Пример #1
0
 /**
  * @param string $args
  */
 public function edit_ajax($args = '')
 {
     $error = '';
     if (is_array($args)) {
         $ajax = FALSE;
     } else {
         if (!current_user_can('backwpup_jobs_edit')) {
             wp_die(-1);
         }
         check_ajax_referer('backwpup_ajax_nonce');
         $args['s3accesskey'] = $_POST['s3accesskey'];
         $args['s3secretkey'] = $_POST['s3secretkey'];
         $args['s3bucketselected'] = $_POST['s3bucketselected'];
         $args['s3base_url'] = $_POST['s3base_url'];
         $args['s3region'] = $_POST['s3region'];
         $ajax = TRUE;
     }
     echo '<span id="s3bucketerror" style="color:red;">';
     if (!empty($args['s3accesskey']) && !empty($args['s3secretkey'])) {
         try {
             $s3 = new AmazonS3(array('key' => $args['s3accesskey'], 'secret' => BackWPup_Encryption::decrypt($args['s3secretkey']), 'certificate_authority' => TRUE));
             $base_url = $this->get_s3_base_url($args['s3region'], $args['s3base_url']);
             if (stristr($base_url, 'amazonaws.com')) {
                 $s3->set_region(str_replace(array('http://', 'https://'), '', $base_url));
             } else {
                 $s3->set_hostname(str_replace(array('http://', 'https://'), '', $base_url));
                 $s3->allow_hostname_override(FALSE);
                 if (substr($base_url, -1) == '/') {
                     $s3->enable_path_style(TRUE);
                 }
             }
             if (stristr($base_url, 'http://')) {
                 $s3->disable_ssl();
             }
             $buckets = $s3->list_buckets();
         } catch (Exception $e) {
             $error = $e->getMessage();
         }
     }
     if (empty($args['s3accesskey'])) {
         _e('Missing access key!', 'backwpup');
     } elseif (empty($args['s3secretkey'])) {
         _e('Missing secret access key!', 'backwpup');
     } elseif (!empty($error) && $error == 'Access Denied') {
         echo '<input type="text" name="s3bucket" id="s3bucket" value="' . esc_attr($args['s3bucketselected']) . '" >';
     } elseif (!empty($error)) {
         echo esc_html($error);
     } elseif (!isset($buckets) || count($buckets->body->Buckets->Bucket) < 1) {
         _e('No bucket found!', 'backwpup');
     }
     echo '</span>';
     if (!empty($buckets->body->Buckets->Bucket)) {
         echo '<select name="s3bucket" id="s3bucket">';
         foreach ($buckets->body->Buckets->Bucket as $bucket) {
             echo "<option " . selected($args['s3bucketselected'], esc_attr($bucket->Name), FALSE) . ">" . esc_attr($bucket->Name) . "</option>";
         }
         echo '</select>';
     }
     if ($ajax) {
         die;
     }
 }
Пример #2
0
         $s3 = new AmazonS3();
         $s3->create_bucket($_POST['newawsBucket'], $_POST['awsRegion']);
         $jobvalues['awsBucket'] = $_POST['newawsBucket'];
     } catch (Exception $e) {
         $backwpup_message .= __($e->getMessage(), 'backwpup') . '<br />';
     }
 }
 if (!empty($_POST['GStorageAccessKey']) and !empty($_POST['GStorageSecret']) and !empty($_POST['newGStorageBucket'])) {
     //create new google storage bucket if needed
     if (!class_exists('CFRuntime')) {
         require_once dirname(__FILE__) . '/../libs/aws/sdk.class.php';
     }
     try {
         CFCredentials::set(array('backwpup' => array('key' => $_POST['GStorageAccessKey'], 'secret' => $_POST['GStorageSecret'], 'default_cache_config' => '', 'certificate_authority' => true), '@default' => 'backwpup'));
         $gstorage = new AmazonS3();
         $gstorage->set_hostname('storage.googleapis.com');
         $gstorage->allow_hostname_override(false);
         $gstorage->create_bucket($_POST['newGStorageBucket'], '');
         $jobvalues['GStorageBucket'] = $_POST['newGStorageBucket'];
         sleep(1);
         //creation take a moment
     } catch (Exception $e) {
         $backwpup_message .= __($e->getMessage(), 'backwpup') . '<br />';
     }
 }
 if (!empty($_POST['newmsazureContainer']) and !empty($_POST['msazureHost']) and !empty($_POST['msazureAccName']) and !empty($_POST['msazureKey'])) {
     //create new s3 bucket if needed
     if (!class_exists('Microsoft_WindowsAzure_Storage_Blob')) {
         require_once dirname(__FILE__) . '/../libs/Microsoft/WindowsAzure/Storage/Blob.php';
     }
     try {
Пример #3
0
function dest_gstorage()
{
    global $WORKING, $STATIC;
    trigger_error(sprintf(__('%d. try sending backup to Google Storage...', 'backwpup'), $WORKING['DEST_GSTORAGE']['STEP_TRY']), E_USER_NOTICE);
    $WORKING['STEPTODO'] = 2 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']);
    $WORKING['STEPDONE'] = 0;
    require_once dirname(__FILE__) . '/../libs/aws/sdk.class.php';
    need_free_memory(26214400 * 1.1);
    try {
        $gstorage = new AmazonS3(array('key' => $STATIC['JOB']['GStorageAccessKey'], 'secret' => $STATIC['JOB']['GStorageSecret'], 'certificate_authority' => true));
        //set up s3 for google
        $gstorage->set_hostname('storage.googleapis.com');
        $gstorage->allow_hostname_override(false);
        if ($gstorage->if_bucket_exists($STATIC['JOB']['GStorageBucket'])) {
            trigger_error(sprintf(__('Connected to GStorage Bucket: %s', 'backwpup'), $STATIC['JOB']['GStorageBucket']), E_USER_NOTICE);
            //set curl Prozess bar
            $curlops = array();
            if (defined('CURLOPT_PROGRESSFUNCTION')) {
                $curlops = array(CURLOPT_NOPROGRESS => false, CURLOPT_PROGRESSFUNCTION => 'curl_progresscallback', CURLOPT_BUFFERSIZE => 1048576);
            }
            trigger_error(__('Upload to GStorage now started... ', 'backwpup'), E_USER_NOTICE);
            //transferee file to GStorage
            $result = $gstorage->create_object($STATIC['JOB']['GStorageBucket'], $STATIC['JOB']['GStoragedir'] . $STATIC['backupfile'], array('fileUpload' => $STATIC['JOB']['backupdir'] . $STATIC['backupfile'], 'acl' => 'private', 'curlopts' => $curlops));
            $result = (array) $result;
            if ($result["status"] >= 200 and $result["status"] < 300) {
                $WORKING['STEPTODO'] = 1 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']);
                trigger_error(sprintf(__('Backup transferred to %s', 'backwpup'), "https://storage.cloud.google.com/" . $STATIC['JOB']['GStorageBucket'] . "/" . $STATIC['JOB']['GStoragedir'] . $STATIC['backupfile']), E_USER_NOTICE);
                $STATIC['JOB']['lastbackupdownloadurl'] = "https://storage.cloud.google.com/" . $STATIC['JOB']['GStorageBucket'] . "/" . $STATIC['JOB']['GStoragedir'] . $STATIC['backupfile'];
                $WORKING['STEPSDONE'][] = 'DEST_GSTORAGE';
                //set done
            } else {
                trigger_error(sprintf(__('Can not transfer backup to GStorage! (%1$d) %2$s', 'backwpup'), $result["status"], $result["Message"]), E_USER_ERROR);
            }
        } else {
            trigger_error(sprintf(__('GStorage Bucket "%s" not exists!', 'backwpup'), $STATIC['JOB']['GStorageBucket']), E_USER_ERROR);
        }
    } catch (Exception $e) {
        trigger_error(sprintf(__('GStorage API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR);
        return;
    }
    try {
        if ($gstorage->if_bucket_exists($STATIC['JOB']['GStorageBucket'])) {
            if ($STATIC['JOB']['GStoragemaxbackups'] > 0) {
                //Delete old backups
                $backupfilelist = array();
                if (($contents = $gstorage->list_objects($STATIC['JOB']['GStorageBucket'], array('prefix' => $STATIC['JOB']['GStoragedir']))) !== false) {
                    foreach ($contents->body->Contents as $object) {
                        $file = basename($object->Key);
                        if ($STATIC['JOB']['fileprefix'] == substr($file, 0, strlen($STATIC['JOB']['fileprefix'])) and $STATIC['JOB']['fileformart'] == substr($file, -strlen($STATIC['JOB']['fileformart']))) {
                            $backupfilelist[] = $file;
                        }
                    }
                }
                if (sizeof($backupfilelist) > 0) {
                    rsort($backupfilelist);
                    $numdeltefiles = 0;
                    for ($i = $STATIC['JOB']['GStoragemaxbackups']; $i < sizeof($backupfilelist); $i++) {
                        if ($gstorage->delete_object($STATIC['JOB']['GStorageBucket'], $STATIC['JOB']['GStoragedir'] . $backupfilelist[$i])) {
                            //delte files on S3
                            $numdeltefiles++;
                        } else {
                            trigger_error(sprintf(__('Can not delete backup on GStorage://%s', 'backwpup'), $STATIC['JOB']['awsBucket'] . '/' . $STATIC['JOB']['GStoragedir'] . $backupfilelist[$i]), E_USER_ERROR);
                        }
                    }
                    if ($numdeltefiles > 0) {
                        trigger_error(sprintf(_n('One file deleted on GStorage Bucket', '%d files deleted on GStorage Bucket', $numdeltefiles, 'backwpup'), $numdeltefiles), E_USER_NOTICE);
                    }
                }
            }
        }
    } catch (Exception $e) {
        trigger_error(sprintf(__('GStorage API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR);
        return;
    }
    $WORKING['STEPDONE']++;
}
Пример #4
0
function backwpup_get_backup_files($jobid, $dest)
{
    global $backwpup_message;
    if (empty($jobid) or !in_array(strtoupper($dest), explode(',', strtoupper(BACKWPUP_DESTS))) and $dest != 'FOLDER') {
        return false;
    }
    $jobs = get_option('backwpup_jobs');
    //Load jobs
    $jobvalue = $jobs[$jobid];
    $filecounter = 0;
    $files = array();
    //Get files/filinfo in backup folder
    if ($dest == 'FOLDER' and !empty($jobvalue['backupdir']) and is_dir($jobvalue['backupdir'])) {
        if ($dir = opendir($jobvalue['backupdir'])) {
            while (($file = readdir($dir)) !== false) {
                if (substr($file, 0, 1) == '.') {
                    continue;
                }
                if (is_file($jobvalue['backupdir'] . $file)) {
                    $files[$filecounter]['JOBID'] = $jobid;
                    $files[$filecounter]['DEST'] = $dest;
                    $files[$filecounter]['folder'] = $jobvalue['backupdir'];
                    $files[$filecounter]['file'] = $jobvalue['backupdir'] . $file;
                    $files[$filecounter]['filename'] = $file;
                    $files[$filecounter]['downloadurl'] = backwpup_admin_url('admin.php') . '?page=backwpupbackups&action=download&file=' . $jobvalue['backupdir'] . $file;
                    $files[$filecounter]['filesize'] = filesize($jobvalue['backupdir'] . $file);
                    $files[$filecounter]['time'] = filemtime($jobvalue['backupdir'] . $file);
                    $filecounter++;
                }
            }
            closedir($dir);
        }
    }
    //Get files/filinfo from Dropbox
    if ($dest == 'DROPBOX' and !empty($jobvalue['dropetoken']) and !empty($jobvalue['dropesecret'])) {
        require_once realpath(dirname(__FILE__) . '/../libs/dropbox.php');
        try {
            $dropbox = new backwpup_Dropbox('dropbox');
            $dropbox->setOAuthTokens($jobvalue['dropetoken'], $jobvalue['dropesecret']);
            $contents = $dropbox->metadata($jobvalue['dropedir']);
            if (is_array($contents)) {
                foreach ($contents['contents'] as $object) {
                    if ($object['is_dir'] != true) {
                        $files[$filecounter]['JOBID'] = $jobid;
                        $files[$filecounter]['DEST'] = $dest;
                        $files[$filecounter]['folder'] = "https://api-content.dropbox.com/1/files/" . $jobvalue['droperoot'] . "/" . dirname($object['path']) . "/";
                        $files[$filecounter]['file'] = $object['path'];
                        $files[$filecounter]['filename'] = basename($object['path']);
                        $files[$filecounter]['downloadurl'] = backwpup_admin_url('admin.php') . '?page=backwpupbackups&action=downloaddropbox&file=' . $object['path'] . '&jobid=' . $jobid;
                        $files[$filecounter]['filesize'] = $object['bytes'];
                        $files[$filecounter]['time'] = strtotime($object['modified']);
                        $filecounter++;
                    }
                }
            }
        } catch (Exception $e) {
            $backwpup_message .= 'DROPBOX: ' . $e->getMessage() . '<br />';
        }
    }
    //Get files/filinfo from Sugarsync
    if ($dest == 'SUGARSYNC' and !empty($jobvalue['sugarrefreshtoken'])) {
        if (!class_exists('SugarSync')) {
            require_once dirname(__FILE__) . '/../libs/sugarsync.php';
        }
        if (class_exists('SugarSync')) {
            try {
                $sugarsync = new SugarSync($jobvalue['sugarrefreshtoken']);
                $dirid = $sugarsync->chdir($jobvalue['sugardir'], $jobvalue['sugarroot']);
                $user = $sugarsync->user();
                $dir = $sugarsync->showdir($dirid);
                $getfiles = $sugarsync->getcontents('file');
                if (is_object($getfiles)) {
                    foreach ($getfiles->file as $getfile) {
                        $files[$filecounter]['JOBID'] = $jobid;
                        $files[$filecounter]['DEST'] = $dest;
                        $files[$filecounter]['folder'] = 'https://' . $user->nickname . '.sugarsync.com/' . $dir;
                        $files[$filecounter]['file'] = (string) $getfile->ref;
                        $files[$filecounter]['filename'] = utf8_decode((string) $getfile->displayName);
                        $files[$filecounter]['downloadurl'] = backwpup_admin_url('admin.php') . '?page=backwpupbackups&action=downloadsugarsync&file=' . (string) $getfile->ref . '&jobid=' . $jobid;
                        $files[$filecounter]['filesize'] = (int) $getfile->size;
                        $files[$filecounter]['time'] = strtotime((string) $getfile->lastModified);
                        $filecounter++;
                    }
                }
            } catch (Exception $e) {
                $backwpup_message .= 'SUGARSYNC: ' . $e->getMessage() . '<br />';
            }
        }
    }
    //Get files/filinfo from S3
    if ($dest == 'S3' and !empty($jobvalue['awsAccessKey']) and !empty($jobvalue['awsSecretKey']) and !empty($jobvalue['awsBucket'])) {
        if (!class_exists('AmazonS3')) {
            require_once dirname(__FILE__) . '/../libs/aws/sdk.class.php';
        }
        if (class_exists('AmazonS3')) {
            try {
                $s3 = new AmazonS3(array('key' => $jobvalue['awsAccessKey'], 'secret' => $jobvalue['awsSecretKey'], 'certificate_authority' => true));
                if (($contents = $s3->list_objects($jobvalue['awsBucket'], array('prefix' => $jobvalue['awsdir']))) !== false) {
                    foreach ($contents->body->Contents as $object) {
                        $files[$filecounter]['JOBID'] = $jobid;
                        $files[$filecounter]['DEST'] = $dest;
                        $files[$filecounter]['folder'] = "https://" . $jobvalue['awsBucket'] . ".s3.amazonaws.com/" . dirname((string) $object->Key) . '/';
                        $files[$filecounter]['file'] = (string) $object->Key;
                        $files[$filecounter]['filename'] = basename($object->Key);
                        $files[$filecounter]['downloadurl'] = backwpup_admin_url('admin.php') . '?page=backwpupbackups&action=downloads3&file=' . $object->Key . '&jobid=' . $jobid;
                        $files[$filecounter]['filesize'] = (string) $object->Size;
                        $files[$filecounter]['time'] = strtotime($object->LastModified);
                        $filecounter++;
                    }
                }
            } catch (Exception $e) {
                $backwpup_message .= 'Amazon S3: ' . $e->getMessage() . '<br />';
            }
        }
    }
    //Get files/filinfo from Google Storage
    if ($dest == 'GSTORAGE' and !empty($jobvalue['GStorageAccessKey']) and !empty($jobvalue['GStorageSecret']) and !empty($jobvalue['GStorageBucket'])) {
        if (!class_exists('AmazonS3')) {
            require_once dirname(__FILE__) . '/../libs/aws/sdk.class.php';
        }
        if (class_exists('AmazonS3')) {
            try {
                $gstorage = new AmazonS3(array('key' => $jobvalue['GStorageAccessKey'], 'secret' => $jobvalue['GStorageSecret'], 'certificate_authority' => true));
                $gstorage->set_hostname('storage.googleapis.com');
                $gstorage->allow_hostname_override(false);
                if (($contents = $gstorage->list_objects($jobvalue['GStorageBucket'], array('prefix' => $jobvalue['GStoragedir']))) !== false) {
                    foreach ($contents->body->Contents as $object) {
                        $files[$filecounter]['JOBID'] = $jobid;
                        $files[$filecounter]['DEST'] = $dest;
                        $files[$filecounter]['folder'] = "https://storage.cloud.google.com/" . $jobvalue['GStorageBucket'] . "/" . dirname((string) $object->Key) . '/';
                        $files[$filecounter]['file'] = (string) $object->Key;
                        $files[$filecounter]['filename'] = basename($object->Key);
                        $files[$filecounter]['downloadurl'] = "https://storage.cloud.google.com/" . $jobvalue['GStorageBucket'] . "/" . (string) $object->Key;
                        $files[$filecounter]['filesize'] = (string) $object->Size;
                        $files[$filecounter]['time'] = strtotime($object->LastModified);
                        $filecounter++;
                    }
                }
            } catch (Exception $e) {
                $backwpup_message .= sprintf(__('GStorage API: %s', 'backwpup'), $e->getMessage()) . '<br />';
            }
        }
    }
    //Get files/filinfo from Microsoft Azure
    if ($dest == 'MSAZURE' and !empty($jobvalue['msazureHost']) and !empty($jobvalue['msazureAccName']) and !empty($jobvalue['msazureKey']) and !empty($jobvalue['msazureContainer'])) {
        if (!class_exists('Microsoft_WindowsAzure_Storage_Blob')) {
            require_once dirname(__FILE__) . '/../libs/Microsoft/WindowsAzure/Storage/Blob.php';
        }
        if (class_exists('Microsoft_WindowsAzure_Storage_Blob')) {
            try {
                $storageClient = new Microsoft_WindowsAzure_Storage_Blob($jobvalue['msazureHost'], $jobvalue['msazureAccName'], $jobvalue['msazureKey']);
                $blobs = $storageClient->listBlobs($jobvalue['msazureContainer'], $jobvalue['msazuredir']);
                if (is_array($blobs)) {
                    foreach ($blobs as $blob) {
                        $files[$filecounter]['JOBID'] = $jobid;
                        $files[$filecounter]['DEST'] = $dest;
                        $files[$filecounter]['folder'] = "https://" . $jobvalue['msazureAccName'] . '.' . $jobvalue['msazureHost'] . "/" . $jobvalue['msazureContainer'] . "/" . dirname($blob->Name) . "/";
                        $files[$filecounter]['file'] = $blob->Name;
                        $files[$filecounter]['filename'] = basename($blob->Name);
                        $files[$filecounter]['downloadurl'] = backwpup_admin_url('admin.php') . '?page=backwpupbackups&action=downloadmsazure&file=' . $blob->Name . '&jobid=' . $jobid;
                        $files[$filecounter]['filesize'] = $blob->size;
                        $files[$filecounter]['time'] = strtotime($blob->lastmodified);
                        $filecounter++;
                    }
                }
            } catch (Exception $e) {
                $backwpup_message .= 'MSAZURE: ' . $e->getMessage() . '<br />';
            }
        }
    }
    //Get files/filinfo from RSC
    if ($dest == 'RSC' and !empty($jobvalue['rscUsername']) and !empty($jobvalue['rscAPIKey']) and !empty($jobvalue['rscContainer'])) {
        if (!class_exists('CF_Authentication')) {
            require_once dirname(__FILE__) . '/../libs/rackspace/cloudfiles.php';
        }
        if (class_exists('CF_Authentication')) {
            try {
                $auth = new CF_Authentication($jobvalue['rscUsername'], $jobvalue['rscAPIKey']);
                $auth->ssl_use_cabundle();
                if ($auth->authenticate()) {
                    $conn = new CF_Connection($auth);
                    $conn->ssl_use_cabundle();
                    $backwpupcontainer = $conn->get_container($jobvalue['rscContainer']);
                    $contents = $backwpupcontainer->get_objects(0, NULL, NULL, $jobvalue['rscdir']);
                    foreach ($contents as $object) {
                        $files[$filecounter]['JOBID'] = $jobid;
                        $files[$filecounter]['DEST'] = $dest;
                        $files[$filecounter]['folder'] = "RSC://" . $jobvalue['rscContainer'] . "/" . dirname($object->name) . "/";
                        $files[$filecounter]['file'] = $object->name;
                        $files[$filecounter]['filename'] = basename($object->name);
                        $files[$filecounter]['downloadurl'] = backwpup_admin_url('admin.php') . '?page=backwpupbackups&action=downloadrsc&file=' . $object->name . '&jobid=' . $jobid;
                        $files[$filecounter]['filesize'] = $object->content_length;
                        $files[$filecounter]['time'] = strtotime($object->last_modified);
                        $filecounter++;
                    }
                }
            } catch (Exception $e) {
                $backwpup_message .= 'RSC: ' . $e->getMessage() . '<br />';
            }
        }
    }
    //Get files/filinfo from FTP
    if ($dest == 'FTP' and !empty($jobvalue['ftphost']) and function_exists('ftp_connect') and !empty($jobvalue['ftpuser']) and !empty($jobvalue['ftppass'])) {
        if (function_exists('ftp_ssl_connect') and $jobvalue['ftpssl']) {
            //make SSL FTP connection
            $ftp_conn_id = ftp_ssl_connect($jobvalue['ftphost'], $jobvalue['ftphostport'], 10);
        } elseif (!$jobvalue['ftpssl']) {
            //make normal FTP conection if SSL not work
            $ftp_conn_id = ftp_connect($jobvalue['ftphost'], $jobvalue['ftphostport'], 10);
        }
        $loginok = false;
        if ($ftp_conn_id) {
            //FTP Login
            if (@ftp_login($ftp_conn_id, $jobvalue['ftpuser'], backwpup_base64($jobvalue['ftppass']))) {
                $loginok = true;
            } else {
                //if PHP ftp login don't work use raw login
                ftp_raw($ftp_conn_id, 'USER ' . $jobvalue['ftpuser']);
                $return = ftp_raw($ftp_conn_id, 'PASS ' . backwpup_base64($jobvalue['ftppass']));
                if (substr(trim($return[0]), 0, 3) <= 400) {
                    $loginok = true;
                }
            }
        }
        if ($loginok) {
            ftp_chdir($ftp_conn_id, $jobvalue['ftpdir']);
            $currentftpdir = rtrim(ftp_pwd($ftp_conn_id), '/') . '/';
            ftp_pasv($ftp_conn_id, $jobvalue['ftppasv']);
            if ($ftpfilelist = ftp_nlist($ftp_conn_id, $currentftpdir)) {
                foreach ($ftpfilelist as $ftpfiles) {
                    if (substr(basename($ftpfiles), 0, 1) == '.') {
                        continue;
                    }
                    $files[$filecounter]['JOBID'] = $jobid;
                    $files[$filecounter]['DEST'] = $dest;
                    $files[$filecounter]['folder'] = "ftp://" . $jobvalue['ftphost'] . ':' . $jobvalue['ftphostport'] . dirname($ftpfiles) . "/";
                    $files[$filecounter]['file'] = $ftpfiles;
                    $files[$filecounter]['filename'] = basename($ftpfiles);
                    $files[$filecounter]['downloadurl'] = "ftp://" . rawurlencode($jobvalue['ftpuser']) . ":" . rawurlencode(backwpup_base64($jobvalue['ftppass'])) . "@" . $jobvalue['ftphost'] . ':' . $jobvalue['ftphostport'] . $ftpfiles;
                    $files[$filecounter]['filesize'] = ftp_size($ftp_conn_id, $ftpfiles);
                    $files[$filecounter]['time'] = ftp_mdtm($ftp_conn_id, $ftpfiles);
                    $filecounter++;
                }
            }
        } else {
            $backwpup_message .= 'FTP: ' . __('Login failure!', 'backwpup') . '<br />';
        }
        $donefolders[] = $jobvalue['ftphost'] . '|' . $jobvalue['ftpuser'] . '|' . $jobvalue['ftpdir'];
    }
    return $files;
}
         $s3 = new AmazonS3();
         $s3->create_bucket($_POST['newawsBucket'], $_POST['awsRegion']);
         $jobvalues['awsBucket'] = $_POST['newawsBucket'];
     } catch (Exception $e) {
         $backwpup_message .= __($e->getMessage(), 'backwpup') . '<br />';
     }
 }
 if (!empty($_POST['GStorageAccessKey']) and !empty($_POST['GStorageSecret']) and !empty($_POST['newGStorageBucket'])) {
     //create new google storage bucket if needed
     if (!class_exists('CFRuntime')) {
         require_once dirname(__FILE__) . '/../libs/aws/sdk.class.php';
     }
     try {
         CFCredentials::set(array('backwpup' => array('key' => $_POST['GStorageAccessKey'], 'secret' => $_POST['GStorageSecret'], 'default_cache_config' => '', 'certificate_authority' => true), '@default' => 'backwpup'));
         $gstorage = new AmazonS3();
         $gstorage->set_hostname('commondatastorage.googleapis.com');
         $gstorage->allow_hostname_override(false);
         $gstorage->create_bucket($_POST['newGStorageBucket'], '');
         $jobvalues['GStorageBucket'] = $_POST['newGStorageBucket'];
         sleep(1);
         //creation take a moment
     } catch (Exception $e) {
         $backwpup_message .= __($e->getMessage(), 'backwpup') . '<br />';
     }
 }
 if (!empty($_POST['newmsazureContainer']) and !empty($_POST['msazureHost']) and !empty($_POST['msazureAccName']) and !empty($_POST['msazureKey'])) {
     //create new s3 bucket if needed
     if (!class_exists('Microsoft_WindowsAzure_Storage_Blob')) {
         require_once dirname(__FILE__) . '/../libs/Microsoft/WindowsAzure/Storage/Blob.php';
     }
     try {
Пример #6
0
function backwpup_get_gstorage_buckets($args = '')
{
    if (is_array($args)) {
        extract($args);
        $ajax = false;
    } else {
        check_ajax_referer('backwpupeditjob_ajax_nonce');
        if (!current_user_can(BACKWPUP_USER_CAPABILITY)) {
            die('-1');
        }
        $GStorageAccessKey = $_POST['GStorageAccessKey'];
        $GStorageSecret = $_POST['GStorageSecret'];
        $GStorageselected = $_POST['GStorageselected'];
        $ajax = true;
    }
    if (!class_exists('CFRuntime')) {
        require_once dirname(__FILE__) . '/../libs/aws/sdk.class.php';
    }
    if (empty($GStorageAccessKey)) {
        echo '<span id="GStorageBucket" style="color:red;">' . __('Missing access key!', 'backwpup') . '</span>';
        if ($ajax) {
            die;
        } else {
            return;
        }
    }
    if (empty($GStorageSecret)) {
        echo '<span id="GStorageBucket" style="color:red;">' . __('Missing secret access key!', 'backwpup') . '</span>';
        if ($ajax) {
            die;
        } else {
            return;
        }
    }
    try {
        $gstorage = new AmazonS3(array('key' => $GStorageAccessKey, 'secret' => $GStorageSecret, 'certificate_authority' => true));
        $gstorage->set_hostname('storage.googleapis.com');
        $gstorage->allow_hostname_override(false);
        $buckets = $gstorage->list_buckets();
    } catch (Exception $e) {
        echo '<span id="GStorageBucket" style="color:red;">' . $e->getMessage() . '</span>';
        if ($ajax) {
            die;
        } else {
            return;
        }
    }
    if ($buckets->status < 200 or $buckets->status >= 300) {
        echo '<span id="GStorageBucket" style="color:red;">' . $buckets->status . ': ' . $buckets->body->Message . '</span>';
        if ($ajax) {
            die;
        } else {
            return;
        }
    }
    if (count($buckets->body->Buckets->Bucket) < 1) {
        echo '<span id="GStorageBucket" style="color:red;">' . __('No bucket found!', 'backwpup') . '</span>';
        if ($ajax) {
            die;
        } else {
            return;
        }
    }
    echo '<select name="GStorageBucket" id="GStorageBucket">';
    foreach ($buckets->body->Buckets->Bucket as $bucket) {
        echo "<option " . selected(strtolower($GStorageselected), strtolower($bucket->Name), false) . ">" . $bucket->Name . "</option>";
    }
    echo '</select>';
    if ($ajax) {
        die;
    } else {
        return;
    }
}