예제 #1
0
 /**
  * @param $job_object
  *
  * @return bool
  */
 public function job_run_archive(BackWPup_Job $job_object)
 {
     $job_object->substeps_todo = 2 + $job_object->backup_filesize;
     if ($job_object->steps_data[$job_object->step_working]['SAVE_STEP_TRY'] != $job_object->steps_data[$job_object->step_working]['STEP_TRY']) {
         $job_object->log(sprintf(__('%d. Try to send backup file to Dropbox …', 'backwpup'), $job_object->steps_data[$job_object->step_working]['STEP_TRY']));
     }
     try {
         $dropbox = new BackWPup_Destination_Dropbox_API($job_object->job['dropboxroot']);
         // cahnge oauth1 to oauth2 token
         if (!empty($job_object->job['dropboxsecret']) && empty($job_object->job['dropboxtoken']['access_token'])) {
             $dropbox->setOAuthTokens(array('access_token' => $job_object->job['dropboxtoken'], 'oauth_token_secret' => BackWPup_Encryption::decrypt($job_object->job['dropboxsecret'])));
             $job_object->job['dropboxtoken'] = $dropbox->token_from_oauth1();
             BackWPup_Option::update($job_object->job['jobid'], 'dropboxtoken', $job_object->job['dropboxtoken']);
             BackWPup_Option::delete($job_object->job['jobid'], 'dropboxsecret');
         }
         // set the tokens
         $dropbox->setOAuthTokens($job_object->job['dropboxtoken']);
         //get account info
         if ($job_object->steps_data[$job_object->step_working]['SAVE_STEP_TRY'] != $job_object->steps_data[$job_object->step_working]['STEP_TRY']) {
             $info = $dropbox->accountInfo();
             if (!empty($info['uid'])) {
                 if ($job_object->is_debug()) {
                     $user = $info['display_name'] . ' (' . $info['email'] . ')';
                 } else {
                     $user = $info['display_name'];
                 }
                 $job_object->log(sprintf(__('Authenticated with Dropbox of user: %s', 'backwpup'), $user));
                 //Quota
                 if ($job_object->is_debug()) {
                     $dropboxfreespase = $info['quota_info']['quota'] - $info['quota_info']['shared'] - $info['quota_info']['normal'];
                     $job_object->log(sprintf(__('%s available on your Dropbox', 'backwpup'), size_format($dropboxfreespase, 2)));
                 }
             } else {
                 $job_object->log(__('Not Authenticated with Dropbox!', 'backwpup'), E_USER_ERROR);
                 return false;
             }
             $job_object->log(__('Uploading to Dropbox …', 'backwpup'));
         }
         // put the file
         self::$backwpup_job_object =& $job_object;
         if ($job_object->substeps_done < $job_object->backup_filesize) {
             //only if upload not complete
             $response = $dropbox->upload($job_object->backup_folder . $job_object->backup_file, $job_object->job['dropboxdir'] . $job_object->backup_file);
             if ($response['bytes'] == $job_object->backup_filesize) {
                 if (!empty($job_object->job['jobid'])) {
                     BackWPup_Option::update($job_object->job['jobid'], 'lastbackupdownloadurl', network_admin_url('admin.php') . '?page=backwpupbackups&action=downloaddropbox&file=' . ltrim($response['path'], '/') . '&jobid=' . $job_object->job['jobid']);
                 }
                 $job_object->substeps_done = 1 + $job_object->backup_filesize;
                 $job_object->log(sprintf(__('Backup transferred to %s', 'backwpup'), 'https://content.dropboxapi.com/1/files/' . $job_object->job['dropboxroot'] . $response['path']), E_USER_NOTICE);
             } else {
                 if ($response['bytes'] != $job_object->backup_filesize) {
                     $job_object->log(__('Uploaded file size and local file size don\'t match.', 'backwpup'), E_USER_ERROR);
                 } else {
                     $job_object->log(sprintf(__('Error transfering backup to %s.', 'backwpup') . ' ' . $response['error'], __('Dropbox', 'backwpup')), E_USER_ERROR);
                 }
                 return false;
             }
         }
         $backupfilelist = array();
         $filecounter = 0;
         $files = array();
         $metadata = $dropbox->metadata($job_object->job['dropboxdir']);
         if (is_array($metadata)) {
             foreach ($metadata['contents'] as $data) {
                 if ($data['is_dir'] != true) {
                     $file = basename($data['path']);
                     if ($job_object->is_backup_archive($file)) {
                         $backupfilelist[strtotime($data['modified'])] = $file;
                     }
                     $files[$filecounter]['folder'] = "https://content.dropboxapi.com/1/files/" . $job_object->job['dropboxroot'] . dirname($data['path']) . "/";
                     $files[$filecounter]['file'] = $data['path'];
                     $files[$filecounter]['filename'] = basename($data['path']);
                     $files[$filecounter]['downloadurl'] = network_admin_url('admin.php?page=backwpupbackups&action=downloaddropbox&file=' . $data['path'] . '&jobid=' . $job_object->job['jobid']);
                     $files[$filecounter]['filesize'] = $data['bytes'];
                     $files[$filecounter]['time'] = strtotime($data['modified']) + get_option('gmt_offset') * 3600;
                     $filecounter++;
                 }
             }
         }
         if ($job_object->job['dropboxmaxbackups'] > 0 && is_object($dropbox)) {
             //Delete old backups
             if (count($backupfilelist) > $job_object->job['dropboxmaxbackups']) {
                 ksort($backupfilelist);
                 $numdeltefiles = 0;
                 while ($file = array_shift($backupfilelist)) {
                     if (count($backupfilelist) < $job_object->job['dropboxmaxbackups']) {
                         break;
                     }
                     $response = $dropbox->fileopsDelete($job_object->job['dropboxdir'] . $file);
                     //delete files on Cloud
                     if ($response['is_deleted'] == 'true') {
                         foreach ($files as $key => $filedata) {
                             if ($filedata['file'] == '/' . $job_object->job['dropboxdir'] . $file) {
                                 unset($files[$key]);
                             }
                         }
                         $numdeltefiles++;
                     } else {
                         $job_object->log(sprintf(__('Error while deleting file from Dropbox: %s', 'backwpup'), $file), E_USER_ERROR);
                     }
                 }
                 if ($numdeltefiles > 0) {
                     $job_object->log(sprintf(_n('One file deleted from Dropbox', '%d files deleted on Dropbox', $numdeltefiles, 'backwpup'), $numdeltefiles), E_USER_NOTICE);
                 }
             }
         }
         set_site_transient('backwpup_' . $job_object->job['jobid'] . '_dropbox', $files, YEAR_IN_SECONDS);
     } catch (Exception $e) {
         $job_object->log(E_USER_ERROR, sprintf(__('Dropbox API: %s', 'backwpup'), $e->getMessage()), $e->getFile(), $e->getLine());
         return false;
     }
     $job_object->substeps_done++;
     return true;
 }
예제 #2
0
 /**
  * @param $job_object
  * @return bool
  */
 public function job_run_archive(BackWPup_Job $job_object)
 {
     $job_object->substeps_todo = 2 + $job_object->backup_filesize;
     if ($job_object->steps_data[$job_object->step_working]['SAVE_STEP_TRY'] != $job_object->steps_data[$job_object->step_working]['STEP_TRY']) {
         $job_object->log(sprintf(__('%d. Try to send backup file to an FTP server&#160;&hellip;', 'backwpup'), $job_object->steps_data[$job_object->step_working]['STEP_TRY']), E_USER_NOTICE);
     }
     if (!empty($job_object->job['ftpssl'])) {
         //make SSL FTP connection
         if (function_exists('ftp_ssl_connect')) {
             $ftp_conn_id = ftp_ssl_connect($job_object->job['ftphost'], $job_object->job['ftphostport'], $job_object->job['ftptimeout']);
             if ($ftp_conn_id) {
                 $job_object->log(sprintf(__('Connected via explicit SSL-FTP to server: %s', 'backwpup'), $job_object->job['ftphost'] . ':' . $job_object->job['ftphostport']), E_USER_NOTICE);
             } else {
                 $job_object->log(sprintf(__('Cannot connect via explicit SSL-FTP to server: %s', 'backwpup'), $job_object->job['ftphost'] . ':' . $job_object->job['ftphostport']), E_USER_ERROR);
                 return FALSE;
             }
         } else {
             $job_object->log(__('PHP function to connect with explicit SSL-FTP to server does not exist!', 'backwpup'), E_USER_ERROR);
             return TRUE;
         }
     } else {
         //make normal FTP connection if SSL not work
         $ftp_conn_id = ftp_connect($job_object->job['ftphost'], $job_object->job['ftphostport'], $job_object->job['ftptimeout']);
         if ($ftp_conn_id) {
             $job_object->log(sprintf(__('Connected to FTP server: %s', 'backwpup'), $job_object->job['ftphost'] . ':' . $job_object->job['ftphostport']), E_USER_NOTICE);
         } else {
             $job_object->log(sprintf(__('Cannot connect to FTP server: %s', 'backwpup'), $job_object->job['ftphost'] . ':' . $job_object->job['ftphostport']), E_USER_ERROR);
             return FALSE;
         }
     }
     //FTP Login
     $job_object->log(sprintf(__('FTP client command: %s', 'backwpup'), 'USER ' . $job_object->job['ftpuser']), E_USER_NOTICE);
     if ($loginok = @ftp_login($ftp_conn_id, $job_object->job['ftpuser'], BackWPup_Encryption::decrypt($job_object->job['ftppass']))) {
         $job_object->log(sprintf(__('FTP server response: %s', 'backwpup'), 'User ' . $job_object->job['ftpuser'] . ' logged in.'), E_USER_NOTICE);
     } else {
         //if PHP ftp login don't work use raw login
         $return = ftp_raw($ftp_conn_id, 'USER ' . $job_object->job['ftpuser']);
         $job_object->log(sprintf(__('FTP server reply: %s', 'backwpup'), $return[0]), E_USER_NOTICE);
         if (substr(trim($return[0]), 0, 3) <= 400) {
             $job_object->log(sprintf(__('FTP client command: %s', 'backwpup'), 'PASS *******'), E_USER_NOTICE);
             $return = ftp_raw($ftp_conn_id, 'PASS ' . BackWPup_Encryption::decrypt($job_object->job['ftppass']));
             if (substr(trim($return[0]), 0, 3) <= 400) {
                 $job_object->log(sprintf(__('FTP server reply: %s', 'backwpup'), $return[0]), E_USER_NOTICE);
                 $loginok = TRUE;
             } else {
                 $job_object->log(sprintf(__('FTP server reply: %s', 'backwpup'), $return[0]), E_USER_ERROR);
             }
         }
     }
     if (!$loginok) {
         return FALSE;
     }
     //SYSTYPE
     $job_object->log(sprintf(__('FTP client command: %s', 'backwpup'), 'SYST'), E_USER_NOTICE);
     $systype = ftp_systype($ftp_conn_id);
     if ($systype) {
         $job_object->log(sprintf(__('FTP server reply: %s', 'backwpup'), $systype), E_USER_NOTICE);
     } else {
         $job_object->log(sprintf(__('FTP server reply: %s', 'backwpup'), __('Error getting SYSTYPE', 'backwpup')), E_USER_ERROR);
     }
     //set actual ftp dir to ftp dir
     if (empty($job_object->job['ftpdir'])) {
         $job_object->job['ftpdir'] = trailingslashit(ftp_pwd($ftp_conn_id));
     }
     // prepend actual ftp dir if relative dir
     if (substr($job_object->job['ftpdir'], 0, 1) != '/') {
         $job_object->job['ftpdir'] = trailingslashit(ftp_pwd($ftp_conn_id)) . $job_object->job['ftpdir'];
     }
     //test ftp dir and create it if not exists
     if ($job_object->job['ftpdir'] != '/') {
         @ftp_chdir($ftp_conn_id, '/');
         //go to root
         $ftpdirs = explode('/', trim($job_object->job['ftpdir'], '/'));
         foreach ($ftpdirs as $ftpdir) {
             if (empty($ftpdir)) {
                 continue;
             }
             if (!@ftp_chdir($ftp_conn_id, $ftpdir)) {
                 if (@ftp_mkdir($ftp_conn_id, $ftpdir)) {
                     $job_object->log(sprintf(__('FTP Folder "%s" created!', 'backwpup'), $ftpdir), E_USER_NOTICE);
                     ftp_chdir($ftp_conn_id, $ftpdir);
                 } else {
                     $job_object->log(sprintf(__('FTP Folder "%s" cannot be created!', 'backwpup'), $ftpdir), E_USER_ERROR);
                     return FALSE;
                 }
             }
         }
     }
     // Get the current working directory
     $current_ftp_dir = trailingslashit(ftp_pwd($ftp_conn_id));
     if ($job_object->substeps_done == 0) {
         $job_object->log(sprintf(__('FTP current folder is: %s', 'backwpup'), $current_ftp_dir), E_USER_NOTICE);
     }
     //get file size to resume upload
     @clearstatcache();
     $job_object->substeps_done = @ftp_size($ftp_conn_id, $job_object->job['ftpdir'] . $job_object->backup_file);
     if ($job_object->substeps_done == -1) {
         $job_object->substeps_done = 0;
     }
     //PASV
     $job_object->log(sprintf(__('FTP client command: %s', 'backwpup'), 'PASV'), E_USER_NOTICE);
     if ($job_object->job['ftppasv']) {
         if (ftp_pasv($ftp_conn_id, TRUE)) {
             $job_object->log(sprintf(__('FTP server reply: %s', 'backwpup'), __('Entering passive mode', 'backwpup')), E_USER_NOTICE);
         } else {
             $job_object->log(sprintf(__('FTP server reply: %s', 'backwpup'), __('Cannot enter passive mode', 'backwpup')), E_USER_WARNING);
         }
     } else {
         if (ftp_pasv($ftp_conn_id, FALSE)) {
             $job_object->log(sprintf(__('FTP server reply: %s', 'backwpup'), __('Entering normal mode', 'backwpup')), E_USER_NOTICE);
         } else {
             $job_object->log(sprintf(__('FTP server reply: %s', 'backwpup'), __('Cannot enter normal mode', 'backwpup')), E_USER_WARNING);
         }
     }
     if ($job_object->substeps_done < $job_object->backup_filesize) {
         $job_object->log(__('Starting upload to FTP &#160;&hellip;', 'backwpup'), E_USER_NOTICE);
         if ($fp = fopen($job_object->backup_folder . $job_object->backup_file, 'rb')) {
             //go to actual file pos
             fseek($fp, $job_object->substeps_done);
             $ret = ftp_nb_fput($ftp_conn_id, $current_ftp_dir . $job_object->backup_file, $fp, FTP_BINARY, $job_object->substeps_done);
             while ($ret == FTP_MOREDATA) {
                 $job_object->substeps_done = ftell($fp);
                 $job_object->update_working_data();
                 $job_object->do_restart_time();
                 $ret = ftp_nb_continue($ftp_conn_id);
             }
             if ($ret != FTP_FINISHED) {
                 $job_object->log(__('Cannot transfer backup to FTP server!', 'backwpup'), E_USER_ERROR);
                 return FALSE;
             } else {
                 $job_object->substeps_done = $job_object->backup_filesize + 1;
                 $job_object->log(sprintf(__('Backup transferred to FTP server: %s', 'backwpup'), $current_ftp_dir . $job_object->backup_file), E_USER_NOTICE);
                 if (!empty($job_object->job['jobid'])) {
                     BackWPup_Option::update($job_object->job['jobid'], 'lastbackupdownloadurl', "ftp://" . $job_object->job['ftpuser'] . ":" . BackWPup_Encryption::decrypt($job_object->job['ftppass']) . "@" . $job_object->job['ftphost'] . ':' . $job_object->job['ftphostport'] . $current_ftp_dir . $job_object->backup_file);
                 }
             }
             fclose($fp);
         } else {
             $job_object->log(__('Can not open source file for transfer.', 'backwpup'), E_USER_ERROR);
             return FALSE;
         }
     }
     $backupfilelist = array();
     $filecounter = 0;
     $files = array();
     if ($filelist = ftp_nlist($ftp_conn_id, '.')) {
         foreach ($filelist as $file) {
             if (basename($file) != '.' && basename($file) != '..') {
                 if ($job_object->is_backup_archive($file)) {
                     $time = ftp_mdtm($ftp_conn_id, $file);
                     if ($time != -1) {
                         $backupfilelist[$time] = basename($file);
                     } else {
                         $backupfilelist[] = basename($file);
                     }
                 }
                 $files[$filecounter]['folder'] = 'ftp://' . $job_object->job['ftphost'] . ':' . $job_object->job['ftphostport'] . $job_object->job['ftpdir'];
                 $files[$filecounter]['file'] = $job_object->job['ftpdir'] . basename($file);
                 $files[$filecounter]['filename'] = basename($file);
                 $files[$filecounter]['downloadurl'] = 'ftp://' . rawurlencode($job_object->job['ftpuser']) . ':' . rawurlencode(BackWPup_Encryption::decrypt($job_object->job['ftppass'])) . '@' . $job_object->job['ftphost'] . ':' . $job_object->job['ftphostport'] . $job_object->job['ftpdir'] . basename($file);
                 $files[$filecounter]['filesize'] = ftp_size($ftp_conn_id, $file);
                 $files[$filecounter]['time'] = ftp_mdtm($ftp_conn_id, $file);
                 $filecounter++;
             }
         }
     }
     if (!empty($job_object->job['ftpmaxbackups']) && $job_object->job['ftpmaxbackups'] > 0) {
         //Delete old backups
         if (count($backupfilelist) > $job_object->job['ftpmaxbackups']) {
             ksort($backupfilelist);
             $numdeltefiles = 0;
             while ($file = array_shift($backupfilelist)) {
                 if (count($backupfilelist) < $job_object->job['ftpmaxbackups']) {
                     break;
                 }
                 if (ftp_delete($ftp_conn_id, $file)) {
                     //delete files on ftp
                     foreach ($files as $key => $filedata) {
                         if ($filedata['file'] == $job_object->job['ftpdir'] . $file) {
                             unset($files[$key]);
                         }
                     }
                     $numdeltefiles++;
                 } else {
                     $job_object->log(sprintf(__('Cannot delete "%s" on FTP server!', 'backwpup'), $job_object->job['ftpdir'] . $file), E_USER_ERROR);
                 }
             }
             if ($numdeltefiles > 0) {
                 $job_object->log(sprintf(_n('One file deleted on FTP server', '%d files deleted on FTP server', $numdeltefiles, 'backwpup'), $numdeltefiles), E_USER_NOTICE);
             }
         }
     }
     set_site_transient('backwpup_' . $job_object->job['jobid'] . '_ftp', $files, YEAR_IN_SECONDS);
     $job_object->substeps_done++;
     ftp_close($ftp_conn_id);
     return TRUE;
 }
 /**
  * @param $job_object BackWPup_Job
  * @return bool
  */
 public function job_run_archive(BackWPup_Job $job_object)
 {
     $job_object->substeps_todo = 2 + $job_object->backup_filesize;
     $job_object->log(sprintf(__('%d. Try to send backup to SugarSync&#160;&hellip;', 'backwpup'), $job_object->steps_data[$job_object->step_working]['STEP_TRY']), E_USER_NOTICE);
     try {
         $sugarsync = new BackWPup_Destination_SugarSync_API($job_object->job['sugarrefreshtoken']);
         //Check Quota
         $user = $sugarsync->user();
         if (!empty($user->nickname)) {
             $job_object->log(sprintf(__('Authenticated to SugarSync with nickname %s', 'backwpup'), $user->nickname), E_USER_NOTICE);
         }
         $sugarsyncfreespase = (double) $user->quota->limit - (double) $user->quota->usage;
         //float fixes bug for display of no free space
         if ($job_object->backup_filesize > $sugarsyncfreespase) {
             $job_object->log(sprintf(_x('Not enough disk space available on SugarSync. Available: %s.', 'Available space on SugarSync', 'backwpup'), size_format($sugarsyncfreespase, 2)), E_USER_ERROR);
             $job_object->substeps_todo = 1 + $job_object->backup_filesize;
             return TRUE;
         } else {
             $job_object->log(sprintf(__('%s available at SugarSync', 'backwpup'), size_format($sugarsyncfreespase, 2)), E_USER_NOTICE);
         }
         //Create and change folder
         $sugarsync->mkdir($job_object->job['sugardir'], $job_object->job['sugarroot']);
         $dirid = $sugarsync->chdir($job_object->job['sugardir'], $job_object->job['sugarroot']);
         //Upload to SugarSync
         $job_object->substeps_done = 0;
         $job_object->log(__('Starting upload to SugarSync&#160;&hellip;', 'backwpup'), E_USER_NOTICE);
         self::$backwpup_job_object =& $job_object;
         $reponse = $sugarsync->upload($job_object->backup_folder . $job_object->backup_file);
         if (is_object($reponse)) {
             if (!empty($job_object->job['jobid'])) {
                 BackWPup_Option::update($job_object->job['jobid'], 'lastbackupdownloadurl', network_admin_url('admin.php') . '?page=backwpupbackups&action=downloadsugarsync&file=' . (string) $reponse . '&jobid=' . $job_object->job['jobid']);
             }
             $job_object->substeps_done++;
             $job_object->log(sprintf(__('Backup transferred to %s', 'backwpup'), 'https://' . $user->nickname . '.sugarsync.com/' . $sugarsync->showdir($dirid) . $job_object->backup_file), E_USER_NOTICE);
         } else {
             $job_object->log(__('Cannot transfer backup to SugarSync!', 'backwpup'), E_USER_ERROR);
             return FALSE;
         }
         $backupfilelist = array();
         $files = array();
         $filecounter = 0;
         $dir = $sugarsync->showdir($dirid);
         $getfiles = $sugarsync->getcontents('file');
         if (is_object($getfiles)) {
             foreach ($getfiles->file as $getfile) {
                 $getfile->displayName = utf8_decode((string) $getfile->displayName);
                 if ($job_object->is_backup_archive($getfile->displayName)) {
                     $backupfilelist[strtotime((string) $getfile->lastModified)] = (string) $getfile->ref;
                 }
                 $files[$filecounter]['folder'] = 'https://' . (string) $user->nickname . '.sugarsync.com/' . $dir;
                 $files[$filecounter]['file'] = (string) $getfile->ref;
                 $files[$filecounter]['filename'] = (string) $getfile->displayName;
                 $files[$filecounter]['downloadurl'] = network_admin_url('admin.php') . '?page=backwpupbackups&action=downloadsugarsync&file=' . (string) $getfile->ref . '&jobid=' . $job_object->job['jobid'];
                 $files[$filecounter]['filesize'] = (int) $getfile->size;
                 $files[$filecounter]['time'] = strtotime((string) $getfile->lastModified) + get_option('gmt_offset') * 3600;
                 $filecounter++;
             }
         }
         if (!empty($job_object->job['sugarmaxbackups']) && $job_object->job['sugarmaxbackups'] > 0) {
             //Delete old backups
             if (count($backupfilelist) > $job_object->job['sugarmaxbackups']) {
                 ksort($backupfilelist);
                 $numdeltefiles = 0;
                 while ($file = array_shift($backupfilelist)) {
                     if (count($backupfilelist) < $job_object->job['sugarmaxbackups']) {
                         break;
                     }
                     $sugarsync->delete($file);
                     //delete files on Cloud
                     foreach ($files as $key => $filedata) {
                         if ($filedata['file'] == $file) {
                             unset($files[$key]);
                         }
                     }
                     $numdeltefiles++;
                 }
                 if ($numdeltefiles > 0) {
                     $job_object->log(sprintf(_n('One file deleted on SugarSync folder', '%d files deleted on SugarSync folder', $numdeltefiles, 'backwpup'), $numdeltefiles), E_USER_NOTICE);
                 }
             }
         }
         set_site_transient('BackWPup_' . $job_object->job['jobid'] . '_SUGARSYNC', $files, 60 * 60 * 24 * 7);
     } catch (Exception $e) {
         $job_object->log(E_USER_ERROR, sprintf(__('SugarSync API: %s', 'backwpup'), $e->getMessage()), $e->getFile(), $e->getLine());
         return FALSE;
     }
     $job_object->substeps_done++;
     return TRUE;
 }
 /**
  * @param $job_object BAckWPup_Job
  * @return bool
  */
 public function job_run_archive(BackWPup_Job $job_object)
 {
     $job_object->substeps_todo = 2 + $job_object->backup_filesize;
     $job_object->substeps_done = 0;
     $job_object->log(sprintf(__('%d. Trying to send backup file to Rackspace cloud &hellip;', 'backwpup'), $job_object->steps_data[$job_object->step_working]['STEP_TRY']), E_USER_NOTICE);
     try {
         $conn = new OpenCloud\Rackspace(self::get_auth_url_by_region($job_object->job['rscregion']), array('username' => $job_object->job['rscusername'], 'apiKey' => BackWPup_Encryption::decrypt($job_object->job['rscapikey'])));
         //connect to cloud files
         $ostore = $conn->objectStoreService('cloudFiles', $job_object->job['rscregion'], 'publicURL');
         $container = $ostore->getContainer($job_object->job['rsccontainer']);
         $job_object->log(sprintf(__('Connected to Rackspace cloud files container %s', 'backwpup'), $job_object->job['rsccontainer']));
     } catch (Exception $e) {
         $job_object->log(E_USER_ERROR, sprintf(__('Rackspace Cloud API: %s', 'backwpup'), $e->getMessage()), $e->getFile(), $e->getLine());
         return FALSE;
     }
     try {
         //Transfer Backup to Rackspace Cloud
         $job_object->substeps_done = 0;
         $job_object->log(__('Upload to Rackspace cloud started &hellip;', 'backwpup'), E_USER_NOTICE);
         if ($handle = fopen($job_object->backup_folder . $job_object->backup_file, 'rb')) {
             $uploded = $container->uploadObject($job_object->job['rscdir'] . $job_object->backup_file, $handle);
             fclose($handle);
         } else {
             $job_object->log(__('Can not open source file for transfer.', 'backwpup'), E_USER_ERROR);
             return FALSE;
         }
         //			$transfer = $container->setupObjectTransfer( array(
         //															 'name' => $job_object->job[ 'rscdir' ] . $job_object->backup_file,
         //															 'path' => $job_object->backup_folder . $job_object->backup_file,
         //															 'concurrency' => 1,
         //															 'partSize'    => 4 * 1024 * 1024
         //														) );
         //			$uploded = $transfer->upload();
         if ($uploded) {
             $job_object->log(__('Backup File transferred to RSC://', 'backwpup') . $job_object->job['rsccontainer'] . '/' . $job_object->job['rscdir'] . $job_object->backup_file, E_USER_NOTICE);
             $job_object->substeps_done = 1 + $job_object->backup_filesize;
             if (!empty($job_object->job['jobid'])) {
                 BackWPup_Option::update($job_object->job['jobid'], 'lastbackupdownloadurl', network_admin_url('admin.php') . '?page=backwpupbackups&action=downloadrsc&file=' . $job_object->job['rscdir'] . $job_object->backup_file . '&jobid=' . $job_object->job['jobid']);
             }
         } else {
             $job_object->log(__('Cannot transfer backup to Rackspace cloud.', 'backwpup'), E_USER_ERROR);
             return FALSE;
         }
     } catch (Exception $e) {
         $job_object->log(E_USER_ERROR, sprintf(__('Rackspace Cloud API: %s', 'backwpup'), $e->getMessage()), $e->getFile(), $e->getLine());
         return FALSE;
     }
     try {
         $backupfilelist = array();
         $filecounter = 0;
         $files = array();
         $objlist = $container->objectList(array('prefix' => $job_object->job['rscdir']));
         while ($object = $objlist->next()) {
             $file = basename($object->getName());
             if ($job_object->job['rscdir'] . $file == $object->getName()) {
                 //only in the folder and not in complete bucket
                 if ($job_object->is_backup_archive($file)) {
                     $backupfilelist[strtotime($object->getLastModified())] = $object;
                 }
             }
             $files[$filecounter]['folder'] = "RSC://" . $job_object->job['rsccontainer'] . "/" . dirname($object->getName()) . "/";
             $files[$filecounter]['file'] = $object->getName();
             $files[$filecounter]['filename'] = basename($object->getName());
             $files[$filecounter]['downloadurl'] = network_admin_url('admin.php') . '?page=backwpupbackups&action=downloadrsc&file=' . $object->getName() . '&jobid=' . $job_object->job['jobid'];
             $files[$filecounter]['filesize'] = $object->getContentLength();
             $files[$filecounter]['time'] = strtotime($object->getLastModified());
             $filecounter++;
         }
         if (!empty($job_object->job['rscmaxbackups']) && $job_object->job['rscmaxbackups'] > 0) {
             //Delete old backups
             if (count($backupfilelist) > $job_object->job['rscmaxbackups']) {
                 ksort($backupfilelist);
                 $numdeltefiles = 0;
                 while ($file = array_shift($backupfilelist)) {
                     if (count($backupfilelist) < $job_object->job['rscmaxbackups']) {
                         break;
                     }
                     foreach ($files as $key => $filedata) {
                         if ($filedata['file'] == $file->getName()) {
                             unset($files[$key]);
                         }
                     }
                     $file->delete();
                     $numdeltefiles++;
                 }
                 if ($numdeltefiles > 0) {
                     $job_object->log(sprintf(_n('One file deleted on Rackspace cloud container.', '%d files deleted on Rackspace cloud container.', $numdeltefiles, 'backwpup'), $numdeltefiles), E_USER_NOTICE);
                 }
             }
         }
         set_site_transient('backwpup_' . $job_object->job['jobid'] . '_rsc', $files, 60 * 60 * 24 * 7);
     } catch (Exception $e) {
         $job_object->log(E_USER_ERROR, sprintf(__('Rackspace Cloud API: %s', 'backwpup'), $e->getMessage()), $e->getFile(), $e->getLine());
         return FALSE;
     }
     $job_object->substeps_done++;
     return TRUE;
 }
예제 #5
0
 /**
  * @param $job_object BAckWPup_Job
  * @return bool
  */
 public function job_run_archive(BackWPup_Job $job_object)
 {
     $job_object->substeps_todo = 2 + $job_object->backup_filesize;
     if ($job_object->steps_data[$job_object->step_working]['SAVE_STEP_TRY'] != $job_object->steps_data[$job_object->step_working]['STEP_TRY']) {
         $job_object->log(sprintf(__('%d. Trying to send backup file to S3 Service&#160;&hellip;', 'backwpup'), $job_object->steps_data[$job_object->step_working]['STEP_TRY']), E_USER_NOTICE);
     }
     try {
         $s3 = Aws\S3\S3Client::factory(array('key' => $job_object->job['s3accesskey'], 'secret' => BackWPup_Encryption::decrypt($job_object->job['s3secretkey']), 'region' => $job_object->job['s3region'], 'base_url' => $this->get_s3_base_url($job_object->job['s3region'], $job_object->job['s3base_url']), 'scheme' => 'https', 'ssl.certificate_authority' => BackWPup::get_plugin_data('cacert')));
         if ($job_object->steps_data[$job_object->step_working]['SAVE_STEP_TRY'] != $job_object->steps_data[$job_object->step_working]['STEP_TRY'] && $job_object->substeps_done < $job_object->backup_filesize) {
             if ($s3->doesBucketExist($job_object->job['s3bucket'])) {
                 $bucketregion = $s3->getBucketLocation(array('Bucket' => $job_object->job['s3bucket']));
                 $job_object->log(sprintf(__('Connected to S3 Bucket "%1$s" in %2$s', 'backwpup'), $job_object->job['s3bucket'], $bucketregion->get('Location')), E_USER_NOTICE);
             } else {
                 $job_object->log(sprintf(__('S3 Bucket "%s" does not exist!', 'backwpup'), $job_object->job['s3bucket']), E_USER_ERROR);
                 return TRUE;
             }
             if ($job_object->job['s3multipart'] && empty($job_object->steps_data[$job_object->step_working]['UploadId'])) {
                 //Check for aboded Multipart Uploads
                 $job_object->log(__('Checking for not aborted multipart Uploads&#160;&hellip;', 'backwpup'));
                 $multipart_uploads = $s3->listMultipartUploads(array('Bucket' => $job_object->job['s3bucket'], 'Prefix' => (string) $job_object->job['s3dir']));
                 $uploads = $multipart_uploads->get('Uploads');
                 if (!empty($uploads)) {
                     foreach ($uploads as $upload) {
                         $s3->abortMultipartUpload(array('Bucket' => $job_object->job['s3bucket'], 'Key' => $upload['Key'], 'UploadId' => $upload['UploadId']));
                         $job_object->log(sprintf(__('Upload for %s aborted.', 'backwpup'), $upload['Key']));
                     }
                 }
             }
             //transfer file to S3
             $job_object->log(__('Starting upload to S3 Service&#160;&hellip;', 'backwpup'));
         }
         if (!$job_object->job['s3multipart'] || $job_object->backup_filesize < 1048576 * 6) {
             //Prepare Upload
             if (!($up_file_handle = fopen($job_object->backup_folder . $job_object->backup_file, 'rb'))) {
                 $job_object->log(__('Can not open source file for transfer.', 'backwpup'), E_USER_ERROR);
                 return FALSE;
             }
             $create_args = array();
             $create_args['Bucket'] = $job_object->job['s3bucket'];
             $create_args['ACL'] = 'private';
             //encrxption
             if (!empty($job_object->job['s3ssencrypt'])) {
                 $create_args['ServerSideEncryption'] = $job_object->job['s3ssencrypt'];
             }
             //Storage Class
             if (!empty($job_object->job['s3storageclass'])) {
                 $create_args['StorageClass'] = $job_object->job['s3storageclass'];
             }
             $create_args['Metadata'] = array('BackupTime' => date('Y-m-d H:i:s', $job_object->start_time));
             $create_args['Body'] = $up_file_handle;
             $create_args['Key'] = $job_object->job['s3dir'] . $job_object->backup_file;
             $create_args['ContentType'] = $job_object->get_mime_type($job_object->backup_folder . $job_object->backup_file);
             try {
                 $s3->putObject($create_args);
             } catch (Aws\Common\Exception\MultipartUploadException $e) {
                 $job_object->log(E_USER_ERROR, sprintf(__('S3 Service API: %s', 'backwpup'), $e->getMessage()), $e->getFile(), $e->getLine());
                 return FALSE;
             }
         } else {
             //Prepare Upload
             if ($file_handle = fopen($job_object->backup_folder . $job_object->backup_file, 'rb')) {
                 fseek($file_handle, $job_object->substeps_done);
                 try {
                     if (empty($job_object->steps_data[$job_object->step_working]['UploadId'])) {
                         $args = array('ACL' => 'private', 'Bucket' => $job_object->job['s3bucket'], 'ContentType' => $job_object->get_mime_type($job_object->backup_folder . $job_object->backup_file), 'Key' => $job_object->job['s3dir'] . $job_object->backup_file);
                         if (!empty($job_object->job['s3ssencrypt'])) {
                             $args['ServerSideEncryption'] = $job_object->job['s3ssencrypt'];
                         }
                         if (!empty($job_object->job['s3storageclass'])) {
                             $args['StorageClass'] = empty($job_object->job['s3storageclass']) ? '' : $job_object->job['s3storageclass'];
                         }
                         $upload = $s3->createMultipartUpload($args);
                         $job_object->steps_data[$job_object->step_working]['UploadId'] = $upload->get('UploadId');
                         $job_object->steps_data[$job_object->step_working]['Parts'] = array();
                         $job_object->steps_data[$job_object->step_working]['Part'] = 1;
                     }
                     while (!feof($file_handle)) {
                         $chunk_upload_start = microtime(TRUE);
                         $part_data = fread($file_handle, 1048576 * 5);
                         //5MB Minimum part size
                         $part = $s3->uploadPart(array('Bucket' => $job_object->job['s3bucket'], 'UploadId' => $job_object->steps_data[$job_object->step_working]['UploadId'], 'Key' => $job_object->job['s3dir'] . $job_object->backup_file, 'PartNumber' => $job_object->steps_data[$job_object->step_working]['Part'], 'Body' => $part_data));
                         $chunk_upload_time = microtime(TRUE) - $chunk_upload_start;
                         $job_object->substeps_done = $job_object->substeps_done + strlen($part_data);
                         $job_object->steps_data[$job_object->step_working]['Parts'][] = array('ETag' => $part->get('ETag'), 'PartNumber' => $job_object->steps_data[$job_object->step_working]['Part']);
                         $job_object->steps_data[$job_object->step_working]['Part']++;
                         $time_remaining = $job_object->do_restart_time();
                         if ($time_remaining < $chunk_upload_time) {
                             $job_object->do_restart_time(TRUE);
                         }
                         $job_object->update_working_data();
                     }
                     $s3->completeMultipartUpload(array('Bucket' => $job_object->job['s3bucket'], 'UploadId' => $job_object->steps_data[$job_object->step_working]['UploadId'], 'Key' => $job_object->job['s3dir'] . $job_object->backup_file, 'Parts' => $job_object->steps_data[$job_object->step_working]['Parts']));
                 } catch (Exception $e) {
                     $job_object->log(E_USER_ERROR, sprintf(__('S3 Service API: %s', 'backwpup'), $e->getMessage()), $e->getFile(), $e->getLine());
                     if (!empty($job_object->steps_data[$job_object->step_working]['uploadId'])) {
                         $s3->abortMultipartUpload(array('Bucket' => $job_object->job['s3bucket'], 'UploadId' => $job_object->steps_data[$job_object->step_working]['uploadId'], 'Key' => $job_object->job['s3dir'] . $job_object->backup_file));
                     }
                     unset($job_object->steps_data[$job_object->step_working]['UploadId']);
                     unset($job_object->steps_data[$job_object->step_working]['Parts']);
                     unset($job_object->steps_data[$job_object->step_working]['Part']);
                     $job_object->substeps_done = 0;
                     if (is_resource($file_handle)) {
                         fclose($file_handle);
                     }
                     return FALSE;
                 }
                 fclose($file_handle);
             } else {
                 $job_object->log(__('Can not open source file for transfer.', 'backwpup'), E_USER_ERROR);
                 return FALSE;
             }
         }
         $result = $s3->headObject(array('Bucket' => $job_object->job['s3bucket'], 'Key' => $job_object->job['s3dir'] . $job_object->backup_file));
         if ($result->get('ContentLength') == filesize($job_object->backup_folder . $job_object->backup_file)) {
             $job_object->substeps_done = 1 + $job_object->backup_filesize;
             $job_object->log(sprintf(__('Backup transferred to %s.', 'backwpup'), $this->get_s3_base_url($job_object->job['s3region'], $job_object->job['s3base_url']) . '/' . $job_object->job['s3bucket'] . '/' . $job_object->job['s3dir'] . $job_object->backup_file), E_USER_NOTICE);
             if (!empty($job_object->job['jobid'])) {
                 BackWPup_Option::update($job_object->job['jobid'], 'lastbackupdownloadurl', network_admin_url('admin.php') . '?page=backwpupbackups&action=downloads3&file=' . $job_object->job['s3dir'] . $job_object->backup_file . '&jobid=' . $job_object->job['jobid']);
             }
         } else {
             $job_object->log(sprintf(__('Cannot transfer backup to S3! (%1$d) %2$s', 'backwpup'), $result->get("status"), $result->get("Message")), E_USER_ERROR);
         }
     } catch (Exception $e) {
         $job_object->log(E_USER_ERROR, sprintf(__('S3 Service API: %s', 'backwpup'), $e->getMessage()), $e->getFile(), $e->getLine());
         return FALSE;
     }
     try {
         $backupfilelist = array();
         $filecounter = 0;
         $files = array();
         $args = array('Bucket' => $job_object->job['s3bucket'], 'Prefix' => (string) $job_object->job['s3dir']);
         $objects = $s3->getIterator('ListObjects', $args);
         if (is_object($objects)) {
             foreach ($objects as $object) {
                 $file = basename($object['Key']);
                 $changetime = strtotime($object['LastModified']) + get_option('gmt_offset') * 3600;
                 if ($job_object->is_backup_archive($file)) {
                     $backupfilelist[$changetime] = $file;
                 }
                 $files[$filecounter]['folder'] = $this->get_s3_base_url($job_object->job['s3region'], $job_object->job['s3base_url']) . '/' . $job_object->job['s3bucket'] . '/' . dirname($object['Key']);
                 $files[$filecounter]['file'] = $object['Key'];
                 $files[$filecounter]['filename'] = basename($object['Key']);
                 if (!empty($object['StorageClass'])) {
                     $files[$filecounter]['info'] = sprintf(__('Storage Class: %s', 'backwpup'), $object['StorageClass']);
                 }
                 $files[$filecounter]['downloadurl'] = network_admin_url('admin.php') . '?page=backwpupbackups&action=downloads3&file=' . $object['Key'] . '&jobid=' . $job_object->job['jobid'];
                 $files[$filecounter]['filesize'] = $object['Size'];
                 $files[$filecounter]['time'] = $changetime;
                 $filecounter++;
             }
         }
         if ($job_object->job['s3maxbackups'] > 0 && is_object($s3)) {
             //Delete old backups
             if (count($backupfilelist) > $job_object->job['s3maxbackups']) {
                 ksort($backupfilelist);
                 $numdeltefiles = 0;
                 while ($file = array_shift($backupfilelist)) {
                     if (count($backupfilelist) < $job_object->job['s3maxbackups']) {
                         break;
                     }
                     //delete files on S3
                     $args = array('Bucket' => $job_object->job['s3bucket'], 'Key' => $job_object->job['s3dir'] . $file);
                     if ($s3->deleteObject($args)) {
                         foreach ($files as $key => $filedata) {
                             if ($filedata['file'] == $job_object->job['s3dir'] . $file) {
                                 unset($files[$key]);
                             }
                         }
                         $numdeltefiles++;
                     } else {
                         $job_object->log(sprintf(__('Cannot delete backup from %s.', 'backwpup'), $this->get_s3_base_url($job_object->job['s3region'], $job_object->job['s3base_url']) . '/' . $job_object->job['s3bucket'] . '/' . $job_object->job['s3dir'] . $file), E_USER_ERROR);
                     }
                 }
                 if ($numdeltefiles > 0) {
                     $job_object->log(sprintf(_n('One file deleted on S3 Bucket.', '%d files deleted on S3 Bucket', $numdeltefiles, 'backwpup'), $numdeltefiles), E_USER_NOTICE);
                 }
             }
         }
         set_site_transient('backwpup_' . $job_object->job['jobid'] . '_s3', $files, YEAR_IN_SECONDS);
     } catch (Exception $e) {
         $job_object->log(E_USER_ERROR, sprintf(__('S3 Service API: %s', 'backwpup'), $e->getMessage()), $e->getFile(), $e->getLine());
         return FALSE;
     }
     $job_object->substeps_done = 2 + $job_object->backup_filesize;
     return TRUE;
 }
예제 #6
0
 /**
  * @param $job_object
  * @return bool
  */
 public function job_run_archive(BackWPup_Job $job_object)
 {
     $job_object->substeps_todo = 1;
     if (!empty($job_object->job['jobid'])) {
         BackWPup_Option::update($job_object->job['jobid'], 'lastbackupdownloadurl', add_query_arg(array('page' => 'backwpupbackups', 'action' => 'downloadfolder', 'file' => basename($job_object->backup_file), 'jobid' => $job_object->job['jobid']), network_admin_url('admin.php')));
     }
     //Delete old Backupfiles
     $backupfilelist = array();
     $files = array();
     if (is_writable($job_object->backup_folder) && ($dir = opendir($job_object->backup_folder))) {
         //make file list
         while (FALSE !== ($file = readdir($dir))) {
             if (is_writeable($job_object->backup_folder . $file) && !is_dir($job_object->backup_folder . $file) && !is_link($job_object->backup_folder . $file)) {
                 //list for deletion
                 if ($job_object->is_backup_archive($file)) {
                     $backupfilelist[filemtime($job_object->backup_folder . $file)] = $file;
                 }
             }
         }
         closedir($dir);
     }
     if ($job_object->job['maxbackups'] > 0) {
         if (count($backupfilelist) > $job_object->job['maxbackups']) {
             ksort($backupfilelist);
             $numdeltefiles = 0;
             while ($file = array_shift($backupfilelist)) {
                 if (count($backupfilelist) < $job_object->job['maxbackups']) {
                     break;
                 }
                 unlink($job_object->backup_folder . $file);
                 foreach ($files as $key => $filedata) {
                     if ($filedata['file'] == $job_object->backup_folder . $file) {
                         unset($files[$key]);
                     }
                 }
                 $numdeltefiles++;
             }
             if ($numdeltefiles > 0) {
                 $job_object->log(sprintf(_n('One backup file deleted', '%d backup files deleted', $numdeltefiles, 'backwpup'), $numdeltefiles), E_USER_NOTICE);
             }
         }
     }
     $job_object->substeps_done++;
     return TRUE;
 }
예제 #7
0
 /**
  * @param $job_object BackWPup_Job
  * @return bool
  */
 public function job_run_archive(BackWPup_Job $job_object)
 {
     $job_object->substeps_todo = 2 + $job_object->backup_filesize;
     $job_object->log(sprintf(__('%d. Trying to send backup file to S3 Service&#160;&hellip;', 'backwpup'), $job_object->steps_data[$job_object->step_working]['STEP_TRY']), E_USER_NOTICE);
     try {
         $s3 = new AmazonS3(array('key' => $job_object->job['s3accesskey'], 'secret' => BackWPup_Encryption::decrypt($job_object->job['s3secretkey']), 'certificate_authority' => TRUE));
         $base_url = $this->get_s3_base_url($job_object->job['s3region'], $job_object->job['s3base_url']);
         if (stristr($base_url, 'amazonaws.com')) {
             $s3->set_region(str_replace(array('http://', 'https://'), '', $base_url));
         } else {
             $s3->set_hostname(str_replace(array('http://', 'https://'), '', $base_url));
             $s3->allow_hostname_override(FALSE);
             if (substr($base_url, -1) == '/') {
                 $s3->enable_path_style(TRUE);
             }
         }
         if (stristr($base_url, 'http://')) {
             $s3->disable_ssl();
         }
         if ($s3->if_bucket_exists($job_object->job['s3bucket'])) {
             $job_object->log(sprintf(__('Connected to S3 Bucket "%1$s" in %2$s', 'backwpup'), $job_object->job['s3bucket'], $base_url), E_USER_NOTICE);
         } else {
             $job_object->log(sprintf(__('S3 Bucket "%s" does not exist!', 'backwpup'), $job_object->job['s3bucket']), E_USER_ERROR);
             return TRUE;
         }
         //transfer file to S3
         $job_object->log(__('Starting upload to S3 Service&#160;&hellip;', 'backwpup'), E_USER_NOTICE);
         //Transfer Backup to S3
         if ($job_object->job['s3storageclass'] == 'REDUCED_REDUNDANCY') {
             //set reduced redundancy or not
             $storage = AmazonS3::STORAGE_REDUCED;
         } else {
             $storage = AmazonS3::STORAGE_STANDARD;
         }
         if (empty($job_object->job['s3ssencrypt'])) {
             $job_object->job['s3ssencrypt'] = NULL;
         }
         //set progress bar
         $s3->register_streaming_read_callback(array($job_object, 'curl_read_callback'));
         $result = $s3->create_object($job_object->job['s3bucket'], $job_object->job['s3dir'] . $job_object->backup_file, array('fileUpload' => $job_object->backup_folder . $job_object->backup_file, 'acl' => AmazonS3::ACL_PRIVATE, 'storage' => $storage, 'encryption' => $job_object->job['s3ssencrypt']));
         if ($result->status >= 200 and $result->status < 300) {
             $job_object->substeps_done = 1 + $job_object->backup_filesize;
             $job_object->log(sprintf(__('Backup transferred to %s.', 'backwpup'), $this->get_s3_base_url($job_object->job['s3region'], $job_object->job['s3base_url']) . '/' . $job_object->job['s3bucket'] . '/' . $job_object->job['s3dir'] . $job_object->backup_file), E_USER_NOTICE);
             if (!empty($job_object->job['jobid'])) {
                 BackWPup_Option::update($job_object->job['jobid'], 'lastbackupdownloadurl', network_admin_url('admin.php') . '?page=backwpupbackups&action=downloads3&file=' . $job_object->job['s3dir'] . $job_object->backup_file . '&jobid=' . $job_object->job['jobid']);
             }
         } else {
             $job_object->log(sprintf(__('Cannot transfer backup to S3! (%1$d) %2$s', 'backwpup'), $result->status, $result->body), E_USER_ERROR);
         }
     } catch (Exception $e) {
         $job_object->log(E_USER_ERROR, sprintf(__('S3 Service API: %s', 'backwpup'), $e->getMessage()), $e->getFile(), $e->getLine());
         return FALSE;
     }
     try {
         $backupfilelist = array();
         $filecounter = 0;
         $files = array();
         $objects = $s3->list_objects($job_object->job['s3bucket'], array('prefix' => $job_object->job['s3dir']));
         if (is_object($objects)) {
             foreach ($objects->body->Contents as $object) {
                 $file = basename((string) $object->Key);
                 $changetime = strtotime((string) $object->LastModified) + get_option('gmt_offset') * 3600;
                 if ($job_object->is_backup_archive($file)) {
                     $backupfilelist[$changetime] = $file;
                 }
                 $files[$filecounter]['folder'] = $this->get_s3_base_url($job_object->job['s3region'], $job_object->job['s3base_url']) . '/' . $job_object->job['s3bucket'] . '/' . dirname((string) $object->Key);
                 $files[$filecounter]['file'] = (string) $object->Key;
                 $files[$filecounter]['filename'] = basename($object->Key);
                 $files[$filecounter]['downloadurl'] = network_admin_url('admin.php') . '?page=backwpupbackups&action=downloads3&file=' . (string) $object->Key . '&jobid=' . $job_object->job['jobid'];
                 $files[$filecounter]['filesize'] = (int) $object->Size;
                 $files[$filecounter]['time'] = $changetime;
                 $filecounter++;
             }
         }
         if ($job_object->job['s3maxbackups'] > 0 && is_object($s3)) {
             //Delete old backups
             if (count($backupfilelist) > $job_object->job['s3maxbackups']) {
                 ksort($backupfilelist);
                 $numdeltefiles = 0;
                 while ($file = array_shift($backupfilelist)) {
                     if (count($backupfilelist) < $job_object->job['s3maxbackups']) {
                         break;
                     }
                     //delete files on S3
                     $delete_s3 = $s3->delete_object($job_object->job['s3bucket'], $job_object->job['s3dir'] . $file);
                     if ($delete_s3) {
                         foreach ($files as $key => $filedata) {
                             if ($filedata['file'] == $job_object->job['s3dir'] . $file) {
                                 unset($files[$key]);
                             }
                         }
                         $numdeltefiles++;
                     } else {
                         $job_object->log(sprintf(__('Cannot delete backup from %s.', 'backwpup'), $this->get_s3_base_url($job_object->job['s3region'], $job_object->job['s3base_url']) . '/' . $job_object->job['s3bucket'] . '/' . $job_object->job['s3dir'] . $file), E_USER_ERROR);
                     }
                 }
                 if ($numdeltefiles > 0) {
                     $job_object->log(sprintf(_n('One file deleted on S3 Bucket.', '%d files deleted on S3 Bucket', $numdeltefiles, 'backwpup'), $numdeltefiles), E_USER_NOTICE);
                 }
             }
         }
         set_site_transient('backwpup_' . $job_object->job['jobid'] . '_s3', $files, 60 * 60 * 24 * 7);
     } catch (Exception $e) {
         $job_object->log(E_USER_ERROR, sprintf(__('S3 Service API: %s', 'backwpup'), $e->getMessage()), $e->getFile(), $e->getLine());
         return FALSE;
     }
     $job_object->substeps_done = 2 + $job_object->backup_filesize;
     return TRUE;
 }
예제 #8
0
 /**
  * @param $job_object
  * @return bool
  */
 public function job_run_archive(BackWPup_Job $job_object)
 {
     $job_object->substeps_todo = $job_object->backup_filesize + 2;
     if ($job_object->steps_data[$job_object->step_working]['SAVE_STEP_TRY'] != $job_object->steps_data[$job_object->step_working]['STEP_TRY']) {
         $job_object->log(sprintf(__('%d. Try sending backup to a Microsoft Azure (Blob)&#160;&hellip;', 'backwpup'), $job_object->steps_data[$job_object->step_working]['STEP_TRY']), E_USER_NOTICE);
     }
     try {
         set_include_path(get_include_path() . PATH_SEPARATOR . BackWPup::get_plugin_data('plugindir') . '/vendor/PEAR/');
         /* @var $blobRestProxy   WindowsAzure\Blob\BlobRestProxy */
         //https causes an error SSL: Connection reset by peer that is why http
         $blobRestProxy = WindowsAzure\Common\ServicesBuilder::getInstance()->createBlobService('DefaultEndpointsProtocol=http;AccountName=' . $job_object->job['msazureaccname'] . ';AccountKey=' . BackWPup_Encryption::decrypt($job_object->job['msazurekey']));
         if ($job_object->steps_data[$job_object->step_working]['SAVE_STEP_TRY'] != $job_object->steps_data[$job_object->step_working]['STEP_TRY']) {
             //test vor existing container
             $containers = $blobRestProxy->listContainers()->getContainers();
             $job_object->steps_data[$job_object->step_working]['container_url'] = '';
             foreach ($containers as $container) {
                 if ($container->getName() == $job_object->job['msazurecontainer']) {
                     $job_object->steps_data[$job_object->step_working]['container_url'] = $container->getUrl();
                     break;
                 }
             }
             if (!$job_object->steps_data[$job_object->step_working]['container_url']) {
                 $job_object->log(sprintf(__('MS Azure container "%s" does not exist!', 'backwpup'), $job_object->job['msazurecontainer']), E_USER_ERROR);
                 return TRUE;
             } else {
                 $job_object->log(sprintf(__('Connected to MS Azure container "%s".', 'backwpup'), $job_object->job['msazurecontainer']), E_USER_NOTICE);
             }
             $job_object->log(__('Starting upload to MS Azure&#160;&hellip;', 'backwpup'), E_USER_NOTICE);
         }
         //Prepare Upload
         if ($file_handel = fopen($job_object->backup_folder . $job_object->backup_file, 'rb')) {
             fseek($file_handel, $job_object->substeps_done);
             if (empty($job_object->steps_data[$job_object->step_working]['BlockList'])) {
                 $job_object->steps_data[$job_object->step_working]['BlockList'] = array();
             }
             while (!feof($file_handel)) {
                 $data = fread($file_handel, 1048576 * 4);
                 //4MB
                 if (strlen($data) == 0) {
                     continue;
                 }
                 $chunk_upload_start = microtime(TRUE);
                 $block_count = count($job_object->steps_data[$job_object->step_working]['BlockList']) + 1;
                 $block_id = md5($data) . str_pad($block_count, 6, "0", STR_PAD_LEFT);
                 $blobRestProxy->createBlobBlock($job_object->job['msazurecontainer'], $job_object->job['msazuredir'] . $job_object->backup_file, $block_id, $data);
                 $job_object->steps_data[$job_object->step_working]['BlockList'][] = $block_id;
                 $chunk_upload_time = microtime(TRUE) - $chunk_upload_start;
                 $job_object->substeps_done = $job_object->substeps_done + strlen($data);
                 $time_remaining = $job_object->do_restart_time();
                 if ($time_remaining < $chunk_upload_time) {
                     $job_object->do_restart_time(TRUE);
                 }
                 $job_object->update_working_data();
             }
             fclose($file_handel);
         } else {
             $job_object->log(__('Can not open source file for transfer.', 'backwpup'), E_USER_ERROR);
             return FALSE;
         }
         //crate blog list
         $blocklist = new WindowsAzure\Blob\Models\BlockList();
         foreach ($job_object->steps_data[$job_object->step_working]['BlockList'] as $block_id) {
             $blocklist->addUncommittedEntry($block_id);
         }
         unset($job_object->steps_data[$job_object->step_working]['BlockList']);
         //Commit Blocks
         $blobRestProxy->commitBlobBlocks($job_object->job['msazurecontainer'], $job_object->job['msazuredir'] . $job_object->backup_file, $blocklist->getEntries());
         $job_object->substeps_done++;
         $job_object->log(sprintf(__('Backup transferred to %s', 'backwpup'), $job_object->steps_data[$job_object->step_working]['container_url'] . '/' . $job_object->job['msazuredir'] . $job_object->backup_file), E_USER_NOTICE);
         if (!empty($job_object->job['jobid'])) {
             BackWPup_Option::update($job_object->job['jobid'], 'lastbackupdownloadurl', network_admin_url('admin.php') . '?page=backwpupbackups&action=downloadmsazure&file=' . $job_object->job['msazuredir'] . $job_object->backup_file . '&jobid=' . $job_object->job['jobid']);
         }
     } catch (Exception $e) {
         $job_object->log(E_USER_ERROR, sprintf(__('Microsoft Azure API: %s', 'backwpup'), $e->getMessage()), $e->getFile(), $e->getLine());
         $job_object->substeps_done = 0;
         unset($job_object->steps_data[$job_object->step_working]['BlockList']);
         if (isset($file_handel) && is_resource($file_handel)) {
             fclose($file_handel);
         }
         return FALSE;
     }
     try {
         $backupfilelist = array();
         $filecounter = 0;
         $files = array();
         $blob_options = new WindowsAzure\Blob\Models\ListBlobsOptions();
         $blob_options->setPrefix($job_object->job['msazuredir']);
         $blobs = $blobRestProxy->listBlobs($job_object->job['msazurecontainer'], $blob_options)->getBlobs();
         if (is_array($blobs)) {
             foreach ($blobs as $blob) {
                 $file = basename($blob->getName());
                 if ($job_object->is_backup_archive($file)) {
                     $backupfilelist[$blob->getProperties()->getLastModified()->getTimestamp()] = $file;
                 }
                 $files[$filecounter]['folder'] = $job_object->steps_data[$job_object->step_working]['container_url'] . "/" . dirname($blob->getName()) . "/";
                 $files[$filecounter]['file'] = $blob->getName();
                 $files[$filecounter]['filename'] = basename($blob->getName());
                 $files[$filecounter]['downloadurl'] = network_admin_url('admin.php') . '?page=backwpupbackups&action=downloadmsazure&file=' . $blob->getName() . '&jobid=' . $job_object->job['jobid'];
                 $files[$filecounter]['filesize'] = $blob->getProperties()->getContentLength();
                 $files[$filecounter]['time'] = $blob->getProperties()->getLastModified()->getTimestamp() + get_option('gmt_offset') * 3600;
                 $filecounter++;
             }
         }
         // Delete old backups
         if (!empty($job_object->job['msazuremaxbackups']) && $job_object->job['msazuremaxbackups'] > 0) {
             if (count($backupfilelist) > $job_object->job['msazuremaxbackups']) {
                 ksort($backupfilelist);
                 $numdeltefiles = 0;
                 while ($file = array_shift($backupfilelist)) {
                     if (count($backupfilelist) < $job_object->job['msazuremaxbackups']) {
                         break;
                     }
                     $blobRestProxy->deleteBlob($job_object->job['msazurecontainer'], $job_object->job['msazuredir'] . $file);
                     foreach ($files as $key => $filedata) {
                         if ($filedata['file'] == $job_object->job['msazuredir'] . $file) {
                             unset($files[$key]);
                         }
                     }
                     $numdeltefiles++;
                 }
                 if ($numdeltefiles > 0) {
                     $job_object->log(sprintf(_n('One file deleted on Microsoft Azure container.', '%d files deleted on Microsoft Azure container.', $numdeltefiles, 'backwpup'), $numdeltefiles), E_USER_NOTICE);
                 }
             }
         }
         set_site_transient('backwpup_' . $job_object->job['jobid'] . '_msazure', $files, YEAR_IN_SECONDS);
     } catch (Exception $e) {
         $job_object->log(E_USER_ERROR, sprintf(__('Microsoft Azure API: %s', 'backwpup'), $e->getMessage()), $e->getFile(), $e->getLine());
         return FALSE;
     }
     $job_object->substeps_done = $job_object->backup_filesize + 2;
     return TRUE;
 }