function process_remote_copy($destination_type, $file, $settings) { pb_backupbuddy::status('details', 'Copying remote `' . $destination_type . '` file `' . $file . '` down to local.'); pb_backupbuddy::set_greedy_script_limits(); if (!class_exists('backupbuddy_core')) { require_once pb_backupbuddy::plugin_path() . '/classes/core.php'; } // Determine destination filename. $destination_file = backupbuddy_core::getBackupDirectory() . basename($file); if (file_exists($destination_file)) { $destination_file = str_replace('backup-', 'backup_copy_' . pb_backupbuddy::random_string(5) . '-', $destination_file); } pb_backupbuddy::status('details', 'Filename of resulting local copy: `' . $destination_file . '`.'); if ($destination_type == 'stash2') { require_once ABSPATH . 'wp-admin/includes/file.php'; pb_backupbuddy::status('details', 'About to begin downloading from URL.'); $download = download_url($url); pb_backupbuddy::status('details', 'Download process complete.'); if (is_wp_error($download)) { $error = 'Error #832989323: Unable to download file `' . $file . '` from URL: `' . $url . '`. Details: `' . $download->get_error_message() . '`.'; pb_backupbuddy::status('error', $error); pb_backupbuddy::alert($error); return false; } else { if (false === copy($download, $destination_file)) { $error = 'Error #3329383: Unable to copy file from `' . $download . '` to `' . $destination_file . '`.'; pb_backupbuddy::status('error', $error); pb_backupbuddy::alert($error); @unlink($download); return false; } else { pb_backupbuddy::status('details', 'File saved to `' . $destination_file . '`.'); @unlink($download); return true; } } } // end stash2. if ($destination_type == 'stash') { $itxapi_username = $settings['itxapi_username']; $itxapi_password = $settings['itxapi_password']; // Load required files. pb_backupbuddy::status('details', 'Load Stash files.'); require_once pb_backupbuddy::plugin_path() . '/destinations/stash/init.php'; require_once dirname(dirname(__FILE__)) . '/destinations/_s3lib/aws-sdk/sdk.class.php'; require_once pb_backupbuddy::plugin_path() . '/destinations/stash/lib/class.itx_helper.php'; // Talk with the Stash API to get access to do things. pb_backupbuddy::status('details', 'Authenticating Stash for remote copy to local.'); $stash = new ITXAPI_Helper(pb_backupbuddy_destination_stash::ITXAPI_KEY, pb_backupbuddy_destination_stash::ITXAPI_URL, $itxapi_username, $itxapi_password); $manage_url = $stash->get_manage_url(); $request = new RequestCore($manage_url); $response = $request->send_request(true); // Validate response. if (!$response->isOK()) { $error = 'Request for management credentials failed.'; pb_backupbuddy::status('error', $error); pb_backupbuddy::alert($error); return false; } if (!($manage_data = json_decode($response->body, true))) { $error = 'Did not get valid JSON response.'; pb_backupbuddy::status('error', $error); pb_backupbuddy::alert($error); return false; } if (isset($manage_data['error'])) { $error = 'Error: ' . implode(' - ', $manage_data['error']); pb_backupbuddy::status('error', $error); pb_backupbuddy::alert($error); return false; } // Connect to S3. pb_backupbuddy::status('details', 'Instantiating S3 object.'); $s3 = new AmazonS3($manage_data['credentials']); pb_backupbuddy::status('details', 'About to get Stash object `' . $file . '`...'); try { $response = $s3->get_object($manage_data['bucket'], $manage_data['subkey'] . pb_backupbuddy_destination_stash::get_remote_path() . $file, array('fileDownload' => $destination_file)); } catch (Exception $e) { pb_backupbuddy::status('error', 'Error #5443984: ' . $e->getMessage()); error_log('err:' . $e->getMessage()); return false; } if ($response->isOK()) { pb_backupbuddy::status('details', 'Stash copy to local success.'); return true; } else { pb_backupbuddy::status('error', 'Error #894597845. Stash copy to local FAILURE. Details: `' . print_r($response, true) . '`.'); return false; } } elseif ($destination_type == 'gdrive') { die('Not implemented here.'); require_once pb_backupbuddy::plugin_path() . '/destinations/gdrive/init.php'; $settings = array_merge(pb_backupbuddy_destination_gdrive::$default_settings, $settings); if (true === pb_backupbuddy_destination_gdrive::getFile($settings, $file, $destination_file)) { // success pb_backupbuddy::status('details', 'Google Drive copy to local success.'); return true; } else { // fail pb_backupbuddy::status('details', 'Error #2332903. Google Drive copy to local FAILURE.'); return false; } } elseif ($destination_type == 's3') { require_once pb_backupbuddy::plugin_path() . '/destinations/s3/init.php'; if (true === pb_backupbuddy_destination_s3::download_file($settings, $file, $destination_file)) { // success pb_backupbuddy::status('details', 'S3 copy to local success.'); return true; } else { // fail pb_backupbuddy::status('details', 'Error #85448774. S3 copy to local FAILURE.'); return false; } } else { pb_backupbuddy::status('error', 'Error #859485. Unknown destination type for remote copy `' . $destination_type . '`.'); return false; } }
function process_remote_copy($destination_type, $file, $settings) { pb_backupbuddy::status('details', 'Copying remote `' . $destination_type . '` file `' . $file . '` down to local.'); pb_backupbuddy::set_greedy_script_limits(); if (!class_exists('backupbuddy_core')) { require_once pb_backupbuddy::plugin_path() . '/classes/core.php'; } // Determine destination filename. $destination_file = backupbuddy_core::getBackupDirectory() . basename($file); if (file_exists($destination_file)) { $destination_file = str_replace('backup-', 'backup_copy_' . pb_backupbuddy::random_string(5) . '-', $destination_file); } pb_backupbuddy::status('details', 'Filename of resulting local copy: `' . $destination_file . '`.'); if ($destination_type == 'stash') { $itxapi_username = $settings['itxapi_username']; $itxapi_password = $settings['itxapi_password']; // Load required files. require_once pb_backupbuddy::plugin_path() . '/destinations/stash/init.php'; require_once pb_backupbuddy::plugin_path() . '/destinations/stash/lib/class.itx_helper.php'; // Talk with the Stash API to get access to do things. $stash = new ITXAPI_Helper(pb_backupbuddy_destination_stash::ITXAPI_KEY, pb_backupbuddy_destination_stash::ITXAPI_URL, $itxapi_username, $itxapi_password); $manage_url = $stash->get_manage_url(); $request = new RequestCore($manage_url); $response = $request->send_request(true); // Validate response. if (!$response->isOK()) { $error = 'Request for management credentials failed.'; pb_backupbuddy::status('error', $error); pb_backupbuddy::alert($error); return false; } if (!($manage_data = json_decode($response->body, true))) { $error = 'Did not get valid JSON response.'; pb_backupbuddy::status('error', $error); pb_backupbuddy::alert($error); return false; } if (isset($manage_data['error'])) { $error = 'Error: ' . implode(' - ', $manage_data['error']); pb_backupbuddy::status('error', $error); pb_backupbuddy::alert($error); return false; } // Connect to Amazon S3. pb_backupbuddy::status('details', 'Instantiating S3 object.'); $s3 = new AmazonS3($manage_data['credentials']); pb_backupbuddy::status('details', 'About to get Stash object `' . $file . '`...'); $response = $s3->get_object($manage_data['bucket'], $manage_data['subkey'] . '/' . $file, array('fileDownload' => $destination_file)); if ($response->isOK()) { pb_backupbuddy::status('details', 'Stash copy to local success.'); return true; } else { pb_backupbuddy::status('error', 'Error #894597845. Stash copy to local FAILURE. Details: `' . print_r($response, true) . '`.'); return false; } } elseif ($destination_type == 's3') { require_once pb_backupbuddy::plugin_path() . '/destinations/s3/init.php'; if (true === pb_backupbuddy_destination_s3::download_file($settings, $file, $destination_file)) { // success pb_backupbuddy::status('details', 'S3 copy to local success.'); return true; } else { // fail pb_backupbuddy::status('details', 'Error #85448774. S3 copy to local FAILURE.'); return false; } } else { pb_backupbuddy::status('error', 'Error #859485. Unknown destination type for remote copy `' . $destination_type . '`.'); return false; } }
$settings = array_merge(pb_backupbuddy_destination_s3::$default_settings, $settings); $settings['bucket'] = strtolower($settings['bucket']); // Buckets must be lowercase. $remote_path = pb_backupbuddy_destination_s3::get_remote_path($settings['directory']); // Welcome text. $manage_data = pb_backupbuddy_destination_s3::get_credentials($settings); // Connect to S3. $s3 = new AmazonS3($manage_data); // the key, secret, token if ($settings['ssl'] == '0') { @$s3->disable_ssl(true); } // The bucket must be in existence and we must get it's region to be able to proceed $region = ''; pb_backupbuddy::status('details', 'Getting region for bucket: `' . $settings['bucket'] . "`."); $response = pb_backupbuddy_destination_s3::get_bucket_region($s3, $settings['bucket']); if (!$response->isOK()) { $this_error = 'Bucket region could not be determined for management operation. Message details: `' . (string) $response->body->Message . '`.'; pb_backupbuddy::status('error', $this_error); } else { pb_backupbuddy::status('details', 'Bucket exists in region: ' . ($response->body === "" ? 'us-east-1' : $response->body)); $region = $response->body; // Must leave as is for actual operational usage } // Set region context for later operations - will be s3.amazonaws.com or s3-<region>.amazonaws.com $s3->set_region('s3' . ($region == "" ? "" : '-' . $region) . '.amazonaws.com'); // Handle deletion. if (pb_backupbuddy::_POST('bulk_action') == 'delete_backup') { pb_backupbuddy::verify_nonce(); $deleted_files = array(); foreach ((array) pb_backupbuddy::_POST('items') as $item) {
</script> <?php // Load required files. require_once dirname(__FILE__) . '/init.php'; require_once dirname(dirname(__FILE__)) . '/_s3lib/aws-sdk/sdk.class.php'; // Settings. if (isset(pb_backupbuddy::$options['remote_destinations'][pb_backupbuddy::_GET('destination_id')])) { $settings =& pb_backupbuddy::$options['remote_destinations'][pb_backupbuddy::_GET('destination_id')]; } $settings = array_merge(pb_backupbuddy_destination_s3::$default_settings, $settings); $remote_path = pb_backupbuddy_destination_s3::get_remote_path(); // Has leading and trailng slashes. $settings['directory'] pb_backupbuddy::$ui->title(__('Amazon S3 Destination', 'it-l10n-backupbuddy') . ' "' . $destination['title'] . '"'); $manage_data = pb_backupbuddy_destination_s3::get_manage_data($settings); // Connect to S3. $s3 = new AmazonS3($manage_data); // the key, secret, token if ($settings['ssl'] == '0') { @$s3->disable_ssl(true); } // Handle deletion. if (pb_backupbuddy::_POST('bulk_action') == 'delete_backup') { pb_backupbuddy::verify_nonce(); $deleted_files = array(); foreach ((array) pb_backupbuddy::_POST('items') as $item) { $response = $s3->delete_object($manage_data['bucket'], $manage_data['subkey'] . $remote_path . $item); if ($response->isOK()) { $deleted_files[] = $item; } else {
public static function multipart_cleanup($settings, $lessLogs = true) { $settings['bucket'] = strtolower($settings['bucket']); // Buckets must be lowercase. $max_age = 60 * 60 * 72; // Seconds of max age to allow a stalled multipart upload. require_once dirname(dirname(__FILE__)) . '/_s3lib/aws-sdk/sdk.class.php'; pb_backupbuddy::status('details', 'Amazon S3 Multipart Remote Housekeeping Starting ...'); $manage_data = pb_backupbuddy_destination_s3::get_credentials($settings); // Create S3 instance. pb_backupbuddy::status('details', 'Creating S3 instance.'); $s3 = new AmazonS3($manage_data); // the key, secret, token if ($settings['ssl'] == 0) { @$s3->disable_ssl(true); } pb_backupbuddy::status('details', 'S3 instance created. Listing in progress multipart uploads ...'); // Verify bucket exists; create if not. Also set region to the region bucket exists in. if (false === self::_prepareBucketAndRegion($s3, $settings, $createBucket = false)) { return false; } // Get the in progress multipart uploads $response = $s3->list_multipart_uploads($settings['bucket'], array('prefix' => 'backup')); if (!$response->isOK()) { pb_backupbuddy::status('error', 'Error listing multipart uploads. Details: `' . print_r($response, true) . '`'); return; } else { if (true !== $lessLogs) { pb_backupbuddy::status('details', 'Multipart upload check retrieved. Found `' . count($response->body->Upload) . '` multipart uploads in progress / stalled. Details: `' . print_r($response, true) . '`'); } else { pb_backupbuddy::status('details', 'Multipart upload check retrieved. Found `' . count($response->body->Upload) . '` multipart uploads in progress / stalled. Old BackupBuddy parts will be cleaned up (if any found) ...'); } foreach ($response->body->Upload as $upload) { if (true !== $lessLogs) { pb_backupbuddy::status('details', 'Checking upload: ' . print_r($upload, true)); } if (FALSE !== stristr($upload->Key, 'backup-')) { // BackupBuddy backup file. $initiated = strtotime($upload->Initiated); if (true !== $lessLogs) { pb_backupbuddy::status('details', 'BackupBuddy Multipart Chunked Upload(s) detected in progress. Age: `' . pb_backupbuddy::$format->time_ago($initiated) . '`.'); } if ($initiated + $max_age < time()) { $abort_response = $s3->abort_multipart_upload($settings['bucket'], $upload->Key, $upload->UploadId); if (!$abort_response->isOK()) { // abort fail. pb_backupbuddy::status('error', 'Stalled Amazon S3 Multipart Chunked abort of file `' . $upload->Key . '` with ID `' . $upload->UploadId . '` FAILED. Manually abort it.'); } else { // aborted. pb_backupbuddy::status('details', 'Stalled Amazon S3 Multipart Chunked Uploads ABORTED ID `' . $upload->UploadId . '` of age `' . pb_backupbuddy::$format->time_ago($initiated) . '`.'); } } else { if (true !== $lessLogs) { pb_backupbuddy::status('details', 'Amazon S3 Multipart Chunked Uploads not aborted as not too old.'); } } } } // end foreach uploads. } pb_backupbuddy::status('details', 'Amazon S3 Multipart Remote Housekeeping Finished.'); return true; }