public static function send($settings = array(), $file, $send_id = '', $delete_after = false, $clear_uploads = false) { $settings = self::_init($settings); if ('1' == $settings['disabled']) { self::_error(__('Error #48933: This destination is currently disabled. Enable it under this destination\'s Advanced Settings.', 'it-l10n-backupbuddy')); return false; } if (is_array($file)) { $file = $files[0]; } if ('' == $settings['_multipart_id']) { // New transfer. Populate initial Stash settings. $file_size = filesize($file); $remote_path = self::get_remote_path(); // Has leading and trailng slashes. $backup_type = backupbuddy_core::getBackupTypeFromFile($file); if ('' == $backup_type) { // unknown backup type $backup_type_path = ''; } else { // known backup type. store in subdir. $backup_type_path = $backup_type . '/'; } $additionalParams = array('filename' => $remote_path . $backup_type_path . basename($file), 'size' => $file_size, 'timezone' => get_option('timezone_string')); $response = self::stashAPI($settings, 'upload', $additionalParams); if (!is_array($response)) { $error = 'Error #832973: Unable to initiate Stash (v2) upload. Details: `' . $response . '`.'; self::_error($error); return false; } $backup_type = backupbuddy_core::getBackupTypeFromFile($file); if (pb_backupbuddy::$options['log_level'] == '3') { // Full logging enabled. pb_backupbuddy::status('details', 'Stash API upload action response due to logging level: `' . print_r($response, true) . '`. Call params: `' . print_r($additionalParams, true) . ' `.'); } $settings['stash_mode'] = '1'; // Stash is calling the s32 destination. $settings['bucket'] = $response['bucket']; $settings['credentials'] = $response['credentials']; $settings['_stash_object'] = $response['object']; $settings['_stash_upload_id'] = $response['upload_id']; /* $settings['_multipart_id'] = $response['upload_id']; $settings['_multipart_partnumber'] = 0; $settings['_multipart_file'] = $file; $settings['_multipart_remotefile'] = $response['object']; //$remote_path . basename( $file ); $settings['_multipart_counts'] = pb_backupbuddy_destination_s32::_get_multipart_counts( $file_size, $settings['max_burst'] * 1024 * 1024 ); // Size of chunks expected to be in bytes. $settings['_multipart_backup_type'] = $backup_type; $settings['_multipart_backup_size'] = $file_size; */ } //error_log( print_r( $settings, true ) ); // Send file. $result = pb_backupbuddy_destination_s32::send($settings, $file, $send_id, $delete_after, $clear_uploads); if (false === $result) { // Notify Stash if failure. self::_uploadFailed($settings); } return $result; }
// Do NOT display any files within a deeper subdirectory. continue; } if (!preg_match(pb_backupbuddy_destination_s3::BACKUP_FILENAME_PATTERN, $file) && 'importbuddy.php' !== $file) { // Do not display any files that do not appear to be a BackupBuddy backup file (except importbuddy.php). continue; } /* Unsure whether to include this here or not? if ( FALSE === ( strpos( $file, 'backup-' . $prefix . '-' ) ) ) { // Not a backup for THIS site. Skip. continue; } */ $last_modified = strtotime($object->LastModified); $size = (double) $object->Size; $backup_type = backupbuddy_core::getBackupTypeFromFile($file); // Generate array of table rows. while (isset($backup_list_temp[$last_modified])) { // Avoid collisions. $last_modified += 0.1; } $backup_list_temp[$last_modified] = array($file, pb_backupbuddy::$format->date(pb_backupbuddy::$format->localize_time($last_modified)) . '<br /><span class="description">(' . pb_backupbuddy::$format->time_ago($last_modified) . ' ago)</span>', pb_backupbuddy::$format->file_size($size), backupbuddy_core::pretty_backup_type($backup_type)); } krsort($backup_list_temp); $backup_list = array(); foreach ($backup_list_temp as $backup_item) { $backup_list[$backup_item[0]] = $backup_item; } unset($backup_list_temp); $urlPrefix = pb_backupbuddy::ajax_url('remoteClient') . '&destination_id=' . htmlentities(pb_backupbuddy::_GET('destination_id')); // Render table listing files.
public static function archiveLimit($settings, $backup_type) { if ($backup_type == 'full') { $limit = $settings['full_archive_limit']; pb_backupbuddy::status('details', 'Full backup archive limit of `' . $limit . '` of type `full` based on destination settings.'); } elseif ($backup_type == 'db') { $limit = $settings['db_archive_limit']; pb_backupbuddy::status('details', 'Database backup archive limit of `' . $limit . '` of type `db` based on destination settings.'); } elseif ($backup_type == 'files') { $limit = $settings['files_archive_limit']; pb_backupbuddy::status('details', 'Database backup archive limit of `' . $limit . '` of type `files` based on destination settings.'); } else { $limit = 0; pb_backupbuddy::status('warning', 'Warning #237332. Unable to determine backup type (reported: `' . $backup_type . '`) so archive limits NOT enforced for this backup.'); } if ($limit > 0) { pb_backupbuddy::status('details', 'Archive limit enforcement beginning.'); // Get file listing. try { $response_manage = self::$_client->listObjects(array('Bucket' => $settings['bucket'], 'Prefix' => $settings['directory'] . 'backup-' . backupbuddy_core::backup_prefix())); // List all users files in this directory that are a backup for this site (limited by prefix). } catch (Exception $e) { return self::_error('Error #9338292: Unable to list files for archive limiting. Details: `' . $e->getMessage() . '`.'); } if (!is_array($response_manage['Contents'])) { $response_manage['Contents'] = array(); } // List backups associated with this site by date. $backups = array(); foreach ($response_manage['Contents'] as $object) { $file = str_replace($settings['directory'], '', $object['Key']); if ($backup_type != backupbuddy_core::getBackupTypeFromFile($file, $quiet = true)) { continue; // Not of the same backup type. } $backups[$file] = strtotime($object['LastModified']); } arsort($backups); pb_backupbuddy::status('details', 'Found `' . count($backups) . '` backups of this type when checking archive limits out of `' . count($response_manage['Contents']) . '` total files in this location.'); if (count($backups) > $limit) { pb_backupbuddy::status('details', 'More archives (' . count($backups) . ') than limit (' . $limit . ') allows. Trimming...'); $i = 0; $delete_fail_count = 0; foreach ($backups as $buname => $butime) { $i++; if ($i > $limit) { pb_backupbuddy::status('details', 'Trimming excess file `' . $buname . '`...'); try { $response = self::$_client->deleteObject(array('Bucket' => $settings['bucket'], 'Key' => $settings['directory'] . $buname)); } catch (Exception $e) { self::_error('Unable to delete excess Stash file `' . $buname . '`. Details: `' . $e->getMessage() . '`.'); $delete_fail_count++; } } } // end foreach. pb_backupbuddy::status('details', 'Finished trimming excess backups.'); if ($delete_fail_count !== 0) { $error_message = 'Stash remote limit could not delete ' . $delete_fail_count . ' backups.'; pb_backupbuddy::status('error', $error_message); backupbuddy_core::mail_error($error_message); } } pb_backupbuddy::status('details', 'Stash completed archive limiting.'); } else { pb_backupbuddy::status('details', 'No Stash archive file limit to enforce.'); } // End remote backup limit return true; }
/* echo '<br><pre>'; print_r( $file ); echo '</pre>'; */ if (!preg_match(pb_backupbuddy_destination_s32::BACKUP_FILENAME_PATTERN, $file['basename']) && 'importbuddy.php' !== $file) { // Do not display any files that do not appear to be a BackupBuddy backup file (except importbuddy.php). continue; } if ('' != $remotePath && !backupbuddy_core::startsWith(basename($file['filename']), $remotePath)) { // Only show backups for this site unless set to show all. continue; } $last_modified = $file['uploaded_timestamp']; $size = (double) $file['size']; $backup_type = backupbuddy_core::getBackupTypeFromFile($file['filename']); // Generate array of table rows. while (isset($backup_list_temp[$last_modified])) { // Avoid collisions. $last_modified += 0.1; } $backup_list_temp[$last_modified] = array(array(base64_encode($file['url']), $file['filename']), pb_backupbuddy::$format->date(pb_backupbuddy::$format->localize_time($last_modified)) . '<br /><span class="description">(' . pb_backupbuddy::$format->time_ago($last_modified) . ' ago)</span>', pb_backupbuddy::$format->file_size($size), backupbuddy_core::pretty_backup_type($backup_type)); } krsort($backup_list_temp); $backup_list = array(); foreach ($backup_list_temp as $backup_item) { $backup_list[$backup_item[0][0]] = $backup_item; } unset($backup_list_temp); $urlPrefix = pb_backupbuddy::ajax_url('remoteClient') . '&destination_id=' . htmlentities(pb_backupbuddy::_GET('destination_id')); $quota = pb_backupbuddy_destination_stash2::get_quota($settings);
public static function send($settings = array(), $file, $send_id = '', $delete_after = false) { global $pb_backupbuddy_destination_errors; if ('1' == $settings['disabled']) { $pb_backupbuddy_destination_errors[] = __('Error #48933: This destination is currently disabled. Enable it under this destination\'s Advanced Settings.', 'it-l10n-backupbuddy'); return false; } $settings = self::_init($settings); // Handles formatting & sanitizing settings. $chunkSizeBytes = $settings['max_burst'] * 1024 * 1024; // Send X mb at a time to limit memory usage. self::$_timeStart = microtime(true); // Initiate multipart upload. if ('' == $settings['_multipart_id']) { // New transfer. Note: All transfers are handled as presumed multiparts for ease. // Handle chunking of file into a multipart upload (if applicable). $file_size = filesize($file); pb_backupbuddy::status('details', 'File size of `' . pb_backupbuddy::$format->file_size($file_size) . '`.'); // About to chunk so cleanup any previous hanging multipart transfers. self::multipart_cleanup($settings); // Initiate multipart upload with S3. pb_backupbuddy::status('details', 'Initiating multipart transfer.'); try { $response = self::$_client->createMultipartUpload(array('Bucket' => $settings['bucket'], 'Key' => $settings['directory'] . basename($file), 'StorageClass' => $settings['storage'], 'ServerSideEncryption' => 'AES256')); } catch (Exception $e) { return self::_error('Error #389383: Unable to initiate multipart upload. Details: `' . $e->getMessage() . '`.'); } // Made it here so SUCCESS initiating multipart! $upload_id = (string) $response['UploadId']; pb_backupbuddy::status('details', 'Initiated multipart upload with ID `' . $upload_id . '`.'); $backup_type = backupbuddy_core::getBackupTypeFromFile($file); // Calculate multipart settings. $multipart_destination_settings = $settings; $multipart_destination_settings['_multipart_id'] = $upload_id; $multipart_destination_settings['_multipart_partnumber'] = 0; $multipart_destination_settings['_multipart_file'] = $file; $multipart_destination_settings['_multipart_remotefile'] = $settings['directory'] . basename($file); $multipart_destination_settings['_multipart_counts'] = self::_get_multipart_counts($file_size, $settings['max_burst'] * 1024 * 1024); // Size of chunks expected to be in bytes. $multipart_destination_settings['_multipart_backup_type'] = $backup_type; $multipart_destination_settings['_multipart_backup_size'] = $file_size; $multipart_destination_settings['_multipart_etag_parts'] = array(); //pb_backupbuddy::status( 'details', 'Multipart settings to pass:'******'_multipart_status'] = 'Starting send of ' . count( $multipart_destination_settings['_multipart_counts'] ) . ' parts.'; pb_backupbuddy::status('details', 'Multipart initiated; passing over to send first chunk this run.'); $settings = $multipart_destination_settings; // Copy over settings. unset($multipart_destination_settings); } // end initiating multipart. // Send parts. $backup_type = str_replace('/', '', $settings['_multipart_backup_type']); // For use later by file limiting. $backup_size = $settings['_multipart_backup_size']; $maxTime = $settings['max_time']; if ('' == $maxTime || !is_numeric($maxTime)) { pb_backupbuddy::status('details', 'Max time not set in settings so detecting server max PHP runtime.'); $maxTime = backupbuddy_core::detectMaxExecutionTime(); } pb_backupbuddy::status('details', 'Using max runtime: `' . $maxTime . '`.'); // Open file for streaming. $f = @fopen($settings['_multipart_file'], 'r'); if (false === $f) { return self::_error('Error #437734. Unable to open file `' . $settings['_multipart_file'] . '` to send. Did it get deleted?'); } $fileDone = false; while (!$fileDone && !feof($f)) { $sendStart = microtime(true); // Made it here so success sending part. Increment for next part to send. $settings['_multipart_partnumber']++; if (!isset($settings['_multipart_counts'][$settings['_multipart_partnumber'] - 1]['seekTo'])) { pb_backupbuddy::status('error', 'Error #8239933: Missing multipart partnumber to seek to. Settings array: `' . print_r($settings, true) . '`.'); } if (-1 == fseek($f, (int) $settings['_multipart_counts'][$settings['_multipart_partnumber'] - 1]['seekTo'])) { return self::_error('Error #833838: Unable to fseek file.'); } pb_backupbuddy::status('details', 'Beginning upload of part `' . $settings['_multipart_partnumber'] . '` of `' . count($settings['_multipart_counts']) . '` parts of file `' . $settings['_multipart_file'] . '` to remote location `' . $settings['_multipart_remotefile'] . '` with multipart ID `' . $settings['_multipart_id'] . '`.'); $contentLength = (int) $settings['_multipart_counts'][$settings['_multipart_partnumber'] - 1]['length']; try { $uploadArr = array('Bucket' => $settings['bucket'], 'Key' => $settings['_multipart_remotefile'], 'UploadId' => $settings['_multipart_id'], 'PartNumber' => $settings['_multipart_partnumber'], 'ContentLength' => $contentLength, 'Body' => fread($f, $contentLength)); //pb_backupbuddy::status( 'details', 'Send array: `' . print_r( $uploadArr, true ) . '`.' ); $response = self::$_client->uploadPart($uploadArr); } catch (Exception $e) { @fclose($f); return self::_error('Unable to upload file part for multipart upload of ID `' . $settings['_multipart_id'] . '`. Details: `' . $e->getMessage() . '`.'); } self::$_chunksSentThisRound++; $settings['_multipart_etag_parts'][] = array('PartNumber' => $settings['_multipart_partnumber'], 'ETag' => $response['ETag']); if (pb_backupbuddy::$options['log_level'] == '3') { // Full logging enabled. pb_backupbuddy::status('details', 'Success sending chunk. Upload details due to log level: `' . print_r($response, true) . '`.'); } else { pb_backupbuddy::status('details', 'Success sending chunk. Enable full logging for upload result details.'); } $uploaded_size = $contentLength; $elapseTime = microtime(true) - $sendStart; if (0 == $elapseTime) { $elapseTime = 1; } $uploaded_speed = $uploaded_size / $elapseTime; pb_backupbuddy::status('details', 'Uploaded size this burst: `' . pb_backupbuddy::$format->file_size($uploaded_size) . '`, Start time: `' . $sendStart . '`. Finish time: `' . microtime(true) . '`. Elapsed: `' . (microtime(true) - $sendStart) . '`. Speed: `' . pb_backupbuddy::$format->file_size($uploaded_speed) . '`/sec.'); // Load fileoptions to the send. if (isset($fileoptions_obj)) { pb_backupbuddy::status('details', 'fileoptions already loaded from prior pass.'); } else { // load fileoptions pb_backupbuddy::status('details', 'About to load fileoptions data.'); require_once pb_backupbuddy::plugin_path() . '/classes/fileoptions.php'; pb_backupbuddy::status('details', 'Fileoptions instance #10.'); $fileoptions_obj = new pb_backupbuddy_fileoptions(backupbuddy_core::getLogDirectory() . 'fileoptions/send-' . $send_id . '.txt', $read_only = false, $ignore_lock = false, $create_file = false); if (true !== ($result = $fileoptions_obj->is_ok())) { return self::_error(__('Fatal Error #9034.2344848. Unable to access fileoptions data.', 'it-l10n-backupbuddy') . ' Error: ' . $result); } pb_backupbuddy::status('details', 'Fileoptions data loaded.'); $fileoptions =& $fileoptions_obj->options; } //$update_status = 'Sent part ' . $settings['_multipart_partnumber'] . ' of ' . count( $settings['_multipart_counts'] ) . '.'; if (!isset($settings['_multipart_counts'][$settings['_multipart_partnumber']])) { // No more parts exist for this file. Tell S3 the multipart upload is complete and move on. pb_backupbuddy::status('details', 'S3 getting parts with etags to notify S3 of completed multipart send.'); $update_status = 'Sent part ' . $settings['_multipart_partnumber'] . ' of ' . count($settings['_multipart_counts']) . ' parts.'; pb_backupbuddy::status('details', 'Notifying server of multipart upload completion.'); try { $response = self::$_client->completeMultipartUpload(array('Bucket' => $settings['bucket'], 'UploadId' => $settings['_multipart_id'], 'Key' => $settings['_multipart_remotefile'], 'Parts' => $settings['_multipart_etag_parts'])); } catch (Exception $e) { return self::_error('Unable to notify server of completion of all parts for multipart upload `' . $settings['_multipart_id'] . '`. Details: `' . $e->getMessage() . '`.'); } pb_backupbuddy::status('details', 'Server notified of multipart completion.'); pb_backupbuddy::status('details', 'No more parts left for this multipart upload. Clearing multipart instance variables.'); $settings['_multipart_partnumber'] = 0; $settings['_multipart_id'] = ''; $settings['_multipart_file'] = ''; $settings['_multipart_remotefile'] = ''; // Multipart completed so safe to prevent housekeeping of incomplete multipart uploads. $settings['_multipart_transferspeeds'][] = $uploaded_speed; // Overall upload speed average. $uploaded_speed = array_sum($settings['_multipart_transferspeeds']) / count($settings['_multipart_counts']); pb_backupbuddy::status('details', 'Upload speed average of all chunks: `' . pb_backupbuddy::$format->file_size($uploaded_speed) . '`.'); $settings['_multipart_counts'] = array(); // Update stats. $fileoptions['_multipart_status'] = $update_status; $fileoptions['finish_time'] = microtime(true); $fileoptions['status'] = 'success'; if (isset($uploaded_speed)) { $fileoptions['write_speed'] = $uploaded_speed; } $fileoptions_obj->save(); unset($fileoptions); $fileDone = true; @fclose($f); } else { // Parts remain. Schedule to continue if anything is left to upload for this multipart of any individual files. pb_backupbuddy::status('details', 'S3 multipart upload has more parts left.'); $update_status = '<br>'; $totalSent = 0; for ($i = 0; $i < $settings['_multipart_partnumber']; $i++) { $totalSent += $settings['_multipart_counts'][$i]['length']; } $percentSent = ceil($totalSent / $settings['_multipart_backup_size'] * 100); $update_status .= '<div class="backupbuddy-progressbar" data-percent="' . $percentSent . '"><div class="backupbuddy-progressbar-label"></div></div>'; if ('0' != $maxTime) { // Not unlimited time so see if we can send more bursts this time or if we need to chunk. // If we are within X second of reaching maximum PHP runtime then stop here so that it can be picked up in another PHP process... $totalSizeSent = self::$_chunksSentThisRound * $chunkSizeBytes; // Total bytes sent this PHP load. $bytesPerSec = $totalSizeSent / (microtime(true) - $sendStart); $timeRemaining = $maxTime - (microtime(true) - self::$_timeStart + self::TIME_WIGGLE_ROOM); if ($timeRemaining < 0) { $timeRemaining = 0; } $bytesWeCouldSendWithTimeLeft = $bytesPerSec * $timeRemaining; //pb_backupbuddy::status( 'details', 'Sent this burst: `' . pb_backupbuddy::$format->file_size( $totalSizeSent ) .'` in `' . (microtime(true) - $sendStart ) . '` secs. Speed: `' . pb_backupbuddy::$format->file_size( $bytesPerSec ) . '`/sec. Time Remaining (w/ wiggle): `' . $timeRemaining . '`. Size that could potentially be sent with remaining time: `' . pb_backupbuddy::$format->file_size( $bytesWeCouldSendWithTimeLeft ) . '` with chunk size of `' . pb_backupbuddy::$format->file_size( $chunkSizeBytes ) . '`.' ); if ($bytesWeCouldSendWithTimeLeft < $chunkSizeBytes) { // We can send more than a whole chunk (including wiggle room) so send another bit. pb_backupbuddy::status('message', 'Not enough time left (~`' . $timeRemaining . '`) with max time of `' . $maxTime . '` sec to send another chunk at `' . pb_backupbuddy::$format->file_size($bytesPerSec) . '` / sec. Ran for ' . round(microtime(true) - self::$_timeStart, 3) . ' sec. Proceeding to use chunking.'); @fclose($fs); $cronTime = time(); $cronArgs = array($settings, $file, $send_id, $delete_after); $cronHashID = md5($cronTime . serialize($cronArgs)); $cronArgs[] = $cronHashID; $schedule_result = backupbuddy_core::schedule_single_event($cronTime, pb_backupbuddy::cron_tag('destination_send'), $cronArgs); if (true === $schedule_result) { pb_backupbuddy::status('details', 'Next S3 chunk step cron event scheduled.'); } else { pb_backupbuddy::status('error', 'Next S3 chunk step cron even FAILED to be scheduled.'); } spawn_cron(time() + 150); // Adds > 60 seconds to get around once per minute cron running limit. update_option('_transient_doing_cron', 0); // Prevent cron-blocking for next item. @fclose($f); unset($fileoptions); return array($settings['_multipart_id'], 'Sent part ' . $settings['_multipart_partnumber'] . ' of ' . count($settings['_multipart_counts']) . ' parts.' . $update_status); } else { // End if. pb_backupbuddy::status('details', 'Not approaching limits. Proceeding to next burst this run.'); } } else { pb_backupbuddy::status('details', 'Max time of zero (0) so assuming unlimited time.'); } $fileoptions['_multipart_status'] = 'Sent part ' . $settings['_multipart_partnumber'] . ' of ' . count($settings['_multipart_counts']) . ' parts.' . $update_status; $fileoptions_obj->save(); //unset( $fileoptions ); } // end no more parts remain. } // End while not feof. /***** BEGIN FILE ARCHIVE LIMITS *****/ if ('1' == $settings['stash_mode']) { // This is being wrapped by the Stash destination. Stash uses a different method of handling archive limiting due to using Stash API. pb_backupbuddy_destination_stash2::archiveLimit($settings, $backup_type); } else { // Normal. This is just a s32 destination. self::archiveLimit($settings, $backup_type); } /***** END FILE ARCHIVE LIMITS *****/ if (isset($fileoptions_obj)) { unset($fileoptions_obj); } // Success if we made it this far. return true; }
public static function send($settings = array(), $file, $send_id = '', $delete_after = false) { $settings = self::_init($settings); // Handles formatting & sanitizing settings. // Process multipart transfer that we already initiated in a previous runthrough. if ($settings['_multipart_id'] != '') { // Multipart upload initiated and needs parts sent. $backup_type = str_replace('/', '', $settings['_multipart_backup_type']); // For use later by file limiting. $backup_size = $settings['_multipart_backup_size']; $this_part_number = $settings['_multipart_partnumber'] + 1; // Open file for streaming. $f = @fopen($settings['_multipart_file'], 'rb'); if (false === $f) { return self::_error('Error #437734. Unable to open file `' . $settings['_multipart_file'] . '` to send. Did it get deleted?'); } if (-1 == @fseek($f, (int) $settings['_multipart_counts'][$settings['_multipart_partnumber']]['seekTo'])) { return self::_error('Error #833838: Unable to fseek file.'); } $sendStart = time(true); pb_backupbuddy::status('details', 'Beginning upload of part `' . $this_part_number . '` of `' . count($settings['_multipart_counts']) . '` parts of file `' . $settings['_multipart_file'] . '` to remote location `' . $settings['_multipart_remotefile'] . '` with multipart ID `' . $settings['_multipart_id'] . '`.'); try { $response = self::$_client->uploadPart(array('Bucket' => $settings['bucket'], 'Key' => $settings['_multipart_remotefile'], 'UploadId' => $settings['_multipart_id'], 'PartNumber' => $this_part_number, 'ContentLength' => (int) $settings['_multipart_counts'][$settings['_multipart_partnumber']]['length'], 'Body' => $f)); } catch (Exception $e) { @fclose($f); return self::_error('Unable to upload file part for multipart upload `' . $settings['_multipart_id'] . '`. Details: `' . $e->getMessage() . '`.'); } @fclose($f); pb_backupbuddy::status('details', 'Success sending chunk. Upload details: `' . print_r($response, true) . '`.'); $uploaded_size = $backup_size; $uploaded_speed = time(true) - $sendStart; pb_backupbuddy::status('details', 'Uploaded size: ' . pb_backupbuddy::$format->file_size($uploaded_size) . ', Speed: ' . pb_backupbuddy::$format->file_size($uploaded_speed) . '/sec.'); // Load fileoptions to the send. pb_backupbuddy::status('details', 'About to load fileoptions data.'); require_once pb_backupbuddy::plugin_path() . '/classes/fileoptions.php'; pb_backupbuddy::status('details', 'Fileoptions instance #10.'); $fileoptions_obj = new pb_backupbuddy_fileoptions(backupbuddy_core::getLogDirectory() . 'fileoptions/send-' . $send_id . '.txt', $read_only = false, $ignore_lock = false, $create_file = false); if (true !== ($result = $fileoptions_obj->is_ok())) { return self::_error(__('Fatal Error #9034.2344848. Unable to access fileoptions data.', 'it-l10n-backupbuddy') . ' Error: ' . $result); } pb_backupbuddy::status('details', 'Fileoptions data loaded.'); $fileoptions =& $fileoptions_obj->options; $update_status = 'Sent part ' . $this_part_number . ' of ' . count($settings['_multipart_counts']) . '.'; // Made it here so success sending part. Increment for next part to send. $settings['_multipart_partnumber']++; if (!isset($settings['_multipart_counts'][$settings['_multipart_partnumber']])) { // No more parts exist for this file. Tell S3 the multipart upload is complete and move on. pb_backupbuddy::status('details', 'S3 getting parts with etags to notify S3 of completed multipart send.'); try { $response = self::$_client->listParts(array('Bucket' => $settings['bucket'], 'UploadId' => $settings['_multipart_id'], 'Key' => $settings['_multipart_remotefile'])); $etag_parts = $etag_parts->Parts; } catch (Exception $e) { return self::_error('Error #8332893: Unable to list parts on server. Details: `' . $e->getMessage() . '`.'); } pb_backupbuddy::status('details', 'Got parts list. Details: ' . print_r($etag_parts, true)); pb_backupbuddy::status('details', 'Notifying server of multipart upload completion.'); try { $response = self::$_client->completeMultipartUpload(array('Bucket' => $settings['bucket'], 'UploadId' => $settings['_multipart_id'], 'Key' => $settings['_multipart_remotefile'], 'Parts' => $etag_parts)); } catch (Exception $e) { return self::_error('Unable to notify server of completion of all parts for multipart upload `' . $settings['_multipart_id'] . '`. Details: `' . $e->getMessage() . '`.'); } pb_backupbuddy::status('details', 'Server notified of multipart completion.'); pb_backupbuddy::status('details', 'No more parts left for this multipart upload. Clearing multipart instance variables.'); $settings['_multipart_partnumber'] = 0; $settings['_multipart_id'] = ''; $settings['_multipart_file'] = ''; $settings['_multipart_remotefile'] = ''; // Multipart completed so safe to prevent housekeeping of incomplete multipart uploads. $settings['_multipart_transferspeeds'][] = $uploaded_speed; // Overall upload speed average. $uploaded_speed = array_sum($settings['_multipart_transferspeeds']) / count($settings['_multipart_counts']); pb_backupbuddy::status('details', 'Upload speed average of all chunks: `' . pb_backupbuddy::$format->file_size($uploaded_speed) . '`.'); $settings['_multipart_counts'] = array(); // Update stats. $fileoptions['_multipart_status'] = $update_status; $fileoptions['finish_time'] = time(); $fileoptions['status'] = 'success'; if (isset($uploaded_speed)) { $fileoptions['write_speed'] = $uploaded_speed; } $fileoptions_obj->save(); unset($fileoptions); } // Schedule to continue if anything is left to upload for this multipart of any individual files. if ($settings['_multipart_id'] != '') { pb_backupbuddy::status('details', 'S3 multipart upload has more parts left. Scheduling next part send.'); $cronTime = time(); $cronArgs = array($settings, $file, $send_id, $delete_after); $cronHashID = md5($cronTime . serialize($cronArgs)); $cronArgs[] = $cronHashID; $schedule_result = backupbuddy_core::schedule_single_event($cronTime, pb_backupbuddy::cron_tag('destination_send'), $cronArgs); if (true === $schedule_result) { pb_backupbuddy::status('details', 'Next S3 chunk step cron event scheduled.'); } else { pb_backupbuddy::status('error', 'Next S3 chunk step cron even FAILED to be scheduled.'); } spawn_cron(time() + 150); // Adds > 60 seconds to get around once per minute cron running limit. update_option('_transient_doing_cron', 0); // Prevent cron-blocking for next item. return array($settings['_multipart_id'], 'Sent part ' . $this_part_number . ' of ' . count($settings['_multipart_counts']) . ' parts.'); } } else { // not multipart continuation // Handle chunking of file into a multipart upload (if applicable). $file_size = filesize($file); if ($settings['max_chunk_size'] >= self::MINIMUM_CHUNK_SIZE && $file_size / 1024 / 1024 > $settings['max_chunk_size']) { // minimum chunk size is 5mb. Anything under 5mb we will not chunk. pb_backupbuddy::status('details', 'File size of ' . pb_backupbuddy::$format->file_size($file_size) . ' exceeds max chunk size of ' . $settings['max_chunk_size'] . 'MB set in settings for sending file as multipart upload.'); // About to chunk so cleanup any previous hanging multipart transfers. self::multipart_cleanup($settings, $lessLogs = false); // Initiate multipart upload with S3. pb_backupbuddy::status('details', 'Initiating multipart transfer.'); try { $response = self::$_client->createMultipartUpload(array('Bucket' => $settings['bucket'], 'Key' => $settings['directory'] . basename($file), 'StorageClass' => $settings['storage'], 'ServerSideEncryption' => 'AES256')); } catch (Exception $e) { return self::_error('Error #389383: Unable to initiate multipart upload. Details: `' . $e->getMessage() . '`.'); } // Made it here so SUCCESS initiating multipart! $upload_id = (string) $response['UploadId']; pb_backupbuddy::status('details', 'Initiated multipart upload with ID `' . $upload_id . '`.'); $backup_type = backupbuddy_core::getBackupTypeFromFile($file); // Calculate multipart settings. $multipart_destination_settings = $settings; $multipart_destination_settings['_multipart_id'] = $upload_id; $multipart_destination_settings['_multipart_partnumber'] = 0; $multipart_destination_settings['_multipart_file'] = $file; $multipart_destination_settings['_multipart_remotefile'] = $settings['directory'] . basename($file); $multipart_destination_settings['_multipart_counts'] = self::_get_multipart_counts($file_size, $settings['max_chunk_size'] * 1024 * 1024); // Size of chunks expected to be in bytes. $multipart_destination_settings['_multipart_backup_type'] = $backup_type; $multipart_destination_settings['_multipart_backup_size'] = $file_size; pb_backupbuddy::status('details', 'Multipart settings to pass:'******'details', 'Scheduling send of next part.'); $cronTime = time(); $cronArgs = array($multipart_destination_settings, $file, $send_id, $delete_after); $cronHashID = md5($cronTime . serialize($cronArgs)); $cronArgs[] = $cronHashID; backupbuddy_core::schedule_single_event($cronTime, pb_backupbuddy::cron_tag('destination_send'), $cronArgs); spawn_cron(time() + 150); // Adds > 60 seconds to get around once per minute cron running limit. update_option('_transient_doing_cron', 0); // Prevent cron-blocking for next item. pb_backupbuddy::status('details', 'Scheduled send of next part(s). Done for this cycle.'); return array($upload_id, 'Starting send of ' . count($multipart_destination_settings['_multipart_counts']) . ' parts.'); } else { // did not meet chunking criteria. if ($settings['max_chunk_size'] != '0') { if ($file_size / 1024 / 1024 > self::MINIMUM_CHUNK_SIZE) { pb_backupbuddy::status('details', 'File size of ' . pb_backupbuddy::$format->file_size($file_size) . ' is less than the max chunk size of ' . $settings['max_chunk_size'] . 'MB; not chunking into multipart upload.'); } else { pb_backupbuddy::status('details', 'File size of ' . pb_backupbuddy::$format->file_size($file_size) . ' is less than the minimum allowed chunk size of ' . self::MINIMUM_CHUNK_SIZE . 'MB; not chunking into multipart upload.'); } } else { pb_backupbuddy::status('details', 'Max chunk size set to zero so not chunking into multipart upload.'); } // Open file for streaming. $f = @fopen($file, 'rb'); if (false === $f) { return self::_error('Error #2379327. Unable to open file `' . $file . '` to send. Did it get deleted?'); } // Initiate SINGLE PART upload. $startSend = time(true); pb_backupbuddy::status('details', 'Initiating non-chunked transfer.'); try { $response = self::$_client->upload($settings['bucket'], $settings['directory'] . basename($file), $f, 'private', array('StorageClass' => $settings['storage'], 'ServerSideEncryption' => 'AES256')); } catch (Exception $e) { return self::_error('Error #389383: Unable to initiate non-chunked upload. Details: `' . $e->getMessage() . '`.'); } @fclose($f); // Made it here so SUCCESS sending. $uploaded_size = $file_size; $uploaded_speed = $file_size / (time(true) - $startSend); pb_backupbuddy::status('details', 'Success uploading file `' . basename($file) . '`. Upload details: `' . print_r($response, true) . '`. Uploaded size: ' . pb_backupbuddy::$format->file_size($uploaded_size) . ', Speed: ' . pb_backupbuddy::$format->file_size($uploaded_speed) . '/sec.'); // Load destination fileoptions. pb_backupbuddy::status('details', 'About to load fileoptions data.'); require_once pb_backupbuddy::plugin_path() . '/classes/fileoptions.php'; pb_backupbuddy::status('details', 'Fileoptions instance #882.'); $fileoptions_obj = new pb_backupbuddy_fileoptions(backupbuddy_core::getLogDirectory() . 'fileoptions/send-' . $send_id . '.txt', $read_only = false, $ignore_lock = false, $create_file = false); if (true !== ($result = $fileoptions_obj->is_ok())) { return self::_error(__('Fatal Error #9034.23737. Unable to access fileoptions data.', 'it-l10n-backupbuddy') . ' Error: ' . $result); } pb_backupbuddy::status('details', 'Fileoptions data loaded.'); $fileoptions =& $fileoptions_obj->options; // Save stats. if (isset($uploaded_speed)) { $fileoptions['write_speed'] = $uploaded_speed; $fileoptions_obj->save(); } unset($fileoptions_obj); } // End non-chunked upload. } // end not multipart continuation. // BEGIN FILE LIMIT PROCESSING. Enforce archive limits if applicable. if ($backup_type == 'full') { $limit = $full_archive_limit; pb_backupbuddy::status('details', 'Full backup archive limit of `' . $limit . '` of type `full` based on destination settings.'); } elseif ($backup_type == 'db') { $limit = $db_archive_limit; pb_backupbuddy::status('details', 'Database backup archive limit of `' . $limit . '` of type `db` based on destination settings.'); } elseif ($backup_type == 'files') { $limit = $db_archive_limit; pb_backupbuddy::status('details', 'Database backup archive limit of `' . $limit . '` of type `files` based on destination settings.'); } else { $limit = 0; pb_backupbuddy::status('warning', 'Warning #237332. Unable to determine backup type (reported: `' . $backup_type . '`) so archive limits NOT enforced for this backup.'); } if ($limit > 0) { pb_backupbuddy::status('details', 'Archive limit enforcement beginning.'); // Get file listing. try { $response_manage = self::$_client->listObjects(array('Bucket' => $settings['bucket'], 'Prefix' => $settings['directory'] . 'backup-' . backup_prefix())); // List all users files in this directory that are a backup for this site (limited by prefix). } catch (Exception $e) { self::_error('Error #9338292: Unable to list files for archive limiting. Details: `' . $e->getMessage() . '`.'); } // List backups associated with this site by date. $backups = array(); foreach ($response_manage->Contents as $object) { $file = str_replace($settings['directory'], '', $object['Key']); $backups[$file] = strtotime($object['LastModified']); } arsort($backups); pb_backupbuddy::status('details', 'Stash found `' . count($backups) . '` backups of this type when checking archive limits.'); if (count($backups) > $limit) { pb_backupbuddy::status('details', 'More archives (' . count($backups) . ') than limit (' . $limit . ') allows. Trimming...'); $i = 0; $delete_fail_count = 0; foreach ($backups as $buname => $butime) { $i++; if ($i > $limit) { pb_backupbuddy::status('details', 'Trimming excess file `' . $buname . '`...'); try { $response = self::$_client->deleteObject(array('Bucket' => $settings['bucket'], 'Key' => $settings['directory'] . $buname)); } catch (Exception $e) { self::_error('Unable to delete excess Stash file `' . $buname . '`. Details: `' . $e->getMessage() . '`.'); $delete_fail_count++; } } } pb_backupbuddy::status('details', 'Finished trimming excess backups.'); if ($delete_fail_count !== 0) { $error_message = 'Stash remote limit could not delete ' . $delete_fail_count . ' backups.'; pb_backupbuddy::status('error', $error_message); backupbuddy_core::mail_error($error_message); } } pb_backupbuddy::status('details', 'Stash completed archive limiting.'); } else { pb_backupbuddy::status('details', 'No Stash archive file limit to enforce.'); } // End remote backup limit if (isset($fileoptions_obj)) { unset($fileoptions_obj); } // Success if we made it this far. return true; }
echo '<br><pre>'; print_r( $file ); echo '</pre>'; */ /* if ( ( ! preg_match( pb_backupbuddy_destination_s32::BACKUP_FILENAME_PATTERN, $file['basename'] ) ) && ( 'importbuddy.php' !== $file ) ) { // Do not display any files that do not appear to be a BackupBuddy backup file (except importbuddy.php). continue; } */ if ('' != $remotePath && !backupbuddy_core::startsWith(basename($file['filename']), $remotePath)) { // Only show backups for this site unless set to show all. continue; } $last_modified = $file['uploaded_timestamp']; $size = (double) $file['size']; $backup_type = backupbuddy_core::getBackupTypeFromFile($file['filename'], $quiet = false, $skip_fileoptions = true); // Generate array of table rows. while (isset($backup_list_temp[$last_modified])) { // Avoid collisions. $last_modified += 0.1; } if ('live' == $destination['type']) { $backup_list_temp[$last_modified] = array(array(base64_encode($file['url']), '<span class="backupbuddy-stash-file-list-title">' . pb_backupbuddy::$format->date(pb_backupbuddy::$format->localize_time($last_modified)) . ' <span class="description">(' . pb_backupbuddy::$format->time_ago($last_modified) . ' ago)</span></span><br><span title="' . $file['filename'] . '">' . basename($file['filename']) . '</span>'), pb_backupbuddy::$format->date(pb_backupbuddy::$format->localize_time($last_modified)) . '<br /><span class="description">(' . pb_backupbuddy::$format->time_ago($last_modified) . ' ago)</span>', pb_backupbuddy::$format->file_size($size), backupbuddy_core::pretty_backup_type($backup_type)); } else { $backup_list_temp[$last_modified] = array(array(base64_encode($file['url']), '<span title="' . $file['filename'] . '">' . basename($file['filename']) . '</span>'), pb_backupbuddy::$format->date(pb_backupbuddy::$format->localize_time($last_modified)) . '<br /><span class="description">(' . pb_backupbuddy::$format->time_ago($last_modified) . ' ago)</span>', pb_backupbuddy::$format->file_size($size), backupbuddy_core::pretty_backup_type($backup_type)); } } krsort($backup_list_temp); $backup_list = array(); foreach ($backup_list_temp as $backup_item) { $backup_list[$backup_item[0][0]] = $backup_item;