コード例 #1
0
ファイル: init.php プロジェクト: adrianjonmiller/animalhealth
 public static function send($settings = array(), $files = array(), $send_id = '', $clear_uploads = false)
 {
     global $pb_backupbuddy_destination_errors;
     if (!is_array($files)) {
         $files = array($files);
     }
     if ($clear_uploads === false) {
         // Uncomment the following line to override and always clear.
         //$clear_uploads = true;
     }
     $itxapi_username = $settings['itxapi_username'];
     $itxapi_password = $settings['itxapi_password'];
     $db_archive_limit = $settings['db_archive_limit'];
     $full_archive_limit = $settings['full_archive_limit'];
     $files_archive_limit = $settings['files_archive_limit'];
     $max_chunk_size = $settings['max_chunk_size'];
     $remote_path = self::get_remote_path($settings['directory']);
     // Has leading and trailng slashes.
     if ($settings['ssl'] == '0') {
         $disable_ssl = true;
     } else {
         $disable_ssl = false;
     }
     $multipart_id = $settings['_multipart_id'];
     $multipart_counts = $settings['_multipart_counts'];
     pb_backupbuddy::status('details', 'Stash remote path set to `' . $remote_path . '`.');
     require_once dirname(__FILE__) . '/lib/class.itx_helper.php';
     require_once dirname(dirname(__FILE__)) . '/_s3lib/aws-sdk/sdk.class.php';
     // Stash API talk.
     $stash = new ITXAPI_Helper(pb_backupbuddy_destination_stash::ITXAPI_KEY, pb_backupbuddy_destination_stash::ITXAPI_URL, $itxapi_username, $itxapi_password);
     $manage_data = pb_backupbuddy_destination_stash::get_manage_data($settings);
     if (!is_array($manage_data['credentials'])) {
         pb_backupbuddy::status('error', 'Error #8484383b: Your authentication credentials for Stash failed. Verify your login and password to Stash. You may need to update the Stash destination settings. Perhaps you recently changed your password?');
         return false;
     }
     // Wipe all current uploads.
     if ($clear_uploads === true) {
         pb_backupbuddy::status('details', 'Clearing any current uploads via Stash call to `abort-all`.');
         $abort_url = $stash->get_upload_url(null, 'abort-all');
         $request = new RequestCore($abort_url);
         $response = $request->send_request(true);
     }
     // Process multipart transfer that we already initiated in a previous PHP load.
     if ($multipart_id != '') {
         // Multipart upload initiated and needs parts sent.
         // Create S3 instance.
         pb_backupbuddy::status('details', 'Creating Stash S3 instance.');
         $s3 = new AmazonS3($settings['_multipart_upload_data']['credentials']);
         // the key, secret, token
         if ($disable_ssl === true) {
             @$s3->disable_ssl(true);
         }
         pb_backupbuddy::status('details', 'Stash S3 instance created.');
         $backup_type = str_replace('/', '', $settings['_multipart_backup_type_dir']);
         // For use later by file limiting.
         $this_part_number = $settings['_multipart_partnumber'] + 1;
         pb_backupbuddy::status('details', 'Stash beginning upload of part `' . $this_part_number . '` of `' . count($settings['_multipart_counts']) . '` parts of file `' . $settings['_multipart_file'] . '` with multipart ID `' . $settings['_multipart_id'] . '`.');
         $response = $s3->upload_part($settings['_multipart_upload_data']['bucket'], $settings['_multipart_upload_data']['object'], $settings['_multipart_id'], array('expect' => '100-continue', 'fileUpload' => $settings['_multipart_file'], 'partNumber' => $this_part_number, 'seekTo' => (int) $settings['_multipart_counts'][$settings['_multipart_partnumber']]['seekTo'], 'length' => (int) $settings['_multipart_counts'][$settings['_multipart_partnumber']]['length']));
         if (!$response->isOK()) {
             $this_error = 'Stash unable to upload file part for multipart upload `' . $settings['_multipart_id'] . '`. Details: `' . print_r($response, true) . '`.';
             $pb_backupbuddy_destination_errors[] = $this_error;
             pb_backupbuddy::status('error', $this_error);
             return false;
         } else {
             $uploaded_size = $response->header['_info']['size_upload'];
             $uploaded_speed = $response->header['_info']['speed_upload'];
             pb_backupbuddy::status('details', 'Uploaded size: ' . pb_backupbuddy::$format->file_size($uploaded_size) . ', Speed: ' . pb_backupbuddy::$format->file_size($uploaded_speed) . '/sec.');
         }
         // Load fileoptions to the send.
         pb_backupbuddy::status('details', 'About to load fileoptions data.');
         require_once pb_backupbuddy::plugin_path() . '/classes/fileoptions.php';
         $fileoptions_obj = new pb_backupbuddy_fileoptions(backupbuddy_core::getLogDirectory() . 'fileoptions/send-' . $send_id . '.txt', $read_only = false, $ignore_lock = false, $create_file = false);
         if (true !== ($result = $fileoptions_obj->is_ok())) {
             pb_backupbuddy::status('error', __('Fatal Error #9034.2344848. Unable to access fileoptions data.', 'it-l10n-backupbuddy') . ' Error: ' . $result);
             return false;
         }
         pb_backupbuddy::status('details', 'Fileoptions data loaded.');
         $fileoptions =& $fileoptions_obj->options;
         $update_status = 'Sent part ' . $this_part_number . ' of ' . count($settings['_multipart_counts']) . '.';
         // Made it here so success sending part. Increment for next part to send.
         $settings['_multipart_partnumber']++;
         if (!isset($settings['_multipart_counts'][$settings['_multipart_partnumber']])) {
             // No more parts exist for this file. Tell S3 the multipart upload is complete and move on.
             pb_backupbuddy::status('details', 'Stash getting parts with etags to notify S3 of completed multipart send.');
             $etag_parts = $s3->list_parts($settings['_multipart_upload_data']['bucket'], $settings['_multipart_upload_data']['object'], $settings['_multipart_id']);
             pb_backupbuddy::status('details', 'Stash got parts list. Notifying S3 of multipart upload completion.');
             $response = $s3->complete_multipart_upload($settings['_multipart_upload_data']['bucket'], $settings['_multipart_upload_data']['object'], $settings['_multipart_id'], $etag_parts);
             if (!$response->isOK()) {
                 $this_error = 'Stash unable to notify S3 of completion of all parts for multipart upload `' . $settings['_multipart_id'] . '`.';
                 $pb_backupbuddy_destination_errors[] = $this_error;
                 pb_backupbuddy::status('error', $this_error);
                 return false;
             } else {
                 pb_backupbuddy::status('details', 'Stash notified S3 of multipart completion.');
             }
             $backup_type_dir = $settings['_multipart_backup_type_dir'];
             // Notify Stash API that things were succesful.
             $done_url = $stash->get_upload_url($settings['_multipart_file'], 'done', $remote_path . $backup_type_dir . basename($settings['_multipart_file']));
             pb_backupbuddy::status('details', 'Notifying Stash of completed multipart upload with done url `' . $done_url . '`.');
             $request = new RequestCore($done_url);
             $response = $request->send_request(true);
             if (!$response->isOK()) {
                 $this_error = 'Error #756834682. Could not finalize Stash upload. Response code: `' . $response->get_response_code() . '`; Response body: `' . $response->get_response_body() . '`; Response headers: `' . $response->get_response_header() . '`.';
                 $pb_backupbuddy_destination_errors[] = $this_error;
                 pb_backupbuddy::status('error', $this_error);
                 return false;
             } else {
                 // Good server response.
                 // See if we got an optional json response.
                 $upload_data = @json_decode($response->body, true);
                 if (isset($upload_data['error'])) {
                     $this_error = 'Stash error(s): `' . implode(' - ', $upload_data['error']) . '`.';
                     $pb_backupbuddy_destination_errors[] = $this_error;
                     pb_backupbuddy::status('error', $this_error);
                     return false;
                 }
                 pb_backupbuddy::status('details', 'Stash success sending file `' . basename($settings['_multipart_file']) . '`. File uploaded via multipart across `' . $this_part_number . '` parts and reported to Stash as completed.');
             }
             pb_backupbuddy::status('details', 'Stash has no more parts left for this multipart upload. Clearing multipart instance variables.');
             $settings['_multipart_partnumber'] = 0;
             $settings['_multipart_id'] = '';
             $settings['_multipart_file'] = '';
             $settings['_multipart_upload_data'] = array();
             $settings['_multipart_transferspeeds'][] = $uploaded_speed;
             // Overall upload speed average.
             $uploaded_speed = array_sum($settings['_multipart_transferspeeds']) / count($settings['_multipart_counts']);
             pb_backupbuddy::status('details', 'Upload speed average of all chunks: `' . pb_backupbuddy::$format->file_size($uploaded_speed) . '`.');
             $settings['_multipart_counts'] = array();
             // Update stats.
             $fileoptions['_multipart_status'] = $update_status;
             $fileoptions['finish_time'] = time();
             $fileoptions['status'] = 'success';
             if (isset($uploaded_speed)) {
                 $fileoptions['write_speed'] = $uploaded_speed;
             }
             $fileoptions_obj->save();
             unset($fileoptions);
         }
         delete_transient('pb_backupbuddy_stashquota_' . $settings['itxapi_username']);
         // Delete quota transient since it probably has changed now.
         // Schedule to continue if anything is left to upload for this multipart of any individual files.
         if ($settings['_multipart_id'] != '' || count($files) > 0) {
             pb_backupbuddy::status('details', 'Stash multipart upload has more parts left. Scheduling next part send.');
             $schedule_result = backupbuddy_core::schedule_single_event(time(), pb_backupbuddy::cron_tag('destination_send'), array($settings, $files, $send_id));
             if (true === $schedule_result) {
                 pb_backupbuddy::status('details', 'Next Stash chunk step cron event scheduled.');
             } else {
                 pb_backupbuddy::status('error', 'Next Stash chunk step cron even FAILED to be scheduled.');
             }
             spawn_cron(time() + 150);
             // Adds > 60 seconds to get around once per minute cron running limit.
             update_option('_transient_doing_cron', 0);
             // Prevent cron-blocking for next item.
             return array($settings['_multipart_id'], 'Sent part ' . $this_part_number . ' of ' . count($settings['_multipart_counts']) . ' parts.');
         }
     }
     // end if multipart continuation.
     require_once pb_backupbuddy::plugin_path() . '/classes/fileoptions.php';
     // Upload each file.
     foreach ($files as $file_id => $file) {
         // Determine backup type directory (if zip).
         $backup_type_dir = '';
         $backup_type = '';
         if (stristr($file, '.zip') !== false) {
             // If a zip try to determine backup type.
             pb_backupbuddy::status('details', 'Stash: Zip file. Detecting backup type if possible.');
             $serial = backupbuddy_core::get_serial_from_file($file);
             // See if we can get backup type from fileoptions data.
             $backup_options = new pb_backupbuddy_fileoptions(backupbuddy_core::getLogDirectory() . 'fileoptions/' . $serial . '.txt', $read_only = true, $ignore_lock = true);
             if (true !== ($result = $backup_options->is_ok())) {
                 pb_backupbuddy::status('error', 'Unable to open fileoptions file `' . backupbuddy_core::getLogDirectory() . 'fileoptions/' . $serial . '.txt' . '`.');
             } else {
                 if (isset($backup_options->options['integrity']['detected_type'])) {
                     pb_backupbuddy::status('details', 'Stash: Detected backup type as `' . $backup_options->options['integrity']['detected_type'] . '` via integrity check data.');
                     $backup_type_dir = $backup_options->options['integrity']['detected_type'] . '/';
                     $backup_type = $backup_options->options['integrity']['detected_type'];
                 }
             }
             // If still do not know backup type then attempt to deduce it from filename.
             if ($backup_type == '') {
                 if (stristr($file, '-db-') !== false) {
                     pb_backupbuddy::status('details', 'Stash: Detected backup type as `db` via filename.');
                     $backup_type_dir = 'db/';
                     $backup_type = 'db';
                 } elseif (stristr($file, '-full-') !== false) {
                     pb_backupbuddy::status('details', 'Stash: Detected backup type as `full` via filename.');
                     $backup_type_dir = 'full/';
                     $backup_type = 'full';
                 } elseif (stristr($file, '-files-') !== false) {
                     pb_backupbuddy::status('details', 'Stash: Detected backup type as `files` via filename.');
                     $backup_type_dir = 'files/';
                     $backup_type = 'files';
                 } else {
                     pb_backupbuddy::status('details', 'Stash: Could not detect backup type via integrity details nor filename.');
                 }
             }
         }
         // Interact with Stash API.
         pb_backupbuddy::status('details', 'Determining Stash upload URL for `' . $file . '`.` with destination remote path `' . $remote_path . $backup_type_dir . basename($file) . '`.');
         $upload_url = $stash->get_upload_url($file, 'request', $remote_path . $backup_type_dir . basename($file));
         pb_backupbuddy::status('details', 'Determined upload url: `' . $upload_url . '`.');
         $request = new RequestCore($upload_url);
         pb_backupbuddy::status('details', 'Sending Stash API request.');
         $response = $request->send_request(true);
         // Validate response.
         if (!$response->isOK()) {
             $this_error = 'Stash request for upload credentials failed.';
             $pb_backupbuddy_destination_errors[] = $this_error;
             pb_backupbuddy::status('error', $this_error);
             return false;
         }
         if (!($upload_data = json_decode($response->body, true))) {
             $this_error = 'Stash API did not give a valid JSON response.';
             $pb_backupbuddy_destination_errors[] = $this_error;
             pb_backupbuddy::status('error', $this_error);
             return false;
         }
         if (isset($upload_data['error'])) {
             $this_error = 'Stash error(s): `' . implode(' - ', $upload_data['error']) . '`.';
             $pb_backupbuddy_destination_errors[] = $this_error;
             pb_backupbuddy::status('error', $this_error);
             return false;
         }
         // Create S3 instance.
         pb_backupbuddy::status('details', 'Creating Stash S3 instance.');
         $s3 = new AmazonS3($upload_data['credentials']);
         // the key, secret, token
         if ($disable_ssl === true) {
             @$s3->disable_ssl(true);
         }
         pb_backupbuddy::status('details', 'Stash S3 instance created.');
         // Handle chunking of file into a multipart upload (if applicable).
         $file_size = filesize($file);
         if ($max_chunk_size >= self::MINIMUM_CHUNK_SIZE && $file_size / 1024 / 1024 > $max_chunk_size) {
             // minimum chunk size is 5mb. Anything under 5mb we will not chunk.
             pb_backupbuddy::status('details', 'Stash file size of ' . pb_backupbuddy::$format->file_size($file_size) . ' exceeds max chunk size of ' . $max_chunk_size . 'MB set in settings for sending file as multipart upload.');
             // Initiate multipart upload with S3.
             pb_backupbuddy::status('details', 'Initiating Stash multipart upload.');
             $response = $s3->initiate_multipart_upload($upload_data['bucket'], $upload_data['object'], array('encryption' => 'AES256'));
             if (!$response->isOK()) {
                 $this_error = 'Stash was unable to initiate multipart upload.';
                 $pb_backupbuddy_destination_errors[] = $this_error;
                 pb_backupbuddy::status('error', $this_error);
                 return false;
             } else {
                 $upload_id = (string) $response->body->UploadId;
                 pb_backupbuddy::status('details', 'Stash initiated multipart upload with ID `' . $upload_id . '`.');
             }
             // Get chunk parts for multipart transfer.
             pb_backupbuddy::status('details', 'Stash getting multipart counts.');
             $parts = $s3->get_multipart_counts($file_size, $max_chunk_size * 1024 * 1024);
             // Size of chunks expected to be in bytes.
             $multipart_destination_settings = $settings;
             $multipart_destination_settings['_multipart_id'] = $upload_id;
             $multipart_destination_settings['_multipart_partnumber'] = 0;
             $multipart_destination_settings['_multipart_file'] = $file;
             $multipart_destination_settings['_multipart_counts'] = $parts;
             $multipart_destination_settings['_multipart_upload_data'] = $upload_data;
             $multipart_destination_settings['_multipart_backup_type_dir'] = $backup_type_dir;
             pb_backupbuddy::status('details', 'Stash multipart settings to pass:'******'details', 'Stash scheduling send of next part(s).');
             backupbuddy_core::schedule_single_event(time(), pb_backupbuddy::cron_tag('destination_send'), array($multipart_destination_settings, $files, $send_id));
             spawn_cron(time() + 150);
             // Adds > 60 seconds to get around once per minute cron running limit.
             update_option('_transient_doing_cron', 0);
             // Prevent cron-blocking for next item.
             pb_backupbuddy::status('details', 'Stash scheduled send of next part(s). Done for this cycle.');
             return array($upload_id, 'Starting send of ' . count($multipart_destination_settings['_multipart_counts']) . ' parts.');
         } else {
             // did not meet chunking criteria.
             if ($max_chunk_size != '0') {
                 if ($file_size / 1024 / 1024 > self::MINIMUM_CHUNK_SIZE) {
                     pb_backupbuddy::status('details', 'File size of ' . pb_backupbuddy::$format->file_size($file_size) . ' is less than the max chunk size of ' . $max_chunk_size . 'MB; not chunking into multipart upload.');
                 } else {
                     pb_backupbuddy::status('details', 'File size of ' . pb_backupbuddy::$format->file_size($file_size) . ' is less than the minimum allowed chunk size of ' . self::MINIMUM_CHUNK_SIZE . 'MB; not chunking into multipart upload.');
                 }
             } else {
                 pb_backupbuddy::status('details', 'Max chunk size set to zero so not chunking into multipart upload.');
             }
         }
         // SEND file.
         pb_backupbuddy::status('details', 'About to put (upload) object to Stash.');
         $response = $s3->create_object($upload_data['bucket'], $upload_data['object'], array('fileUpload' => $file, 'encryption' => 'AES256'));
         // Validate response. On failure notify Stash API that things went wrong.
         if (!$response->isOK()) {
             // Send FAILED.
             pb_backupbuddy::status('details', 'Sending upload abort.');
             $request = new RequestCore($abort_url);
             $response = $request->send_request(true);
             $this_error = 'Could not upload to Stash, attempt aborted.';
             $pb_backupbuddy_destination_errors[] = $this_error;
             pb_backupbuddy::status('error', $this_error);
             return false;
         } else {
             // Send SUCCESS.
             pb_backupbuddy::status('details', 'Success uploading file to Stash storage. Notifying Stash API next. Upload details: `' . print_r($response, true) . '`.');
             $uploaded_size = $response->header['_info']['size_upload'];
             $uploaded_speed = $response->header['_info']['speed_upload'];
             pb_backupbuddy::status('details', 'Uploaded size: ' . pb_backupbuddy::$format->file_size($uploaded_size) . ', Speed: ' . pb_backupbuddy::$format->file_size($uploaded_speed) . '/sec.');
         }
         delete_transient('pb_backupbuddy_stashquota_' . $settings['itxapi_username']);
         // Delete quota transient since it probably has changed now.
         // Notify Stash API that things were succesful.
         $done_url = $stash->get_upload_url($file, 'done', $remote_path . $backup_type_dir . basename($file));
         pb_backupbuddy::status('details', 'Notifying Stash of completed upload with done url `' . $done_url . '`.');
         $request = new RequestCore($done_url);
         $response = $request->send_request(true);
         if (!$response->isOK()) {
             $this_error = 'Error #247568834682. Could not finalize Stash upload. Response code: `' . $response->get_response_code() . '`; Response body: `' . $response->get_response_body() . '`; Response headers: `' . $response->get_response_header() . '`.';
             $pb_backupbuddy_destination_errors[] = $this_error;
             pb_backupbuddy::status('error', $this_error);
             return false;
         } else {
             // Good server response.
             // See if we got an optional json response.
             $upload_data = @json_decode($response->body, true);
             if (isset($upload_data['error'])) {
                 // Some kind of error.
                 $this_error = 'Stash error(s): `' . implode(' - ', $upload_data['error']) . '`.';
                 $pb_backupbuddy_destination_errors[] = $this_error;
                 pb_backupbuddy::status('error', $this_error);
                 return false;
             }
             unset($files[$file_id]);
             // Remove from list of files we have not sent yet.
             pb_backupbuddy::status('details', 'Stash success sending file `' . basename($file) . '`. File uploaded and reported to Stash as completed.');
             // Load destination fileoptions.
             pb_backupbuddy::status('details', 'About to load fileoptions data.');
             require_once pb_backupbuddy::plugin_path() . '/classes/fileoptions.php';
             $fileoptions_obj = new pb_backupbuddy_fileoptions(backupbuddy_core::getLogDirectory() . 'fileoptions/send-' . $send_id . '.txt', $read_only = false, $ignore_lock = false, $create_file = false);
             if (true !== ($result = $fileoptions_obj->is_ok())) {
                 pb_backupbuddy::status('error', __('Fatal Error #9034.84838. Unable to access fileoptions data.', 'it-l10n-backupbuddy') . ' Error: ' . $result);
                 return false;
             }
             pb_backupbuddy::status('details', 'Fileoptions data loaded.');
             $fileoptions =& $fileoptions_obj->options;
             // Save stats.
             if (isset($uploaded_speed)) {
                 $fileoptions['write_speed'] = $uploaded_speed;
                 $fileoptions_obj->save();
             }
             //$fileoptions['finish_time'] = time();
             //$fileoptions['status'] = 'success';
             unset($fileoptions_obj);
         }
     }
     // end foreach.
     // BEGIN FILE LIMIT PROCESSING. Enforce archive limits if applicable.
     if ($backup_type == 'full') {
         $limit = $full_archive_limit;
         pb_backupbuddy::status('details', 'Stash full backup archive limit of `' . $limit . '` of type `full` based on destination settings.');
     } elseif ($backup_type == 'db') {
         $limit = $db_archive_limit;
         pb_backupbuddy::status('details', 'Stash database backup archive limit of `' . $limit . '` of type `db` based on destination settings.');
     } elseif ($backup_type == 'files') {
         $limit = $db_archive_limit;
         pb_backupbuddy::status('details', 'Stash database backup archive limit of `' . $limit . '` of type `files` based on destination settings.');
     } else {
         $limit = 0;
         pb_backupbuddy::status('warning', 'Warning #54854895. Stash was unable to determine backup type (reported: `' . $backup_type . '`) so archive limits NOT enforced for this backup.');
     }
     if ($limit > 0) {
         pb_backupbuddy::status('details', 'Stash archive limit enforcement beginning.');
         // S3 object for managing files.
         $s3_manage = new AmazonS3($manage_data['credentials']);
         if ($disable_ssl === true) {
             @$s3_manage->disable_ssl(true);
         }
         // Get file listing.
         $response_manage = $s3_manage->list_objects($manage_data['bucket'], array('prefix' => $manage_data['subkey'] . $remote_path . $backup_type_dir));
         // list all the files in the subscriber account
         // Create array of backups and organize by date
         $prefix = backupbuddy_core::backup_prefix();
         // List backups associated with this site by date.
         $backups = array();
         foreach ($response_manage->body->Contents as $object) {
             $file = str_replace($manage_data['subkey'] . $remote_path . $backup_type_dir, '', $object->Key);
             // Stash stores files in a directory per site so no need to check prefix here! if ( false !== strpos( $file, 'backup-' . $prefix . '-' ) ) { // if backup has this site prefix...
             $backups[$file] = strtotime($object->LastModified);
         }
         arsort($backups);
         pb_backupbuddy::status('details', 'Stash found `' . count($backups) . '` backups of this type when checking archive limits.');
         if (count($backups) > $limit) {
             pb_backupbuddy::status('details', 'More archives (' . count($backups) . ') than limit (' . $limit . ') allows. Trimming...');
             $i = 0;
             $delete_fail_count = 0;
             foreach ($backups as $buname => $butime) {
                 $i++;
                 if ($i > $limit) {
                     pb_backupbuddy::status('details', 'Trimming excess file `' . $buname . '`...');
                     $response = $s3_manage->delete_object($manage_data['bucket'], $manage_data['subkey'] . $remote_path . $backup_type_dir . $buname);
                     if (!$response->isOK()) {
                         pb_backupbuddy::status('details', 'Unable to delete excess Stash file `' . $buname . '`. Details: `' . print_r($response, true) . '`.');
                         $delete_fail_count++;
                     }
                 }
             }
             pb_backupbuddy::status('details', 'Finished trimming excess backups.');
             if ($delete_fail_count !== 0) {
                 $error_message = 'Stash remote limit could not delete ' . $delete_fail_count . ' backups.';
                 pb_backupbuddy::status('error', $error_message);
                 backupbuddy_core::mail_error($error_message);
             }
         }
         pb_backupbuddy::status('details', 'Stash completed archive limiting.');
     } else {
         pb_backupbuddy::status('details', 'No Stash archive file limit to enforce.');
     }
     // End remote backup limit
     if (isset($fileoptions_obj)) {
         unset($fileoptions_obj);
     }
     // END FILE LIMIT PROCESSING.
     // Success if we made it this far.
     return true;
 }
コード例 #2
0
ファイル: init.php プロジェクト: CherylMuniz/fashion
 public static function send($settings = array(), $files = array(), $clear_uploads = false)
 {
     global $pb_backupbuddy_destination_errors;
     if (!is_array($files)) {
         $files = array($files);
     }
     if ($clear_uploads === false) {
         // Uncomment the following line to override and always clear.
         //$clear_uploads = true;
     }
     $itxapi_username = $settings['itxapi_username'];
     $itxapi_password = $settings['itxapi_password'];
     $db_archive_limit = $settings['db_archive_limit'];
     $full_archive_limit = $settings['full_archive_limit'];
     $max_chunk_size = $settings['max_chunk_size'];
     $remote_path = self::get_remote_path($settings['directory']);
     // Has leading and trailng slashes.
     if ($settings['ssl'] == '0') {
         $disable_ssl = true;
     } else {
         $disable_ssl = false;
     }
     $multipart_id = $settings['_multipart_id'];
     $multipart_counts = $settings['_multipart_counts'];
     pb_backupbuddy::status('details', 'Stash remote path set to `' . $remote_path . '`.');
     require_once dirname(__FILE__) . '/lib/class.itx_helper.php';
     require_once dirname(__FILE__) . '/lib/aws-sdk/sdk.class.php';
     // Stash API talk.
     $stash = new ITXAPI_Helper(pb_backupbuddy_destination_stash::ITXAPI_KEY, pb_backupbuddy_destination_stash::ITXAPI_URL, $itxapi_username, $itxapi_password);
     $manage_data = pb_backupbuddy_destination_stash::get_manage_data($settings);
     // Wipe all current uploads.
     if ($clear_uploads === true) {
         pb_backupbuddy::status('details', 'Clearing any current uploads via Stash call to `abort-all`.');
         $abort_url = $stash->get_upload_url(null, 'abort-all');
         $request = new RequestCore($abort_url);
         //pb_backupbuddy::status('details', print_r( $request , true ) );
         $response = $request->send_request(true);
     }
     // Process multipart transfer that we already initiated in a previous PHP load.
     if ($multipart_id != '') {
         // Multipart upload initiated and needs parts sent.
         // Create S3 instance.
         pb_backupbuddy::status('details', 'Creating Stash S3 instance.');
         $s3 = new AmazonS3($settings['_multipart_upload_data']['credentials']);
         // the key, secret, token
         if ($disable_ssl === true) {
             @$s3->disable_ssl(true);
         }
         pb_backupbuddy::status('details', 'Stash S3 instance created.');
         $this_part_number = $settings['_multipart_partnumber'] + 1;
         pb_backupbuddy::status('details', 'Stash beginning upload of part `' . $this_part_number . '` of `' . count($settings['_multipart_counts']) . '` parts of file `' . $settings['_multipart_file'] . '` with multipart ID `' . $settings['_multipart_id'] . '`.');
         $response = $s3->upload_part($settings['_multipart_upload_data']['bucket'], $settings['_multipart_upload_data']['object'], $settings['_multipart_id'], array('expect' => '100-continue', 'fileUpload' => $settings['_multipart_file'], 'partNumber' => $this_part_number, 'seekTo' => (int) $settings['_multipart_counts'][$settings['_multipart_partnumber']]['seekTo'], 'length' => (int) $settings['_multipart_counts'][$settings['_multipart_partnumber']]['length']));
         if (!$response->isOK()) {
             $this_error = 'Stash unable to upload file part for multipart upload `' . $settings['_multipart_id'] . '`. Details: `' . print_r($response, true) . '`.';
             $pb_backupbuddy_destination_errors[] = $this_error;
             pb_backupbuddy::status('error', $this_error);
             return false;
         }
         // Update stats.
         foreach (pb_backupbuddy::$options['remote_sends'] as $identifier => $remote_send) {
             if (isset($remote_send['_multipart_id']) && $remote_send['_multipart_id'] == $multipart_id) {
                 // this item.
                 pb_backupbuddy::$options['remote_sends'][$identifier]['_multipart_status'] = 'Sent part ' . $this_part_number . ' of ' . count($settings['_multipart_counts']) . '.';
                 if ($this_part_number == count($settings['_multipart_counts'])) {
                     pb_backupbuddy::$options['remote_sends'][$identifier]['_multipart_status'] .= '<br>Success.';
                     pb_backupbuddy::$options['remote_sends'][$identifier]['finish_time'] = time();
                 }
                 pb_backupbuddy::save();
                 break;
             }
         }
         // Made it here so success sending part. Increment for next part to send.
         $settings['_multipart_partnumber']++;
         if (!isset($settings['_multipart_counts'][$settings['_multipart_partnumber']])) {
             // No more parts exist for this file. Tell S3 the multipart upload is complete and move on.
             pb_backupbuddy::status('details', 'Stash getting parts with etags to notify S3 of completed multipart send.');
             $etag_parts = $s3->list_parts($settings['_multipart_upload_data']['bucket'], $settings['_multipart_upload_data']['object'], $settings['_multipart_id']);
             pb_backupbuddy::status('details', 'Stash got parts list. Notifying S3 of multipart upload completion.');
             $response = $s3->complete_multipart_upload($settings['_multipart_upload_data']['bucket'], $settings['_multipart_upload_data']['object'], $settings['_multipart_id'], $etag_parts);
             if (!$response->isOK()) {
                 $this_error = 'Stash unable to notify S3 of completion of all parts for multipart upload `' . $settings['_multipart_id'] . '`.';
                 $pb_backupbuddy_destination_errors[] = $this_error;
                 pb_backupbuddy::status('error', $this_error);
                 return false;
             } else {
                 pb_backupbuddy::status('details', 'Stash notified S3 of multipart completion.');
             }
             // Notify Stash API that things were succesful.
             $done_url = $stash->get_upload_url($settings['_multipart_file'], 'done', $remote_path . $settings['_multipart_backup_type_dir'] . basename($settings['_multipart_file']));
             pb_backupbuddy::status('details', 'Notifying Stash of completed multipart upload with done url `' . $done_url . '`.');
             $request = new RequestCore($done_url);
             $response = $request->send_request(true);
             if (!$response->isOK()) {
                 $this_error = 'Error #756834682. Could not finalize Stash upload. Response code: `' . $response->get_response_code() . '`; Response body: `' . $response->get_response_body() . '`; Response headers: `' . $response->get_response_header() . '`.';
                 $pb_backupbuddy_destination_errors[] = $this_error;
                 pb_backupbuddy::status('error', $this_error);
                 return false;
             } else {
                 // Good server response.
                 // See if we got an optional json response.
                 $upload_data = @json_decode($response->body, true);
                 if (isset($upload_data['error'])) {
                     $this_error = 'Stash error(s): `' . implode(' - ', $upload_data['error']) . '`.';
                     $pb_backupbuddy_destination_errors[] = $this_error;
                     pb_backupbuddy::status('error', $this_error);
                     return false;
                 }
                 pb_backupbuddy::status('details', 'Stash success sending file `' . basename($settings['_multipart_file']) . '`. File uploaded via multipart across `' . $this_part_number . '` parts and reported to Stash as completed.');
             }
             pb_backupbuddy::status('details', 'Stash has no more parts left for this multipart upload. Clearing multipart instance variables.');
             $settings['_multipart_partnumber'] = 0;
             $settings['_multipart_id'] = '';
             $settings['_multipart_file'] = '';
             $settings['_multipart_counts'] = array();
             $settings['_multipart_upload_data'] = array();
         }
         delete_transient('pb_backupbuddy_stashquota_' . $settings['itxapi_username']);
         // Delete quota transient since it probably has changed now.
         // Schedule to continue if anything is left to upload for this multipart of any individual files.
         if ($settings['_multipart_id'] != '' || count($files) > 0) {
             pb_backupbuddy::status('details', 'Stash multipart upload has more parts left. Scheduling next part send.');
             wp_schedule_single_event(time(), pb_backupbuddy::cron_tag('destination_send'), array($settings, $files, 'multipart', false));
             spawn_cron(time() + 150);
             // Adds > 60 seconds to get around once per minute cron running limit.
             update_option('_transient_doing_cron', 0);
             // Prevent cron-blocking for next item.
             pb_backupbuddy::status('details', 'Stash scheduled send of next part(s). Done for this cycle.');
             return array($settings['_multipart_id'], 'Sent ' . $this_part_number . ' of ' . count($multipart_destination_settings['_multipart_counts'] . ' parts.'));
         }
     }
     // Upload each file.
     foreach ($files as $file_id => $file) {
         // Determine backup type directory (if zip).
         $backup_type_dir = '';
         $backup_type = '';
         if (stristr($file, '.zip') !== false) {
             // If a zip try to determine backup type.
             pb_backupbuddy::status('details', 'Stash: Zip file. Detecting backup type if possible.');
             $serial = pb_backupbuddy::$classes['core']->get_serial_from_file($file);
             if (isset(pb_backupbuddy::$options['backups'][$serial]['integrity']['detected_type'])) {
                 pb_backupbuddy::status('details', 'Stash: Detected backup type as `' . pb_backupbuddy::$options['backups'][$serial]['integrity']['detected_type'] . '` via integrity check data.');
                 $backup_type_dir = pb_backupbuddy::$options['backups'][$serial]['integrity']['detected_type'] . '/';
                 $backup_type = pb_backupbuddy::$options['backups'][$serial]['integrity']['detected_type'];
             } else {
                 if (stristr($file, '-db-') !== false) {
                     pb_backupbuddy::status('details', 'Stash: Detected backup type as `db` via filename.');
                     $backup_type_dir = 'db/';
                     $backup_type = 'db';
                 } elseif (stristr($file, '-full-') !== false) {
                     pb_backupbuddy::status('details', 'Stash: Detected backup type as `full` via filename.');
                     $backup_type_dir = 'full/';
                     $backup_type = 'full';
                 } else {
                     pb_backupbuddy::status('details', 'Stash: Could not detect backup type via integrity details nor filename.');
                 }
             }
         }
         // Interact with Stash API.
         pb_backupbuddy::status('details', 'Determining Stash upload URL for `' . $file . '`.` with destination remote path `' . $remote_path . $backup_type_dir . basename($file) . '`.');
         $upload_url = $stash->get_upload_url($file, 'request', $remote_path . $backup_type_dir . basename($file));
         pb_backupbuddy::status('details', 'Determined upload url: `' . $upload_url . '`.');
         $request = new RequestCore($upload_url);
         pb_backupbuddy::status('details', 'Sending Stash API request.');
         $response = $request->send_request(true);
         // Validate response.
         if (!$response->isOK()) {
             $this_error = 'Stash request for upload credentials failed.';
             $pb_backupbuddy_destination_errors[] = $this_error;
             pb_backupbuddy::status('error', $this_error);
             return false;
         }
         if (!($upload_data = json_decode($response->body, true))) {
             $this_error = 'Stash API did not give a valid JSON response.';
             $pb_backupbuddy_destination_errors[] = $this_error;
             pb_backupbuddy::status('error', $this_error);
             return false;
         }
         if (isset($upload_data['error'])) {
             $this_error = 'Stash error(s): `' . implode(' - ', $upload_data['error']) . '`.';
             $pb_backupbuddy_destination_errors[] = $this_error;
             pb_backupbuddy::status('error', $this_error);
             return false;
         }
         // Calculate meta data to send.
         /*
         $meta_array = array();
         if ( stristr( $file, '.zip' ) !== false ) { // If a zip try to determine backup type.
         	pb_backupbuddy::status( 'details', 'Stash: Zip file. Detecting backup type if possible.' );
         	$serial = pb_backupbuddy::$classes['core']->get_serial_from_file( $file );
         	if ( isset( pb_backupbuddy::$options['backups'][$serial]['integrity']['detected_type'] ) ) {
         		pb_backupbuddy::status( 'details', 'Stash: Detected backup type as `' . pb_backupbuddy::$options['backups'][$serial]['integrity']['detected_type'] . '` via integrity check data.' );
         		$meta_array['backup_type'] = pb_backupbuddy::$options['backups'][$serial]['integrity']['detected_type'];
         	} else {
         		if ( stristr( $file, '-db-' ) !== false ) {
         			pb_backupbuddy::status( 'details', 'Stash: Detected backup type as `db` via filename.' );
         			$meta_array['backup_type'] = 'db';
         		} elseif ( stristr( $file, '-full-' ) !== false ) {
         			pb_backupbuddy::status( 'details', 'Stash: Detected backup type as `full` via filename.' );
         			$meta_array['backup_type'] = 'full';
         		} else {
         			pb_backupbuddy::status( 'details', 'Stash: Could not detect backup type via integrity details nor filename.' );
         		}
         	}
         }
         */
         // Create S3 instance.
         pb_backupbuddy::status('details', 'Creating Stash S3 instance.');
         $s3 = new AmazonS3($upload_data['credentials']);
         // the key, secret, token
         if ($disable_ssl === true) {
             @$s3->disable_ssl(true);
         }
         pb_backupbuddy::status('details', 'Stash S3 instance created.');
         // Handle chunking of file into a multipart upload (if applicable).
         $file_size = filesize($file);
         if ($max_chunk_size >= 5 && $file_size / 1024 / 1024 > $max_chunk_size) {
             // minimum chunk size is 5mb. Anything under 5mb we will not chunk.
             pb_backupbuddy::status('details', 'Stash file size of ' . $file_size / 1024 / 1024 . 'MB exceeds max chunk size of ' . $max_chunk_size . 'MB set in settings for sending file as multipart upload.');
             // Initiate multipart upload with S3.
             pb_backupbuddy::status('details', 'Initiating Stash multipart upload.');
             $response = $s3->initiate_multipart_upload($upload_data['bucket'], $upload_data['object'], array('encryption' => 'AES256'));
             if (!$response->isOK()) {
                 $this_error = 'Stash was unable to initiate multipart upload.';
                 $pb_backupbuddy_destination_errors[] = $this_error;
                 pb_backupbuddy::status('error', $this_error);
                 return false;
             } else {
                 $upload_id = (string) $response->body->UploadId;
                 pb_backupbuddy::status('details', 'Stash initiated multipart upload with ID `' . $upload_id . '`.');
             }
             // Get chunk parts for multipart transfer.
             pb_backupbuddy::status('details', 'Stash getting multipart counts.');
             $parts = $s3->get_multipart_counts($file_size, $max_chunk_size * 1024 * 1024);
             // Size of chunks expected to be in bytes.
             $multipart_destination_settings = $settings;
             $multipart_destination_settings['_multipart_id'] = $upload_id;
             $multipart_destination_settings['_multipart_partnumber'] = 0;
             $multipart_destination_settings['_multipart_file'] = $file;
             $multipart_destination_settings['_multipart_counts'] = $parts;
             $multipart_destination_settings['_multipart_upload_data'] = $upload_data;
             $multipart_destination_settings['_multipart_backup_type_dir'] = $backup_type_dir;
             pb_backupbuddy::status('details', 'Stash multipart settings to pass:'******'details', 'Stash scheduling send of next part(s).');
             wp_schedule_single_event(time(), pb_backupbuddy::cron_tag('destination_send'), array($multipart_destination_settings, $files, 'multipart', false));
             spawn_cron(time() + 150);
             // Adds > 60 seconds to get around once per minute cron running limit.
             update_option('_transient_doing_cron', 0);
             // Prevent cron-blocking for next item.
             pb_backupbuddy::status('details', 'Stash scheduled send of next part(s). Done for this cycle.');
             return array($upload_id, 'Starting send of ' . count($multipart_destination_settings['_multipart_counts']) . ' parts.');
         } else {
             if ($max_chunk_size != '0') {
                 pb_backupbuddy::status('details', 'File size of ' . $file_size / 1024 / 1024 . 'MB is less than the max chunk size of ' . $max_chunk_size . 'MB; not chunking into multipart upload.');
             } else {
                 pb_backupbuddy::status('details', 'Max chunk size set to zero so not chunking into multipart upload.');
             }
         }
         // SEND file.
         pb_backupbuddy::status('details', 'About to put (upload) object to Stash.');
         $response = $s3->create_object($upload_data['bucket'], $upload_data['object'], array('fileUpload' => $file, 'encryption' => 'AES256'));
         //  we can also utilize the multi-part-upload to create an object
         //  $response = $s3->create_mpu_object($upload_data['bucket'], $upload_data['object'], array('fileUpload'=>$upload_file));
         // Validate response. On failure notify Stash API that things went wrong.
         if (!$response->isOK()) {
             pb_backupbuddy::status('details', 'Sending upload abort.');
             $request = new RequestCore($abort_url);
             $response = $request->send_request(true);
             $this_error = 'Could not upload to Stash, attempt aborted.';
             $pb_backupbuddy_destination_errors[] = $this_error;
             pb_backupbuddy::status('error', $this_error);
             return false;
         } else {
             //	pb_backupbuddy::status( 'details', 'Stash file upload speed: ' . ( $response->header['_info']['speed_upload'] / 1024 / 1024 ) . 'MB/sec. This number may be invalid for small file transfers.' );
             pb_backupbuddy::status('details', 'Stash put success. Need to nofity Stash of upload completion. Details: `' . print_r($response, true) . '`.');
         }
         delete_transient('pb_backupbuddy_stashquota_' . $settings['itxapi_username']);
         // Delete quota transient since it probably has changed now.
         // Notify Stash API that things were succesful.
         $done_url = $stash->get_upload_url($file, 'done', $remote_path . $backup_type_dir . basename($file));
         pb_backupbuddy::status('details', 'Notifying Stash of completed upload with done url `' . $done_url . '`.');
         $request = new RequestCore($done_url);
         $response = $request->send_request(true);
         if (!$response->isOK()) {
             $this_error = 'Error #756834682. Could not finalize Stash upload. Response code: `' . $response->get_response_code() . '`; Response body: `' . $response->get_response_body() . '`; Response headers: `' . $response->get_response_header() . '`.';
             $pb_backupbuddy_destination_errors[] = $this_error;
             pb_backupbuddy::status('error', $this_error);
             return false;
         } else {
             // Good server response.
             // See if we got an optional json response.
             $upload_data = @json_decode($response->body, true);
             if (isset($upload_data['error'])) {
                 // Some kind of error.
                 $this_error = 'Stash error(s): `' . implode(' - ', $upload_data['error']) . '`.';
                 $pb_backupbuddy_destination_errors[] = $this_error;
                 pb_backupbuddy::status('error', $this_error);
                 return false;
             }
             unset($files[$file_id]);
             // Remove from list of files we have not sent yet.
             pb_backupbuddy::status('details', 'Stash success sending file `' . basename($file) . '`. File uploaded and reported to Stash as completed.');
         }
         // Enforce archive limits if applicable.
         if ($backup_type == 'full') {
             $limit = $full_archive_limit;
             pb_backupbuddy::status('details', 'Stash full backup archive limit of `' . $limit . '` based on destination settings.');
         } elseif ($backup_type == 'db') {
             $limit = $db_archive_limit;
             pb_backupbuddy::status('details', 'Stash database backup archive limit of `' . $limit . '` based on destination settings.');
         } else {
             $limit = 0;
             pb_backupbuddy::status('error', 'Error #54854895. Stash was unable to determine backup type so archive limits NOT enforced for this backup.');
         }
         if ($limit > 0) {
             pb_backupbuddy::status('details', 'Stash archive limit enforcement beginning.');
             // S3 object for managing files.
             $s3_manage = new AmazonS3($manage_data['credentials']);
             if ($disable_ssl === true) {
                 @$s3_manage->disable_ssl(true);
             }
             // Get file listing.
             $response_manage = $s3_manage->list_objects($manage_data['bucket'], array('prefix' => $manage_data['subkey'] . $remote_path . $backup_type_dir));
             // list all the files in the subscriber account
             // Create array of backups and organize by date
             $prefix = pb_backupbuddy::$classes['core']->backup_prefix();
             // List backups associated with this site by date.
             $backups = array();
             foreach ($response_manage->body->Contents as $object) {
                 $file = str_replace($manage_data['subkey'] . $remote_path . $backup_type_dir, '', $object->Key);
                 // Stash stores files in a directory per site so no need to check prefix here! if ( false !== strpos( $file, 'backup-' . $prefix . '-' ) ) { // if backup has this site prefix...
                 $backups[$file] = strtotime($object->LastModified);
                 //}
             }
             arsort($backups);
             //error_log( 'backups: ' . print_r( $backups, true ) );
             pb_backupbuddy::status('details', 'Stash found `' . count($backups) . '` backups of this type when checking archive limits.');
             if (count($backups) > $limit) {
                 pb_backupbuddy::status('details', 'More archives (' . count($backups) . ') than limit (' . $limit . ') allows. Trimming...');
                 $i = 0;
                 $delete_fail_count = 0;
                 foreach ($backups as $buname => $butime) {
                     $i++;
                     if ($i > $limit) {
                         pb_backupbuddy::status('details', 'Trimming excess file `' . $buname . '`...');
                         $response = $s3_manage->delete_object($manage_data['bucket'], $manage_data['subkey'] . $remote_path . $backup_type_dir . $buname);
                         if (!$response->isOK()) {
                             pb_backupbuddy::status('details', 'Unable to delete excess Stash file `' . $buname . '`. Details: `' . print_r($response, true) . '`.');
                             $delete_fail_count++;
                         }
                     }
                 }
                 pb_backupbuddy::status('details', 'Finished trimming excess backups.');
                 if ($delete_fail_count !== 0) {
                     $error_message = 'Stash remote limit could not delete ' . $delete_fail_count . ' backups.';
                     pb_backupbuddy::status('error', $error_message);
                     pb_backupbuddy::$classes['core']->mail_error($error_message);
                 }
             }
             pb_backupbuddy::status('details', 'Stash completed archive limiting.');
         } else {
             pb_backupbuddy::status('details', 'No Stash archive file limit to enforce.');
         }
         // End remote backup limit
     }
     // end foreach.
     // Success if we made it this far.
     return true;
 }