function amazons3_backup_bwd_comp($historyID, $args = '') { $s3StartTime = $this->iwpScriptStartTime; $this->backup_settings_vals = get_option('iwp_client_multi_backup_temp_values'); $backup_settings_values = $this->backup_settings_vals; if (isset($backup_settings_values['s3_retrace_count']) && !empty($backup_settings_values['s3_retrace_count'])) { $s3_retrace_count = $backup_settings_values['s3_retrace_count'][$historyID]; } else { $s3_retrace_count = 0; } //get the settings by other method $requestParams = $this->getRequiredData($historyID, "requestParams"); $upload_loop_break_time = $requestParams['account_info']['upload_loop_break_time']; //darkcode changed $upload_file_block_size = $requestParams['account_info']['upload_file_block_size']; if ($upload_file_block_size < 5 * 1024 * 1024) { $upload_file_block_size = 5 * 1024 * 1024 + 1; } $del_host_file = $requestParams['args']['del_host_file']; $task_result = $this->getRequiredData($historyID, "taskResults"); @set_time_limit(0); $this->hisID = $historyID; $uploadLoopCount = 0; $upload_id = 'start'; $partsArray = array(); $nextPart = 1; $retrace = 'notSet'; $doComplete = false; if ($args == '') { //on the next call $args would be '' //set $args, $uploadid, $offset from the DB $responseParams = $this->getRequiredData($historyID, "responseParams"); if (!$responseParams) { return $this->statusLog($this->hisID, array('stage' => 's3Upload', 'status' => 'error', 'statusMsg' => 'S3 Upload failed: Error while fetching table data.', 'statusCode' => 's3_upload_failed_error_while_fetching_table_data')); } $args = $responseParams['s3Args']; $prevChunkResults = $responseParams['response_data']; $upload_id = $prevChunkResults['upload_id']; $nextPart = $prevChunkResults['nextPart']; $partsArray = $prevChunkResults['partsArray']; $current_file_num = $responseParams['current_file_num']; $dont_retrace = $responseParams['dont_retrace']; $start_new_backup = $responseParams['start_new_backup']; } if (empty($current_file_num)) { $current_file_num = 0; } //traceback options and setting values if (!$upload_id && empty($dont_retrace)) { if ($s3_retrace_count <= 3) { $args = $requestParams['secure']['account_info']['iwp_amazon_s3']; if ($backup_settings_values['s3_upload_id']) { $upload_id = $backup_settings_values['s3_upload_id'][$historyID]; } else { return $this->statusLog($this->hisID, array('stage' => 's3Upload Retrace', 'status' => 'error', 'statusMsg' => 'S3 Upload failed: Error while fetching table data during retrace', 'statusCode' => 's3_upload_failed_error_while_fetching_table_data_during_retrace')); } $backup_file = $backup_settings_values['backup_file']; $retrace = 'set'; $s3_retrace_count++; $backup_settings_values['s3_retrace_count'][$historyID] = $s3_retrace_count; update_option('iwp_client_multi_backup_temp_values', $backup_settings_values); } else { return $this->statusLog($this->hisID, array('stage' => 's3Upload', 'status' => 'error', 'statusMsg' => 'S3 upload failed: Retrace limit reached.', 'statusCode' => 's3_upload_failed_retrace_limit_reached')); } } if (!$this->iwp_mmb_function_exists('curl_init')) { return array('error' => 'You cannot use Amazon S3 on your server. Please enable curl first.', 'partial' => 1, 'error_code' => 'cannot_use_s3_enable_curl_first'); } require_once $GLOBALS['iwp_mmb_plugin_dir'] . '/lib/amazon_s3_bwd_comp/sdk.class.php'; $tempArgs = $args; extract($args); if (!is_array($backup_file)) { $temp_backup_file = $backup_file; $backup_file = array(); $backup_file[] = $temp_backup_file; } if (is_array($backup_file)) { $backup_files_count = count($backup_file); $temp_single_file = $backup_file[$current_file_num]; unset($backup_file); $backup_file = $temp_single_file; } if ($as3_site_folder == true) { if (!empty($as3_directory)) { $as3_directory .= '/' . $this->site_name; } else { $as3_directory = $this->site_name; } } try { CFCredentials::set(array('development' => array('key' => trim($as3_access_key), 'secret' => trim(str_replace(' ', '+', $as3_secure_key)), 'default_cache_config' => '', 'certificate_authority' => true, 'use_ssl' => false, 'ssl_verification' => false), '@default' => 'development')); $s3 = new AmazonS3(); $cfu_obj = new CFUtilities(); //the mulitCall upload starts darkCode starts //$this->statusLog($this -> hisID, array('stage' => 'uploadingFiles', 'status' => 'partiallyCompleted', 'statusMsg' => 's3MultiCallStartsHere')); if (!empty($as3_directory)) { $as3_file = $as3_directory . '/' . basename($backup_file); } else { $as3_file = basename($backup_file); } if (iwp_mmb_get_file_size($backup_file) <= 5 * 1024 * 1024) { echo "<br>small backup so single upload<br>"; $response = $s3->create_object($as3_bucket, $as3_file, array('fileUpload' => $backup_file)); if ($response->isOK()) { $current_file_num += 1; $resArray = array('status' => "completed", 'backupParentHID' => $historyID); $result_arr = array(); $result_arr['status'] = 'completed'; $result_arr['nextFunc'] = 'amazons3_backup_over'; $result_arr['s3Args'] = $tempArgs; $result_arr['current_file_num'] = $current_file_num; $result_arr['dont_retrace'] = true; $task_result['task_results'][$historyID]['amazons3'][$current_file_num - 1] = basename($backup_file); $task_result['amazons3'][$current_file_num - 1] = basename($backup_file); if ($current_file_num >= $backup_files_count) { unset($task_result['task_results'][$historyID]['server']); @unlink($backup_file); } else { //to continue zip split parts $resArray['status'] = 'partiallyCompleted'; $chunkResult = array(); $chunkResult['partsArray'] = array(); $chunkResult['nextPart'] = 1; $chunkResult['upload_id'] = 'start'; $result_arr['response_data'] = $chunkResult; $result_arr['nextFunc'] = 'amazons3_backup'; $result_arr['status'] = 'partiallyCompleted'; $result_arr['start_new_backup'] = true; @unlink($backup_file); } $this->statusLog($this->hisID, array('stage' => 's3MultiCall', 'status' => 'completed', 'statusMsg' => 'nextCall', 'nextFunc' => 'amazons3_backup', 'task_result' => $task_result, 'responseParams' => $result_arr)); return $resArray; } else { return array('error' => 'Failed to upload to Amazon S3.'); } } if ($upload_id == 'start') { echo "initiating multiCall upload"; //initiate the multiPartUpload to get the uploadID from its response $response = $s3->initiate_multipart_upload($as3_bucket, $as3_file); //createMultipartUpload //convert the response into an array $response_array = $cfu_obj->convert_response_to_array($response); //get the uploadID $upload_id = $response_array['body']['UploadId']; //storing the uploadID in DB $backup_settings_values['s3_upload_id'][$historyID] = $upload_id; $backup_settings_values['backup_file'] = $backup_file; update_option('iwp_client_multi_backup_temp_values', $backup_settings_values); } //get the parts of the big file $parts = $s3->get_multipart_counts(iwp_mmb_get_file_size($backup_file), $upload_file_block_size); //1 MB chunks if ($retrace == 'set') { $list_parts_response = $s3->list_parts($as3_bucket, $as3_file, $upload_id); $partsArray = CFUtilities::convert_response_to_array($list_parts_response); $nextPart = count($partsArray) + 1; $this->statusLog($this->hisID, array('stage' => 's3MultiCall', 'status' => 'partiallyCompleted', 'statusMsg' => 'retracingValues', 'nextFunc' => 'amazons3_backup', 'task_result' => $task_result, 'responseParams' => $result_arr)); $retrace = 'unset'; } //this is the main upload loop break it on when the timeLimit is reached //chunk upload loop $partsArraySize = count($parts); $s3ChunkTimeTaken = 0; $s3ChunkCount = 0; $reloop = false; $reloopCount = 0; $status = ''; do { $uploadLoopCount = 0; if ($reloopCount == 0) { $s3ChunkStartTime = $s3StartTime; } else { $s3ChunkStartTime = microtime(true); } foreach ($parts as $i => $part) { $uploadLoopCount += 1; if ($uploadLoopCount == $nextPart) { $singleUploadResponse = $s3->upload_part($as3_bucket, $as3_file, $upload_id, array('fileUpload' => $backup_file, 'partNumber' => $i + 1, 'seekTo' => $part['seekTo'], 'length' => $part['length'])); $singleUploadResult = $singleUploadResponse->isOk(); echo "singleUploadResult - " . $singleUploadResult; $singleUploadResponseArray = $cfu_obj->convert_response_to_array($singleUploadResponse); /* $response = $s3->complete_multipart_upload($bucket, $filename, $upload_id, array( array('PartNumber' => 1, 'ETag' => '"25e317773f308e446cc84c503a6d1f85"'), array('PartNumber' => 2, 'ETag' => '"a6d1f85f58498973f308e446cc84c503"'), array('PartNumber' => 3, 'ETag' => '"bed3c0a4a1407f584989b4009e9ce33f"'), )); */ $nextPart = $uploadLoopCount; $partsArray[$i + 1]['PartNumber'] = $i + 1; $partsArray[$i + 1]['ETag'] = $singleUploadResponseArray['header']['etag']; $chunkResult = array(); $chunkResult['partsArray'] = $partsArray; $chunkResult['nextPart'] = $nextPart + 1; $chunkResult['upload_id'] = $upload_id; $nextPart = $nextPart + 1; $backup_settings_values['s3_retrace_count'][$historyID] = 0; update_option('iwp_client_multi_backup_temp_values', $backup_settings_values); $status = 'partiallyCompleted'; if ($nextPart == $partsArraySize + 1) { $doComplete = true; $status = 'completed'; } $result_arr = array(); $result_arr['response_data'] = $chunkResult; $result_arr['status'] = $status; $result_arr['nextFunc'] = 'amazons3_backup'; $result_arr['s3Args'] = $tempArgs; $result_arr['current_file_num'] = $current_file_num; $task_result['task_results'][$historyID]['amazons3'][$current_file_num] = basename($backup_file); $task_result['amazons3'][$current_file_num] = basename($backup_file); $this->statusLog($this->hisID, array('stage' => 's3MultiCall', 'status' => 'completed', 'statusMsg' => 'nextCall', 'nextFunc' => 'amazons3_backup', 'task_result' => $task_result, 'responseParams' => $result_arr)); $resArray = array('status' => $status, 'backupParentHID' => $historyID); /* $resArray = array ( 'status' => 'completed', 'backupParentHID' => $historyID, ); */ break; //return $resArray; //exit; } else { if ($nextPart == $partsArraySize + 1) { $doComplete = true; break; } } } if ($doComplete) { // complete the multipart upload $response = $s3->complete_multipart_upload($as3_bucket, $as3_file, $upload_id, $partsArray); if ($response->isOK() != true) { $response = $s3->abort_multipart_upload($as3_bucket, $as3_file, $upload_id); } $response_array = $cfu_obj->convert_response_to_array($response); $current_file_num += 1; $result_arr = array(); $result_arr['response_data'] = $chunkResult; $result_arr['status'] = 'completed'; $result_arr['nextFunc'] = 'amazons3_backup_over'; $result_arr['s3Args'] = $tempArgs; $result_arr['dont_retrace'] = true; $result_arr['current_file_num'] = $current_file_num; $resArray = array('status' => 'completed', 'backupParentHID' => $historyID); if ($current_file_num >= $backup_files_count) { $task_result['task_results'][$historyID]['amazons3'][$current_file_num - 1] = basename($backup_file); $task_result['amazons3'][$current_file_num - 1] = basename($backup_file); unset($task_result['task_results'][$historyID]['server']); } else { //to continue zip split parts $status = 'partiallyCompleted'; $chunkResult = array(); $chunkResult['partsArray'] = array(); $chunkResult['nextPart'] = 1; $chunkResult['upload_id'] = 'start'; $result_arr['response_data'] = $chunkResult; $result_arr['status'] = 'partiallyCompleted'; $result_arr['nextFunc'] = 'amazons3_backup'; $result_arr['start_new_backup'] = true; $resArray['status'] = 'partiallyCompleted'; } $this->statusLog($this->hisID, array('stage' => 's3MultiCall', 'status' => 'completed', 'statusMsg' => 'finalCall', 'nextFunc' => 'amazons3_backup', 'task_result' => $task_result, 'responseParams' => $result_arr)); $upload = $response->isOk(); } //check time $s3ChunkEndTime = microtime(true); $s3ChunkTimeTaken = $s3ChunkEndTime - $s3ChunkStartTime + $s3ChunkTimeTaken / ($reloopCount + 1); $s3EndTime = microtime(true); $s3TimeTaken = $s3EndTime - $s3StartTime; $s3TimeLeft = $upload_loop_break_time - $s3TimeTaken; $s3TimeLeft = $s3TimeLeft - 5; //for safe timeLimit if (!empty($chunkResult['nextPart'])) { echo 'parts' . $chunkResult['nextPart']; } echo " s3TimeTaken " . $s3TimeTaken; $s3UploadedSize = $uploadLoopCount * 5; echo " s3 approx file size written " . $s3UploadedSize; iwp_mmb_print_flush("s3loop"); echo " s3TimeLeft " . $s3TimeLeft; echo " s3ChunkTimeTaken " . $s3ChunkTimeTaken; if ($s3TimeLeft <= $s3ChunkTimeTaken || !$singleUploadResult || $doComplete) { $reloop = false; echo "reloop stopped"; } else { $reloop = true; $reloopCount++; } } while ($reloop); if (!$doComplete) { return $resArray; } if ($doComplete && $upload) { $status = 'completed'; iwp_mmb_print_flush('Amazon S3 upload: End'); if ($status == 'completed') { //file verification //checking file size and comparing //getting the hash value $partArrayLength = count($partsArray); $verificationResult = $this->postUploadVerification($s3, $backup_file, $as3_file, $type = "amazons3", $as3_bucket); if (!$verificationResult) { return $this->statusLog($historyID, array('stage' => 'uploadAmazons3', 'status' => 'error', 'statusMsg' => 'S3 verification failed: File may be corrupted.', 'statusCode' => 'docomplete_S3_verification_failed_file_may_be_corrupted')); } if ($del_host_file) { @unlink($backup_file); } } return $resArray; } else { return array('error' => 'Failed to upload to Amazon S3. Please check your details and set upload/delete permissions on your bucket.', 'partial' => 1, 'error_code' => 'failed_to_upload_to_s3_check_your_details_and_set_upload_delete_permissions_on_your_bucket'); } } catch (Exception $e) { $err = $e->getMessage(); if ($err) { return array('error' => 'Failed to upload to AmazonS3 (' . $err . ').', 'error_code' => 'failed_to_upload_s3_err'); } else { return array('error' => 'Failed to upload to Amazon S3.', 'error_code' => 'failed_to_upload_s3'); } } }
<?php require_once 'something.class.php'; require_once 'something_else.class.php'; require_once 'something_third.class.php'; // Instantiate object $util = new CFUtilities(); /*#swap:{"util = new CFUtilities":"s3 = new AmazonS3"}*/ $rfc2616 = $util->konst($util, 'DATE_FORMAT_RFC2616'); /*#swap:{"util":"s3->util"}*/ $date = gmdate($rfc2616, 946684800); /*#skip*/ $date = gmdate($rfc2616, 946684800); /*#skip*/ $version_id = (string) $s3->get_object('bucket', 'filename', array('versionId' => 'abc123', 'secret_code' => '123456'))->body->Versions->VersionId; $version_id = (string) $s3->get_object('bucket', 'filename', array('versionId' => 'abc123', 'secret_code' => '123456'))->body->Versions->VersionId; $response = $s3->copy_object('bucket', 'filename', array('condition1' => 'true', 'condition2' => 'false')); /*#swap-end*/ ################################################################## /*#skip-start*/ // Comments and stuff $extra_processing = 'This isn\'t supposed to be part of the example'; ################################################################## /*#skip-end*/ Test::logger(__FILE__, $response); /*#skip*/ $more_code = $s3->get_object('bucket', 'filename123.txt'); /*#swap:{"filename\\d{3}": "filename", "txt": "ext"}*/ /*#block:["require_once"]*/