示例#1
0
 public function upload($file, $path = '', $overwrite = true)
 {
     $file = str_replace("\\", "/", $file);
     if (!is_readable($file) or !is_file($file)) {
         throw new IWP_DropboxException("Error: File \"{$file}\" is not readable or doesn't exist.");
     }
     $filesize = iwp_mmb_get_file_size($file);
     if ($filesize < 1024 * 1024 * 50) {
         //chunk transfer on bigger uploads <50MB
         $filehandle = fopen($file, 'r');
         $url = self::API_CONTENT_URL . self::API_VERSION_URL . 'files_put/' . $this->root . '/' . trim($path, '/');
         $output = $this->request($url, array('overwrite' => $overwrite ? 'true' : 'false'), 'PUT', $filehandle, $filesize);
         fclose($filehandle);
     } else {
         //chunk transfer on bigger uploads >50MB
         $output = $this->chunked_upload_single_call($file, $path, $overwrite);
     }
     return $output;
 }
示例#2
0
 function amazons3_backup_bwd_comp($historyID, $args = '')
 {
     $s3StartTime = $this->iwpScriptStartTime;
     $this->backup_settings_vals = get_option('iwp_client_multi_backup_temp_values');
     $backup_settings_values = $this->backup_settings_vals;
     if (isset($backup_settings_values['s3_retrace_count']) && !empty($backup_settings_values['s3_retrace_count'])) {
         $s3_retrace_count = $backup_settings_values['s3_retrace_count'][$historyID];
     } else {
         $s3_retrace_count = 0;
     }
     //get the settings by other method
     $requestParams = $this->getRequiredData($historyID, "requestParams");
     $upload_loop_break_time = $requestParams['account_info']['upload_loop_break_time'];
     //darkcode changed
     $upload_file_block_size = $requestParams['account_info']['upload_file_block_size'];
     if ($upload_file_block_size < 5 * 1024 * 1024) {
         $upload_file_block_size = 5 * 1024 * 1024 + 1;
     }
     $del_host_file = $requestParams['args']['del_host_file'];
     $task_result = $this->getRequiredData($historyID, "taskResults");
     @set_time_limit(0);
     $this->hisID = $historyID;
     $uploadLoopCount = 0;
     $upload_id = 'start';
     $partsArray = array();
     $nextPart = 1;
     $retrace = 'notSet';
     $doComplete = false;
     if ($args == '') {
         //on the next call $args would be ''
         //set $args, $uploadid, $offset  from the DB
         $responseParams = $this->getRequiredData($historyID, "responseParams");
         if (!$responseParams) {
             return $this->statusLog($this->hisID, array('stage' => 's3Upload', 'status' => 'error', 'statusMsg' => 'S3 Upload failed: Error while fetching table data.', 'statusCode' => 's3_upload_failed_error_while_fetching_table_data'));
         }
         $args = $responseParams['s3Args'];
         $prevChunkResults = $responseParams['response_data'];
         $upload_id = $prevChunkResults['upload_id'];
         $nextPart = $prevChunkResults['nextPart'];
         $partsArray = $prevChunkResults['partsArray'];
         $current_file_num = $responseParams['current_file_num'];
         $dont_retrace = $responseParams['dont_retrace'];
         $start_new_backup = $responseParams['start_new_backup'];
     }
     if (empty($current_file_num)) {
         $current_file_num = 0;
     }
     //traceback options and setting values
     if (!$upload_id && empty($dont_retrace)) {
         if ($s3_retrace_count <= 3) {
             $args = $requestParams['secure']['account_info']['iwp_amazon_s3'];
             if ($backup_settings_values['s3_upload_id']) {
                 $upload_id = $backup_settings_values['s3_upload_id'][$historyID];
             } else {
                 return $this->statusLog($this->hisID, array('stage' => 's3Upload Retrace', 'status' => 'error', 'statusMsg' => 'S3 Upload failed: Error while fetching table data during retrace', 'statusCode' => 's3_upload_failed_error_while_fetching_table_data_during_retrace'));
             }
             $backup_file = $backup_settings_values['backup_file'];
             $retrace = 'set';
             $s3_retrace_count++;
             $backup_settings_values['s3_retrace_count'][$historyID] = $s3_retrace_count;
             update_option('iwp_client_multi_backup_temp_values', $backup_settings_values);
         } else {
             return $this->statusLog($this->hisID, array('stage' => 's3Upload', 'status' => 'error', 'statusMsg' => 'S3 upload failed: Retrace limit reached.', 'statusCode' => 's3_upload_failed_retrace_limit_reached'));
         }
     }
     if (!$this->iwp_mmb_function_exists('curl_init')) {
         return array('error' => 'You cannot use Amazon S3 on your server. Please enable curl first.', 'partial' => 1, 'error_code' => 'cannot_use_s3_enable_curl_first');
     }
     require_once $GLOBALS['iwp_mmb_plugin_dir'] . '/lib/amazon_s3_bwd_comp/sdk.class.php';
     $tempArgs = $args;
     extract($args);
     if (!is_array($backup_file)) {
         $temp_backup_file = $backup_file;
         $backup_file = array();
         $backup_file[] = $temp_backup_file;
     }
     if (is_array($backup_file)) {
         $backup_files_count = count($backup_file);
         $temp_single_file = $backup_file[$current_file_num];
         unset($backup_file);
         $backup_file = $temp_single_file;
     }
     if ($as3_site_folder == true) {
         if (!empty($as3_directory)) {
             $as3_directory .= '/' . $this->site_name;
         } else {
             $as3_directory = $this->site_name;
         }
     }
     try {
         CFCredentials::set(array('development' => array('key' => trim($as3_access_key), 'secret' => trim(str_replace(' ', '+', $as3_secure_key)), 'default_cache_config' => '', 'certificate_authority' => true, 'use_ssl' => false, 'ssl_verification' => false), '@default' => 'development'));
         $s3 = new AmazonS3();
         $cfu_obj = new CFUtilities();
         //the mulitCall upload starts				darkCode starts
         //$this->statusLog($this -> hisID, array('stage' => 'uploadingFiles', 'status' => 'partiallyCompleted', 'statusMsg' => 's3MultiCallStartsHere'));
         if (!empty($as3_directory)) {
             $as3_file = $as3_directory . '/' . basename($backup_file);
         } else {
             $as3_file = basename($backup_file);
         }
         if (iwp_mmb_get_file_size($backup_file) <= 5 * 1024 * 1024) {
             echo "<br>small backup so single upload<br>";
             $response = $s3->create_object($as3_bucket, $as3_file, array('fileUpload' => $backup_file));
             if ($response->isOK()) {
                 $current_file_num += 1;
                 $resArray = array('status' => "completed", 'backupParentHID' => $historyID);
                 $result_arr = array();
                 $result_arr['status'] = 'completed';
                 $result_arr['nextFunc'] = 'amazons3_backup_over';
                 $result_arr['s3Args'] = $tempArgs;
                 $result_arr['current_file_num'] = $current_file_num;
                 $result_arr['dont_retrace'] = true;
                 $task_result['task_results'][$historyID]['amazons3'][$current_file_num - 1] = basename($backup_file);
                 $task_result['amazons3'][$current_file_num - 1] = basename($backup_file);
                 if ($current_file_num >= $backup_files_count) {
                     unset($task_result['task_results'][$historyID]['server']);
                     @unlink($backup_file);
                 } else {
                     //to continue zip split parts
                     $resArray['status'] = 'partiallyCompleted';
                     $chunkResult = array();
                     $chunkResult['partsArray'] = array();
                     $chunkResult['nextPart'] = 1;
                     $chunkResult['upload_id'] = 'start';
                     $result_arr['response_data'] = $chunkResult;
                     $result_arr['nextFunc'] = 'amazons3_backup';
                     $result_arr['status'] = 'partiallyCompleted';
                     $result_arr['start_new_backup'] = true;
                     @unlink($backup_file);
                 }
                 $this->statusLog($this->hisID, array('stage' => 's3MultiCall', 'status' => 'completed', 'statusMsg' => 'nextCall', 'nextFunc' => 'amazons3_backup', 'task_result' => $task_result, 'responseParams' => $result_arr));
                 return $resArray;
             } else {
                 return array('error' => 'Failed to upload to Amazon S3.');
             }
         }
         if ($upload_id == 'start') {
             echo "initiating multiCall upload";
             //initiate the multiPartUpload to get the uploadID from its response
             $response = $s3->initiate_multipart_upload($as3_bucket, $as3_file);
             //createMultipartUpload
             //convert the response into an array
             $response_array = $cfu_obj->convert_response_to_array($response);
             //get the uploadID
             $upload_id = $response_array['body']['UploadId'];
             //storing the uploadID in DB
             $backup_settings_values['s3_upload_id'][$historyID] = $upload_id;
             $backup_settings_values['backup_file'] = $backup_file;
             update_option('iwp_client_multi_backup_temp_values', $backup_settings_values);
         }
         //get the parts of the big file
         $parts = $s3->get_multipart_counts(iwp_mmb_get_file_size($backup_file), $upload_file_block_size);
         //1 MB chunks
         if ($retrace == 'set') {
             $list_parts_response = $s3->list_parts($as3_bucket, $as3_file, $upload_id);
             $partsArray = CFUtilities::convert_response_to_array($list_parts_response);
             $nextPart = count($partsArray) + 1;
             $this->statusLog($this->hisID, array('stage' => 's3MultiCall', 'status' => 'partiallyCompleted', 'statusMsg' => 'retracingValues', 'nextFunc' => 'amazons3_backup', 'task_result' => $task_result, 'responseParams' => $result_arr));
             $retrace = 'unset';
         }
         //this is the main upload loop break it on when the timeLimit is reached
         //chunk upload loop
         $partsArraySize = count($parts);
         $s3ChunkTimeTaken = 0;
         $s3ChunkCount = 0;
         $reloop = false;
         $reloopCount = 0;
         $status = '';
         do {
             $uploadLoopCount = 0;
             if ($reloopCount == 0) {
                 $s3ChunkStartTime = $s3StartTime;
             } else {
                 $s3ChunkStartTime = microtime(true);
             }
             foreach ($parts as $i => $part) {
                 $uploadLoopCount += 1;
                 if ($uploadLoopCount == $nextPart) {
                     $singleUploadResponse = $s3->upload_part($as3_bucket, $as3_file, $upload_id, array('fileUpload' => $backup_file, 'partNumber' => $i + 1, 'seekTo' => $part['seekTo'], 'length' => $part['length']));
                     $singleUploadResult = $singleUploadResponse->isOk();
                     echo "singleUploadResult - " . $singleUploadResult;
                     $singleUploadResponseArray = $cfu_obj->convert_response_to_array($singleUploadResponse);
                     /* $response = $s3->complete_multipart_upload($bucket, $filename, $upload_id, array(
                     				array('PartNumber' => 1, 'ETag' => '"25e317773f308e446cc84c503a6d1f85"'),
                     				array('PartNumber' => 2, 'ETag' => '"a6d1f85f58498973f308e446cc84c503"'),
                     				array('PartNumber' => 3, 'ETag' => '"bed3c0a4a1407f584989b4009e9ce33f"'),
                     			)); */
                     $nextPart = $uploadLoopCount;
                     $partsArray[$i + 1]['PartNumber'] = $i + 1;
                     $partsArray[$i + 1]['ETag'] = $singleUploadResponseArray['header']['etag'];
                     $chunkResult = array();
                     $chunkResult['partsArray'] = $partsArray;
                     $chunkResult['nextPart'] = $nextPart + 1;
                     $chunkResult['upload_id'] = $upload_id;
                     $nextPart = $nextPart + 1;
                     $backup_settings_values['s3_retrace_count'][$historyID] = 0;
                     update_option('iwp_client_multi_backup_temp_values', $backup_settings_values);
                     $status = 'partiallyCompleted';
                     if ($nextPart == $partsArraySize + 1) {
                         $doComplete = true;
                         $status = 'completed';
                     }
                     $result_arr = array();
                     $result_arr['response_data'] = $chunkResult;
                     $result_arr['status'] = $status;
                     $result_arr['nextFunc'] = 'amazons3_backup';
                     $result_arr['s3Args'] = $tempArgs;
                     $result_arr['current_file_num'] = $current_file_num;
                     $task_result['task_results'][$historyID]['amazons3'][$current_file_num] = basename($backup_file);
                     $task_result['amazons3'][$current_file_num] = basename($backup_file);
                     $this->statusLog($this->hisID, array('stage' => 's3MultiCall', 'status' => 'completed', 'statusMsg' => 'nextCall', 'nextFunc' => 'amazons3_backup', 'task_result' => $task_result, 'responseParams' => $result_arr));
                     $resArray = array('status' => $status, 'backupParentHID' => $historyID);
                     /* $resArray = array (
                     			  'status' => 'completed',
                     			  'backupParentHID' => $historyID,
                     			); */
                     break;
                     //return $resArray;
                     //exit;
                 } else {
                     if ($nextPart == $partsArraySize + 1) {
                         $doComplete = true;
                         break;
                     }
                 }
             }
             if ($doComplete) {
                 // complete the multipart upload
                 $response = $s3->complete_multipart_upload($as3_bucket, $as3_file, $upload_id, $partsArray);
                 if ($response->isOK() != true) {
                     $response = $s3->abort_multipart_upload($as3_bucket, $as3_file, $upload_id);
                 }
                 $response_array = $cfu_obj->convert_response_to_array($response);
                 $current_file_num += 1;
                 $result_arr = array();
                 $result_arr['response_data'] = $chunkResult;
                 $result_arr['status'] = 'completed';
                 $result_arr['nextFunc'] = 'amazons3_backup_over';
                 $result_arr['s3Args'] = $tempArgs;
                 $result_arr['dont_retrace'] = true;
                 $result_arr['current_file_num'] = $current_file_num;
                 $resArray = array('status' => 'completed', 'backupParentHID' => $historyID);
                 if ($current_file_num >= $backup_files_count) {
                     $task_result['task_results'][$historyID]['amazons3'][$current_file_num - 1] = basename($backup_file);
                     $task_result['amazons3'][$current_file_num - 1] = basename($backup_file);
                     unset($task_result['task_results'][$historyID]['server']);
                 } else {
                     //to continue zip split parts
                     $status = 'partiallyCompleted';
                     $chunkResult = array();
                     $chunkResult['partsArray'] = array();
                     $chunkResult['nextPart'] = 1;
                     $chunkResult['upload_id'] = 'start';
                     $result_arr['response_data'] = $chunkResult;
                     $result_arr['status'] = 'partiallyCompleted';
                     $result_arr['nextFunc'] = 'amazons3_backup';
                     $result_arr['start_new_backup'] = true;
                     $resArray['status'] = 'partiallyCompleted';
                 }
                 $this->statusLog($this->hisID, array('stage' => 's3MultiCall', 'status' => 'completed', 'statusMsg' => 'finalCall', 'nextFunc' => 'amazons3_backup', 'task_result' => $task_result, 'responseParams' => $result_arr));
                 $upload = $response->isOk();
             }
             //check time
             $s3ChunkEndTime = microtime(true);
             $s3ChunkTimeTaken = $s3ChunkEndTime - $s3ChunkStartTime + $s3ChunkTimeTaken / ($reloopCount + 1);
             $s3EndTime = microtime(true);
             $s3TimeTaken = $s3EndTime - $s3StartTime;
             $s3TimeLeft = $upload_loop_break_time - $s3TimeTaken;
             $s3TimeLeft = $s3TimeLeft - 5;
             //for safe timeLimit
             if (!empty($chunkResult['nextPart'])) {
                 echo 'parts' . $chunkResult['nextPart'];
             }
             echo " s3TimeTaken " . $s3TimeTaken;
             $s3UploadedSize = $uploadLoopCount * 5;
             echo " s3 approx file size written " . $s3UploadedSize;
             iwp_mmb_print_flush("s3loop");
             echo " s3TimeLeft " . $s3TimeLeft;
             echo " s3ChunkTimeTaken " . $s3ChunkTimeTaken;
             if ($s3TimeLeft <= $s3ChunkTimeTaken || !$singleUploadResult || $doComplete) {
                 $reloop = false;
                 echo "reloop stopped";
             } else {
                 $reloop = true;
                 $reloopCount++;
             }
         } while ($reloop);
         if (!$doComplete) {
             return $resArray;
         }
         if ($doComplete && $upload) {
             $status = 'completed';
             iwp_mmb_print_flush('Amazon S3 upload: End');
             if ($status == 'completed') {
                 //file verification
                 //checking file size and comparing
                 //getting the hash value
                 $partArrayLength = count($partsArray);
                 $verificationResult = $this->postUploadVerification($s3, $backup_file, $as3_file, $type = "amazons3", $as3_bucket);
                 if (!$verificationResult) {
                     return $this->statusLog($historyID, array('stage' => 'uploadAmazons3', 'status' => 'error', 'statusMsg' => 'S3 verification failed: File may be corrupted.', 'statusCode' => 'docomplete_S3_verification_failed_file_may_be_corrupted'));
                 }
                 if ($del_host_file) {
                     @unlink($backup_file);
                 }
             }
             return $resArray;
         } else {
             return array('error' => 'Failed to upload to Amazon S3. Please check your details and set upload/delete permissions on your bucket.', 'partial' => 1, 'error_code' => 'failed_to_upload_to_s3_check_your_details_and_set_upload_delete_permissions_on_your_bucket');
         }
     } catch (Exception $e) {
         $err = $e->getMessage();
         if ($err) {
             return array('error' => 'Failed to upload to AmazonS3 (' . $err . ').', 'error_code' => 'failed_to_upload_s3_err');
         } else {
             return array('error' => 'Failed to upload to Amazon S3.', 'error_code' => 'failed_to_upload_s3');
         }
     }
 }
示例#3
0
 function amazons3_backup($historyID, $args = '')
 {
     if (!$this->iwp_mmb_function_exists('curl_init')) {
         return array('error' => 'You cannot use Amazon S3 on your server. Please enable curl first.', 'partial' => 1, 'error_code' => 'cannot_use_s3_enable_curl_first');
     }
     if (!class_exists('S3Client')) {
         require_once $GLOBALS['iwp_mmb_plugin_dir'] . '/lib/amazon/autoload.php';
     }
     $s3StartTime = $this->iwpScriptStartTime;
     $this->backup_settings_vals = get_option('iwp_client_multi_backup_temp_values');
     $backup_settings_values = $this->backup_settings_vals;
     if (isset($backup_settings_values['s3_retrace_count']) && !empty($backup_settings_values['s3_retrace_count'])) {
         $s3_retrace_count = $backup_settings_values['s3_retrace_count'][$historyID];
     } else {
         $s3_retrace_count = 0;
     }
     //get the settings by other method
     $requestParams = $this->getRequiredData($historyID, "requestParams");
     $upload_loop_break_time = $requestParams['account_info']['upload_loop_break_time'];
     //darkcode changed
     $upload_file_block_size = $requestParams['account_info']['upload_file_block_size'];
     if ($upload_file_block_size < 5 * 1024 * 1024) {
         $upload_file_block_size = 5 * 1024 * 1024 + 1;
     }
     $del_host_file = $requestParams['args']['del_host_file'];
     $task_result = $this->getRequiredData($historyID, "taskResults");
     @set_time_limit(0);
     $this->hisID = $historyID;
     $uploadLoopCount = 0;
     $uploadId = 'start';
     $parts = array();
     $nextPart = 1;
     $retrace = 'notSet';
     $doComplete = false;
     if ($args == '') {
         //on the next call $args would be ''
         //set $args, $uploadid, $offset  from the DB
         $responseParams = $this->getRequiredData($historyID, "responseParams");
         if (!$responseParams) {
             return $this->statusLog($this->hisID, array('stage' => 's3Upload', 'status' => 'error', 'statusMsg' => 'S3 Upload failed: Error while fetching table data.', 'statusCode' => 's3_upload_failed_error_while_fetching_table_data'));
         }
         $args = $responseParams['s3Args'];
         $prevChunkResults = $responseParams['response_data'];
         $uploadId = $prevChunkResults['uploadId'];
         $nextPart = $prevChunkResults['nextPart'];
         $partsArray = $prevChunkResults['partsArray'];
         $parts = $prevChunkResults['parts'];
         $current_file_num = $responseParams['current_file_num'];
         $dont_retrace = $responseParams['dont_retrace'];
         $start_new_backup = $responseParams['start_new_backup'];
     }
     if (empty($current_file_num)) {
         $current_file_num = 0;
     }
     //traceback options and setting values
     if (!$uploadId && empty($dont_retrace)) {
         if ($s3_retrace_count <= 3) {
             $args = $requestParams['secure']['account_info']['iwp_amazon_s3'];
             if ($backup_settings_values['s3_upload_id']) {
                 $uploadId = $backup_settings_values['s3_upload_id'][$historyID];
             } else {
                 return $this->statusLog($this->hisID, array('stage' => 's3Upload Retrace', 'status' => 'error', 'statusMsg' => 'S3 Upload failed: Error while fetching table data during retrace', 'statusCode' => 's3_upload_failed_error_while_fetching_table_data_during_retrace'));
             }
             $backup_file = $backup_settings_values['backup_file'];
             $retrace = 'set';
             $s3_retrace_count++;
             $backup_settings_values['s3_retrace_count'][$historyID] = $s3_retrace_count;
             update_option('iwp_client_multi_backup_temp_values', $backup_settings_values);
         } else {
             return $this->statusLog($this->hisID, array('stage' => 's3Upload', 'status' => 'error', 'statusMsg' => 'S3 upload failed: Retrace limit reached.', 'statusCode' => 's3_upload_failed_retrace_limit_reached'));
         }
     }
     //tracback ends
     $tempArgs = $args;
     extract($args);
     if (!is_array($backup_file)) {
         $temp_backup_file = $backup_file;
         $backup_file = array();
         $backup_file[] = $temp_backup_file;
     }
     if (is_array($backup_file)) {
         $backup_files_count = count($backup_file);
         $temp_single_file = $backup_file[$current_file_num];
         unset($backup_file);
         $backup_file = $temp_single_file;
     } else {
         $backup_files_count = 1;
     }
     if ($as3_site_folder == true) {
         if (!empty($as3_directory)) {
             $as3_directory .= '/' . $this->site_name;
         } else {
             $as3_directory = $this->site_name;
         }
     }
     if ($s3_retrace_count <= 3) {
         try {
             $s3 = S3Client::factory(array('key' => trim($as3_access_key), 'secret' => trim(str_replace(' ', '+', $as3_secure_key)), 'region' => $as3_bucket_region, 'signature' => 'v4', 'ssl.certificate_authority' => false));
             $objects = $s3->getIterator('ListObjects', array('Bucket' => $as3_bucket));
             foreach ($objects as $object) {
                 echo $s3->getObjectUrl($as3_bucket, $object['Key']);
                 break;
             }
             //the mulitCall upload starts				darkCode starts
             if (!empty($as3_directory)) {
                 $as3_file = $as3_directory . '/' . basename($backup_file);
             } else {
                 $as3_file = basename($backup_file);
             }
             if (iwp_mmb_get_file_size($backup_file) <= 5 * 1024 * 1024) {
                 //new starts
                 echo "<br>small backup so single upload<br>";
                 $s3->putObject(array('Bucket' => $as3_bucket, 'SourceFile' => $backup_file, 'Key' => $as3_file, 'ACL' => 'public-read'));
                 $current_file_num += 1;
                 $resArray = array('status' => "completed", 'backupParentHID' => $historyID);
                 $result_arr = array();
                 $result_arr['status'] = 'completed';
                 $result_arr['nextFunc'] = 'amazons3_backup_over';
                 $result_arr['s3Args'] = $tempArgs;
                 $result_arr['current_file_num'] = $current_file_num;
                 $result_arr['dont_retrace'] = true;
                 $task_result['task_results'][$historyID]['amazons3'][$current_file_num - 1] = basename($backup_file);
                 $task_result['amazons3'][$current_file_num - 1] = basename($backup_file);
                 if ($current_file_num >= $backup_files_count) {
                     unset($task_result['task_results'][$historyID]['server']);
                     @unlink($backup_file);
                 } else {
                     //to continue zip split parts
                     $resArray['status'] = 'partiallyCompleted';
                     $chunkResult = array();
                     $chunkResult['partsArray'] = array();
                     $chunkResult['nextPart'] = 1;
                     $chunkResult['uploadId'] = 'start';
                     $result_arr['response_data'] = $chunkResult;
                     $result_arr['nextFunc'] = 'amazons3_backup';
                     $result_arr['status'] = 'partiallyCompleted';
                     $result_arr['start_new_backup'] = true;
                     @unlink($backup_file);
                 }
                 $this->statusLog($this->hisID, array('stage' => 's3MultiCall', 'status' => 'completed', 'statusMsg' => 'nextCall', 'nextFunc' => 'amazons3_backup', 'task_result' => $task_result, 'responseParams' => $result_arr));
                 return $resArray;
             } else {
                 if ($uploadId == 'start' && isset($parts)) {
                     echo "iwpmsg initiating multiCall upload";
                     //get the uploadID
                     $filename = $backup_file;
                     $result = $s3->createMultipartUpload(array('Bucket' => $as3_bucket, 'Key' => $as3_file, 'ACL' => 'public-read'));
                     $parts = array();
                     $uploadId = $result['UploadId'];
                     //storing the uploadID in DB
                     $backup_settings_values['s3_upload_id'][$historyID] = $uploadId;
                     $backup_settings_values['backup_file'] = $backup_file;
                     update_option('iwp_client_multi_backup_temp_values', $backup_settings_values);
                 }
                 $s3ChunkTimeTaken = 0;
                 $s3ChunkCount = 0;
                 $reloopCount = 0;
                 try {
                     $filename = $backup_file;
                     $file = fopen($filename, 'r');
                     $partNumber = 1;
                     echo $partNumber;
                     $reloopCount = 0;
                     while (!feof($file)) {
                         if ($reloopCount == 0) {
                             $s3ChunkStartTime = $s3StartTime;
                             $reloopCount++;
                         } else {
                             $s3ChunkStartTime = microtime(true);
                         }
                         if ($partNumber == $nextPart) {
                             $result = $s3->uploadPart(array('Bucket' => $as3_bucket, 'Key' => $as3_file, 'UploadId' => $uploadId, 'PartNumber' => $partNumber, 'Body' => fread($file, 5 * 1024 * 1024)));
                             $parts[] = array('PartNumber' => $partNumber++, 'ETag' => $result['ETag']);
                             echo "Uploading part {$partNumber} of {$filename}.\n";
                             $chunkResult['nextPart'] = $nextPart + 1;
                             $chunkResult['uploadId'] = $uploadId;
                             $chunkResult['parts'] = $parts;
                             $nextPart = $nextPart + 1;
                             $backup_settings_values['s3_retrace_count'][$historyID] = 0;
                             update_option('iwp_client_multi_backup_temp_values', $backup_settings_values);
                             $status = 'partiallyCompleted';
                             $result_arr = array();
                             $result_arr['response_data'] = $chunkResult;
                             $result_arr['status'] = $status;
                             $result_arr['nextFunc'] = 'amazons3_backup';
                             $result_arr['s3Args'] = $tempArgs;
                             $result_arr['current_file_num'] = $current_file_num;
                             $task_result['task_results'][$historyID]['amazons3'][$current_file_num] = basename($backup_file);
                             $task_result['amazons3'][$current_file_num] = basename($backup_file);
                             $this->statusLog($this->hisID, array('stage' => 's3MultiCall', 'status' => 'completed', 'statusMsg' => 'nextCall', 'nextFunc' => 'amazons3_backup', 'task_result' => $task_result, 'responseParams' => $result_arr));
                             $resArray = array('status' => $status, 'backupParentHID' => $historyID);
                             $s3ChunkEndTime = microtime(true);
                             $s3ChunkTimeTaken = $s3ChunkEndTime - $s3ChunkStartTime;
                             $s3EndTime = microtime(true);
                             $s3TimeTaken = $s3EndTime - $s3StartTime;
                             $s3TimeLeft = $upload_loop_break_time - $s3TimeTaken;
                             if (!empty($chunkResult['nextPart'])) {
                                 echo 'parts' . $chunkResult['nextPart'];
                             }
                             echo " s3TimeTaken " . $s3TimeTaken;
                             $s3UploadedSize = $uploadLoopCount * 5;
                             echo " s3 approx file size written " . $s3UploadedSize;
                             iwp_mmb_print_flush("s3loop");
                             echo " s3TimeLeft " . $s3TimeLeft;
                             echo " s3ChunkTimeTaken " . $s3ChunkTimeTaken;
                             if ($s3TimeLeft <= $s3ChunkTimeTaken) {
                                 $this->statusLog($this->hisID, array('stage' => 's3MultiCall', 'status' => 'partiallyCompleted', 'statusMsg' => 'nextCall', 'nextFunc' => 'amazons3_backup', 'task_result' => $task_result, 'responseParams' => $result_arr));
                                 fclose($file);
                                 break;
                             }
                         } else {
                             fread($file, 5 * 1024 * 1024);
                             $partNumber++;
                         }
                     }
                     fclose($file);
                 } catch (S3Exception $e) {
                     $this->statusLog($this->hisID, array('stage' => 's3MultiCall', 'status' => 'partiallyCompleted', 'statusMsg' => 'retracingValues', 'nextFunc' => 'amazons3_backup', 'task_result' => $task_result, 'responseParams' => $result_arr));
                 }
                 if ($nextPart == ceil(iwp_mmb_get_file_size($backup_file) / 1024 / 1024 / 5) + 1) {
                     $result = $s3->completeMultipartUpload(array('Bucket' => $as3_bucket, 'Key' => $as3_file, 'UploadId' => $uploadId, 'Parts' => $parts));
                     $url = $result['Location'];
                     $current_file_num += 1;
                     $result_arr = array();
                     $result_arr['response_data'] = $chunkResult;
                     $result_arr['status'] = 'completed';
                     $result_arr['nextFunc'] = 'amazons3_backup_over';
                     $result_arr['s3Args'] = $tempArgs;
                     $result_arr['dont_retrace'] = true;
                     $result_arr['current_file_num'] = $current_file_num;
                     $resArray = array('status' => 'completed', 'backupParentHID' => $historyID);
                     if ($current_file_num >= $backup_files_count) {
                         $task_result['task_results'][$historyID]['amazons3'][$current_file_num - 1] = basename($backup_file);
                         $task_result['amazons3'][$current_file_num - 1] = basename($backup_file);
                         unset($task_result['task_results'][$historyID]['server']);
                     } else {
                         //to continue zip split parts
                         $chunkResult = array();
                         $chunkResult['partsArray'] = array();
                         $chunkResult['nextPart'] = 1;
                         $chunkResult['uploadId'] = 'start';
                         $chunkResult['parts'] = '';
                         $result_arr['response_data'] = $chunkResult;
                         $result_arr['status'] = 'partiallyCompleted';
                         $result_arr['nextFunc'] = 'amazons3_backup';
                         $result_arr['start_new_backup'] = true;
                         $resArray['status'] = 'partiallyCompleted';
                     }
                     $this->statusLog($this->hisID, array('stage' => 's3MultiCall', 'status' => 'completed', 'statusMsg' => 'finalCall', 'nextFunc' => 'amazons3_backup', 'task_result' => $task_result, 'responseParams' => $result_arr));
                     $status = 'completed';
                     iwp_mmb_print_flush('Amazon S3 upload: End');
                     if ($status == 'completed') {
                         $partArrayLength = count($partsArray);
                         $verificationResult = $this->postUploadVerification($s3, $backup_file, $as3_file, $type = "amazons3", $as3_bucket, $as3_access_key, $as3_secure_key, $as3_bucket_region);
                         if (!$verificationResult) {
                             return $this->statusLog($historyID, array('stage' => 'uploadAmazons3', 'status' => 'error', 'statusMsg' => 'S3 verification failed: File may be corrupted.', 'statusCode' => 'docomplete_S3_verification_failed_file_may_be_corrupted'));
                         }
                         if ($del_host_file) {
                             @unlink($backup_file);
                         }
                         return $resArray;
                     }
                     echo "Uploaded {$filename} to {$backup_file}.\n";
                 } else {
                     return $resArray;
                 }
             }
         } catch (Exception $e) {
             $result = $s3->abortMultipartUpload(array('Bucket' => $as3_bucket, 'Key' => $as3_file, 'UploadId' => $uploadId));
             $err = $e->getMessage();
             if ($err) {
                 return array('error' => 'Failed to upload to AmazonS3 (' . $err . ').', 'error_code' => 'failed_to_upload_s3_err');
             } else {
                 return array('error' => 'Failed to upload to Amazon S3.', 'error_code' => 'failed_to_upload_s3');
             }
         }
     } else {
         return array('error' => 'Failed to upload to Amazon S3. Could not connect amazon server at the moment', 'partial' => 1, 'error_code' => 'failed_to_upload_to_s3_Could_not_connect_amazon_server_at_the_moment');
     }
 }
示例#4
0
 /**
  * Creates an Amazon S3 object using the multipart upload APIs. It is analogous to <create_object()>.
  *
  * While each individual part of a multipart upload can hold up to 5 GB of data, this method limits the
  * part size to a maximum of 500 MB. The combined size of all parts can not exceed 5 TB of data. When an
  * object is stored in Amazon S3, the data is streamed to multiple storage servers in multiple data
  * centers. This ensures the data remains available in the event of internal network or hardware failure.
  *
  * Amazon S3 charges for storage as well as requests to the service. Smaller part sizes (and more
  * requests) allow for faster failures and better upload reliability. Larger part sizes (and fewer
  * requests) costs slightly less but has lower upload reliability.
  *
  * In certain cases with large objects, it's possible for this method to attempt to open more file system
  * connections than allowed by the OS. In this case, either
  * <a href="https://forums.aws.amazon.com/thread.jspa?threadID=70216">increase the number of connections
  * allowed</a> or increase the value of the <code>partSize</code> parameter to use a larger part size.
  *
  * @param string $bucket (Required) The name of the bucket to use.
  * @param string $filename (Required) The file name for the object.
  * @param array $opt (Optional) An associative array of parameters that can have the following keys: <ul>
  * 	<li><code>fileUpload</code> - <code>string|resource</code> - Required - The URL/path for the file to upload, or an open resource.</li>
  * 	<li><code>acl</code> - <code>string</code> - Optional - The ACL settings for the specified object. [Allowed values: <code>AmazonS3::ACL_PRIVATE</code>, <code>AmazonS3::ACL_PUBLIC</code>, <code>AmazonS3::ACL_OPEN</code>, <code>AmazonS3::ACL_AUTH_READ</code>, <code>AmazonS3::ACL_OWNER_READ</code>, <code>AmazonS3::ACL_OWNER_FULL_CONTROL</code>]. The default value is <code>ACL_PRIVATE</code>.</li>
  * 	<li><code>contentType</code> - <code>string</code> - Optional - The type of content that is being sent in the body. The default value is <code>application/octet-stream</code>.</li>
  * 	<li><code>headers</code> - <code>array</code> - Optional - Standard HTTP headers to send along in the request. Accepts an associative array of key-value pairs.</li>
  * 	<li><code>length</code> - <code>integer</code> - Optional - The size of the object in bytes. For more information, see <a href="http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.13">RFC 2616, section 14.13</a>. The value can also be passed to the <code>header</code> option as <code>Content-Length</code>.</li>
  * 	<li><code>limit</code> - <code>integer</code> - Optional - The maximum number of concurrent uploads done by cURL. Gets passed to <code>CFBatchRequest</code>.</li>
  * 	<li><code>meta</code> - <code>array</code> - Optional - An associative array of key-value pairs. Any header starting with <code>x-amz-meta-:</code> is considered user metadata. It will be stored with the object and returned when you retrieve the object. The total size of the HTTP request, not including the body, must be less than 4 KB.</li>
  * 	<li><code>partSize</code> - <code>integer</code> - Optional - The size of an individual part. The size may not be smaller than 5 MB or larger than 500 MB. The default value is 50 MB.</li>
  * 	<li><code>seekTo</code> - <code>integer</code> - Optional - The starting position in bytes for the first piece of the file/stream to upload.</li>
  * 	<li><code>storage</code> - <code>string</code> - Optional - Whether to use Standard or Reduced Redundancy storage. [Allowed values: <code>AmazonS3::STORAGE_STANDARD</code>, <code>AmazonS3::STORAGE_REDUCED</code>]. The default value is <code>STORAGE_STANDARD</code>.</li>
  * 	<li><code>uploadId</code> - <code>string</code> - Optional - An upload ID identifying an existing multipart upload to use. If this option is not set, one will be created automatically.</li>
  * 	<li><code>curlopts</code> - <code>array</code> - Optional - A set of values to pass directly into <code>curl_setopt()</code>, where the key is a pre-defined <code>CURLOPT_*</code> constant.</li>
  * 	<li><code>returnCurlHandle</code> - <code>boolean</code> - Optional - A private toggle specifying that the cURL handle be returned rather than actually completing the request. This toggle is useful for manually managed batch requests.</li></ul>
  * @return CFResponse A <CFResponse> object containing a parsed HTTP response.
  * @link http://docs.amazonwebservices.com/AmazonS3/latest/dev/RESTAccessPolicy.html REST Access Control Policy
  */
 public function create_mpu_object($bucket, $filename, $opt = null)
 {
     if ($this->use_batch_flow) {
         throw new S3_Exception(__FUNCTION__ . '() cannot be batch requested');
     }
     if (!$opt) {
         $opt = array();
     }
     // Handle content length. Can also be passed as an HTTP header.
     if (isset($opt['length'])) {
         $opt['headers']['Content-Length'] = $opt['length'];
         unset($opt['length']);
     }
     if (!isset($opt['fileUpload'])) {
         throw new S3_Exception('The `fileUpload` option is required in ' . __FUNCTION__ . '().');
     } elseif (is_resource($opt['fileUpload'])) {
         $opt['limit'] = 1;
         // We can only read from this one resource.
         $upload_position = isset($opt['seekTo']) ? (int) $opt['seekTo'] : ftell($opt['fileUpload']);
         $upload_filesize = isset($opt['headers']['Content-Length']) ? (int) $opt['headers']['Content-Length'] : null;
         if (!isset($upload_filesize) && $upload_position !== false) {
             $stats = fstat($opt['fileUpload']);
             if ($stats && $stats['size'] >= 0) {
                 $upload_filesize = $stats['size'] - $upload_position;
             }
         }
     } else {
         $upload_position = isset($opt['seekTo']) ? (int) $opt['seekTo'] : 0;
         if (isset($opt['headers']['Content-Length'])) {
             $upload_filesize = (int) $opt['headers']['Content-Length'];
         } else {
             $upload_filesize = iwp_mmb_get_file_size($opt['fileUpload']);
             if ($upload_filesize !== false) {
                 $upload_filesize -= $upload_position;
             }
         }
     }
     if ($upload_position === false || !isset($upload_filesize) || $upload_filesize === false || $upload_filesize < 0) {
         throw new S3_Exception('The size of `fileUpload` cannot be determined in ' . __FUNCTION__ . '().');
     }
     // Handle part size
     if (isset($opt['partSize'])) {
         // If less that 5 MB...
         if ((int) $opt['partSize'] < 5242880) {
             $opt['partSize'] = 5242880;
             // 5 MB
         } elseif ((int) $opt['partSize'] > 52428800) {
             $opt['partSize'] = 52428800;
             // 50 MB
         }
     } else {
         $opt['partSize'] = 52428800;
         // 50 MB
     }
     // If the upload size is smaller than the piece size, failover to create_object().
     if ($upload_filesize < $opt['partSize'] && !isset($opt['uploadId'])) {
         return $this->create_object($bucket, $filename, $opt);
     }
     // Initiate multipart upload
     if (isset($opt['uploadId'])) {
         $upload_id = $opt['uploadId'];
     } else {
         // Compose options for initiate_multipart_upload().
         $_opt = array();
         foreach (array('contentType', 'acl', 'storage', 'headers', 'meta') as $param) {
             if (isset($opt[$param])) {
                 $_opt[$param] = $opt[$param];
             }
         }
         $upload = $this->initiate_multipart_upload($bucket, $filename, $_opt);
         if (!$upload->isOK()) {
             return $upload;
         }
         // Fetch the UploadId
         $upload_id = (string) $upload->body->UploadId;
     }
     // Get the list of pieces
     $pieces = $this->get_multipart_counts($upload_filesize, (int) $opt['partSize']);
     // Queue batch requests
     $batch = new CFBatchRequest(isset($opt['limit']) ? (int) $opt['limit'] : null);
     foreach ($pieces as $i => $piece) {
         iwp_mmb_auto_print('amazonS3_chucked_upload');
         $this->batch($batch)->upload_part($bucket, $filename, $upload_id, array('expect' => '100-continue', 'fileUpload' => $opt['fileUpload'], 'partNumber' => $i + 1, 'seekTo' => $upload_position + (int) $piece['seekTo'], 'length' => (int) $piece['length']));
     }
     iwp_mmb_auto_print('amazonS3_chucked_upload');
     // Send batch requests
     $batch_responses = $this->batch($batch)->send();
     if (!$batch_responses->areOK()) {
         return $batch_responses;
     }
     // Compose completion XML
     $parts = array();
     foreach ($batch_responses as $i => $response) {
         $parts[] = array('PartNumber' => $i + 1, 'ETag' => $response->header['etag']);
     }
     return $this->complete_multipart_upload($bucket, $filename, $upload_id, $parts);
 }
示例#5
0
 function privDuplicate($p_archive_filename)
 {
     $v_result = 1;
     // ----- Look if the $p_archive_filename exists
     if (!is_file($p_archive_filename)) {
         // ----- Nothing to duplicate, so duplicate is a success.
         $v_result = 1;
         // ----- Return
         return $v_result;
     }
     // ----- Open the zip file
     if (($v_result = $this->privOpenFd('wb')) != 1) {
         // ----- Return
         return $v_result;
     }
     // ----- Open the temporary file in write mode
     if (($v_zip_temp_fd = @fopen($p_archive_filename, 'rb')) == 0) {
         $this->privCloseFd();
         IWPPclZip::privErrorLog(IWP_PCLZIP_ERR_READ_OPEN_FAIL, 'Unable to open archive file \'' . $p_archive_filename . '\' in binary write mode');
         // ----- Return
         return IWPPclZip::errorCode();
     }
     // ----- Copy the files from the archive to the temporary file
     // TBC : Here I should better append the file and go back to erase the central dir
     $v_size = iwp_mmb_get_file_size($p_archive_filename);
     while ($v_size != 0) {
         $v_read_size = $v_size < IWP_PCLZIP_READ_BLOCK_SIZE ? $v_size : IWP_PCLZIP_READ_BLOCK_SIZE;
         $v_buffer = fread($v_zip_temp_fd, $v_read_size);
         @fwrite($this->zip_fd, $v_buffer, $v_read_size);
         $v_size -= $v_read_size;
     }
     // ----- Close
     $this->privCloseFd();
     // ----- Close the temporary file
     @fclose($v_zip_temp_fd);
     // ----- Return
     return $v_result;
 }
 function backup_db_php($file)
 {
     global $wpdb;
     if (empty($GLOBALS['fail_safe_db'])) {
         iwp_mmb_print_flush('DB DUMP PHP Normal: Start');
         $fp = fopen($file, 'w');
         $_count = 0;
         $insert_sql = '';
         $result = $wpdb->get_results('SHOW TABLES LIKE "' . $wpdb->base_prefix . '%"');
         if (!$result) {
             return array('error' => 'MySQL ' . $wpdb->print_error() . " ", 'error_code' => 'MySQL ' . str_replace(" ", "_", $wpdb->print_error()) . " ");
         }
         foreach ($result as $index => $value) {
             foreach ($value as $tableName) {
                 //echo $tableName . '<br />';
                 $tables[] = $tableName;
             }
         }
         foreach ($tables as $table) {
             iwp_mmb_auto_print('backup_db_php_normal');
             $insert_sql .= "DROP TABLE IF EXISTS {$table};";
             $table_descr_query = $wpdb->get_results("SHOW CREATE TABLE `{$table}`", ARRAY_N);
             $insert_sql .= "\n\n" . $table_descr_query[0][1] . ";\n\n";
             fwrite($fp, $insert_sql);
             $insert_sql = '';
             $table_query = $wpdb->get_results("SELECT * FROM `{$table}`", ARRAY_N);
             $num_fields = $wpdb->num_rows;
             foreach ($table_query as $final) {
                 $counts = count($final);
                 $insert_sql .= "INSERT INTO {$table} VALUES(";
                 for ($i = 0; $i < $counts; $i++) {
                     if ($final[$i] == NULL) {
                         $insert_sql .= "'', ";
                     } else {
                         $insert_sql .= "'" . esc_sql($final[$i]) . "', ";
                     }
                     //mb_convert_encoding(esc_sql($final[$i] ), "HTML-ENTITIES", "ISO-8859-1")
                 }
                 $insert_sql = substr($insert_sql, 0, -2);
                 $insert_sql .= ");\n";
                 fwrite($fp, $insert_sql);
                 $insert_sql = '';
                 $_count++;
                 if ($_count >= 400) {
                     echo ' ';
                     flush();
                     $_count = 0;
                 }
             }
             $insert_sql .= "\n\n\n";
             if ($wpdb->check_connection()) {
                 // Still connected to database.
                 $wpdb->flush();
                 // Free memory.
             }
             // Help keep HTTP alive.
             echo ' ';
             flush();
             //unset( $tables[$table_key] );
         }
         fclose($fp);
         unset($fp);
         iwp_mmb_print_flush('DB DUMP PHP Normal: End');
     } else {
         iwp_mmb_print_flush('DB DUMP PHP Fail-safe: Start');
         file_put_contents($file, '');
         //safe  to reset any old data
         //$tables = $wpdb->get_results('SHOW TABLES', ARRAY_N);
         $tables = $wpdb->get_results('SHOW TABLES LIKE "' . $wpdb->base_prefix . '%"', ARRAY_N);
         foreach ($tables as $table) {
             //drop existing table
             $dump_data = "DROP TABLE IF EXISTS {$table['0']};";
             file_put_contents($file, $dump_data, FILE_APPEND);
             //create table
             $create_table = $wpdb->get_row("SHOW CREATE TABLE {$table['0']}", ARRAY_N);
             $dump_data = "\n\n" . $create_table[1] . ";\n\n";
             file_put_contents($file, $dump_data, FILE_APPEND);
             $count = $wpdb->get_var("SELECT count(*) FROM {$table['0']}");
             if ($count > 100) {
                 $count = ceil($count / 100);
             } else {
                 if ($count > 0) {
                     $count = 1;
                 }
             }
             for ($i = 0; $i < $count; $i++) {
                 iwp_mmb_auto_print('backup_db_php_fail_safe');
                 $low_limit = $i * 100;
                 $qry = "SELECT * FROM {$table['0']} LIMIT {$low_limit}, 100";
                 $rows = $wpdb->get_results($qry, ARRAY_A);
                 if (is_array($rows)) {
                     foreach ($rows as $row) {
                         //insert single row
                         $dump_data = "INSERT INTO {$table['0']} VALUES(";
                         $num_values = count($row);
                         $j = 1;
                         foreach ($row as $value) {
                             $value = addslashes($value);
                             $value = preg_replace("/\n/Ui", "\\n", $value);
                             $num_values == $j ? $dump_data .= "'" . $value . "'" : ($dump_data .= "'" . $value . "', ");
                             $j++;
                             unset($value);
                         }
                         $dump_data .= ");\n";
                         file_put_contents($file, $dump_data, FILE_APPEND);
                     }
                 }
             }
             $dump_data = "\n\n\n";
             file_put_contents($file, $dump_data, FILE_APPEND);
             unset($rows);
             unset($dump_data);
         }
         iwp_mmb_print_flush('DB DUMP PHP Fail-safe: End');
     }
     if (iwp_mmb_get_file_size($file) == 0 || !is_file($file)) {
         @unlink($file);
         return array('error' => 'Database backup failed. Try to enable MySQL dump on your server.', 'error_code' => 'database_backup_failed_enable_MySQL_dump_server');
     }
     return $file;
 }
 function backup_db_php($file)
 {
     global $wpdb;
     if (empty($GLOBALS['fail_safe_db'])) {
         iwp_mmb_print_flush('DB DUMP PHP Normal: Start');
         $fp = fopen($file, 'w');
         if (!mysql_ping($wpdb->dbh)) {
             mysql_connect(DB_HOST, DB_USER, DB_PASSWORD);
             mysql_select_db(DB_NAME);
         }
         $_count = 0;
         $insert_sql = '';
         //$result = mysql_query( 'SHOW TABLES' );
         $result = mysql_query('SHOW TABLES LIKE "' . $wpdb->base_prefix . '%"');
         if (!$result) {
             return array('error' => 'MySQL ' . mysql_error() . " ", 'error_code' => 'MySQL ' . str_replace(" ", "_", mysql_error()) . " ");
         }
         while ($row = mysql_fetch_row($result)) {
             $tables[] = $row[0];
             //array_push( $tables, $row[0] );
         }
         //$tables = $wpdb->get_results('SHOW TABLES', ARRAY_N);
         foreach ($tables as $table) {
             iwp_mmb_auto_print('backup_db_php_normal');
             $insert_sql .= "DROP TABLE IF EXISTS {$table};";
             //create table
             $table_descr_query = mysql_query("SHOW CREATE TABLE `{$table}`");
             $fetch_table_descr_row = mysql_fetch_array($table_descr_query);
             $insert_sql .= "\n\n" . $fetch_table_descr_row[1] . ";\n\n";
             fwrite($fp, $insert_sql);
             $insert_sql = '';
             $table_query = mysql_query("SELECT * FROM `{$table}`");
             $num_fields = mysql_num_fields($table_query);
             while ($fetch_row = mysql_fetch_array($table_query)) {
                 $insert_sql .= "INSERT INTO {$table} VALUES(";
                 for ($n = 1; $n <= $num_fields; $n++) {
                     $m = $n - 1;
                     if ($fetch_row[$m] === NULL) {
                         $insert_sql .= "NULL, ";
                     } else {
                         $insert_sql .= "'" . mysql_real_escape_string($fetch_row[$m]) . "', ";
                     }
                 }
                 $insert_sql = substr($insert_sql, 0, -2);
                 $insert_sql .= ");\n";
                 fwrite($fp, $insert_sql);
                 $insert_sql = '';
                 // Help keep HTTP alive.
                 $_count++;
                 if ($_count >= 400) {
                     echo ' ';
                     flush();
                     $_count = 0;
                 }
             }
             // End foreach $tables.
             $insert_sql .= "\n\n\n";
             // testing: mysql_close( $wpdb->dbh );
             // Verify database is still connected and working properly. Sometimes mysql runs out of memory and dies in the above foreach.
             // No point in reconnecting as we can NOT trust that our dump was succesful anymore (it most likely was not).
             if (@mysql_ping($wpdb->dbh)) {
                 // Still connected to database.
                 mysql_free_result($table_query);
                 // Free memory.
             }
             /*else { // Database not connected.
             		
             				return false;
             			}*/
             // Help keep HTTP alive.
             echo ' ';
             flush();
             //unset( $tables[$table_key] );
         }
         fclose($fp);
         unset($fp);
         iwp_mmb_print_flush('DB DUMP PHP Normal: End');
     } else {
         iwp_mmb_print_flush('DB DUMP PHP Fail-safe: Start');
         file_put_contents($file, '');
         //safe  to reset any old data
         //$tables = $wpdb->get_results('SHOW TABLES', ARRAY_N);
         $tables = $wpdb->get_results('SHOW TABLES LIKE "' . $wpdb->base_prefix . '%"', ARRAY_N);
         foreach ($tables as $table) {
             //drop existing table
             $dump_data = "DROP TABLE IF EXISTS {$table['0']};";
             file_put_contents($file, $dump_data, FILE_APPEND);
             //create table
             $create_table = $wpdb->get_row("SHOW CREATE TABLE {$table['0']}", ARRAY_N);
             $dump_data = "\n\n" . $create_table[1] . ";\n\n";
             file_put_contents($file, $dump_data, FILE_APPEND);
             $count = $wpdb->get_var("SELECT count(*) FROM {$table['0']}");
             if ($count > 100) {
                 $count = ceil($count / 100);
             } else {
                 if ($count > 0) {
                     $count = 1;
                 }
             }
             for ($i = 0; $i < $count; $i++) {
                 iwp_mmb_auto_print('backup_db_php_fail_safe');
                 $low_limit = $i * 100;
                 $qry = "SELECT * FROM {$table['0']} LIMIT {$low_limit}, 100";
                 $rows = $wpdb->get_results($qry, ARRAY_A);
                 if (is_array($rows)) {
                     foreach ($rows as $row) {
                         //insert single row
                         $dump_data = "INSERT INTO {$table['0']} VALUES(";
                         $num_values = count($row);
                         $j = 1;
                         foreach ($row as $value) {
                             $value = addslashes($value);
                             $value = preg_replace("/\n/Ui", "\\n", $value);
                             $num_values == $j ? $dump_data .= "'" . $value . "'" : ($dump_data .= "'" . $value . "', ");
                             $j++;
                             unset($value);
                         }
                         $dump_data .= ");\n";
                         file_put_contents($file, $dump_data, FILE_APPEND);
                     }
                 }
             }
             $dump_data = "\n\n\n";
             file_put_contents($file, $dump_data, FILE_APPEND);
             unset($rows);
             unset($dump_data);
         }
         iwp_mmb_print_flush('DB DUMP PHP Fail-safe: End');
     }
     if (iwp_mmb_get_file_size($file) == 0 || !is_file($file)) {
         @unlink($file);
         return array('error' => 'Database backup failed. Try to enable MySQL dump on your server.', 'error_code' => 'database_backup_failed_enable_MySQL_dump_server');
     }
     return $file;
 }