/** * rename file/directory * * @return string */ public function rename() { $oldPath = $this->sanitizePath($this->get['old']); $isDir = $this->isDir($this->metadata("{$oldPath}/")); $oldPath .= $isDir ? '/' : ''; $contents = $isDir ? $this->s3->get_object_list($this->bucket, array('prefix' => $oldPath)) : array(); // NOTE: we do this instead of isDir as we only care if there are files under this prefix if (count($contents) > 1) { $this->error('Unfortunately, we are currently unable to rename a non-empty directory.', false); } $pathInfo = pathinfo($oldPath); $dirName = $pathInfo['dirname']; $baseName = $pathInfo['basename']; $newFile = $this->get['new']; $newPath = join('/', array($dirName, $newFile)); if ($isDir) { $response = $this->createDirectory($newPath); } else { $response = $this->s3->copy_object(array('bucket' => $this->bucket, 'filename' => $oldPath), array('bucket' => $this->bucket, 'filename' => $newPath), array('acl' => AmazonS3::ACL_PUBLIC)); } if ($response->isOK()) { $this->s3->delete_object($this->bucket, $oldPath); } return array('Error' => '', 'Code' => 0, 'Old Path' => $oldPath, 'Old Name' => $baseName, 'New Path' => $newPath, 'New Name' => $newFile); }
/** * Move a file or folder to a specific location * * @param string $from The location to move from * @param string $to The location to move to * @param string $point * @return boolean */ public function moveObject($from, $to, $point = 'append') { $this->xpdo->lexicon->load('source'); $success = false; if (substr(strrev($from), 0, 1) == '/') { $this->xpdo->error->message = $this->xpdo->lexicon('s3_no_move_folder', array('from' => $from)); return $success; } if (!$this->driver->if_object_exists($this->bucket, $from)) { $this->xpdo->error->message = $this->xpdo->lexicon('file_err_ns') . ': ' . $from; return $success; } if ($to != '/') { if (!$this->driver->if_object_exists($this->bucket, $to)) { $this->xpdo->error->message = $this->xpdo->lexicon('file_err_ns') . ': ' . $to; return $success; } $toPath = rtrim($to, '/') . '/' . basename($from); } else { $toPath = basename($from); } $response = $this->driver->copy_object(array('bucket' => $this->bucket, 'filename' => $from), array('bucket' => $this->bucket, 'filename' => $toPath), array('acl' => AmazonS3::ACL_PUBLIC)); $success = $response->isOK(); if ($success) { $deleteResponse = $this->driver->delete_object($this->bucket, $from); $success = $deleteResponse->isOK(); } else { $this->xpdo->error->message = $this->xpdo->lexicon('file_folder_err_rename') . ': ' . $to . ' -> ' . $from; } return $success; }
/** * {@inheritDoc} */ public function remove(array $paths, array $filters) { if (empty($paths) && empty($filters)) { return; } if (empty($paths)) { if (!$this->storage->delete_all_objects($this->bucket, sprintf('/%s/i', implode('|', $filters)))) { $this->logError('The objects could not be deleted from Amazon S3.', array( 'filters' => implode(', ', $filters), 'bucket' => $this->bucket, )); } return; } foreach ($filters as $filter) { foreach ($paths as $path) { $objectPath = $this->getObjectPath($path, $filter); if (!$this->objectExists($objectPath)) { continue; } if (!$this->storage->delete_object($this->bucket, $objectPath)->isOK()) { $this->logError('The objects could not be deleted from Amazon S3.', array( 'filter' => $filter, 'bucket' => $this->bucket, 'path' => $path, )); } } } }
function delete_xid($db, $xid) { $data = $db->Raw("SELECT server, link FROM userdb_uploads WHERE xid='{$xid}'"); $server = $data[0]['server']; $link = $data[0]['link']; if ($server == 's3') { $s3 = new AmazonS3(); $s3->delete_object('fb-music', $link); } else { $split = split("/", $data[0]['link']); $link = "/var/www/music/users/" . $split[4] . "/" . $split[5] . "/" . $split[6]; unlink($link); } $db->Raw("DELETE FROM userdb_uploads WHERE xid='{$xid}'"); }
function remove_amazons3_backup_bwd_comp($args) { if ($this->iwp_mmb_function_exists('curl_init')) { require_once $GLOBALS['iwp_mmb_plugin_dir'] . '/lib/amazon_s3_bwd_comp/sdk.class.php'; extract($args); if (!is_array($backup_file)) { $temp_backup_file = $backup_file; $backup_file = array(); $backup_file[] = $temp_backup_file; } if ($as3_site_folder == true) { if (!empty($as3_directory)) { $as3_directory .= '/' . $this->site_name; } else { $as3_directory = $this->site_name; } } try { CFCredentials::set(array('development' => array('key' => trim($as3_access_key), 'secret' => trim(str_replace(' ', '+', $as3_secure_key)), 'default_cache_config' => '', 'certificate_authority' => true), '@default' => 'development')); $s3 = new AmazonS3(); foreach ($backup_file as $single_backup_file) { if (empty($as3_directory)) { $single_as3_file = $single_backup_file; } else { $single_as3_file = $as3_directory . '/' . $single_backup_file; } $s3->delete_object($as3_bucket, $single_as3_file); } } catch (Exception $e) { } } }
function dest_s3() { global $WORKING, $STATIC; trigger_error(sprintf(__('%d. try sending backup file to Amazon S3...', 'backwpup'), $WORKING['DEST_S3']['STEP_TRY']), E_USER_NOTICE); $WORKING['STEPTODO'] = 2 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']); $WORKING['STEPDONE'] = 0; require_once dirname(__FILE__) . '/../libs/aws/sdk.class.php'; need_free_memory(26214400 * 1.1); try { $s3 = new AmazonS3(array('key' => $STATIC['JOB']['awsAccessKey'], 'secret' => $STATIC['JOB']['awsSecretKey'], 'certificate_authority' => true)); if ($s3->if_bucket_exists($STATIC['JOB']['awsBucket'])) { trigger_error(sprintf(__('Connected to S3 Bucket: %s', 'backwpup'), $STATIC['JOB']['awsBucket']), E_USER_NOTICE); //Transfer Backup to S3 if ($STATIC['JOB']['awsrrs']) { //set reduced redundancy or not $storage = AmazonS3::STORAGE_REDUCED; } else { $storage = AmazonS3::STORAGE_STANDARD; } //set curl Progress bar $curlops = array(); if (defined('CURLOPT_PROGRESSFUNCTION')) { $curlops = array(CURLOPT_NOPROGRESS => false, CURLOPT_PROGRESSFUNCTION => 'curl_progresscallback', CURLOPT_BUFFERSIZE => 1048576); } trigger_error(__('Upload to Amazon S3 now started... ', 'backwpup'), E_USER_NOTICE); //transferee file to S3 $result = $s3->create_object($STATIC['JOB']['awsBucket'], $STATIC['JOB']['awsdir'] . $STATIC['backupfile'], array('fileUpload' => $STATIC['JOB']['backupdir'] . $STATIC['backupfile'], 'acl' => AmazonS3::ACL_PRIVATE, 'storage' => $storage, 'curlopts' => $curlops)); $result = (array) $result; if ($result["status"] >= 200 and $result["status"] < 300) { $WORKING['STEPTODO'] = 1 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']); trigger_error(sprintf(__('Backup transferred to %s', 'backwpup'), $result["header"]["_info"]["url"]), E_USER_NOTICE); $STATIC['JOB']['lastbackupdownloadurl'] = $STATIC['WP']['ADMINURL'] . '?page=backwpupbackups&action=downloads3&file=' . $STATIC['JOB']['awsdir'] . $STATIC['backupfile'] . '&jobid=' . $STATIC['JOB']['jobid']; $WORKING['STEPSDONE'][] = 'DEST_S3'; //set done } else { trigger_error(sprintf(__('Can not transfer backup to S3! (%1$d) %2$s', 'backwpup'), $result["status"], $result["Message"]), E_USER_ERROR); } } else { trigger_error(sprintf(__('S3 Bucket "%s" not exists!', 'backwpup'), $STATIC['JOB']['awsBucket']), E_USER_ERROR); } } catch (Exception $e) { trigger_error(sprintf(__('Amazon API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR); return; } try { if ($s3->if_bucket_exists($STATIC['JOB']['awsBucket'])) { if ($STATIC['JOB']['awsmaxbackups'] > 0) { //Delete old backups $backupfilelist = array(); if (($contents = $s3->list_objects($STATIC['JOB']['awsBucket'], array('prefix' => $STATIC['JOB']['awsdir']))) !== false) { foreach ($contents->body->Contents as $object) { $file = basename($object->Key); if ($STATIC['JOB']['fileprefix'] == substr($file, 0, strlen($STATIC['JOB']['fileprefix'])) and $STATIC['JOB']['fileformart'] == substr($file, -strlen($STATIC['JOB']['fileformart']))) { $backupfilelist[] = $file; } } } if (sizeof($backupfilelist) > 0) { rsort($backupfilelist); $numdeltefiles = 0; for ($i = $STATIC['JOB']['awsmaxbackups']; $i < sizeof($backupfilelist); $i++) { if ($s3->delete_object($STATIC['JOB']['awsBucket'], $STATIC['JOB']['awsdir'] . $backupfilelist[$i])) { //delte files on S3 $numdeltefiles++; } else { trigger_error(sprintf(__('Can not delete backup on S3://%s', 'backwpup'), $STATIC['JOB']['awsBucket'] . '/' . $STATIC['JOB']['awsdir'] . $backupfilelist[$i]), E_USER_ERROR); } } if ($numdeltefiles > 0) { trigger_error(sprintf(_n('One file deleted on S3 Bucket', '%d files deleted on S3 Bucket', $numdeltefiles, 'backwpup'), $numdeltefiles), E_USER_NOTICE); } } } } } catch (Exception $e) { trigger_error(sprintf(__('Amazon API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR); return; } $WORKING['STEPDONE']++; }
public function removeFileFromApplicationBucket($filename, $prefix) { ProjectConfiguration::registerAws(); $s3 = new AmazonS3(); $bucket = ProjectConfiguration::getApplicationAmazonBucketName(); if ($s3->if_bucket_exists($bucket)) { $response = $s3->delete_object($bucket, $prefix . '/' . $filename); if (!$response->isOK()) { throw new Exception("Error deleting file!"); } } else { throw new Exception("Amazon bucket '{$bucket}' does not exist!"); } return $response; }
function dest_gstorage() { global $WORKING, $STATIC; trigger_error(sprintf(__('%d. try sending backup to Google Storage...', 'backwpup'), $WORKING['DEST_GSTORAGE']['STEP_TRY']), E_USER_NOTICE); $WORKING['STEPTODO'] = 2 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']); $WORKING['STEPDONE'] = 0; require_once dirname(__FILE__) . '/../libs/aws/sdk.class.php'; need_free_memory(26214400 * 1.1); try { $gstorage = new AmazonS3(array('key' => $STATIC['JOB']['GStorageAccessKey'], 'secret' => $STATIC['JOB']['GStorageSecret'], 'certificate_authority' => true)); //set up s3 for google $gstorage->set_hostname('storage.googleapis.com'); $gstorage->allow_hostname_override(false); if ($gstorage->if_bucket_exists($STATIC['JOB']['GStorageBucket'])) { trigger_error(sprintf(__('Connected to GStorage Bucket: %s', 'backwpup'), $STATIC['JOB']['GStorageBucket']), E_USER_NOTICE); //set curl Prozess bar $curlops = array(); if (defined('CURLOPT_PROGRESSFUNCTION')) { $curlops = array(CURLOPT_NOPROGRESS => false, CURLOPT_PROGRESSFUNCTION => 'curl_progresscallback', CURLOPT_BUFFERSIZE => 1048576); } trigger_error(__('Upload to GStorage now started... ', 'backwpup'), E_USER_NOTICE); //transferee file to GStorage $result = $gstorage->create_object($STATIC['JOB']['GStorageBucket'], $STATIC['JOB']['GStoragedir'] . $STATIC['backupfile'], array('fileUpload' => $STATIC['JOB']['backupdir'] . $STATIC['backupfile'], 'acl' => 'private', 'curlopts' => $curlops)); $result = (array) $result; if ($result["status"] >= 200 and $result["status"] < 300) { $WORKING['STEPTODO'] = 1 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']); trigger_error(sprintf(__('Backup transferred to %s', 'backwpup'), "https://storage.cloud.google.com/" . $STATIC['JOB']['GStorageBucket'] . "/" . $STATIC['JOB']['GStoragedir'] . $STATIC['backupfile']), E_USER_NOTICE); $STATIC['JOB']['lastbackupdownloadurl'] = "https://storage.cloud.google.com/" . $STATIC['JOB']['GStorageBucket'] . "/" . $STATIC['JOB']['GStoragedir'] . $STATIC['backupfile']; $WORKING['STEPSDONE'][] = 'DEST_GSTORAGE'; //set done } else { trigger_error(sprintf(__('Can not transfer backup to GStorage! (%1$d) %2$s', 'backwpup'), $result["status"], $result["Message"]), E_USER_ERROR); } } else { trigger_error(sprintf(__('GStorage Bucket "%s" not exists!', 'backwpup'), $STATIC['JOB']['GStorageBucket']), E_USER_ERROR); } } catch (Exception $e) { trigger_error(sprintf(__('GStorage API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR); return; } try { if ($gstorage->if_bucket_exists($STATIC['JOB']['GStorageBucket'])) { if ($STATIC['JOB']['GStoragemaxbackups'] > 0) { //Delete old backups $backupfilelist = array(); if (($contents = $gstorage->list_objects($STATIC['JOB']['GStorageBucket'], array('prefix' => $STATIC['JOB']['GStoragedir']))) !== false) { foreach ($contents->body->Contents as $object) { $file = basename($object->Key); if ($STATIC['JOB']['fileprefix'] == substr($file, 0, strlen($STATIC['JOB']['fileprefix'])) and $STATIC['JOB']['fileformart'] == substr($file, -strlen($STATIC['JOB']['fileformart']))) { $backupfilelist[] = $file; } } } if (sizeof($backupfilelist) > 0) { rsort($backupfilelist); $numdeltefiles = 0; for ($i = $STATIC['JOB']['GStoragemaxbackups']; $i < sizeof($backupfilelist); $i++) { if ($gstorage->delete_object($STATIC['JOB']['GStorageBucket'], $STATIC['JOB']['GStoragedir'] . $backupfilelist[$i])) { //delte files on S3 $numdeltefiles++; } else { trigger_error(sprintf(__('Can not delete backup on GStorage://%s', 'backwpup'), $STATIC['JOB']['awsBucket'] . '/' . $STATIC['JOB']['GStoragedir'] . $backupfilelist[$i]), E_USER_ERROR); } } if ($numdeltefiles > 0) { trigger_error(sprintf(_n('One file deleted on GStorage Bucket', '%d files deleted on GStorage Bucket', $numdeltefiles, 'backwpup'), $numdeltefiles), E_USER_NOTICE); } } } } } catch (Exception $e) { trigger_error(sprintf(__('GStorage API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR); return; } $WORKING['STEPDONE']++; }
public static function test($settings) { $remote_path = self::get_remote_path($settings['directory']); // Has leading and trailng slashes. // Try sending a file. $test_result = self::send($settings, dirname(__FILE__) . '/icon.png', true); // 3rd param true forces clearing of any current uploads. // S3 object for managing files. $manage_data = pb_backupbuddy_destination_stash::get_manage_data($settings); $s3_manage = new AmazonS3($manage_data['credentials']); if ($settings['ssl'] == 0) { @$s3_manage->disable_ssl(true); } // Delete sent file. $response = $s3_manage->delete_object($manage_data['bucket'], $manage_data['subkey'] . $remote_path . 'icon.png'); if (!$response->isOK()) { pb_backupbuddy::status('details', 'Unable to delete test Stash file `' . $buname . '`. Details: `' . print_r($response, true) . '`.'); } delete_transient('pb_backupbuddy_stashquota_' . $settings['itxapi_username']); // Delete quota transient since it probably has changed now. return $test_result; }
public function updateRemote($myrole, $drivers) { if ($this->_options['update'] == 'simulate') { $simulate = true; $this->_out->logWarning("only SIMULATION mode"); } else { if ($this->_options['update'] === false || (int) $this->_options['update'] === 0) { $this->_out->logNotice("skipped, not requested and not needed"); return; } $simulate = false; } /** @var $compare Compare_Interface */ $compare = $drivers['compare']; /** @var $local Storage_Interface */ $local = $drivers['local']; if (!$compare->initChangesOn("remote")) { // TODO not sure, but maybe we will need it } $job = $this->_out->jobStart("updating remote storage"); $this->_out->jobSetProgressStep($job, 1000); foreach ($compare as $task) { $repeat = 3; do { $msg = ""; try { $path = $this->_getPathWithBasedir($task->path, self::ADD_BASE_DIR); switch ($task->action) { case Compare_Interface::CMD_MKDIR: $msg = "mkdir " . $path . " into s3 bucket"; $this->_out->logDebug($msg); if (!$simulate) { // create folders $this->_s3->create_object($this->getBucket(), $path, array('body' => '', 'storage' => $this->_defaultRedundancyStorage)); } break; case Compare_Interface::CMD_PUT: $msg = "put " . $path . " into s3 bucket"; $this->_out->logDebug($msg); $uploadPath = $local->getBaseDir() . $task->path; //fix for windows encoding issue $uploadPath = $local->convertEncodingPath($uploadPath); if (!file_exists($uploadPath)) { $this->_out->logError("file {$uploadPath} does not exists anymore locally"); continue; } if (!$simulate) { //empty directory if (ord(substr($path, -1)) === 47) { //for empty folders we need little different options $this->_out->logWarning("TODO putting empty folder {$path} ... is it possible ?"); $this->_s3->create_object($this->getBucket(), $path, array('body' => '', 'storage' => $this->_defaultRedundancyStorage)); } else { $options = array('fileUpload' => $uploadPath, 'storage' => $this->_defaultRedundancyStorage); // TODO it should be possible to speedup upload of small upload but using S3 batch if ($this->_options['multipart']['big-files']) { // multipart upload for big files if ($this->_options['multipart']['part-size']) { $options['partSize'] = $this->_options['multipart']['part-size']; } $this->_s3->create_mpu_object($this->getBucket(), $path, $options); } else { // normal upload $this->_s3->create_object($this->getBucket(), $path, $options); } } } break; case Compare_Interface::CMD_DELETE: $msg = "deleting " . $path . " from s3 bucket"; $this->_out->logDebug($msg); if (!$simulate) { $this->_s3->delete_object($this->getBucket(), $path); } break; case Compare_Interface::CMD_TS: // storing this information as metadata is too slow to be used // $this->_out->logDebug("remember local timestamp for " . $path . " into s3 bucket"); // if (!$simulate) { // $this->_s3->update_object( // $this->getBucket(), $path, // array( // 'meta' => array('localts' => $task->ltime), // ) // ); // } break; default: $this->_out->logError("ignored command {$task->action}"); } $repeat = 0; } catch (Exception $e) { $repeat--; if ($repeat) { $this->_out->logError("need to repeat: {$msg}"); } else { if ($msg) { $this->_out->logError($msg); } throw new Exception($e->getMessage(), $e->getCode()); } } } while ($repeat); if (!$simulate) { $compare->remoteHasDone($task); } $this->_out->jobStep($job); } $this->_out->jobEnd($job, "remote storage updated"); }
public static function test($settings) { $remote_path = self::get_remote_path($settings['directory']); // Has leading and trailng slashes. // Try sending a file. $test_result = self::send($settings, dirname(__FILE__) . '/icon.png', true); // 3rd param true forces clearing of any current uploads. // S3 object for managing files. $manage_data = self::get_manage_data($settings); $s3_manage = new AmazonS3(self::get_manage_data($settings)); if ($settings['ssl'] == 0) { @$s3_manage->disable_ssl(true); } // Delete sent file. $response = $s3_manage->delete_object($settings['bucket'], $remote_path . 'icon.png'); if (!$response->isOK()) { pb_backupbuddy::status('details', 'Unable to delete test S3 file `' . $settings['bucket'], $remote_path . 'icon.png' . '`. Details: `' . print_r($response, true) . '`.'); } return $test_result; }
public function deleteEpisodeFileFromAmazon($filename = null, $bucket = null) { ProjectConfiguration::registerAws(); $s3 = new AmazonS3(); $bucket = is_null($bucket) ? $this->getSubreddit()->getBucketName() : $bucket; if (!$s3->if_bucket_exists($bucket)) { throw new Exception("Amazon bucket '{$bucket}' does not exist!"); } $filename = is_null($filename) ? $this->getNiceFilename() : $filename; $response = $s3->delete_object($bucket, $filename); if (!$response->isOK()) { throw new Exception('Failed to remove file from Amazon!'); } }
if (isset($_GET['updateList'])) { $origPlaylist = $db->Raw("SELECT `xid` FROM `userdb_uploads` WHERE `user`='{$_GET['uid']}'"); if (count($_POST['playlist']) == 0) { $newList = array(); } else { $newList = $_POST['playlist']; } foreach ($origPlaylist as $origSong) { if (!in_array($origSong['xid'], $newList)) { $id = $origSong['xid']; $deleteData = $db->Raw("SELECT `type`,`link`,`server`,`drive` FROM `userdb_uploads` WHERE `id`='{$id}' LIMIT 1"); $server = $deleteData[0]['server']; if ($deleteData[0]['type'] == 'upload') { if ($server == 's3') { $s3 = new AmazonS3(); $s3->delete_object('fb-music', $deleteData[0]['link']); } else { $serverData = $db->Raw("SELECT `internal_uri` FROM `servers` WHERE `name`='{$server}'"); $userFolder = array_sum(str_split($_GET['id'])); if (file_exists('' . $serverData[0]['internal_uri'] . 'users/' . $deleteData[0]['drive'] . '/' . $userFolder . '/' . baseName($deleteData[0]['link']) . '')) { unlink('' . $serverData[0]['internal_uri'] . 'users/' . $deleteData[0]['drive'] . '/' . $userFolder . '/' . baseName($deleteData[0]['link']) . ''); } } } // $db->Raw("INSERT INTO `delete_queue` (`xid`,`file`) VALUES ('$id','$deleteData[0][link]')"); $db->Raw("DELETE FROM `userdb_uploads` WHERE `id`='{$id}'"); } } // Update the playlist order as long as there are still songs that exist in the playlist. if (count($_POST['playlist']) !== 0) { foreach ($_POST['playlist'] as $key => $song) {
public function remove_leftover() { require_once '/var/www/application_xdcmgh32Rw/aws/sdk.class.php'; $s3 = new AmazonS3(); $menurad_id = $this->session->userdata('menurad_id'); $path = $this->config->item('menurad_bucket') . 'assets/img/stores/menu/'; if ($this->url_exists($path . $menurad_id . '-' . $_POST['category_id'] . '.jpg')) { $response = $s3->delete_object('menurad', 'assets/img/stores/menu/' . $menurad_id . '-' . $_POST['category_id'] . '.jpg'); } }
</form> <?php die; } $s3 = new AmazonS3($manage_data['credentials']); // the key, secret, token if ($settings['ssl'] == '0') { @$s3->disable_ssl(true); } // Handle deletion. if (pb_backupbuddy::_POST('bulk_action') == 'delete_backup') { pb_backupbuddy::verify_nonce(); $deleted_files = array(); foreach ((array) pb_backupbuddy::_POST('items') as $item) { $response = $s3->delete_object($manage_data['bucket'], $manage_data['subkey'] . $remote_path . $item); if ($response->isOK()) { $deleted_files[] = $item; } else { pb_backupbuddy::alert('Error: Unable to delete `' . $item . '`. Verify permissions.'); } } if (count($deleted_files) > 0) { pb_backupbuddy::alert('Deleted ' . implode(', ', $deleted_files) . '.'); delete_transient('pb_backupbuddy_stashquota_' . $itxapi_username); // Delete quota transient since it probably has changed now. } echo '<br>'; } // Handle copying files to local if (pb_backupbuddy::_GET('cpy_file') != '') {
/** * @param $jobdest * @param $backupfile */ public function file_delete($jobdest, $backupfile) { $files = get_site_transient('backwpup_' . strtolower($jobdest), array()); list($jobid, $dest) = explode('_', $jobdest); if (BackWPup_Option::get($jobid, 's3accesskey') && BackWPup_Option::get($jobid, 's3secretkey') && BackWPup_Option::get($jobid, 's3bucket')) { try { $s3 = new AmazonS3(array('key' => BackWPup_Option::get($jobid, 's3accesskey'), 'secret' => BackWPup_Encryption::decrypt(BackWPup_Option::get($jobid, 's3secretkey')), 'certificate_authority' => TRUE)); $base_url = $this->get_s3_base_url(BackWPup_Option::get($jobid, 's3region'), BackWPup_Option::get($jobid, 's3base_url')); if (stristr($base_url, 'amazonaws.com')) { $s3->set_region(str_replace(array('http://', 'https://'), '', $base_url)); } else { $s3->set_hostname(str_replace(array('http://', 'https://'), '', $base_url)); $s3->allow_hostname_override(FALSE); if (substr($base_url, -1) == '/') { $s3->enable_path_style(TRUE); } } if (stristr($base_url, 'http://')) { $s3->disable_ssl(); } $s3->delete_object(BackWPup_Option::get($jobid, 's3bucket'), $backupfile); //update file list foreach ($files as $key => $file) { if (is_array($file) && $file['file'] == $backupfile) { unset($files[$key]); } } unset($s3); } catch (Exception $e) { BackWPup_Admin::message(sprintf(__('S3 Service API: %s', 'backwpup'), $e->getMessage()), TRUE); } } set_site_transient('backwpup_' . strtolower($jobdest), $files, 60 * 60 * 24 * 7); }
$backwpup_message .= 'Amazon S3: ' . $e->getMessage() . '<br />'; } } } } elseif ($dest == 'GSTORAGE') { if (!class_exists('AmazonS3')) { require_once realpath(dirname(__FILE__) . '/../libs/aws/sdk.class.php'); } if (class_exists('AmazonS3')) { if (!empty($jobvalue['GStorageAccessKey']) and !empty($jobvalue['GStorageSecret']) and !empty($jobvalue['GStorageBucket'])) { try { $gstorage = new AmazonS3(array('key' => $jobvalue['GStorageAccessKey'], 'secret' => $jobvalue['GStorageSecret'], 'certificate_authority' => true)); $gstorage->ssl_verification = false; $gstorage->set_hostname('storage.googleapis.com'); $gstorage->allow_hostname_override(false); $gstorage->delete_object($jobvalue['GStorageBucket'], $backupfile); unset($gstorage); } catch (Exception $e) { $backwpup_message .= sprintf(__('GStorage API: %s', 'backwpup'), $e->getMessage()) . '<br />'; } } } } elseif ($dest == 'MSAZURE') { if (!class_exists('Microsoft_WindowsAzure_Storage_Blob')) { require_once dirname(__FILE__) . '/../libs/Microsoft/WindowsAzure/Storage/Blob.php'; } if (class_exists('Microsoft_WindowsAzure_Storage_Blob')) { if (!empty($jobvalue['msazureHost']) and !empty($jobvalue['msazureAccName']) and !empty($jobvalue['msazureKey']) and !empty($jobvalue['msazureContainer'])) { try { $storageClient = new Microsoft_WindowsAzure_Storage_Blob($jobvalue['msazureHost'], $jobvalue['msazureAccName'], $jobvalue['msazureKey']); $storageClient->deleteBlob($jobvalue['msazureContainer'], $backupfile);
public static function test($settings) { $remote_path = self::get_remote_path($settings['directory']); // Has leading and trailng slashes. $manage_data = pb_backupbuddy_destination_stash::get_manage_data($settings); if (!is_array($manage_data['credentials'])) { // Credentials were somehow faulty. User changed password after prior page? Unlikely but you never know... $error_msg = 'Error #8484383c: Your authentication credentials for Stash failed. Verify your login and password to Stash. You may need to update the Stash destination settings. Perhaps you recently changed your password?'; pb_backupbuddy::status('error', $error_msg); return $error_msg; } // Try sending a file. $send_response = pb_backupbuddy_destinations::send($settings, dirname(dirname(__FILE__)) . '/remote-send-test.php', $send_id = 'TEST-' . pb_backupbuddy::random_string(12)); // 3rd param true forces clearing of any current uploads. if (false === $send_response) { $send_response = 'Error sending test file to Stash.'; } else { $send_response = 'Success.'; } // S3 object for managing files. $credentials = pb_backupbuddy_destination_stash::get_manage_data($settings); $s3_manage = new AmazonS3($manage_data['credentials']); if ($settings['ssl'] == 0) { @$s3_manage->disable_ssl(true); } // Delete sent file. $delete_response = 'Success.'; $delete_response = $s3_manage->delete_object($manage_data['bucket'], $manage_data['subkey'] . $remote_path . 'remote-send-test.php'); if (!$delete_response->isOK()) { $delete_response = 'Unable to delete test Stash file `remote-send-test.php`. Details: `' . print_r($response, true) . '`.'; pb_backupbuddy::status('details', $delete_response); } else { $delete_response = 'Success.'; } // Load destination fileoptions. pb_backupbuddy::status('details', 'About to load fileoptions data.'); require_once pb_backupbuddy::plugin_path() . '/classes/fileoptions.php'; $fileoptions_obj = new pb_backupbuddy_fileoptions(backupbuddy_core::getLogDirectory() . 'fileoptions/send-' . $send_id . '.txt', $read_only = false, $ignore_lock = false, $create_file = false); if (true !== ($result = $fileoptions_obj->is_ok())) { pb_backupbuddy::status('error', __('Fatal Error #9034.84838. Unable to access fileoptions data.', 'it-l10n-backupbuddy') . ' Error: ' . $result); return false; } pb_backupbuddy::status('details', 'Fileoptions data loaded.'); $fileoptions =& $fileoptions_obj->options; if ('Success.' != $send_response || 'Success.' != $delete_response) { $fileoptions['status'] = 'failure'; $fileoptions_obj->save(); unset($fileoptions_obj); return 'Send details: `' . $send_response . '`. Delete details: `' . $delete_response . '`.'; } else { $fileoptions['status'] = 'success'; $fileoptions['finish_time'] = time(); } $fileoptions_obj->save(); unset($fileoptions_obj); return true; }
$region = ""; if (!empty($authToken) && !empty($deviceID)) { $conn = mysql_connect(DB_HOST, DB_USERNAME, DB_PASSWORD) or die("Error:Couldn't connect to server"); $db = mysql_select_db(DB_DBNAME, $conn) or die("Error:Couldn't select database"); $query = "SELECT * FROM users WHERE authToken = '{$authToken}'"; $result = mysql_query($query) or die("Error:Query Failed-1"); if (mysql_num_rows($result) == 1) { $row = mysql_fetch_array($result); $username = $row["email"]; $region = $row["region"]; } else { die("Error: Incorrect authToken"); } mysql_close($conn); $newFileName = md5($username . $deviceID) . "_"; $s3 = new AmazonS3(); $bucket = 'com.sanchitkarve.tb.usor'; $response = $s3->get_object_list($bucket); if (!empty($response)) { foreach ($response as $item) { if (startsWith($item, $newFileName)) { $r = $s3->delete_object($bucket, $item); } } echo "Success"; } else { echo "Success: No files found"; } } else { echo "Error: Not all parameters set."; }