protected function execute($arguments = array(), $options = array())
 {
     $this->configuration = ProjectConfiguration::getApplicationConfiguration($options['app'], $options['env'], true);
     if (!sfConfig::get('app_sf_amazon_plugin_access_key', false)) {
         throw new sfException(sprintf('You have not set an amazon access key'));
     }
     if (!sfConfig::get('app_sf_amazon_plugin_secret_key', false)) {
         throw new sfException(sprintf('You have not set an amazon secret key'));
     }
     $s3 = new AmazonS3(sfConfig::get('app_sf_amazon_plugin_access_key'), sfConfig::get('app_sf_amazon_plugin_secret_key'));
     $this->s3_response = $s3->create_bucket($arguments['bucket'], $options['region'], $options['acl']);
     if ($this->s3_response->isOk()) {
         $this->log('Bucketed is being created...');
         /* Since AWS follows an "eventual consistency" model, sleep and poll
            until the bucket is available. */
         $exists = $s3->if_bucket_exists($arguments['bucket']);
         while (!$exists) {
             // Not yet? Sleep for 1 second, then check again
             sleep(1);
             $exists = $s3->if_bucket_exists($arguments['bucket']);
         }
         $this->logSection('Bucket+', sprintf('"%s" created successfully', $arguments['bucket']));
     } else {
         throw new sfException($this->s3_response->body->Message);
     }
 }
예제 #2
0
 public function __construct($config)
 {
     $this->_s3 = new AmazonS3();
     $this->_bucket = $config['bucket'];
     if (!$this->_s3->if_bucket_exists($this->_bucket)) {
         throw new Exception("Amazon bucket " . $this->_bucket . " doesn't exist, exiting.");
     }
 }
예제 #3
0
 public function init($myrole, $drivers)
 {
     $this->_out->logNotice(">>>init S3 driver as {$myrole}");
     $this->_asRemote = $myrole == Core_Engine::ROLE_REMOTE;
     // Amazon library SSL Connection Issues
     if (!defined('AWS_CERTIFICATE_AUTHORITY')) {
         define('AWS_CERTIFICATE_AUTHORITY', $this->_options['certificate_authority']);
     } else {
         $this->_out->logNotice("option 'certificate_authority' was already set, it can't be changed");
     }
     if ($this->_options['compatibility-test']) {
         // see lib/AWSSDKforPHP/_compatibility_test
         $this->_out->jobStart("executing Amazon SDK compatibility test");
         // their code shows notices
         error_reporting(E_ALL & ~E_NOTICE);
         include "lib/AWSSDKforPHP/_compatibility_test/sdk_compatibility_test_cli.php";
         $this->_out->stop("-- re-run without --");
     }
     // test parameters
     if (!isset($this->_options['key'], $this->_options['key']['access'])) {
         throw new Core_StopException("You have to define S3 option key.access.", "S3Init");
     }
     if (!isset($this->_options['key']['secret'])) {
         throw new Core_StopException("You have to define S3 option key.secret.", "S3Init");
     }
     if (!isset($this->_options['bucket'])) {
         throw new Core_StopException("You have to define S3 option bucket.", "S3Init");
     }
     if (!is_null($this->_options['multipart']['part-size']) && ($this->_options['multipart']['part-size'] < 5 || $this->_options['multipart']['part-size'] > 5120)) {
         throw new Core_StopException("multipart.part-size has to be in range from 5MB to 500MB. It is Amazon S3 restriction. Current value is {$this->_options['multipart']['part-size']}MB.", "S3Init");
     }
     $job = $this->_out->jobStart("handshaking with Amazon S3");
     // TODO we need better AmazonS3 error handling
     $this->_s3 = new AmazonS3(array('key' => $this->_options['key']['access'], 'secret' => $this->_options['key']['secret']));
     if (false == $this->_s3->if_bucket_exists($this->getBucket())) {
         $this->_out->jobEnd($job, "failed");
         throw new Core_StopException("S3 bucket not found: '{$this->getBucket()}' for access key '" . substr($this->_options['key']['access'], 0, 5) . "...'", "S3Init");
     }
     $this->_out->jobEnd($job, "authorized");
     // find out if versioning is enabled
     $versioning = $this->_s3->get_versioning_status($this->getBucket());
     if (!$versioning->isOK()) {
         throw new Core_StopException("Not possible to get versioning status of S3 bucket. (" . (string) $versioning->body->Code . ": " . (string) $versioning->body->Message . ")", "S3Init");
     }
     $this->_versioningEnabled = $versioning->body->Status == "Enabled";
     if (!$this->_versioningEnabled) {
         $priority = $this->_options['warn-versioning'] ? Output_Stack::WARNING : Output_Stack::DEBUG;
         $this->_out->log($priority, "Versioning not enabled for this S3 bucket, you will not be able to restore older versions of files.");
     }
     if (array_key_exists('defaultRedundancyStorage', $this->_options)) {
         if (is_string($this->_options['defaultRedundancyStorage'])) {
             $this->_defaultRedundancyStorage = constant("AmazonS3::" . $this->_options['defaultRedundancyStorage']);
         }
     }
     return true;
 }
예제 #4
0
 /**
  * @param $job_object
  * @return bool
  */
 public function job_run_archive(&$job_object)
 {
     $job_object->substeps_todo = 2 + $job_object->backup_filesize;
     $job_object->log(sprintf(__('%d. Trying to send backup file to S3 Service&#160;&hellip;', 'backwpup'), $job_object->steps_data[$job_object->step_working]['STEP_TRY']), E_USER_NOTICE);
     try {
         $s3 = new AmazonS3(array('key' => $job_object->job['s3accesskey'], 'secret' => BackWPup_Encryption::decrypt($job_object->job['s3secretkey']), 'certificate_authority' => TRUE));
         $base_url = $this->get_s3_base_url($job_object->job['s3region'], $job_object->job['s3base_url']);
         if (stristr($base_url, 'amazonaws.com')) {
             $s3->set_region(str_replace(array('http://', 'https://'), '', $base_url));
         } else {
             $s3->set_hostname(str_replace(array('http://', 'https://'), '', $base_url));
             $s3->allow_hostname_override(FALSE);
             if (substr($base_url, -1) == '/') {
                 $s3->enable_path_style(TRUE);
             }
         }
         if (stristr($base_url, 'http://')) {
             $s3->disable_ssl();
         }
         if ($s3->if_bucket_exists($job_object->job['s3bucket'])) {
             $job_object->log(sprintf(__('Connected to S3 Bucket "%1$s" in %2$s', 'backwpup'), $job_object->job['s3bucket'], $base_url), E_USER_NOTICE);
         } else {
             $job_object->log(sprintf(__('S3 Bucket "%s" does not exist!', 'backwpup'), $job_object->job['s3bucket']), E_USER_ERROR);
             return TRUE;
         }
         //transfer file to S3
         $job_object->log(__('Starting upload to S3 Service&#160;&hellip;', 'backwpup'), E_USER_NOTICE);
         //Transfer Backup to S3
         if ($job_object->job['s3storageclass'] == 'REDUCED_REDUNDANCY') {
             //set reduced redundancy or not
             $storage = AmazonS3::STORAGE_REDUCED;
         } else {
             $storage = AmazonS3::STORAGE_STANDARD;
         }
         if (empty($job_object->job['s3ssencrypt'])) {
             $job_object->job['s3ssencrypt'] = NULL;
         }
         //set progress bar
         $s3->register_streaming_read_callback(array($job_object, 'curl_read_callback'));
         $result = $s3->create_object($job_object->job['s3bucket'], $job_object->job['s3dir'] . $job_object->backup_file, array('fileUpload' => $job_object->backup_folder . $job_object->backup_file, 'acl' => AmazonS3::ACL_PRIVATE, 'storage' => $storage, 'encryption' => $job_object->job['s3ssencrypt']));
         if ($result->status >= 200 and $result->status < 300) {
             $job_object->substeps_done = 1 + $job_object->backup_filesize;
             $job_object->log(sprintf(__('Backup transferred to %s.', 'backwpup'), $this->get_s3_base_url($job_object->job['s3region'], $job_object->job['s3base_url']) . '/' . $job_object->job['s3bucket'] . '/' . $job_object->job['s3dir'] . $job_object->backup_file), E_USER_NOTICE);
             if (!empty($job_object->job['jobid'])) {
                 BackWPup_Option::update($job_object->job['jobid'], 'lastbackupdownloadurl', network_admin_url('admin.php') . '?page=backwpupbackups&action=downloads3&file=' . $job_object->job['s3dir'] . $job_object->backup_file . '&jobid=' . $job_object->job['jobid']);
             }
         } else {
             $job_object->log(sprintf(__('Cannot transfer backup to S3! (%1$d) %2$s', 'backwpup'), $result->status, $result->body), E_USER_ERROR);
         }
     } catch (Exception $e) {
         $job_object->log(E_USER_ERROR, sprintf(__('S3 Service API: %s', 'backwpup'), htmlentities($e->getMessage())), $e->getFile(), $e->getLine());
         return FALSE;
     }
     try {
         $backupfilelist = array();
         $filecounter = 0;
         $files = array();
         $objects = $s3->list_objects($job_object->job['s3bucket'], array('prefix' => $job_object->job['s3dir']));
         if (is_object($objects)) {
             foreach ($objects->body->Contents as $object) {
                 $file = basename((string) $object->Key);
                 $changetime = strtotime((string) $object->LastModified) + get_option('gmt_offset') * 3600;
                 if ($job_object->is_backup_archive($file)) {
                     $backupfilelist[$changetime] = $file;
                 }
                 $files[$filecounter]['folder'] = $this->get_s3_base_url($job_object->job['s3region'], $job_object->job['s3base_url']) . '/' . $job_object->job['s3bucket'] . '/' . dirname((string) $object->Key);
                 $files[$filecounter]['file'] = (string) $object->Key;
                 $files[$filecounter]['filename'] = basename($object->Key);
                 $files[$filecounter]['downloadurl'] = network_admin_url('admin.php') . '?page=backwpupbackups&action=downloads3&file=' . (string) $object->Key . '&jobid=' . $job_object->job['jobid'];
                 $files[$filecounter]['filesize'] = (int) $object->Size;
                 $files[$filecounter]['time'] = $changetime;
                 $filecounter++;
             }
         }
         if ($job_object->job['s3maxbackups'] > 0 && is_object($s3)) {
             //Delete old backups
             if (count($backupfilelist) > $job_object->job['s3maxbackups']) {
                 ksort($backupfilelist);
                 $numdeltefiles = 0;
                 while ($file = array_shift($backupfilelist)) {
                     if (count($backupfilelist) < $job_object->job['s3maxbackups']) {
                         break;
                     }
                     //delete files on S3
                     $delete_s3 = $s3->delete_object($job_object->job['s3bucket'], $job_object->job['s3dir'] . $file);
                     if ($delete_s3) {
                         foreach ($files as $key => $filedata) {
                             if ($filedata['file'] == $job_object->job['s3dir'] . $file) {
                                 unset($files[$key]);
                             }
                         }
                         $numdeltefiles++;
                     } else {
                         $job_object->log(sprintf(__('Cannot delete backup from %s.', 'backwpup'), $this->get_s3_base_url($job_object->job['s3region'], $job_object->job['s3base_url']) . '/' . $job_object->job['s3bucket'] . '/' . $job_object->job['s3dir'] . $file), E_USER_ERROR);
                     }
                 }
                 if ($numdeltefiles > 0) {
                     $job_object->log(sprintf(_n('One file deleted on S3 Bucket.', '%d files deleted on S3 Bucket', $numdeltefiles, 'backwpup'), $numdeltefiles), E_USER_NOTICE);
                 }
             }
         }
         set_site_transient('backwpup_' . $job_object->job['jobid'] . '_s3', $files, 60 * 60 * 24 * 7);
     } catch (Exception $e) {
         $job_object->log(E_USER_ERROR, sprintf(__('S3 Service API: %s', 'backwpup'), htmlentities($e->getMessage())), $e->getFile(), $e->getLine());
         return FALSE;
     }
     $job_object->substeps_done = 2 + $job_object->backup_filesize;
     return TRUE;
 }
예제 #5
0
 *
 * what this script does?
 *
 * add caching  headers to an existing object
 *
 */
require_once "sdk-1.5.7/sdk.class.php";
error_reporting(-1);
$config = parse_ini_file("aws.ini");
$awsKey = $config["aws.key"];
$awsSecret = $config["aws.secret"];
$bucket = "test.indigloo";
$name = "garbage_bin_wallpaper.jpg";
$options = array("key" => $awsKey, "secret" => $awsSecret, "default_cache_config" => '', "certificate_authority" => true);
$s3 = new AmazonS3($options);
$exists = $s3->if_bucket_exists($bucket);
if (!$exists) {
    printf("S3 bucket %s does not exists \n", $bucket);
    exit;
}
$mime = NULL;
$response = $s3->get_object_metadata($bucket, $name);
//get content-type of existing object
if ($response) {
    $mime = $response["ContentType"];
}
if (empty($mime)) {
    printf("No mime found for object \n");
    exit;
}
$source = array("bucket" => $bucket, "filename" => $name);
예제 #6
0
파일: dest_s3.php 프로젝트: hscale/webento
function dest_s3()
{
    global $WORKING, $STATIC;
    trigger_error(sprintf(__('%d. try sending backup file to Amazon S3...', 'backwpup'), $WORKING['DEST_S3']['STEP_TRY']), E_USER_NOTICE);
    $WORKING['STEPTODO'] = 2 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']);
    $WORKING['STEPDONE'] = 0;
    require_once dirname(__FILE__) . '/../libs/aws/sdk.class.php';
    need_free_memory(26214400 * 1.1);
    try {
        $s3 = new AmazonS3(array('key' => $STATIC['JOB']['awsAccessKey'], 'secret' => $STATIC['JOB']['awsSecretKey'], 'certificate_authority' => true));
        if ($s3->if_bucket_exists($STATIC['JOB']['awsBucket'])) {
            trigger_error(sprintf(__('Connected to S3 Bucket: %s', 'backwpup'), $STATIC['JOB']['awsBucket']), E_USER_NOTICE);
            //Transfer Backup to S3
            if ($STATIC['JOB']['awsrrs']) {
                //set reduced redundancy or not
                $storage = AmazonS3::STORAGE_REDUCED;
            } else {
                $storage = AmazonS3::STORAGE_STANDARD;
            }
            //set curl Progress bar
            $curlops = array();
            if (defined('CURLOPT_PROGRESSFUNCTION')) {
                $curlops = array(CURLOPT_NOPROGRESS => false, CURLOPT_PROGRESSFUNCTION => 'curl_progresscallback', CURLOPT_BUFFERSIZE => 1048576);
            }
            trigger_error(__('Upload to Amazon S3 now started... ', 'backwpup'), E_USER_NOTICE);
            //transferee file to S3
            $result = $s3->create_object($STATIC['JOB']['awsBucket'], $STATIC['JOB']['awsdir'] . $STATIC['backupfile'], array('fileUpload' => $STATIC['JOB']['backupdir'] . $STATIC['backupfile'], 'acl' => AmazonS3::ACL_PRIVATE, 'storage' => $storage, 'curlopts' => $curlops));
            $result = (array) $result;
            if ($result["status"] >= 200 and $result["status"] < 300) {
                $WORKING['STEPTODO'] = 1 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']);
                trigger_error(sprintf(__('Backup transferred to %s', 'backwpup'), $result["header"]["_info"]["url"]), E_USER_NOTICE);
                $STATIC['JOB']['lastbackupdownloadurl'] = $STATIC['WP']['ADMINURL'] . '?page=backwpupbackups&action=downloads3&file=' . $STATIC['JOB']['awsdir'] . $STATIC['backupfile'] . '&jobid=' . $STATIC['JOB']['jobid'];
                $WORKING['STEPSDONE'][] = 'DEST_S3';
                //set done
            } else {
                trigger_error(sprintf(__('Can not transfer backup to S3! (%1$d) %2$s', 'backwpup'), $result["status"], $result["Message"]), E_USER_ERROR);
            }
        } else {
            trigger_error(sprintf(__('S3 Bucket "%s" not exists!', 'backwpup'), $STATIC['JOB']['awsBucket']), E_USER_ERROR);
        }
    } catch (Exception $e) {
        trigger_error(sprintf(__('Amazon API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR);
        return;
    }
    try {
        if ($s3->if_bucket_exists($STATIC['JOB']['awsBucket'])) {
            if ($STATIC['JOB']['awsmaxbackups'] > 0) {
                //Delete old backups
                $backupfilelist = array();
                if (($contents = $s3->list_objects($STATIC['JOB']['awsBucket'], array('prefix' => $STATIC['JOB']['awsdir']))) !== false) {
                    foreach ($contents->body->Contents as $object) {
                        $file = basename($object->Key);
                        if ($STATIC['JOB']['fileprefix'] == substr($file, 0, strlen($STATIC['JOB']['fileprefix'])) and $STATIC['JOB']['fileformart'] == substr($file, -strlen($STATIC['JOB']['fileformart']))) {
                            $backupfilelist[] = $file;
                        }
                    }
                }
                if (sizeof($backupfilelist) > 0) {
                    rsort($backupfilelist);
                    $numdeltefiles = 0;
                    for ($i = $STATIC['JOB']['awsmaxbackups']; $i < sizeof($backupfilelist); $i++) {
                        if ($s3->delete_object($STATIC['JOB']['awsBucket'], $STATIC['JOB']['awsdir'] . $backupfilelist[$i])) {
                            //delte files on S3
                            $numdeltefiles++;
                        } else {
                            trigger_error(sprintf(__('Can not delete backup on S3://%s', 'backwpup'), $STATIC['JOB']['awsBucket'] . '/' . $STATIC['JOB']['awsdir'] . $backupfilelist[$i]), E_USER_ERROR);
                        }
                    }
                    if ($numdeltefiles > 0) {
                        trigger_error(sprintf(_n('One file deleted on S3 Bucket', '%d files deleted on S3 Bucket', $numdeltefiles, 'backwpup'), $numdeltefiles), E_USER_NOTICE);
                    }
                }
            }
        }
    } catch (Exception $e) {
        trigger_error(sprintf(__('Amazon API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR);
        return;
    }
    $WORKING['STEPDONE']++;
}
예제 #7
0
 /**
  * Check with S3 to confirm the currently selected bucket exists
  * 
  * @return bool
  */
 public function bucketExists()
 {
     return $this->s3->if_bucket_exists($this->bucket);
 }
예제 #8
0
파일: s3-bucket-copy.php 프로젝트: rjha/sc
            exit;
        }
    }
    $flag = $bor->IsTruncated;
    return $flag;
}
// start:script
$config = parse_ini_file("aws.ini");
$awsKey = $config["aws.key"];
$awsSecret = $config["aws.secret"];
//define:buckets
$source_bucket = "rjha";
$target_bucket = "test.indigloo";
$options = array("key" => $awsKey, "secret" => $awsSecret, "default_cache_config" => '', "certificate_authority" => true);
$s3 = new AmazonS3($options);
$exists = $s3->if_bucket_exists($source_bucket);
if (!$exists) {
    $message = sprintf("source bucket %s does not exists", $source_bucket);
    write_log($message);
    exit;
}
$exists = $s3->if_bucket_exists($target_bucket);
if (!$exists) {
    $message = sprintf("target bucket %s does not exists", $target_bucket);
    write_log($message);
    exit;
}
if (!file_exists("./aws.bucket.marker")) {
    // create marker file
    file_put_contents("./aws.bucket.marker", "");
}
 public function removeFileFromApplicationBucket($filename, $prefix)
 {
     ProjectConfiguration::registerAws();
     $s3 = new AmazonS3();
     $bucket = ProjectConfiguration::getApplicationAmazonBucketName();
     if ($s3->if_bucket_exists($bucket)) {
         $response = $s3->delete_object($bucket, $prefix . '/' . $filename);
         if (!$response->isOK()) {
             throw new Exception("Error deleting file!");
         }
     } else {
         throw new Exception("Amazon bucket '{$bucket}' does not exist!");
     }
     return $response;
 }
예제 #10
0
function dest_gstorage()
{
    global $WORKING, $STATIC;
    trigger_error(sprintf(__('%d. try sending backup to Google Storage...', 'backwpup'), $WORKING['DEST_GSTORAGE']['STEP_TRY']), E_USER_NOTICE);
    $WORKING['STEPTODO'] = 2 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']);
    $WORKING['STEPDONE'] = 0;
    require_once dirname(__FILE__) . '/../libs/aws/sdk.class.php';
    need_free_memory(26214400 * 1.1);
    try {
        $gstorage = new AmazonS3(array('key' => $STATIC['JOB']['GStorageAccessKey'], 'secret' => $STATIC['JOB']['GStorageSecret'], 'certificate_authority' => true));
        //set up s3 for google
        $gstorage->set_hostname('storage.googleapis.com');
        $gstorage->allow_hostname_override(false);
        if ($gstorage->if_bucket_exists($STATIC['JOB']['GStorageBucket'])) {
            trigger_error(sprintf(__('Connected to GStorage Bucket: %s', 'backwpup'), $STATIC['JOB']['GStorageBucket']), E_USER_NOTICE);
            //set curl Prozess bar
            $curlops = array();
            if (defined('CURLOPT_PROGRESSFUNCTION')) {
                $curlops = array(CURLOPT_NOPROGRESS => false, CURLOPT_PROGRESSFUNCTION => 'curl_progresscallback', CURLOPT_BUFFERSIZE => 1048576);
            }
            trigger_error(__('Upload to GStorage now started... ', 'backwpup'), E_USER_NOTICE);
            //transferee file to GStorage
            $result = $gstorage->create_object($STATIC['JOB']['GStorageBucket'], $STATIC['JOB']['GStoragedir'] . $STATIC['backupfile'], array('fileUpload' => $STATIC['JOB']['backupdir'] . $STATIC['backupfile'], 'acl' => 'private', 'curlopts' => $curlops));
            $result = (array) $result;
            if ($result["status"] >= 200 and $result["status"] < 300) {
                $WORKING['STEPTODO'] = 1 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']);
                trigger_error(sprintf(__('Backup transferred to %s', 'backwpup'), "https://storage.cloud.google.com/" . $STATIC['JOB']['GStorageBucket'] . "/" . $STATIC['JOB']['GStoragedir'] . $STATIC['backupfile']), E_USER_NOTICE);
                $STATIC['JOB']['lastbackupdownloadurl'] = "https://storage.cloud.google.com/" . $STATIC['JOB']['GStorageBucket'] . "/" . $STATIC['JOB']['GStoragedir'] . $STATIC['backupfile'];
                $WORKING['STEPSDONE'][] = 'DEST_GSTORAGE';
                //set done
            } else {
                trigger_error(sprintf(__('Can not transfer backup to GStorage! (%1$d) %2$s', 'backwpup'), $result["status"], $result["Message"]), E_USER_ERROR);
            }
        } else {
            trigger_error(sprintf(__('GStorage Bucket "%s" not exists!', 'backwpup'), $STATIC['JOB']['GStorageBucket']), E_USER_ERROR);
        }
    } catch (Exception $e) {
        trigger_error(sprintf(__('GStorage API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR);
        return;
    }
    try {
        if ($gstorage->if_bucket_exists($STATIC['JOB']['GStorageBucket'])) {
            if ($STATIC['JOB']['GStoragemaxbackups'] > 0) {
                //Delete old backups
                $backupfilelist = array();
                if (($contents = $gstorage->list_objects($STATIC['JOB']['GStorageBucket'], array('prefix' => $STATIC['JOB']['GStoragedir']))) !== false) {
                    foreach ($contents->body->Contents as $object) {
                        $file = basename($object->Key);
                        if ($STATIC['JOB']['fileprefix'] == substr($file, 0, strlen($STATIC['JOB']['fileprefix'])) and $STATIC['JOB']['fileformart'] == substr($file, -strlen($STATIC['JOB']['fileformart']))) {
                            $backupfilelist[] = $file;
                        }
                    }
                }
                if (sizeof($backupfilelist) > 0) {
                    rsort($backupfilelist);
                    $numdeltefiles = 0;
                    for ($i = $STATIC['JOB']['GStoragemaxbackups']; $i < sizeof($backupfilelist); $i++) {
                        if ($gstorage->delete_object($STATIC['JOB']['GStorageBucket'], $STATIC['JOB']['GStoragedir'] . $backupfilelist[$i])) {
                            //delte files on S3
                            $numdeltefiles++;
                        } else {
                            trigger_error(sprintf(__('Can not delete backup on GStorage://%s', 'backwpup'), $STATIC['JOB']['awsBucket'] . '/' . $STATIC['JOB']['GStoragedir'] . $backupfilelist[$i]), E_USER_ERROR);
                        }
                    }
                    if ($numdeltefiles > 0) {
                        trigger_error(sprintf(_n('One file deleted on GStorage Bucket', '%d files deleted on GStorage Bucket', $numdeltefiles, 'backwpup'), $numdeltefiles), E_USER_NOTICE);
                    }
                }
            }
        }
    } catch (Exception $e) {
        trigger_error(sprintf(__('GStorage API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR);
        return;
    }
    $WORKING['STEPDONE']++;
}
예제 #11
0
 public function deleteEpisodeFileFromAmazon($filename = null, $bucket = null)
 {
     ProjectConfiguration::registerAws();
     $s3 = new AmazonS3();
     $bucket = is_null($bucket) ? $this->getSubreddit()->getBucketName() : $bucket;
     if (!$s3->if_bucket_exists($bucket)) {
         throw new Exception("Amazon bucket '{$bucket}' does not exist!");
     }
     $filename = is_null($filename) ? $this->getNiceFilename() : $filename;
     $response = $s3->delete_object($bucket, $filename);
     if (!$response->isOK()) {
         throw new Exception('Failed to remove file from Amazon!');
     }
 }
예제 #12
0
        }
    }
    $flag = $bor->IsTruncated;
    return $flag;
}
// start:script
// define:variables
$config = parse_ini_file("aws.ini");
$awsKey = $config["aws.key"];
$awsSecret = $config["aws.secret"];
$source_bucket = "rjha";
$marker_file = "./aws.bucket.headers.marker";
$log_file = "./aws.bucket.headers.log";
$options = array("key" => $awsKey, "secret" => $awsSecret, "default_cache_config" => '', "certificate_authority" => true);
$s3 = new AmazonS3($options);
$exists = $s3->if_bucket_exists($source_bucket);
if (!$exists) {
    $message = sprintf("source bucket %s does not exists", $source_bucket);
    write_log($message);
    exit;
}
if (!file_exists($marker_file)) {
    // create new marker file
    file_put_contents($marker_file, "");
}
$fp_log = fopen($log_file, "a");
while (1) {
    sleep(1);
    $flag = update_object_headers($s3);
    $message = sprintf("more objects to fetch? [%s] \n", $flag);
    write_log($message);
    $progress_bar->update(curl_getinfo($curl_handle, CURLINFO_SIZE_DOWNLOAD));
}
// Add some spacing above the progress bar.
echo PHP_EOL;
echo 'Downloading http://aws-sdk-for-php.s3.amazonaws.com/demo/big-buck-bunny.mp4' . PHP_EOL;
echo 'Writing to ' . realpath('./downloads') . '/big-buck-bunny.mp4' . PHP_EOL;
// Download a public object.
$response = $s3->get_object('aws-sdk-for-php', 'demo/big-buck-bunny.mp4', array('fileDownload' => './downloads/big-buck-bunny.mp4'));
// Add some spacing below the progress bar.
echo PHP_EOL;
/*%******************************************************************************************%*/
// UPLOAD SAMPLE FILE TO S3
$_100_percent = 0;
// Create a bucket to upload to
$bucket = 's3-progress-bar-' . strtolower($s3->key);
if (!$s3->if_bucket_exists($bucket)) {
    $response = $s3->create_bucket($bucket, AmazonS3::REGION_US_E1);
    if (!$response->isOK()) {
        die('Could not create `' . $bucket . '`.');
    }
}
// Instantiate a new progress bar.
// We won't know the max number of bytes until the download starts, so we'll handle that in our callback.
$progress_bar = new Console_ProgressBar('* %fraction% KB [%bar%] %percent%', '=>', ' ', 100, 1);
$progress_bar->UPDATED = false;
// Register a callback function to execute when a stream is written locally.
$s3->register_streaming_read_callback('read_callback');
function read_callback($curl_handle, $file_handle, $length)
{
    // Import from global scope
    $progress_bar = $GLOBALS['progress_bar'];
예제 #14
0
 public function setLatinaPresenterDataFile($var_name = "presente_data", $market_id = 1)
 {
     try {
         $db = $this->Presenter->getDataSource();
         $query = $db->fetchAll("SELECT\n\t\t\t\t\t\tp.id as presenter_id\n\t\t\t\t\t\t, p.presenter_sequence_id as presenter_sequence_id\n\t\t\t\t\t\t, geo.lat as lat\n\t\t\t\t\t\t, geo.lng as lng\n\t\t\t\t\t\t, MAX(pt.presentertypes_id) as presntertype_id\n\t\t\t\t\t\t, s.abbrev as state\n\t\t\t\t\t\t, a.city as city\n\t\t\t\t\t\tFROM presenters p\n\t\t\t\t\t\tLEFT JOIN users u on u.id=p.user_id\n\t\t\t\t\t\tLEFT JOIN addresses a on a.user_id=u.id AND a.address_type_id = 1\n\t\t\t\t\t\tLEFT JOIN address_geocodes geo on geo.address_id=a.id\n\t\t\t\t\t\tLEFT JOIN states s on s.id = a.state_id\n\t\t\t\t\t\tLEFT JOIN presenter_types as pt on pt.presenter_id = p.id\n\t\t\t\t\t\tWHERE p.presenter_status_id = " . PresenterStatus::COMPLETE . "\n\t\t\t\t\t\t\tAND p.market_id = {$market_id}\n\t\t\t\t\t\t\t AND p.default_locale = 'es_US'\n\t\t\t\t\t\tGROUP BY p.id\n\t\t\t\t\t\tORDER BY a.id DESC, geo.id DESC");
         foreach ($query as $value) {
             $results[] = array('presenter_id' => $value['p']['presenter_id'], 'lat' => $value['geo']['lat'], 'lng' => $value['geo']['lng'], 'presentertype_id' => $value['0']['presntertype_id'], 'city' => $value['a']['city'], 'state' => $value['s']['state']);
         }
     } catch (Exception $e) {
         $this->out("Error getting list of Presenters.");
     }
     //create the file
     $string = 'var ' . $var_name . ' = {"code":200,"result":';
     $string .= json_encode($results);
     $string .= "};";
     if (YOUNIQUE_TESTSERVER === true) {
         $filename = "test_latina_presenter_data_{$market_id}";
     } else {
         $filename = "latina_presenter_data_{$market_id}";
     }
     try {
         $s3 = new AmazonS3(array("key" => "AKIAJVCBLQ3VQQS3DJHA", "secret" => "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"));
         $s3->disable_ssl_verification();
         $bucket = "younique-map-data";
         if ($s3->if_bucket_exists($bucket)) {
             $result = $s3->create_object($bucket, $filename, array('body' => $string, 'contentType' => 'text/plain', 'length' => strlen($string), 'acl' => AmazonS3::ACL_PUBLIC));
             if ($result->isOK()) {
                 $this->out("Presenter map updated for market " . $market_id);
             } else {
                 $this->out("AS3 error:" . var_export($result->body->Message, true));
             }
         } else {
             $this->out("AS3 error:" . "No bucket");
         }
     } catch (Exception $e) {
         $this->out("AS3 error:" . var_export($e->getMessage(), true));
     }
 }