Exemplo n.º 1
14
 /**
  * {@inheritdoc}
  */
 public function deliverFile($pathfile)
 {
     $this->initClient();
     $uploader = UploadBuilder::newInstance()->setClient($this->client)->setSource($pathfile)->setBucket($this->bucketName)->setKey($this->objectKey)->setMinPartSize(self::$multipartUploadChunk * Size::MB)->build();
     try {
         $uploader->upload();
         $this->delivered = true;
     } catch (MultipartUploadException $e) {
         $uploader->abort();
         throw new RuntimeException('File delivery failed', $e->getCode(), $e);
     } catch (AwsExceptionInterface $e) {
         throw new RuntimeException('An AmazonWebService exception occured', $e->getCode(), $e);
     } catch (\Exception $e) {
         throw new RuntimeException('An unexpected error occured', $e->getCode(), $e);
     }
     return $this->getId();
 }
Exemplo n.º 2
0
 protected function createTransferAction(\SplFileInfo $file)
 {
     // Open the file for reading
     $filename = $file->getPathName();
     if (!($resource = fopen($filename, 'r'))) {
         // @codeCoverageIgnoreStart
         throw new RuntimeException("Could not open {$filename} for reading");
         // @codeCoverageIgnoreEnd
     }
     $key = $this->options['source_converter']->convert($filename);
     $body = EntityBody::factory($resource);
     // Determine how the ACL should be applied
     if ($acl = $this->options['acl']) {
         $aclType = is_string($this->options['acl']) ? 'ACL' : 'ACP';
     } else {
         $acl = 'private';
         $aclType = 'ACL';
     }
     // Use a multi-part upload if the file is larger than the cutoff size and is a regular file
     if ($body->getWrapper() == 'plainfile' && $file->getSize() >= $this->options['multipart_upload_size']) {
         $builder = UploadBuilder::newInstance()->setBucket($this->options['bucket'])->setKey($key)->setMinPartSize($this->options['multipart_upload_size'])->setOption($aclType, $acl)->setClient($this->options['client'])->setSource($body)->setConcurrency($this->options['concurrency']);
         $this->dispatch(self::BEFORE_MULTIPART_BUILD, array('builder' => $builder, 'file' => $file));
         return $builder->build();
     }
     return $this->options['client']->getCommand('PutObject', array('Bucket' => $this->options['bucket'], 'Key' => $key, 'Body' => $body, $aclType => $acl));
 }
 /**
  * @param File $f
  * @throws Exception
  */
 public function put(File $f)
 {
     $fp = fopen($f->getFullPath(), 'r');
     if (!$fp) {
         throw new Exception('Unable to open file: ' . $f->getFilename());
     }
     $uploader = UploadBuilder::newInstance()->setClient($this->client)->setSource($f->getFullPath())->setBucket($this->containerName)->setKey($this->getRelativeLinkFor($f))->build();
     try {
         $uploader->upload();
     } catch (MultipartUploadException $e) {
         $uploader->abort();
     }
 }
Exemplo n.º 4
0
 public function writeStream($path, $stream)
 {
     /** @var \Aws\S3\Model\MultipartUpload\UploadBuilder $uploader */
     $uploadBuilder = \Aws\S3\Model\MultipartUpload\UploadBuilder::newInstance();
     $uploadBuilder->setClient($this->_client);
     $uploadBuilder->setSource($stream);
     $uploadBuilder->setBucket($this->_bucket);
     $uploadBuilder->setKey($this->_getAbsolutePath($path));
     /** @var \Aws\Common\Model\MultipartUpload\TransferInterface $uploader */
     $uploader = $uploadBuilder->build();
     try {
         $uploader->upload();
     } catch (\Aws\Common\Exception\MultipartUploadException $exception) {
         $uploader->abort();
         throw new CM_Exception('AWS S3 Upload to path failed', null, ['path' => $path, 'originalExceptionMessage' => $exception->getMessage()]);
     }
 }
Exemplo n.º 5
0
function uploadFileToS3($sourceFile, $folder = "")
{
    $resultado = array("res" => false);
    // Instanciamos un cliente de s3
    $client = Aws::factory(getServerRoot() . '/modulos/aws/modelos/configurationFile.php')->get('s3');
    $bucket = getBucketName();
    $key = generateFileKey($sourceFile, $folder);
    while ($client->doesObjectExist($bucket, $key)) {
        //Si ese objeto ya existe, generamos otro key
        //Este caso es muy raro, debido a la generación,
        //Pero puede pasar
        $key = generateFileKey($sourceFile, $folder);
    }
    require_once 'funcionesPHP/funcionesParaArchivos.php';
    //Si el archivo es más grande que 10MB, utilizamos la función
    //para subir por partes
    $megabytesLimit = 10 * 1048576;
    if (getFileSize($sourceFile) < $megabytesLimit) {
        $client->putObject(array('Bucket' => $bucket, 'Key' => $key, 'SourceFile' => $sourceFile, 'ACL' => 'public-read'));
        $resultado["res"] = true;
    } else {
        $uploader = UploadBuilder::newInstance()->setClient($client)->setSource($sourceFile)->setBucket($bucket)->setKey($key)->setOption('ACL', 'public-read')->build();
        try {
            $uploader->upload();
            $resultado["res"] = true;
        } catch (MultipartUploadException $e) {
            $uploader->abort();
            $resultado["res"] = false;
        }
    }
    if ($resultado['res']) {
        $resultado["bucket"] = $bucket;
        $resultado["key"] = $key;
        $prefijoLink = getPrefijoLink();
        $resultado["link"] = $prefijoLink . $bucket . "/" . $key;
    }
    return $resultado;
}
Exemplo n.º 6
0
 /**
  * Transport the file to a remote location.
  *
  * @param \Transit\File $file
  * @return string
  * @throws \Transit\Exception\TransportationException
  */
 public function transport(File $file)
 {
     $config = $this->_config;
     $key = ltrim($config['folder'], '/') . $file->basename();
     $response = null;
     // If larger then 100MB, split upload into parts
     if ($file->size() >= 100 * Size::MB) {
         $uploader = UploadBuilder::newInstance()->setClient($this->getClient())->setSource($file->path())->setBucket($config['bucket'])->setKey($key)->setMinPartSize(10 * Size::MB)->build();
         try {
             $response = $uploader->upload();
         } catch (MultipartUploadException $e) {
             $uploader->abort();
         }
     } else {
         $response = $this->getClient()->putObject(array_filter(array('Key' => $key, 'Bucket' => $config['bucket'], 'Body' => EntityBody::factory(fopen($file->path(), 'r')), 'ACL' => $config['acl'], 'ContentType' => $file->type(), 'ServerSideEncryption' => $config['encryption'], 'StorageClass' => $config['storage'], 'Metadata' => $config['meta'])));
     }
     // Return S3 URL if successful
     if ($response) {
         $file->delete();
         return sprintf('%s/%s/%s', S3Client::getEndpoint($this->getClient()->getDescription(), $config['region'], $config['scheme']), $config['bucket'], $key);
     }
     throw new TransportationException(sprintf('Failed to transport %s to Amazon S3', $file->basename()));
 }
Exemplo n.º 7
0
 public static function upload($bucket, $source, $target, $options = array())
 {
     $config = DiHandler::getConfig();
     $config_s3_buckets = $config->application->aws->services->s3->buckets;
     $bucket_info = null;
     if (property_exists($config_s3_buckets, $bucket)) {
         $bucket_info = $config_s3_buckets->{$bucket};
     } else {
         $bucket_info = $config_s3_buckets->tmp;
     }
     $s3 = DiHandler::getAwsS3();
     $uploader = UploadBuilder::newInstance()->setClient($s3)->setSource($source)->setBucket($bucket_info->name)->setKey($target)->setOption('ACL', 'public-read')->build();
     try {
         $result = $uploader->upload();
         if (isset($result['Location'])) {
             return str_replace('%2F', '/', $result['Location']);
         } else {
             return false;
         }
     } catch (MultipartUploadException $e) {
         $uploader->abort();
         return false;
     }
 }
Exemplo n.º 8
0
 /**
  * Upload a file, stream, or string to a bucket. If the upload size exceeds the specified threshold, the upload
  * will be performed using parallel multipart uploads.
  *
  * @param string $bucket  Bucket to upload the object
  * @param string $key     Key of the object
  * @param mixed  $body    Object data to upload. Can be a Guzzle\Http\EntityBodyInterface, stream resource, or
  *                        string of data to upload.
  * @param string $acl     ACL to apply to the object
  * @param array  $options Custom options used when executing commands:
  *     - params: Custom parameters to use with the upload. The parameters must map to a PutObject
  *       or InitiateMultipartUpload operation parameters.
  *     - min_part_size: Minimum size to allow for each uploaded part when performing a multipart upload.
  *     - concurrency: Maximum number of concurrent multipart uploads.
  *     - before_upload: Callback to invoke before each multipart upload. The callback will receive a
  *       Guzzle\Common\Event object with context.
  *
  * @see Aws\S3\Model\MultipartUpload\UploadBuilder for more options and customization
  * @return \Guzzle\Service\Resource\Model Returns the modeled result of the performed operation
  */
 public function upload($bucket, $key, $body, $acl = 'private', array $options = array())
 {
     $body = EntityBody::factory($body);
     $options = Collection::fromConfig(array_change_key_case($options), array('min_part_size' => AbstractMulti::MIN_PART_SIZE, 'params' => array(), 'concurrency' => $body->getWrapper() == 'plainfile' ? 3 : 1));
     if ($body->getSize() < $options['min_part_size']) {
         // Perform a simple PutObject operation
         return $this->putObject(array('Bucket' => $bucket, 'Key' => $key, 'Body' => $body, 'ACL' => $acl) + $options['params']);
     }
     // Perform a multipart upload if the file is large enough
     $transfer = UploadBuilder::newInstance()->setBucket($bucket)->setKey($key)->setMinPartSize($options['min_part_size'])->setConcurrency($options['concurrency'])->setClient($this)->setSource($body)->setTransferOptions($options->toArray())->addOptions($options['params'])->setOption('ACL', $acl)->build()->upload();
     if ($options['before_upload']) {
         $transfer->getEventDispatcher()->addListener(AbstractTransfer::BEFORE_PART_UPLOAD, $options['before_upload']);
     }
     return $transfer;
 }
Exemplo n.º 9
0
 public function testMultipartUpload()
 {
     $this->client->waitUntil('bucket_exists', array('Bucket' => $this->bucket));
     self::log('Creating a 100MB object in /tmp/large-object.jpg');
     $handle = fopen('/tmp/large-object.jpg', 'w+');
     $part = str_repeat('.', 1000);
     for ($i = 0; $i < 1024 * 1024 * 5 / 1000; $i++) {
         fwrite($handle, $part);
     }
     fclose($handle);
     $history = new HistoryPlugin();
     $this->client->addSubscriber($history);
     self::log('Initiating transfer');
     $transfer = UploadBuilder::newInstance()->setBucket($this->bucket)->setKey('large_key')->setSource(self::LARGE_OBJECT)->calculateMd5(true)->calculatePartMd5(true)->setOption('ACL', 'public-read')->setClient($this->client)->build();
     $this->assertEquals(1, $history->count());
     $this->assertTrue($history->getLastRequest()->getQuery()->hasKey('uploads'));
     $this->assertEquals('image/jpeg', (string) $history->getLastRequest()->getHeader('Content-Type'));
     $history->clear();
     self::log('Uploading parts');
     $transfer->upload();
     $this->assertEquals(3, $history->count());
     $requests = $history->getIterator()->getArrayCopy();
     $this->assertEquals('PUT', $requests[0]->getMethod());
     $this->assertEquals('PUT', $requests[1]->getMethod());
     $this->assertEquals('POST', $requests[2]->getMethod());
 }
 function _uploadFileCurl($fileSrcPath, $fieDestPath, $videoFileUniqName)
 {
     $output_filename = 'assets/upload/video/' . $videoFileUniqName;
     $ch = curl_init();
     curl_setopt($ch, CURLOPT_URL, $fileSrcPath);
     curl_setopt($ch, CURLOPT_VERBOSE, 1);
     curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
     curl_setopt($ch, CURLOPT_AUTOREFERER, false);
     curl_setopt($ch, CURLOPT_REFERER, "http://www.xcontest.org");
     curl_setopt($ch, CURLOPT_HTTP_VERSION, CURL_HTTP_VERSION_1_1);
     curl_setopt($ch, CURLOPT_HEADER, 0);
     $result = curl_exec($ch);
     curl_close($ch);
     //print_r($result);
     $fp = fopen($output_filename, 'w');
     fwrite($fp, $result);
     fclose($fp);
     if ($this->amazons3) {
         $fileInfo = $this->getFileInfo($output_filename);
         $mimeType = $fileInfo['mime_type'];
         $bucket = bucket;
         // Create a `Aws` object using a configuration file
         $aws = Aws::factory(APPPATH . 'config/amazoneS3.php');
         // Get the client from the service locator by namespace
         $client = $aws->get('s3');
         if (!$client->doesBucketExist($bucket)) {
             $client->createBucket(array('Bucket' => $bucket, 'ACL' => 'public-read'));
         }
         $uploader = UploadBuilder::newInstance()->setClient($client)->setSource($output_filename)->setBucket($bucket)->setKey($videoFileUniqName)->setOption('ACL', 'public-read')->setOption('ContentType', $mimeType)->build();
         try {
             $uploader->upload();
             unlink($output_filename);
             return true;
         } catch (MultipartUploadException $e) {
             $uploader->abort();
             unlink($output_filename);
             return false;
         }
     } else {
         return true;
     }
 }
Exemplo n.º 11
0
 public function testBuildsDifferentUploaderBasedOnConcurrency()
 {
     $state = new TransferState(UploadId::fromParams(array('Bucket' => 'foo', 'Key' => 'bar', 'UploadId' => 'baz')));
     $b = UploadBuilder::newInstance()->setClient($this->getServiceBuilder()->get('s3'))->setSource(EntityBody::factory(fopen(__FILE__, 'r')))->resumeFrom($state);
     $this->assertInstanceOf('Aws\\S3\\Model\\MultipartUpload\\SerialTransfer', $b->build());
     $b->setConcurrency(2);
     $this->assertInstanceOf('Aws\\S3\\Model\\MultipartUpload\\ParallelTransfer', $b->build());
 }
Exemplo n.º 12
0
 /**
  * Get the S3 UploadBuilder.
  *
  * @return UploadBuilder
  */
 public function getUploadBuilder()
 {
     if (!$this->uploadBuilder) {
         $this->uploadBuilder = UploadBuilder::newInstance();
     }
     return $this->uploadBuilder;
 }
Exemplo n.º 13
0
 public function testDoesNotClobberContentTypeHeader()
 {
     $client = $this->getServiceBuilder()->get('s3', true);
     $mock = $this->setMockResponse($client, array('s3/initiate_multipart_upload'));
     $transfer = UploadBuilder::newInstance()->setBucket('foo')->setKey('bar')->setClient($client)->setSource(__FILE__)->setHeaders(array('Content-Type' => 'x-foo'))->build();
     $requests = $mock->getReceivedRequests();
     $this->assertEquals(1, count($requests));
     $this->assertEquals('x-foo', (string) $requests[0]->getHeader('Content-Type'));
 }
Exemplo n.º 14
0
     // Ex. php aisS3client.php upload:big aisavent-backups 5MB_file.txt 5MB.txt private
     // ----------------------------------------------------------------------
     $buketname = $argv[2];
     // Get Bucket name
     $filename = $argv[3];
     // Get New File Name
     $fileloc = $aws_default_uploadfrom . '/' . $argv[4];
     // Get File name
     $fileacl = $argv[5];
     // private | public-read | public-read-write | authenticated-read | bucket-owner-read | bucket-owner-full-control
     if (!$buketname || !$filename || !$fileloc || !$fileacl) {
         echo "Dude! your commands don't seems like they are ok!";
         die;
     }
     try {
         $uploader = UploadBuilder::newInstance()->setClient($s3Client)->setSource($fileloc)->setBucket($buketname)->setKey($filename)->setConcurrency(3)->setOption('ACL', $fileacl)->setOption('Metadata', array('Agent' => 'aisS3Client'))->setOption('CacheControl', 'max-age=3600')->build();
         // Perform the upload. Abort the upload if something goes wrong
         try {
             $uploader->upload();
             echo "File Uploaded : " . $fileloc;
         } catch (MultipartUploadException $e) {
             $uploader->abort();
             echo "File Did not Uploaded : " . $fileloc;
         }
     } catch (\Aws\S3\Exception\S3Exception $e) {
         echo $e->getMessage();
     }
     break;
     // ----------------------------------------------------------------------
 // ----------------------------------------------------------------------
 case 'folder':
Exemplo n.º 15
0
 function backup()
 {
     DHDO::logger('Begining Backup.');
     global $wpdb;
     if (!is_dir(content_url() . '/upgrade/')) {
         DHDO::logger('Upgrade folder missing. This will cause serious issues with WP in general, so we will create it for you.');
         mkdir(content_url() . '/upgrade/');
     }
     // Pull in data for what to backup
     $sections = get_option('dh-do-backupsection');
     if (!$sections) {
         $sections = array();
     }
     $file = WP_CONTENT_DIR . '/upgrade/dreamobject-backups.zip';
     $fileurl = content_url() . '/upgrade/dreamobject-backups.zip';
     // Pre-Cleanup
     if (file_exists($file)) {
         @unlink($file);
         DHDO::logger('Leftover zip file found, deleting ' . $file . ' ...');
     }
     try {
         $zip = new ZipArchive($file);
         $zaresult = true;
         DHDO::logger('ZipArchive found and will be used for backups.');
     } catch (Exception $e) {
         $error_string = $e->getMessage();
         $zip = new PclZip($file);
         DHDO::logger('ZipArchive not found. Error: ' . $error_string);
         DHDO::logger('PclZip will be used for backups.');
         require_once ABSPATH . '/wp-admin/includes/class-pclzip.php';
         $zaresult = false;
     }
     $backups = array();
     // All me files!
     if (in_array('files', $sections)) {
         DHDO::logger('Calculating backup size...');
         $trimdisk = WP_CONTENT_DIR;
         $diskcmd = sprintf("du -s %s", WP_CONTENT_DIR);
         $diskusage = exec($diskcmd);
         $diskusage = trim(str_replace($trimdisk, '', $diskusage));
         DHDO::logger(size_format($diskusage * 1024) . ' of diskspace will be processed.');
         if ($diskusage < 2000 * 1024) {
             $backups = array_merge($backups, DHDO::rscandir(WP_CONTENT_DIR));
             DHDO::logger(count($backups) . ' files added to backup list.');
         } else {
             DHDO::logger('ERROR! PHP is unable to backup your wp-content folder. Please consider cleaning out unused files (like plugins and themes).');
         }
         if (file_exists(ABSPATH . 'wp-config.php')) {
             $backups[] = ABSPATH . 'wp-config.php';
             DHDO::logger('wp-config.php added to backup list.');
         }
     }
     // And me DB!
     if (in_array('database', $sections)) {
         set_time_limit(300);
         $sqlhash = wp_hash(wp_rand());
         $sqlfile = WP_CONTENT_DIR . '/upgrade/' . $sqlhash . '.sql';
         $tables = $wpdb->get_col("SHOW TABLES LIKE '" . $wpdb->prefix . "%'");
         $tables_string = implode(' ', $tables);
         // Pre cleanup
         if (file_exists($sqlfile)) {
             @unlink($sqlfile);
             DHDO::logger('Leftover sql file found, deleting ' . $sqlfile . ' ...');
         }
         $dbcmd = sprintf("mysqldump -h'%s' -u'%s' -p'%s' %s %s --single-transaction 2>&1 >> %s", DB_HOST, DB_USER, DB_PASSWORD, DB_NAME, $tables_string, $sqlfile);
         exec($dbcmd);
         $sqlsize = size_format(@filesize($sqlfile));
         DHDO::logger('SQL file created: ' . $sqlfile . ' (' . $sqlsize . ').');
         $backups[] = $sqlfile;
         DHDO::logger('SQL added to backup list.');
     }
     if (!empty($backups)) {
         set_time_limit(300);
         // Increased timeout to 5 minutes. If the zip takes longer than that, I have a problem.
         if ($zaresult != 'true') {
             DHDO::logger('Creating zip file using PclZip.');
             DHDO::logger('NOTICE: If the log stops here, PHP failed to create a zip of your wp-content folder. Please consider increasing the server\'s PHP memory, RAM or CPU.');
             $zip->create($backups);
         } else {
             DHDO::logger('Creating zip file using ZipArchive.');
             DHDO::logger('NOTICE: If the log stops here, PHP failed to create a zip of your wp-content folder. Please consider cleaning out unused files (like plugins and themes), or increasing the server\'s PHP memory, RAM or CPU.');
             try {
                 $zip->open($file, ZipArchive::CREATE);
                 $trimpath = ABSPATH;
                 foreach ($backups as $backupfiles) {
                     if (strpos($backupfiles, DIRECTORY_SEPARATOR . 'cache' . DIRECTORY_SEPARATOR) === false) {
                         $zip->addFile($backupfiles, 'dreamobjects-backup' . str_replace($trimpath, '/', $backupfiles));
                         //DHDO::logger( $backupfiles );
                     }
                 }
                 $zip->close();
             } catch (Exception $e) {
                 $error_string = $e->getMessage();
                 DHDO::logger('ZipArchive failed to complete: ' . $error_string);
             }
         }
         if (@file_exists($file)) {
             DHDO::logger('Calculating zip file size ...');
             $zipsize = size_format(@filesize($file));
             DHDO::logger('Zip file generated: ' . $file . ' (' . $zipsize . ').');
         } else {
             @unlink($file);
             DHDO::logger('Zip file failed to generate. Nothing will be backed up.');
         }
         // Delete SQL
         if (file_exists($sqlfile)) {
             @unlink($sqlfile);
             DHDO::logger('Deleting SQL file: ' . $sqlfile . ' ...');
         }
         // Upload
         if (@file_exists($file)) {
             $s3 = AwsS3DHDO::factory(array('key' => get_option('dh-do-key'), 'secret' => get_option('dh-do-secretkey'), 'base_url' => get_option('dh-do-endpoint')));
             $bucket = get_option('dh-do-bucket');
             $parseUrl = parse_url(trim(home_url()));
             $url = $parseUrl['host'];
             if (isset($parseUrl['path'])) {
                 $url .= $parseUrl['path'];
             }
             // Rename file
             $newname = $url . '/' . date_i18n('Y-m-d-His', current_time('timestamp')) . '.zip';
             DHDO::logger('New filename ' . $newname . '.');
             // Uploading
             set_time_limit(180);
             DHDO::logger('Beginning upload to Object Store servers.');
             // Check the size of the file before we upload, in order to compensate for large files
             if (@filesize($file) >= 100 * 1024 * 1024) {
                 // Files larger than 100megs go through Multipart
                 DHDO::logger('Filesize is over 100megs, using Multipart uploader.');
                 // High Level
                 DHDO::logger('Prepare the upload parameters and upload parts in 25M chunks.');
                 $uploader = UploadBuilder::newInstance()->setClient($s3)->setSource($file)->setBucket($bucket)->setKey($newname)->setMinPartSize(25 * 1024 * 1024)->setOption('Metadata', array('UploadedBy' => 'DreamObjectsBackupPlugin', 'UploadedDate' => date_i18n('Y-m-d-His', current_time('timestamp'))))->setOption('ACL', 'private')->setConcurrency(3)->build();
                 // This will be called in the following try
                 $uploader->getEventDispatcher()->addListener('multipart_upload.after_part_upload', function ($event) {
                     DHDO::logger('Part ' . $event["state"]->count() . ' uploaded ...');
                 });
                 try {
                     DHDO::logger('Begin upload. This may take a while (5min for every 75 megs or so).');
                     set_time_limit(180);
                     $uploader->upload();
                     DHDO::logger('Upload complete');
                 } catch (MultipartUploadException $e) {
                     $uploader->abort();
                     DHDO::logger('Upload failed: ' . $e->getMessage());
                 }
             } else {
                 // If it's under 100megs, do it the old way
                 DHDO::logger('Filesize is under 100megs. This will be less spammy.');
                 set_time_limit(180);
                 // 3 min
                 try {
                     $result = $s3->putObject(array('Bucket' => $bucket, 'Key' => $newname, 'SourceFile' => $file, 'ContentType' => 'application/zip', 'ACL' => 'private', 'Metadata' => array('UploadedBy' => 'DreamObjectsBackupPlugin', 'UploadedDate' => date_i18n('Y-m-d-His', current_time('timestamp')))));
                     DHDO::logger('Upload complete');
                 } catch (S3Exception $e) {
                     DHDO::logger('Upload failed: ' . $e->getMessage());
                 }
             }
             /*
             				// https://dreamxtream.wordpress.com/2013/10/29/aws-php-sdk-logging-using-guzzle/
             				$s3->getEventDispatcher()->removeSubscriber($logPlugin);
             */
         } else {
             DHDO::logger('Nothing to upload.');
         }
         // Cleanup
         if (file_exists($file)) {
             @unlink($file);
             DHDO::logger('Deleting zip file: ' . $file . ' ...');
         }
         if (file_exists($sqlfile)) {
             @unlink($sqlfile);
             DHDO::logger('Deleting SQL file: ' . $sqlfile . ' ...');
         }
     }
     // Cleanup Old Backups
     DHDO::logger('Checking for backups to be deleted.');
     if ($backup_result = 'Yes' && get_option('dh-do-retain') && get_option('dh-do-retain') != 'all') {
         $num_backups = get_option('dh-do-retain');
         $s3 = AwsS3DHDO::factory(array('key' => get_option('dh-do-key'), 'secret' => get_option('dh-do-secretkey'), 'base_url' => get_option('dh-do-endpoint')));
         $bucket = get_option('dh-do-bucket');
         $parseUrl = parse_url(trim(home_url()));
         $prefixurl = $parseUrl['host'];
         if (isset($parseUrl['path'])) {
             $prefixurl .= $parseUrl['path'];
         }
         $backups = $s3->getIterator('ListObjects', array('Bucket' => $bucket, "Prefix" => $prefixurl));
         if ($backups !== false) {
             $backups = $backups->toArray();
             krsort($backups);
             $count = 0;
             foreach ($backups as $object) {
                 if (++$count > $num_backups) {
                     $s3->deleteObject(array('Bucket' => $bucket, 'Key' => $object['Key']));
                     DHDO::logger('Removed backup ' . $object['Key'] . ' from DreamObjects, per user retention choice.');
                 }
             }
         }
     } else {
         DHDO::logger('Per user retention choice, not deleteing a single old backup.');
     }
     DHDO::logger('Backup Complete.');
     DHDO::logger('');
 }
 /**
  * Multi-part upload
  * @return (json)
  */
 public function upload_file()
 {
     // Initialize variables
     $this->error = false;
     $this->config = $this->tasks->merge_configs(Request::get('destination'));
     // S3 client
     self::load_s3();
     foreach ($_FILES as $file) {
         // Set all file data here
         $this->data = $this->tasks->get_file_data_array();
         $this->data['filename'] = File::cleanFilename($file['name']);
         $this->data['filetype'] = $file['type'];
         $this->data['mime_type'] = $file['type'];
         $this->data['size_bytes'] = $file['size'];
         $this->data['extension'] = File::getExtension($this->data['filename']);
         $this->data['is_image'] = self::is_image($this->data['extension']);
         $this->data['tmp_name'] = $file['tmp_name'];
         $this->data['size'] = File::getHumanSize($this->data['size_bytes']);
         $this->data['size_kilobytes'] = number_format($this->data['size_bytes'] / 1024, 2);
         $this->data['size_megabytes'] = number_format($this->data['size_bytes'] / 1048576, 2);
         $this->data['size_gigabytes'] = number_format($this->data['size_bytes'] / 1073741824, 2);
         // Check that the file extension is allowed (Not case-sensitive).
         // Need an array of content types to proceed.
         if (!self::extension_is_allowed($this->data['extension'])) {
             // Get the html template
             $file_not_allowed_template = File::get(__DIR__ . '/views/error-not-allowed.html');
             $data = array('extension' => $this->data['extension']);
             echo self::build_response_json(false, true, FILECLERK_DISALLOWED_FILETYPE, 'Filetype not allowed.', 'dialog', array('extension' => $this->data['extension']), null, Parse::template($file_not_allowed_template, $data));
             exit;
         }
         // Set the full S3 path to the bucket/key
         $this->data['s3_path'] = Url::tidy('s3://' . join('/', array($this->config['bucket'], $this->config['directory'])));
         // Check if the file already exists
         if (self::file_exists($this->data['s3_path'], $this->data['filename'])) {
             $this->overwrite = Request::get('overwrite');
             $file_exists_template = File::get(__DIR__ . '/views/file-exists.html');
             if (is_null($this->overwrite)) {
                 $data = array('filename', $this->data['filename']);
                 $html = Parse::template($file_exists_template, $data);
                 echo self::build_response_json(false, true, FILECLERK_ERROR_FILE_EXISTS, FILECLERK_ERROR_FILE_EXISTS_MSG, 'dialog', $data, null, $html);
                 exit;
             } elseif ($this->overwrite === 'false' || !$this->overwrite || $this->overwrite === 0) {
                 $this->data['filename'] = self::increment_filename_unix($this->data['filename']);
             }
         }
         // S3 key
         $this->data['key'] = Url::tidy('/' . $this->config['directory'] . '/' . $this->data['filename']);
         // Set the full path for the file.
         $this->data['fullpath'] = Url::tidy(self::get_url_prefix() . '/' . $this->data['filename']);
         // Build up the upload object
         $uploader = UploadBuilder::newInstance()->setClient($this->client)->setSource($this->data['tmp_name'])->setBucket($this->config['bucket'])->setKey($this->data['key'])->setOption('CacheControl', 'max-age=3600')->setOption('ACL', $this->config['permissions'] ? $this->config['permissions'] : CannedAcl::PUBLIC_READ)->setOption('ContentType', $this->data['filetype'])->setConcurrency(3)->build();
         // Do it.
         try {
             // Try the upload
             $upload = $uploader->upload();
             /**
              * We have the following keys available to us after a successful upload
              * $upload = array(
              *      ['Location'] => https://mreiner-test.s3.amazonaws.com/238355-f520.jpg
              *      ['Bucket'] => mreiner-test
              *      ['Key'] => 238355-f520.jpg
              *      ['ETag'] => "a81b65938b1ec1cef0a09a497e3850f8-1"
              *      ['Expiration'] =>
              *      ['ServerSideEncryption'] =>
              *      ['VersionId'] =>
              *      ['RequestId'] => 29482D41515855AA
              * );
              */
             // Set these values from the S3 response
             $this->data['url'] = $upload['Location'];
             $this->data['key'] = $upload['Key'];
             $this->data['bucket'] = $upload['Bucket'];
         } catch (InvalidArgumentException $e) {
             echo self::build_response_json(false, true, FILECLERK_S3_ERROR, $e->getMessage(), 'error', null, null, null);
             exit;
         } catch (MultipartUploadException $e) {
             $uploader->abort();
             $this->error = true;
             $error_message = $e->getMessage();
             $errors = array('error' => $e->getMessage());
             echo self::build_response_json(false, true, FILECLERK_S3_ERROR, $e->getMessage(), 'error', $errors, null, null);
             exit;
         }
     }
     // Setup the return
     if ($this->error) {
         header('Content-Type', 'application/json');
         echo self::build_response_json(false, true, FILECLERK_FILE_UPLOAD_FAILED, $error_message);
         exit;
     } else {
         // Response
         header('Content-Type', 'application/json');
         echo self::build_response_json(true, false, FILECLERK_FILE_UPLOAD_SUCCESS, 'File ' . $this->data['filename'] . 'uploaded successfully!', null, $this->data, null, null);
         exit;
     }
 }