protected function createTransferAction(\SplFileInfo $file)
 {
     // Open the file for reading
     $filename = $file->getRealPath() ?: $file->getPathName();
     if (!($resource = fopen($filename, 'r'))) {
         // @codeCoverageIgnoreStart
         throw new RuntimeException('Could not open ' . $file->getPathname() . ' for reading');
         // @codeCoverageIgnoreEnd
     }
     $key = $this->options['source_converter']->convert($filename);
     $body = EntityBody::factory($resource);
     // Determine how the ACL should be applied
     if ($acl = $this->options['acl']) {
         $aclType = is_string($this->options['acl']) ? 'ACL' : 'ACP';
     } else {
         $acl = 'private';
         $aclType = 'ACL';
     }
     // Use a multi-part upload if the file is larger than the cutoff size and is a regular file
     if ($body->getWrapper() == 'plainfile' && $file->getSize() >= $this->options['multipart_upload_size']) {
         $builder = UploadBuilder::newInstance()->setBucket($this->options['bucket'])->setKey($key)->setMinPartSize($this->options['multipart_upload_size'])->setOption($aclType, $acl)->setClient($this->options['client'])->setSource($body)->setConcurrency($this->options['concurrency']);
         $this->dispatch(self::BEFORE_MULTIPART_BUILD, array('builder' => $builder, 'file' => $file));
         return $builder->build();
     }
     return $this->options['client']->getCommand('PutObject', array('Bucket' => $this->options['bucket'], 'Key' => $key, 'Body' => $body, $aclType => $acl));
 }
Beispiel #2
0
 /**
  * Upload a file, stream, or string to a bucket. If the upload size exceeds the specified threshold, the upload
  * will be performed using parallel multipart uploads.
  *
  * @param string $bucket  Bucket to upload the object
  * @param string $key     Key of the object
  * @param mixed  $body    Object data to upload. Can be a Guzzle\Http\EntityBodyInterface, stream resource, or
  *                        string of data to upload.
  * @param string $acl     ACL to apply to the object
  * @param array  $options Custom options used when executing commands:
  *     - params: Custom parameters to use with the upload. The parameters must map to a PutObject
  *       or InitiateMultipartUpload operation parameters.
  *     - min_part_size: Minimum size to allow for each uploaded part when performing a multipart upload.
  *     - concurrency: Maximum number of concurrent multipart uploads.
  *     - before_upload: Callback to invoke before each multipart upload. The callback will receive a
  *       Guzzle\Common\Event object with context.
  *
  * @see Mss\S3\Model\MultipartUpload\UploadBuilder for more options and customization
  * @return \Guzzle\Service\Resource\Model Returns the modeled result of the performed operation
  */
 public function upload($bucket, $key, $body, $acl = 'private', array $options = array())
 {
     $body = EntityBody::factory($body);
     $options = Collection::fromConfig(array_change_key_case($options), array('min_part_size' => AbstractMulti::MIN_PART_SIZE, 'params' => array(), 'concurrency' => $body->getWrapper() == 'plainfile' ? 3 : 1));
     if ($body->getSize() < $options['min_part_size']) {
         // Perform a simple PutObject operation
         return $this->putObject(array('Bucket' => $bucket, 'Key' => $key, 'Body' => $body, 'ACL' => $acl) + $options['params']);
     }
     // Perform a multipart upload if the file is large enough
     $transfer = UploadBuilder::newInstance()->setBucket($bucket)->setKey($key)->setMinPartSize($options['min_part_size'])->setConcurrency($options['concurrency'])->setClient($this)->setSource($body)->setTransferOptions($options->toArray())->addOptions($options['params'])->setOption('ACL', $acl)->build();
     if ($options['before_upload']) {
         $transfer->getEventDispatcher()->addListener(AbstractTransfer::BEFORE_PART_UPLOAD, $options['before_upload']);
     }
     return $transfer->upload();
 }
 public function testMultipartUpload()
 {
     $this->client->waitUntil('bucket_exists', array('Bucket' => $this->bucket));
     self::log('Creating a 100MB object in /tmp/large-object.jpg');
     $handle = fopen('/tmp/large-object.jpg', 'w+');
     $part = str_repeat('.', 1000);
     for ($i = 0; $i < 1024 * 1024 * 5 / 1000; $i++) {
         fwrite($handle, $part);
     }
     fclose($handle);
     $history = new HistoryPlugin();
     $this->client->addSubscriber($history);
     self::log('Initiating transfer');
     $transfer = UploadBuilder::newInstance()->setBucket($this->bucket)->setKey('large_key')->setSource(self::LARGE_OBJECT)->calculateMd5(true)->calculatePartMd5(true)->setOption('ACL', 'public-read')->setClient($this->client)->build();
     $this->assertEquals(1, $history->count());
     $this->assertTrue($history->getLastRequest()->getQuery()->hasKey('uploads'));
     $this->assertEquals('image/jpeg', (string) $history->getLastRequest()->getHeader('Content-Type'));
     $history->clear();
     self::log('Uploading parts');
     $transfer->upload();
     $this->assertEquals(3, $history->count());
     $requests = $history->getIterator()->getArrayCopy();
     $this->assertEquals('PUT', $requests[0]->getMethod());
     $this->assertEquals('PUT', $requests[1]->getMethod());
     $this->assertEquals('POST', $requests[2]->getMethod());
 }
 public function testDoesNotClobberContentTypeHeader()
 {
     $client = $this->getServiceBuilder()->get('s3', true);
     $mock = $this->setMockResponse($client, array('s3/initiate_multipart_upload'));
     $transfer = UploadBuilder::newInstance()->setBucket('foo')->setKey('bar')->setClient($client)->setSource(__FILE__)->setHeaders(array('Content-Type' => 'x-foo'))->build();
     $requests = $mock->getReceivedRequests();
     $this->assertEquals(1, count($requests));
     $this->assertEquals('x-foo', (string) $requests[0]->getHeader('Content-Type'));
 }