/** * {@inheritdoc} */ public function get($path) { try { $model = $this->s3->getObject(['Bucket' => $this->bucket, 'Key' => $path]); return (string) $model->get('Body'); } catch (S3Exception $e) { if ($e->getAwsErrorCode() == 'NoSuchKey') { throw Exception\NotFoundException::pathNotFound($path, $e); } throw Exception\StorageException::getError($path, $e); } }
/** * S3の指定パスのファイルをダウンロードする * * @param string $path * @return string **/ public function download($path) { try { $result = $this->client->getObject(array('Bucket' => $this->bucket, 'Key' => $path)); $response = ''; $result['Body']->rewind(); while ($data = $result['Body']->read(1024)) { $response .= $data; } } catch (\Exception $e) { throw $e; } return $response; }
/** * @param string $localFile * @param string $remoteFile * @param int $perm * @return string */ function getFileIfNewest($localFile, $remoteFile, $perm = 0777) { $this->lastRemoteFile = $remoteFile; $download = false; if (!file_exists($localFile)) { $download = true; } else { $iterator = $this->s3Client->getIterator('ListObjects', array('Bucket' => $this->bucket, 'Prefix' => $remoteFile, 'Delimiter' => '/')); foreach ($iterator as $object) { $remoteDate = date("U", strtotime($object['LastModified'])); $localDate = filemtime($localFile); if ($remoteDate > $localDate) { $download = true; } break; } } if ($download) { try { $result = $this->s3Client->getObject(array('Bucket' => $this->bucket, 'Key' => $remoteFile)); } catch (\Exception $e) { error_log("Error recovering {$remoteFile} from S3: " . $e->getMessage()); return null; } file_put_contents($localFile, $result['Body']); chmod($localFile, $perm); touch($localFile, strtotime($result['LastModified'])); } return $localFile; }
/** * {@inheritDoc} * * @link http://stackoverflow.com/questions/13686316/grabbing-contents-of-object-from-s3-via-php-sdk-2 */ public function fetch($path) { $result = $this->s3->getObject(array('Bucket' => $this->getConfigRelativeKey('s3Bucket'), 'Key' => $path)); $body = $result->get('Body'); $body->rewind(); $content = $body->read($result['ContentLength']); return $content; }
/** * {@inheritdoc} */ public function get($path) { try { $model = $this->s3->getObject(['Bucket' => $this->bucket, 'Key' => $path]); return (string) $model->get('Body'); } catch (NoSuchKeyException $e) { throw Exception\NotFoundException::pathNotFound($path, $e); } catch (AwsExceptionInterface $e) { throw Exception\StorageException::getError($path, $e); } }
/** * @depends testHeadBucket */ public function testPutAndListObjects() { $command = $this->client->getCommand('PutObject', array('Bucket' => $this->bucket, 'Key' => self::TEST_KEY, 'ContentMD5' => true, 'Body' => 'åbc 123', 'ContentType' => 'application/foo', 'ACP' => $this->acp, 'Metadata' => array('test' => '123', 'abc' => '@pples'))); self::log("Uploading an object"); $result = $command->execute(); // make sure the expect header wasn't sent $this->assertNull($command->getRequest()->getHeader('Expect')); $this->assertInstanceOf('Guzzle\\Service\\Resource\\Model', $result); $this->assertNotEmpty($result['ETag']); $this->client->waitUntil('object_exists', $this->bucket . '/' . self::TEST_KEY); self::log("HEAD the object"); $result = $this->client->headObject(array('Bucket' => $this->bucket, 'Key' => self::TEST_KEY)); $this->assertEquals('application/foo', $result['ContentType']); $this->assertEquals('123', $result['Metadata']['test']); $this->assertEquals('@pples', $result['Metadata']['abc']); // Ensure the object was created correctly self::log("GETting the object"); $result = $this->client->getObject(array('Bucket' => $this->bucket, 'Key' => self::TEST_KEY)); $this->assertInstanceOf('Guzzle\\Service\\Resource\\Model', $result); $this->assertInstanceOf('Guzzle\\Http\\EntityBody', $result['Body']); $this->assertEquals('åbc 123', (string) $result['Body']); $this->assertEquals('application/foo', $result['ContentType']); $this->assertEquals('123', $result['Metadata']['test']); $this->assertEquals('@pples', $result['Metadata']['abc']); // Ensure the object was created and we can find it in the iterator self::log("Checking if the item is in the ListObjects results"); $iterator = $this->client->getIterator('ListObjects', array('Bucket' => $this->bucket, 'Prefix' => self::TEST_KEY)); $objects = $iterator->toArray(); $this->assertEquals(1, count($objects)); $this->assertEquals('foo', $objects[0]['Key']); }
/** * Tells the driver to prepare a copy of the original file locally. * * @param FileModel $fileModel * @return File */ public function tempOriginal(FileModel $fileModel) { // Recreate original filename $tempOriginalPath = tempnam(sys_get_temp_dir(), null); $originalPath = $this->nameGenerator->fileName($fileModel); // Download file $this->s3->getObject(array('Bucket' => $this->awsBucket, 'Key' => $originalPath, 'SaveAs' => $tempOriginalPath)); return new File($tempOriginalPath); }
/** * Tells the driver to prepare a copy of the original image locally. * * @param Image $image * @return File */ public function tempOriginal(Image $image) { // Recreate original filename $tempOriginalPath = tempnam(sys_get_temp_dir(), null); $originalPath = sprintf('%s-%s.%s', $image->getKey(), $this->generateHash($image), Mime::getExtensionForMimeType($image->mime_type)); // Download file $this->s3->getObject(array('Bucket' => $this->awsBucket, 'Key' => $originalPath, 'SaveAs' => $tempOriginalPath)); return new File($tempOriginalPath); }
/** * Returns the binary content of $filePath from DFS * * @param string $filePath local file path * * @return binary|bool file's content, or false */ public function getContents($filePath) { try { $object = $this->s3client->getObject(array('Bucket' => $this->bucket, 'Key' => $filePath)); return (string) $object['Body']; } catch (S3Exception $e) { eZDebug::writeError($e->getMessage(), __METHOD__); return false; } }
/** * @depends testPutAndListObjects * @dataProvider prefixKeyProvider */ public function testWorksWithPrefixKeys($key, $cleaned, $encoded) { $this->client->waitUntil('bucket_exists', array('Bucket' => $this->bucket)); $command = $this->client->getCommand('PutObject', array('Bucket' => $this->bucket, 'Key' => $key, 'SourceFile' => __FILE__)); $command->execute(); // Ensure the path is correct $this->assertEquals($encoded, $command->getRequest()->getPath()); // Ensure the key is not an array and is returned to it's previous value $this->assertEquals($key, $command['Key']); $this->client->waitUntil('object_exists', array('Bucket' => $this->bucket, 'Key' => $key)); $result = $this->client->getObject(array('Bucket' => $this->bucket, 'Key' => $key)); $this->assertEquals(file_get_contents(__FILE__), (string) $result['Body']); // Test using path style hosting $command = $this->client->getCommand('DeleteObject', array('Bucket' => $this->bucket, 'Key' => $key, 'PathStyle' => true)); $command->execute(); $this->assertEquals('/' . $this->bucket . $encoded, $command->getRequest()->getPath()); }
/** * Return a read-only stream resource for a file. * * @param string $file * @return resource|boolean The resource or false on failure * @throws \RuntimeException */ public function stream($file) { if (null !== $this->filenameFilter) { $file = $this->filenameFilter->filter($file); } $params = array('Bucket' => $this->getBucket(), 'Key' => $file); try { $response = $this->s3Client->getObject($params); } catch (S3Exception $e) { if (!$this->getThrowExceptions()) { return false; } throw new \RuntimeException('Exception thrown by Aws\\S3\\S3Client: ' . $e->getMessage(), null, $e); } $body = $response->get('Body'); $body->rewind(); return $body->getStream(); }
public function fopen($path, $mode) { $path = $this->normalizePath($path); switch ($mode) { case 'r': case 'rb': $tmpFile = \OC_Helper::tmpFile(); self::$tmpFiles[$tmpFile] = $path; try { $result = $this->connection->getObject(array('Bucket' => $this->bucket, 'Key' => $this->cleanKey($path), 'SaveAs' => $tmpFile)); } catch (S3Exception $e) { \OCP\Util::writeLog('files_external', $e->getMessage(), \OCP\Util::ERROR); return false; } return fopen($tmpFile, 'r'); case 'w': case 'wb': case 'a': case 'ab': case 'r+': case 'w+': case 'wb+': case 'a+': case 'x': case 'x+': case 'c': case 'c+': if (strrpos($path, '.') !== false) { $ext = substr($path, strrpos($path, '.')); } else { $ext = ''; } $tmpFile = \OC_Helper::tmpFile($ext); \OC\Files\Stream\Close::registerCallback($tmpFile, array($this, 'writeBack')); if ($this->file_exists($path)) { $source = $this->fopen($path, 'r'); file_put_contents($tmpFile, $source); } self::$tmpFiles[$tmpFile] = $path; return fopen('close://' . $tmpFile, $mode); } return false; }
/** * @param string $container * @param string $name * @param array $params * * @throws DfException */ public function streamBlob($container, $name, $params = []) { try { $this->checkConnection(); /** @var \Aws\Result $result */ $result = $this->blobConn->getObject(['Bucket' => $container, 'Key' => $name]); header('Last-Modified: ' . $result->get('LastModified')); header('Content-Type: ' . $result->get('ContentType')); header('Content-Length:' . intval($result->get('ContentLength'))); $disposition = isset($params['disposition']) && !empty($params['disposition']) ? $params['disposition'] : 'inline'; header('Content-Disposition: ' . $disposition . '; filename="' . $name . '";'); echo $result->get('Body'); } catch (\Exception $ex) { if ('Resource could not be accessed.' == $ex->getMessage()) { $status_header = "HTTP/1.1 404 The specified file '{$name}' does not exist."; header($status_header); header('Content-Type: text/html'); } else { throw new DfException('Failed to stream blob: ' . $ex->getMessage()); } } }
/** * @brief Download a file from S3 to $path * * @param string $key "path" to a file on S3 * @param string $path Local path to save the file within app * @param string $env environment variable, one of 'dev', 'test' or 'prod' * @return void */ protected static function getFromS3(IOInterface $io, $key, $path, $env) { $bucket = 'keboola-configs'; if ($env == 'test') { $bucket = 'keboola-configs-testing'; } elseif ($env == 'dev') { $bucket = 'keboola-configs-devel'; } if (getenv('KEBOOLA_SYRUP_CONFIGS_BUCKET')) { $bucket = getenv('KEBOOLA_SYRUP_CONFIGS_BUCKET'); } $awsRegion = 'us-east-1'; if (getenv('AWS_REGION')) { $awsRegion = getenv('AWS_REGION'); } $client = new S3Client(array('version' => '2006-03-01', 'region' => $awsRegion)); $client->getObject(array('Bucket' => $bucket, 'Key' => $key, 'SaveAs' => $path)); $io->write("<info>File <comment>{$path}</comment> downloaded from S3 ({$bucket})</info>"); }
/** * Read a file * * @param string $path * @return array file metadata */ public function read($path) { $options = $this->getOptions($path); $result = $this->client->getObject($options); return $this->normalizeObject($result->getAll(), $path); }
/** * getObject * * @param string $key * @param array $params * * @return \Guzzle\Service\Resource\Model */ public function getObject($key, array $params = array()) { $params['Key'] = $key; $params['Bucket'] = $this->name; return $this->client->getObject($params); }
public function localFile($identifier) { $filePath = tempnam(sys_get_temp_dir(), Random::generate(12)); $this->s3->getObject(array('Bucket' => $this->bucket, 'Key' => $identifier, 'SaveAs' => $filePath)); return $filePath; }
if (!empty($_POST['submit'])) { if (!empty($_FILES["uploadfile"])) { $filename = $_FILES["uploadfile"]["name"]; $file = $_FILES["uploadfile"]["tmp_name"]; $filetype = $_FILES["uploadfile"]["type"]; $filesize = $_FILES["uploadfile"]["size"]; $filedata = file_get_contents($file); $bucket = $_POST['bucket']; // insert into redis use base64 $base64 = base64_encode(file_get_contents($file)); $filekey = $filename . DATA_SEPARATOR . $filetype . DATA_SEPARATOR . $filesize . DATA_SEPARATOR . time() . DATA_SEPARATOR . $bucket; $redis->mset($filekey, $base64); // create or update imagelist.txt if ($s3->doesObjectExist(S3_BUCKET, IMAGELIST_FILE)) { // exsist $txtfile = $s3->getObject(['Bucket' => S3_BUCKET, 'Key' => IMAGELIST_FILE]); $txtbody = $txtfile['Body'] . $filekey . PHP_EOL; try { $s3->deleteObject(['Bucket' => S3_BUCKET, 'Key' => IMAGELIST_FILE]); $s3->putObject(['Bucket' => S3_BUCKET, 'Key' => IMAGELIST_FILE, 'Body' => $txtbody, 'ACL' => 'public-read-write']); } catch (Aws\Exception\S3Exception $e) { $message .= "There was an error deleting and creating imagelist.txt.\r\n"; } } else { // create imagelist.txt try { $s3->putObject(['Bucket' => S3_BUCKET, 'Key' => IMAGELIST_FILE, 'Body' => $filekey . PHP_EOL, 'ACL' => 'public-read-write']); } catch (Aws\Exception\S3Exception $e) { $message .= "There was an error creating imagelist.txt.\r\n"; } }
/** * @param $key string Name of the resource on S3 * @return string */ public function getContent($key) { $result = $this->s3Client->getObject(array('Bucket' => $this->bucketName, 'Key' => $key)); return $result['Body']; }
/** * @param $fileInfo array file info from Storage API * @param $destination string Destination file path */ protected function downloadFile($fileInfo, $destination) { // Initialize S3Client with credentials from Storage API $s3Client = new S3Client(array("credentials" => ["key" => $fileInfo["credentials"]["AccessKeyId"], "secret" => $fileInfo["credentials"]["SecretAccessKey"], "token" => $fileInfo["credentials"]["SessionToken"]], "region" => 'us-east-1', 'version' => 'latest')); $fs = new Filesystem(); if (!$fs->exists(dirname($destination))) { $fs->mkdir($destination); } /** * NonSliced file, just move from temp to destination file */ $s3Client->getObject(array('Bucket' => $fileInfo["s3Path"]["bucket"], 'Key' => $fileInfo["s3Path"]["key"], 'SaveAs' => $destination)); }
public function get($key) { $this->load(); $result = $this->s3Client->getObject(["Bucket" => S3Cache::BUCKET, "Key" => $key]); return (string) $result['Body']; }
} else { echo "Possible file upload attack!\n"; } echo 'Here is some more debugging info:'; print_r($_FILES); print "</pre>"; $s3 = new Aws\S3\S3Client(['version' => 'latest', 'region' => 'us-east-1']); $bucket = uniqid("php-ars-test-bucket-", false); # AWS PHP SDK version 3 create bucket $result = $s3->createBucket(['ACL' => 'public-read', 'Bucket' => $bucket]); $s3->waitUntil('BucketExists', array('Bucket' => $bucket)); # PHP version 3 $result = $s3->putObject(['ACL' => 'public-read', 'Bucket' => $bucket, 'Key' => "Hello" . $uploadfile, 'ContentType' => $_FILES['userfile']['tmp_name'], 'Body' => fopen($uploadfile, 'r+')]); $url = $result['ObjectURL']; echo $url; $result = $s3->getObject(array('Bucket' => $bucket, 'Key' => "Hello" . $uploadfile, 'ContentType' => $_FILES['userfile']['tmp_name'], 'SaveAs' => '/tmp/originalimage.jpg')); $image = new Imagick(glob('/tmp/originalimage.jpg')); $image->oilPaintImage(2); //Oilpaint image $image->setImageFormat("jpg"); $image->writeImages('/tmp/modifiedimage.jpg', true); $modifiedbucket = uniqid("modified-image-", false); $result = $s3->createBucket(['ACL' => 'public-read', 'Bucket' => $modifiedbucket]); $resultrendered = $s3->putObject(['ACL' => 'public-read', 'Bucket' => $modifiedbucket, 'Key' => "Hello" . $uploadfile, 'SourceFile' => "/tmp/modifiedimage.jpg", 'ContentType' => $_FILES['userfile']['tmp_name'], 'Body' => fopen("/tmp/modifiedimage.jpg", 'r+')]); unlink('/tmp/modifiedimage.jpg'); $finishedurl = $resultrendered['ObjectURL']; echo $finishedurl; $expiration = $s3->putBucketLifecycleConfiguration(['Bucket' => $bucket, 'LifecycleConfiguration' => ['Rules' => [['Expiration' => ['Date' => '2015-12-25'], 'Prefix' => ' ', 'Status' => 'Enabled']]]]); $expiration = $s3->putBucketLifecycleConfiguration(['Bucket' => $modifiedbucket, 'LifecycleConfiguration' => ['Rules' => [['Expiration' => ['Date' => '2015-12-25'], 'Prefix' => ' ', 'Status' => 'Enabled']]]]); $rds = new Aws\Rds\RdsClient(['version' => 'latest', 'region' => 'us-east-1']); $result = $rds->describeDBInstances(['DBInstanceIdentifier' => 'mp1-db']);
public function getObject($path) { $s3config = $this->app->config('amazonS3'); $result = $this->client->getObject(array('Bucket' => $s3config['bucket'], 'Key' => S3Helpers::key($this->app->config('publicDir'), $path))); return $result; }
protected function fetchObject(S3Client $client, Bucket $bucket, $key, $store = true, $maxSize = null) { try { // Make sure the file exists. $info = $client->headObject(array('Bucket' => $bucket->getName(), 'Key' => $key)); $contentLength = $info->get('ContentLength'); if ($contentLength == 0) { // This key is likely a directory. return false; } if ($maxSize !== NULL && $maxSize < $contentLength) { // The file is too large. return false; } } catch (\Exception $e) { return false; } $result = $client->getObject(array('Bucket' => $bucket->getName(), 'Key' => $key)); $data = $result->get('Body'); if ($store) { $this->writeToDrive($bucket->getSaveMethod(), $data, $bucket->getDestination(), $key); unset($result); unset($data); return true; } else { unset($result); return $data; } }
/** * Get object from bucket. * @param string $cacheKey the cache key. * @return mixed */ private function getObject($cacheKey) { return $this->_client->getObject(['Bucket' => $this->bucket, 'Key' => $cacheKey]); }
/** * Get object identified by bucket and key * * @param string $bucket * @param string $key * @param array $params * * @return mixed */ public function getObject($bucket, $key, array $params = []) { $params['Bucket'] = $bucket; $params['Key'] = $key; return $this->instance->getObject($params); }
<?php /* * This performs a pseudo etl transform */ // Include the composer autoloader require_once 'vendor/autoload.php'; use Aws\S3\S3Client; // Capture the post body $postBody = file_get_contents('php://input'); // Decode the message $job = json_decode($postBody); // Connect to the client $client = new S3Client(['version' => 'latest', 'region' => 'ap-southeast-2']); // Read the bi_queue file $be_client = $job->{'Records'}[0]->{'s3'}->{'bucket'}->{'name'}; $key = $job->{'Records'}[0]->{'s3'}->{'object'}->{'key'}; $result_bi_queue = $client->getObject(array('Bucket' => $be_client, 'Key' => $key)); // Read the mapping file $result_mapping = $client->getObject(array('Bucket' => "be-data", 'Key' => "etl_mappings/{$be_client}/{$be_client}_mapping.csv")); // Combine the body of the files $etl_body = $result_bi_queue['Body'] . $result_mapping['Body']; // Write the new file $result = $client->putObject(array('Bucket' => $be_client, 'Key' => "etl/{$be_client}_etl_xform.csv", 'Body' => $etl_body)); if ($error) { // Returning any non 200 HTTP code will tell the // SQS queue that the job failed and to re-queue it. http_response_code(500); }
} ?> <table class="table"> <tr> <th>Bucket</th> <th>File Name</th> <th>Download Link</th> <th>Resized Image</th> </tr> <?php $result = $s3->listBuckets(); foreach ($result['Buckets'] as $bucket) { // Each Bucket value will contain a Name and CreationDate if ($s3->doesObjectExist($bucket['Name'], SMALLIMAGELIST_FILE)) { $txtfile = $s3->getObject(['Bucket' => $bucket['Name'], 'Key' => SMALLIMAGELIST_FILE]); $txtbody = $txtfile['Body']; $lines = explode(PHP_EOL, $txtbody); foreach ($lines as $key) { if (trim($key) != '') { $tag = preg_split("/######/", $key); echo '<tr>'; echo '<td>' . $bucket['Name'] . '</td>'; echo '<td>' . $tag[1] . '</td>'; echo '<td><a href="' . S3_PATH . $bucket['Name'] . '/' . $tag[1] . '">Click</a></td>'; echo '<td><a href="' . S3_PATH . $bucket['Name'] . '/' . $tag[2] . '"><img src="' . S3_PATH . $bucket['Name'] . '/' . $tag[2] . '"/></a></td>'; echo '</tr>'; } } } }