/** * Returns a signed Amazon S3 download URL. * * @param $key string AWS key * @param $secret string AWS secret * @param $bucket string Bucket name * @param $file_name string File name (URI) * @return string The signed download URL */ public static function get_s3_url($key, $secret, $bucket, $file_name) { $s3_client = Aws\S3\S3Client::factory(array('key' => $key, 'secret' => $secret)); return $s3_client->getObjectUrl($bucket, $file_name, '+10 minutes'); }
public function run() { include main::getPluginDir() . '/libs/classes/aws-autoloader.php'; $ad = $this->params['access_details']; main::log(lang::get('Start copy files to Amazon S3', false)); $files = $this->params['files']; $dir = isset($ad['dir']) ? $ad['dir'] : '/'; $credentials = new Aws\Common\Credentials\Credentials($ad['AccessKeyId'], $ad['SecretAccessKey']); $client = Aws\S3\S3Client::factory(array('credentials' => $credentials)); try { $n = count($files); for ($i = 0; $i < $n; $i++) { $filePath = preg_replace('#[/\\\\]+#', '/', BACKUP_DIR . '/' . $dir . '/' . $files[$i]); $key = $dir ? $dir . '/' . basename($filePath) : basename($filePath); $key = ltrim(preg_replace('#[/\\\\]+#', '/', $key), '/'); //if first will be '/', file not will be uploaded, but result will be ok $putRes = $client->putObject(array("Bucket" => $ad['bucket'], 'Key' => $key, 'Body' => fopen($filePath, 'r+'))); if (isset($putRes['RequestId']) && !empty($putRes['RequestId'])) { main::log(str_replace('%s', basename($filePath), lang::get("File(%s) Upload successfully to Amazon S3", false))); } } main::log(lang::get('End copy files to Amazon S3', false)); } catch (Exception $e) { main::log('Error send to Amazon s3: ' . $e->getMessage()); $this->setError($e->getMessage()); return false; } catch (S3Exception $e) { main::log('Error send to Amazon s3: ' . $e->getMessage()); $this->setError($e->getMessage()); return false; } return true; }
public static function uploadToAmazon($file, $key) { $client = Aws\S3\S3Client::factory(['region' => 'us-west-2', 'version' => '2006-03-01', 'http' => ['verify' => false]]); $bucket = getenv('S3_BUCKET') ?: die('No "S3_BUCKET" config var in found in env!'); $result = $client->putObject(array('Bucket' => $bucket, 'Key' => 'posts/' . $key, 'SourceFile' => $file)); return $result; }
public function execute(WPAdm_Command_Context $context) { require_once WPAdm_Core::getPluginDir() . '/modules/aws-autoloader.php'; $credentials = new Aws\Common\Credentials\Credentials($context->get('AccessKeyId'), $context->get('SecretAccessKey')); $client = Aws\S3\S3Client::factory(array('credentials' => $credentials)); $dir = $context->get('dir') ? $context->get('dir') . "/" : ''; try { if (!empty($dir)) { $logs = $client->putObject(array('Bucket' => $context->get('bucket'), 'Key' => $dir, 'Body' => '')); WPAdm_Core::log('Create folder ' . $dir); } $filePath = preg_replace('#[/\\\\]+#', '/', $context->get('file')); $key = $dir ? $dir . '/' . basename($filePath) : basename($filePath); $key = ltrim(preg_replace('#[/\\\\]+#', '/', $key), '/'); //if first will be '/', file not will be uploaded, but result will be ok $putRes = $client->putObject(array("Bucket" => $context->get('bucket'), 'Key' => $key, 'Body' => fopen($filePath, 'r+'))); if ((int) $putRes == 1) { WPAdm_Core::log("File({$key}) Upload successfully to Amazon S3"); } } catch (Exception $e) { $context->setError($e->getMessage()); return false; } catch (S3Exception $e) { WPAdm_Core::log('error send file ' . $e->getMessage()); $context->setError($e->getMessage()); return false; } return true; }
private function getClient($bucket = AMAZON_S3_BUCKET_NAME) { if ($this->client === null) { $this->client = Aws\S3\S3Client::factory(array('key' => AMAZON_AWS_KEY, 'secret' => AMAZON_AWS_SECRET, 'Bucket' => $bucket)); } return $this->client; }
/** * Send the file to S3 for persistence. * * If we want to just store it locally to the server receiving the request, * simply use move_uploaded_file() to save the uploaded CSS and write * the report to the same location in a similar fashion. * * @param string $filePath * @param string $fileExtension * @return string * @throws S3BucketNotFoundException */ private function persistFileToS3($filePath, $fileExtension) { $s3Client = Aws\S3\S3Client::factory(); $s3Bucket = getenv('S3_BUCKET'); if (!$s3Bucket) { throw new S3BucketNotFoundException('S3 persistent storage missing bucket in configuration.'); } $fileName = $this->sessionUUID . '.' . $fileExtension; $uploadHandle = $s3Client->upload($s3Bucket, $fileName, fopen($filePath, 'rb'), 'public-read'); return htmlspecialchars($uploadHandle->get('ObjectURL')); }
private function uploadToS3Bucket($filename, $fileData, &$message) { $client = Aws\S3\S3Client::factory(ConnectionManager::getDataSource('default')->config['aws_s3_config']['aws_config']); $bucket = ConnectionManager::getDataSource('default')->config['aws_s3_config']['aws_bucket']; $data = explode(',', $fileData); try { $result = $client->putObject(array('Bucket' => $bucket, 'Key' => $filename, 'Body' => base64_decode($data[0]), 'ContentType' => 'image/jpeg', 'ACL' => 'public-read')); return $result['ObjectURL']; } catch (Exception $e) { $message = $e->getMessage(); return null; } }
/** * @param Repository $repoObject * @param boolean $registerStream * @return \AccessS3\S3Client */ protected static function getClientForRepository($repoObject, $registerStream = true) { require_once "aws.phar"; if (!isset(self::$clients[$repoObject->getId()])) { // Get a client $options = array('key' => $repoObject->getOption("API_KEY"), 'secret' => $repoObject->getOption("SECRET_KEY")); $signatureVersion = $repoObject->getOption("SIGNATURE_VERSION"); if (!empty($signatureVersion)) { $options['signature'] = $signatureVersion; } $baseURL = $repoObject->getOption("STORAGE_URL"); if (!empty($baseURL)) { $options["base_url"] = $baseURL; } $region = $repoObject->getOption("REGION"); if (!empty($region)) { $options["region"] = $region; } $proxy = $repoObject->getOption("PROXY"); if (!empty($proxy)) { $options['request.options'] = array('proxy' => $proxy); } $apiVersion = $repoObject->getOption("API_VERSION"); if ($apiVersion === "") { $apiVersion = "latest"; } //SDK_VERSION IS A GLOBAL PARAM ConfService::getConfStorageImpl()->_loadPluginConfig("access.s3", $globalOptions); $sdkVersion = $globalOptions["SDK_VERSION"]; //$repoObject->driverInstance->driverConf['SDK_VERSION']; if ($sdkVersion !== "v2" && $sdkVersion !== "v3") { $sdkVersion = "v2"; } if ($sdkVersion === "v3") { require_once __DIR__ . DIRECTORY_SEPARATOR . "class.pydioS3Client.php"; $s3Client = new \AccessS3\S3Client(["version" => $apiVersion, "region" => $region, "credentials" => $options]); $s3Client->registerStreamWrapper($repoObject->getId()); } else { $s3Client = Aws\S3\S3Client::factory($options); if ($repoObject->getOption("VHOST_NOT_SUPPORTED")) { // Use virtual hosted buckets when possible require_once "ForcePathStyleListener.php"; $s3Client->addSubscriber(new \Aws\S3\ForcePathStyleStyleListener()); } $s3Client->registerStreamWrapper(); } self::$clients[$repoObject->getId()] = $s3Client; } return self::$clients[$repoObject->getId()]; }
/** * @param string $name * @return Aws */ public function __get($name) { switch ($name) { case 'sqs': if (empty($this->sqs_client)) { $this->sqs_client = Aws\Sqs\SqsClient::factory(array('key' => $this->CI->config->item('sqs_access_key_id'), 'secret' => $this->CI->config->item('sqs_secret_key'), 'region' => $this->CI->config->item('aws_region'))); } $this->client = $this->sqs_client; break; case 's3': if (empty($this->s3_client)) { $this->s3_client = Aws\S3\S3Client::factory(array('key' => $this->CI->config->item('s3_access_key_id'), 'secret' => $this->CI->config->item('s3_secret_key'), 'region' => $this->CI->config->item('aws_region'))); } $this->client = $this->s3_client; break; default: break; } return $this; }
private function s3() { $amazon_option = get_option(PREFIX_AS3B . 'setting'); if ($amazon_option) { require_once main::getPluginDir() . '/libs/classes/aws-autoloader.php'; try { $dir = BACKUP_DIR . '/' . $this->params['name']; $credentials = new Aws\Common\Credentials\Credentials($amazon_option['access_key_id'], $amazon_option['secret_access_key']); $client = Aws\S3\S3Client::factory(array('credentials' => $credentials)); main::log(lang::get("Get Files for Resore Backup", false)); $keys = $client->listObjects(array('Bucket' => $amazon_option['bucket'], 'Prefix' => $this->params['name']))->getIterator(); //->getPath('Contents/*/Key'); if (isset($keys['Contents'])) { $n = count($keys['Contents']); main::mkdir($dir); main::log(lang::get("Start Download files with Amazon S3", false)); for ($i = 0; $i < $n; $i++) { $path = explode("/", $keys['Contents'][$i]['Key']); if (isset($path[0]) && isset($path[1]) && !empty($path[1])) { $result = $client->getObject(array('Bucket' => $amazon_option['bucket'], 'Key' => $keys['Contents'][$i]['Key'], 'SaveAs' => BACKUP_DIR . '/' . $keys['Contents'][$i]['Key'])); main::log(str_replace("%s", $keys['Contents'][$i]['Key'], lang::get("Download file - %s", false))); } } main::log(lang::get("End downloads files with Amazon S3", false)); $this->local(); if (is_dir($dir)) { main::remove($dir); } } else { $this->setError(lang::get("Error, in downloads with Amazon S3", false)); } } catch (Exception $e) { $this->setError($e->getMessage()); } catch (S3Exception $e) { $this->setError($e->getMessage()); } } else { $this->setError(lang::get('Error: Data is not exist for send backup files to Amazon S3. Please type your Data in the Settings form', false)); } }
public static function delete_backup() { if (isset($_POST['backup-name']) && isset($_POST['backup-type'])) { if ($_POST['backup-type'] == 'local') { self::remove(BACKUP_DIR . "/" . $_POST['backup-name']); } elseif ($_POST['backup-type'] == 's3') { $amazon_option = get_option(PREFIX_AS3B . 'setting'); if ($amazon_option) { require_once self::getPluginDir() . '/libs/classes/aws-autoloader.php'; $credentials = new Aws\Common\Credentials\Credentials($amazon_option['access_key_id'], $amazon_option['secret_access_key']); $client = Aws\S3\S3Client::factory(array('credentials' => $credentials)); try { $keys = $client->listObjects(array('Bucket' => $amazon_option['bucket'], 'Prefix' => $_POST['backup-name']))->getIterator(); if (isset($keys['Contents'])) { $n = count($keys['Contents']); for ($i = 0; $i < $n; $i++) { $client->deleteObject(array('Bucket' => $amazon_option['bucket'], 'Key' => $keys['Contents'][$i]['Key'])); } } } catch (Exception $e) { self::setError($e->getMessage()); } catch (S3Exception $e) { self::setError($e->getMessage()); } } } } Header("location: " . admin_url('admin.php?page=amazon-s3-backup')); }
public static function getBackupsInAmazon($setting) { require_once dirname(__FILE__) . '/modules/aws-autoloader.php'; $credentials = new Aws\Common\Credentials\Credentials($setting['access_key_id'], $setting['secret_access_key']); $client = Aws\S3\S3Client::factory(array('credentials' => $credentials)); $data = array('data' => array(), 'md5' => md5(print_r(array(), 1))); try { $project = self::getNameProject(); $keys = $client->listObjects(array('Bucket' => $setting['bucket'], 'Prefix' => $project . '-db'))->getIterator(); //->getPath('Contents/*/Key'); if (isset($keys['Contents'])) { $n = count($keys['Contents']); $j = 0; $backups = array(); for ($i = 0; $i < $n; $i++) { if (isset($keys['Contents'][$i]['Key'])) { $backup = explode('/', $keys['Contents'][$i]['Key']); if (isset($backup[0]) && isset($backup[1]) && !empty($backup[1])) { if (!isset($backups[$backup[0]])) { $backups[$backup[0]] = $j; $data['data'][$j]['name'] = $backup[0]; $data['data'][$j]['dt'] = parent::getDateInName($backup[0]); $data['data'][$j]['size'] = $keys['Contents'][$i]['Size']; $data['data'][$j]['files'] = $backup[1]; $data['data'][$j]['type'] = 's3'; $data['data'][$j]['count'] = 1; $j++; } else { $data['data'][$backups[$backup[0]]]['files'] .= ',' . $backup[1]; $data['data'][$backups[$backup[0]]]['size'] += $keys['Contents'][$i]['Size']; $data['data'][$backups[$backup[0]]]['count'] += 1; } } } } } } catch (\Aws\S3\Exception\S3Exception $e) { return $data; } return $data; }
<?php require '/app/vendor/autoload.php'; $s3 = Aws\S3\S3Client::factory(); $params = array('Bucket' => 'cpgrantsdocs', 'Key' => $_POST['id'], 'SaveAs' => 'localdoc.docx'); $result = $s3->getObject($params); $result = $s3->deleteObject(array('Bucket' => 'cpgrantsdocs', 'Key' => $_POST['id'])); $filename = $_POST['filename']; $file_url = 'localdoc.docx'; header('Content-Type: application/octet-stream'); header("Content-Transfer-Encoding: Binary"); header("Content-disposition: attachment; filename=" . $filename . ".docx"); readfile($file_url);
public function delete($backend, $id) { $db =& $backend->getDB(); if (Ethna::isError($db)) { return $db; } $list = $db->query("select * from board where id=?", array($id)); if ($item = $list->fetchRow()) { $fileid = $item['fileid']; if ($fileid !== NULL && $fileid !== "") { try { $s3 = Aws\S3\S3Client::factory(array('key' => SecretConfig::$config['AWS_ACCESS_KEY_ID'], 'secret' => SecretConfig::$config['AWS_SECRET_ACCESS_KEY'], 'region' => SecretConfig::$config['AWS_DEFAULT_REGION'])); $result = $s3->deleteObject(array('Bucket' => SecretConfig::$config['AWS_BUCKET_NAME'], 'Key' => $fileid)); } catch (Exception $e) { throw $e; //return Ethna::raiseNotice('error occured while accessing AWS errormessage:' . $e->getMessage(),E_SAMPLE_AUTH); } } } $list = $db->query("DELETE FROM board WHERE id=?", array($id)); if (Ethna::isError($list)) { return $list; } return null; }
public function getIconUrl($userid) { $res = ""; try { $s3 = Aws\S3\S3Client::factory(array('key' => SecretConfig::$config['AWS_ACCESS_KEY_ID'], 'secret' => SecretConfig::$config['AWS_SECRET_ACCESS_KEY'], 'region' => SecretConfig::$config['AWS_DEFAULT_REGION'])); if ($s3->doesObjectExist(SecretConfig::$config['AWS_BUCKET_NAME'], $userid . '/icon')) { $res = $s3->getObjectUrl(SecretConfig::$config['AWS_BUCKET_NAME'], $userid . '/icon'); } else { $res = $s3->getObjectUrl(SecretConfig::$config['AWS_BUCKET_NAME'], 'defaulticon'); } } catch (Exception $e) { throw $e; //return Ethna::raiseNotice('error occured while accessing AWS errormessage:' . $e->getMessage(),E_SAMPLE_AUTH); } return $res; }
<?php require __DIR__ . '/artifacts/aws.phar'; Aws\S3\S3Client::factory()->listBuckets(); echo 'Version=' . Aws\Common\Aws::VERSION;
<?php require 'init.php.inc'; ?> <?php require '../vendor/autoload.php'; header('Content-Type: application/json; Charset=UTF-8'); try { // this will simply read AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY from env vars $s3 = Aws\S3\S3Client::factory(['signature' => 'v4', 'region' => 'eu-central-1']); //$s3Client = S3Client::factory(array('key'=>YOUR_AWS_KEY, 'secret'=>YOUR_AWS_SECRET, 'signature' => 'v4', 'region'=>'eu-central-1')); $bucket = getenv('S3_BUCKET'); if (!$bucket) { throw new Exception('No "S3_BUCKET" config var in found in env!'); } if (!isset($_SERVER['HTTP_REFERER']) || strpos($_SERVER['HTTP_REFERER'], 'localhost') === FALSE && strpos($_SERVER['HTTP_REFERER'], 'ftof.herokuapp.com') === FALSE && strpos($_SERVER['HTTP_REFERER'], 'family2family.eu') === FALSE) { throw new Exception("Technical error: not allowed to perform this action"); } if (!$user->is_loggedin()) { throw new Exception("Technical error: unknown profile id"); } $profile_id = intval($_SESSION['user_session']); if (!isset($_POST['id'])) { throw new Exception("Technical error: unknown field id"); } $id = $_POST['id']; if ($_SERVER['REQUEST_METHOD'] == 'POST' && isset($_FILES['value']) && $_FILES['value']['error'] == UPLOAD_ERR_OK && is_uploaded_file($_FILES['value']['tmp_name'])) { $fileInfo = pathinfo($_FILES["value"]["name"]); $maxsize = 2097152; $acceptable = array('image/jpeg', 'image/jpg', 'image/gif', 'image/png');
public function getSiretDocumentUrl($presenter_id, $doc_name) { require APP . 'Vendor/autoload.php'; $s3 = Aws\S3\S3Client::factory(['key' => 'AKIAISBR4E3CNF25XD4Q', 'secret' => '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']); $key = $presenter_id . '/' . $doc_name; if (YOUNIQUE_TESTSERVER === true) { $key = '0-devtesting/' . $key; } $key = 'France/' . $key; $bucket = 'younique-verification'; $signedURL = $s3->getObjectUrl($bucket, $key, '+20 minutes'); $this->sendSuccess($signedURL); }
<?php session_start(); if (!isset($_SESSION['UserData']['Username'])) { header("location:login.php"); } require 'lib/aws-autoloader.php'; $msg = ""; $s3 = Aws\S3\S3Client::factory(['credentials' => array('key' => '*********************', 'secret' => '*********************'), 'region' => 'eu-central-1', 'version' => 'latest']); if (isset($_FILES['file'])) { $file = $_FILES['file']; $name = $file['name']; $tmp_name = $file['tmp_name']; $tmp_file_path = "files/{$name}"; move_uploaded_file($tmp_name, $tmp_file_path); $result = $s3->putObject(['Bucket' => 'bauboxbucket', 'Key' => $name, 'Body' => fopen($tmp_file_path, 'rb'), 'ACL' => 'public-read']); if ($result) { $msg = "<span><strong>Success!</strong> File uploaded succefuly</span>"; } else { $msg = "<span style='color:red'>ERROR</span>"; } } $iterator = $s3->getIterator('ListObjects', ['Bucket' => "bauboxbucket"]); if (isset($_GET["delete"])) { $key = $_GET["delete"]; $result = $s3->deleteObject(array('Bucket' => 'bauboxbucket', 'Key' => $key)); if ($result) { $msg = "<span><strong></strong> File deleted succefuly</span>"; } unlink("files/" . $key); }
/** * Returns uploaded files to amazon s3 * * @since 1.1 * @uses \Aws\S3\S3Client * @see http://docs.aws.amazon.com/aws-sdk-php/latest/class-Aws.S3.S3Client.html#_listObjects * @see http://www.php.net/manual/ru/class.arrayaccess.php * @see http://www.php.net/manual/ru/class.iteratoraggregate.php * @see http://www.php.net/manual/ru/class.countable.php * @see http://docs.aws.amazon.com/aws-sdk-php/latest/class-Guzzle.Common.ToArrayInterface.html * @param boolean $match Show only backups files or show all files in bucket * @param integer $order ascending or descending * @return array */ public function getUploadedFiles($stacksFolder = '', $match = true, $order = amazonModelBup::ORDER_DESCENDING) { $credentials = $this->getCredentials(); $bucket = $this->getBucket(); $files = array(); // $pattern = '/([a-z_]+[0-9_-]{20}[a-z]{4,8}_id[0-9]+(.sql|.zip))/'; $pattern = '/(backup_.*)/'; $client = Aws\S3\S3Client::factory(array('key' => $credentials['access'], 'secret' => $credentials['secret'])); $filesIterator = $client->getIterator('ListObjects', array('Bucket' => $bucket, 'Prefix' => $this->getCurrentDomain() . '/' . $stacksFolder)); foreach ($filesIterator as $storageFile) { if ($match === true) { if (preg_match($pattern, $storageFile['Key'])) { $files[] = $storageFile['Key']; } } else { $files[] = $storageFile['Key']; } } if ($order = amazonModelBup::ORDER_DESCENDING) { krsort($files); } if ($stacksFolder) { return $files; } else { // Formatting uploading data files for use their on backups page $newFiles = array(); foreach ($files as $file) { $pathElementsCount = explode('/', $file); // if $pathElementsCount contains 3 element - so this is filesystem backup with stacks, else backup with one big archive $oneFileBackup = count($pathElementsCount) > 2 ? false : true; $extension = pathinfo($file, PATHINFO_EXTENSION); if ($extension === 'sql' || $extension === 'zip' && $oneFileBackup) { $backupInfo = $this->getBackupInfoByFilename($file); } else { $backupInfo = $this->getBackupInfoByFilename(pathinfo($file, PATHINFO_DIRNAME)); } if (!empty($backupInfo['ext']) && $backupInfo['ext'] == 'sql') { $newFiles[$backupInfo['id']]['amazon']['sql']['file'] = $file; $newFiles[$backupInfo['id']]['amazon']['sql']['backupInfo'] = $backupInfo; $newFiles[$backupInfo['id']]['amazon']['sql']['backupInfo'] = dispatcherBup::applyFilters('addInfoIfEncryptedDb', $newFiles[$backupInfo['id']]['amazon']['sql']['backupInfo']); } elseif (!empty($backupInfo['ext']) && $backupInfo['ext'] == 'zip' && $oneFileBackup) { $newFiles[$backupInfo['id']]['amazon']['zip']['file'] = $file; $newFiles[$backupInfo['id']]['amazon']['zip']['backupInfo'] = $backupInfo; } else { $newFiles[$backupInfo['id']]['amazon']['zip']['file'] = pathinfo($file, PATHINFO_DIRNAME); $newFiles[$backupInfo['id']]['amazon']['zip']['backupInfo'] = $backupInfo; } } return $newFiles; } }
/** * Returns a signed Amazon S3 download URL. * * @param $bucket string Bucket name * @param $file_name string File name (URI) * @return string The signed download URL */ public static function get_s3_url($bucket, $file_name) { $options = get_option('wp-license-manager-settings'); $s3_client = Aws\S3\S3Client::factory(array('key' => $options['aws_key'], 'secret' => $options['aws_secret'])); return $s3_client->getObjectUrl($bucket, $file_name, '+10 minutes'); }
<?php def_accessor('s4_bucket'); def_accessor('s4_region', 'eu-west-1'); def_memo('s4', function () { return Aws\S3\S3Client::factory(array('region' => s4_region())); }); def('s4_delete', function ($key) { return s4()->deleteObject(array('Bucket' => s4_bucket(), 'Key' => $key)); }); def('s4_url', function ($key) { return s4()->getObjectUrl(s4_bucket(), $key); }); def('s4_put', function ($key, $body, $ct = null) { $t = array('Bucket' => s4_bucket(), 'Key' => $key, 'Body' => $body); if (!is_null($ct)) { $t['ContentType'] = $ct; } return s4()->putObject($t); }); def('s4_get', function ($key) { $t = array('Bucket' => s4_bucket(), 'Key' => $key); $t = s4()->getObject($t); return $t['Body']; }); def('s4_put_file', function ($key, $pth, $ct = null) { $t = array('Bucket' => s4_bucket(), 'Key' => $key, 'SourceFile' => $pth); if (!is_null($ct)) { $t['ContentType'] = $ct; } return s4()->putObject($t);
/** * Constructor - if you're not using the class statically * * @param string $accessKey Access key * @param string $secretKey Secret key * @param boolean $useSSL Enable SSL * @param string|boolean $sslCACert - certificate authority (true = bundled Guzzle version; false = no verify, 'system' = system version; otherwise, path) * @return void */ public function __construct($accessKey = null, $secretKey = null, $useSSL = true, $sslCACert = true, $endpoint = null) { if ($accessKey !== null && $secretKey !== null) { $this->setAuth($accessKey, $secretKey); } $this->useSSL = $useSSL; $this->sslCACert = $sslCACert; $opts = array('key' => $accessKey, 'secret' => $secretKey, 'scheme' => $useSSL ? 'https' : 'http'); if ($endpoint) { // Can't specify signature v4, as that requires stating the region - which we don't necessarily yet know. $this->endpoint = $endpoint; $opts['endpoint'] = $endpoint; } else { // Using signature v4 requires a region. Also, some regions (EU Central 1, China) require signature v4 - and all support it, so we may as well use it if we can. $opts['signature'] = 'v4'; $opts['region'] = $this->region; } if ($useSSL) { $opts['ssl.certificate_authority'] = $sslCACert; } $this->client = Aws\S3\S3Client::factory($opts); }
/** * @return Aws\S3\S3Client */ public function s3() { if (!empty($this->s3)) { return $this->s3; } $params = array('version' => 'latest'); if ($this->key && $this->secret) { $params['credentials']['key'] = $this->key; $params['credentials']['secret'] = $this->secret; } if ($this->region) { $params['signature'] = 'v4'; $params['region'] = $this->region; } if (defined('WP_PROXY_HOST') && defined('WP_PROXY_PORT')) { $proxy_auth = ''; $proxy_address = WP_PROXY_HOST . ':' . WP_PROXY_PORT; if (defined('WP_PROXY_USERNAME') && defined('WP_PROXY_PASSWORD')) { $proxy_auth = WP_PROXY_USERNAME . ':' . WP_PROXY_PASSWORD . '@'; } $params['request.options']['proxy'] = $proxy_auth . $proxy_address; } $params = apply_filters('s3_uploads_s3_client_params', $params); $this->s3 = Aws\S3\S3Client::factory($params); return $this->s3; }
public static function DeployDirectory($site, $local_dir, $keyPrefix) { // create AWS client $client = Aws\S3\S3Client::factory(array('key' => S3_KEY, 'secret' => S3_SECRET, 'region' => S3_LOCATION)); $bucket = $site['Bucket']; // create a bucket if it doesn't already exist S3::CreateBucket($bucket); // set permissions $options = array('params' => array('ACL' => 'public-read'), 'concurrency' => 20, 'debug' => true); // sync folders, #ref: http://blogs.aws.amazon.com/php/post/Tx2W9JAA7RXVOXA/Syncing-Data-with-Amazon-S3 $client->uploadDirectory($local_dir, $bucket, $site['FriendlyId'] . '/' . $keyPrefix, $options); }
function getTemplates($user) { require '../vendor/autoload.php'; $prefixstr = $user . '/'; $s3 = Aws\S3\S3Client::factory(); $filenamearray = array(); $objects = $s3->getIterator('ListObjects', array('Bucket' => 'cpgrantstemplates', 'Prefix' => $prefixstr)); foreach ($objects as $object) { array_push($filenamearray, $object['Key']); } echo json_encode($filenamearray); }
/** * @param string $args */ public function edit_ajax($args = '') { $error = ''; $buckets_list = array(); if (is_array($args)) { $ajax = FALSE; } else { if (!current_user_can('backwpup_jobs_edit')) { wp_die(-1); } check_ajax_referer('backwpup_ajax_nonce'); $args['s3accesskey'] = sanitize_text_field($_POST['s3accesskey']); $args['s3secretkey'] = sanitize_text_field($_POST['s3secretkey']); $args['s3bucketselected'] = sanitize_text_field($_POST['s3bucketselected']); $args['s3base_url'] = esc_url_raw($_POST['s3base_url']); $args['s3region'] = sanitize_text_field($_POST['s3region']); $ajax = TRUE; } echo '<span id="s3bucketerror" style="color:red;">'; if (!empty($args['s3accesskey']) && !empty($args['s3secretkey'])) { try { $s3 = Aws\S3\S3Client::factory(array('key' => $args['s3accesskey'], 'secret' => BackWPup_Encryption::decrypt($args['s3secretkey']), 'region' => $args['s3region'], 'base_url' => $this->get_s3_base_url($args['s3region'], $args['s3base_url']), 'scheme' => 'https', 'ssl.certificate_authority' => BackWPup::get_plugin_data('cacert'))); $buckets = $s3->listBuckets(); if (!empty($buckets['Buckets'])) { $buckets_list = $buckets['Buckets']; } while (!empty($vaults['Marker'])) { $buckets = $s3->listBuckets(array('marker' => $buckets['Marker'])); if (!empty($buckets['Buckets'])) { $buckets_list = array_merge($buckets_list, $buckets['Buckets']); } } } catch (Exception $e) { $error = $e->getMessage(); } } if (empty($args['s3accesskey'])) { _e('Missing access key!', 'backwpup'); } elseif (empty($args['s3secretkey'])) { _e('Missing secret access key!', 'backwpup'); } elseif (!empty($error) && $error == 'Access Denied') { echo '<input type="text" name="s3bucket" id="s3bucket" value="' . esc_attr($args['s3bucketselected']) . '" >'; } elseif (!empty($error)) { echo esc_html($error); } elseif (!isset($buckets) || count($buckets['Buckets']) < 1) { _e('No bucket found!', 'backwpup'); } echo '</span>'; if (!empty($buckets_list)) { echo '<select name="s3bucket" id="s3bucket">'; foreach ($buckets_list as $bucket) { echo "<option " . selected($args['s3bucketselected'], esc_attr($bucket['Name']), FALSE) . ">" . esc_attr($bucket['Name']) . "</option>"; } echo '</select>'; } if ($ajax) { die; } }
<?php require __DIR__ . '/artifacts/aws.phar'; $client = Aws\S3\S3Client::factory(['endpoint' => 'http://mtmss.com', 'ssl.certificate_authority' => false, 'key' => '*', 'secret' => '*']); echo 'Version=' . Aws\Common\Aws::VERSION; # $client->createBucket(array('Bucket' => 'mybucket-php1'));
<?php require __DIR__ . '/artifacts/aws.phar'; $client = Aws\S3\S3Client::factory(['key' => 'foo', 'secret' => 'bar', 'region' => 'us-east-1']); echo 'Version=' . Aws\Common\Aws::VERSION;
/** * Create S3Client * All options to S3Client is supplied in the s3 option * See http://docs.amazonwebservices.com/aws-sdk-php-2/latest/class-Aws.S3.S3Client.html for list of S3Client options * See http://docs.amazonwebservices.com/general/latest/gr/rande.html#s3_region for list of region names * @return bool */ protected function init() { $this->s3 = Aws\S3\S3Client::factory($this->options['s3']); $this->bucket = $this->options['bucket']; return true; }