Put an object
public static putObject ( mixed $input, string $bucket, string $uri, constant $acl = self::ACL_PRIVATE, array $metaHeaders = [], array $requestHeaders = [], constant $storageClass = self::STORAGE_CLASS_STANDARD, constant $serverSideEncryption = self::SSE_NONE ) : boolean | ||
$input | mixed | Input data |
$bucket | string | Bucket name |
$uri | string | Object URI |
$acl | constant | ACL constant |
$metaHeaders | array | Array of x-amz-meta-* headers |
$requestHeaders | array | Array of request headers or content type as a string |
$storageClass | constant | Storage class constant |
$serverSideEncryption | constant | Server-side encryption |
return | boolean |
function UploadToCloud($file_source, $model_id, $model_name, $type, $mime_type, $file_ext = null) { App::import('Vendor', 'S3', array('file' => 'S3.php')); $s3 = new S3($this->awsAccessKey, $this->awsSecretKey, true, "s3-ap-southeast-1.amazonaws.com"); $input = $s3->inputFile($file_source, false); $ext = pathinfo($file_source, PATHINFO_EXTENSION); if ($file_ext != null) { $obj = $s3->putObject($input, $this->bucketName, "contents/" . $model_name . "/" . $model_id . "/" . $model_id . "_" . $type . "." . $file_ext, S3::ACL_PUBLIC_READ, array(), array("Content-Type" => $mime_type)); } else { $obj = $s3->putObject($input, $this->bucketName, "contents/" . $model_name . "/" . $model_id . "/" . $model_id . "_" . $type . "." . $ext, S3::ACL_PUBLIC_READ, array(), array("Content-Type" => $mime_type)); } return $obj; }
/** * Write * * @param string $path Short path * @param string $data Data * @param array $httpHeaders HTTP headers OPTIONAL * * @return boolean */ public function write($path, $data, array $httpHeaders = array()) { $result = false; try { $result = $this->client->putObject($data, \XLite\Core\Config::getInstance()->CDev->AmazonS3Images->bucket, $path, \S3::ACL_PUBLIC_READ, array(), $httpHeaders); $result = (bool) $result; $message = true; } catch (\S3Exception $e) { $result = false; \XLite\Logger::getInstance()->registerException($e); } return $result; }
public function upload_image() { $config = array('allowed_types' => 'jpg|jpeg|gif|png', 'upload_path' => './temp', 'max_size' => 3072, 'overwrite' => true); $this->load->library('upload', $config); $this->upload->overwrite = true; $response['responseStatus'] = "Not OK"; if (!$this->upload->do_upload()) { $response['responseStatus'] = "Your image could not be uploaded"; } else { $data = $this->upload->data(); //instantiate the class $s3 = new S3(awsAccessKey, awsSecretKey); $ext = pathinfo($data['full_path'], PATHINFO_EXTENSION); $imgName = (string) time() . "." . $ext; $input = S3::inputFile($data['full_path'], FALSE); if ($s3->putObject(file_get_contents($data['full_path']), "tlahui-content", $imgName, S3::ACL_PUBLIC_READ)) { $response['responseStatus'] = "OK"; $response['url'] = "https://s3.amazonaws.com/tlahui-content/" . $imgName; unlink($data['full_path']); } else { $response['responseStatus'] = "Your image could not be uploaded"; unlink($data['full_path']); } } echo json_encode($response); }
static function createSquareFile($filename, $readPath, $writePath, $writeSize = 300, $upload = true) { # make sure all inputs are clean if (!self::areClean(array($filename, $readPath, $writePath, $writeSize))) { return false; } if (!self::imageMagickInstalled()) { return false; } if (!($size = getimagesize($readPath))) { return false; } $savePath = sfConfig::get('sf_image_dir') . DIRECTORY_SEPARATOR . $writePath . DIRECTORY_SEPARATOR . $filename; if ($size[0] > $size[1] * 2 || $size[1] > $size[0] * 2) { # pad to square if one dimension is more than twice the other dimension exec(sfConfig::get('app_imagemagick_binary_path') . " {$readPath} -virtual-pixel background -background white -set option:distort:viewport \"%[fx:max(w,h)]x%[fx:max(w,h)]-%[fx:max((h-w)/2,0)]-%[fx:max((w-h)/2,0)]\" -filter point -distort SRT 0 +repage {$savePath}"); } else { # otherwise, crop to square exec(sfConfig::get('app_imagemagick_binary_path') . " {$readPath} -virtual-pixel edge -set option:distort:viewport \"%[fx:min(w,h)]x%[fx:min(w,h)]+%[fx:max((w-h)/2,0)]+%[fx:max((h-w)/2,0)]\" -filter point -distort SRT 0 +repage {$savePath}"); } # resize exec(sfConfig::get('app_imagemagick_binary_path') . " {$savePath} -resize {$writeSize}x{$writeSize} {$savePath}"); # if s3 enabled, save to s3 if ($upload && sfConfig::get('app_amazon_enable_s3')) { $s3 = new S3(sfConfig::get('app_amazon_access_key'), sfConfig::get('app_amazon_secret_key')); $input = $s3->inputResource($f = fopen($savePath, "rb"), $s = filesize($savePath)); $uri = self::generateS3path($writePath, $filename); if (!S3::putObject($input, sfConfig::get('app_amazon_s3_bucket'), $uri, S3::ACL_PUBLIC_READ)) { return false; } } return $savePath; }
function storecontenttofile($content, $key, $sec = "private") { if ($GLOBALS['filehandertype'] == 's3') { if ($sec == "public" || $sec == "public-read") { $sec = "public-read"; } else { $sec = "private"; } $s3 = new S3($GLOBALS['AWSkey'], $GLOBALS['AWSsecret']); if ($s3->putObject($content, $GLOBALS['AWSbucket'], $key, $sec)) { return true; } else { return false; } } else { $base = rtrim(dirname(dirname(__FILE__)), '/\\') . '/filestore/'; $dir = $base . dirname($key); $fn = basename($key); if (!is_dir($dir)) { mkdir_recursive($dir); } $fh = @fopen($dir . '/' . $fn, 'wb'); if ($fh) { fwrite($fh, $content); fclose($fh); return true; } else { return false; } } }
/** * Загружает файл в S3. */ public static function moveFileToS3($fileName, $mimeType = null, $baseName = null) { self::checkEnv($ctx = Context::last()); $conf = $ctx->config->get('modules/s3'); $s3 = new S3($conf['accesskey'], $conf['secretkey']); if (!($bucketName = trim($ctx->config->get('modules/s3/bucket', 'files'), '/'))) { throw new RuntimeException(t('Модуль s3 не настроен (bucket).')); } if ($folderName = $ctx->config->get('module/s3/folder', 'files')) { $folderName .= '/'; } /* if (!in_array($bucketName, $s3->listBuckets())) throw new RuntimeException(t('Нет такой папки: ' . $bucketName)); */ if ($f = fopen($fileName, 'rb')) { if (null === $baseName) { $baseName = basename($fileName); } if (!($r = S3::inputResource($f, filesize($fileName)))) { throw new RuntimeException(t('Не удалось создать ресурс из файла %filename.', array('%filename' => $fileName))); } if (!($response = S3::putObject($r, $bucketName, $folderName . $baseName, S3::ACL_PUBLIC_READ))) { throw new RuntimeException(t('Не удалось загрузить файл %filename в папку %bucket.', array('%filename' => $fileName, '%bucket' => $bucketName))); } $url = 'http://' . $bucketName . '.s3.amazonaws.com/' . $folderName . $baseName; Logger::log('S3: ' . $url); return $url; } }
function create($localpath, $remotepath) { $localpath = realpath($localpath); if (S3::putObject(S3::inputFile($localpath), $this->backupBucket, $remotepath, S3::ACL_PRIVATE)) { return TRUE; } else { return FALSE; } }
function save_sized_image($original, $key, $size, $folder) { $file = $folder . "/" . $key . ".jpg"; $file_path = IMAGES_DIR . "/" . $file; resize_image($original, $size, $file_path); if (STORAGE_STRATEGY == 's3') { $input = S3::inputFile($file_path); S3::putObject($input, S3_BUCKET, $file, S3::ACL_PUBLIC_READ); } }
/** * CRUD controller: CREATE */ public function action_create() { $this->auto_render = FALSE; $this->template = View::factory('js'); if (!isset($_FILES['image'])) { $this->template->content = json_encode('KO'); return; } $image = $_FILES['image']; if (core::config('image.aws_s3_active')) { require_once Kohana::find_file('vendor', 'amazon-s3-php-class/S3', 'php'); $s3 = new S3(core::config('image.aws_access_key'), core::config('image.aws_secret_key')); } if (!Upload::valid($image) or !Upload::not_empty($image) or !Upload::type($image, explode(',', core::config('image.allowed_formats'))) or !Upload::size($image, core::config('image.max_image_size') . 'M')) { if (Upload::not_empty($image) and !Upload::type($image, explode(',', core::config('image.allowed_formats')))) { $this->template->content = json_encode(array('msg' => $image['name'] . ' ' . sprintf(__('Is not valid format, please use one of this formats "%s"'), core::config('image.allowed_formats')))); return; } if (!Upload::size($image, core::config('image.max_image_size') . 'M')) { $this->template->content = json_encode(array('msg' => $image['name'] . ' ' . sprintf(__('Is not of valid size. Size is limited to %s MB per image'), core::config('image.max_image_size')))); return; } $this->template->content = json_encode(array('msg' => $image['name'] . ' ' . __('Image is not valid. Please try again.'))); return; } elseif ($image != NULL) { // saving/uploading img file to dir. $path = 'images/cms/'; $root = DOCROOT . $path; //root folder $image_name = URL::title(pathinfo($image['name'], PATHINFO_FILENAME)); $image_name = Text::limit_chars(URL::title(pathinfo($image['name'], PATHINFO_FILENAME)), 200); $image_name = time() . '.' . $image_name; // if folder does not exist, try to make it if (!file_exists($root) and !@mkdir($root, 0775, true)) { // mkdir not successful ? $this->template->content = json_encode(array('msg' => __('Image folder is missing and cannot be created with mkdir. Please correct to be able to upload images.'))); return; // exit function } // save file to root folder, file, name, dir if ($file = Upload::save($image, $image_name, $root)) { // put image to Amazon S3 if (core::config('image.aws_s3_active')) { $s3->putObject($s3->inputFile($file), core::config('image.aws_s3_bucket'), $path . $image_name, S3::ACL_PUBLIC_READ); } $this->template->content = json_encode(array('link' => Core::config('general.base_url') . $path . $image_name)); return; } else { $this->template->content = json_encode(array('msg' => $image['name'] . ' ' . __('Image file could not been saved.'))); return; } $this->template->content = json_encode(array('msg' => $image['name'] . ' ' . __('Image is not valid. Please try again.'))); } }
/** * Put an object into an S3 bucket. * * @param $filePath * @param $bucket * @param $uriPath * @param $permissions * * @return bool */ protected function putObject($filePath, $bucket, $uriPath, $permissions) { $object = empty($filePath) ? '' : array('file' => $filePath); $headers = array(); if (!empty($object) && !empty($this->getSettings()->expires) && DateTimeHelper::isValidIntervalString($this->getSettings()->expires)) { $expires = new DateTime(); $now = new DateTime(); $expires->modify('+' . $this->getSettings()->expires); $diff = $expires->format('U') - $now->format('U'); $headers['Cache-Control'] = 'max-age=' . $diff . ', must-revalidate'; } return $this->_s3->putObject($object, $bucket, $uriPath, $permissions, array(), $headers); }
function gsUpload($file, $bucket, $remoteFile) { $ret = false; $key = 'GOOGT4X7CFTWS2VWN2HT'; $secret = 'SEWZTyKZH6dNbjbT2CHg5Q5pUh5Y5+iinj0yBFB4'; $server = 'storage.googleapis.com'; $s3 = new S3($key, $secret, false, $server); $metaHeaders = array(); $requestHeaders = array(); if ($s3->putObject($s3->inputFile($file, false), $bucket, $remoteFile, S3::ACL_PUBLIC_READ, $metaHeaders, $requestHeaders)) { $ret = true; } return $ret; }
function uploadFile($type, $filename, $check_first = true, $debug = false) { $localPath = sfConfig::get('sf_image_dir') . DIRECTORY_SEPARATOR . $type . DIRECTORY_SEPARATOR . $filename; $input = $this->s3->inputResource($f = fopen($localPath, "rb"), filesize($s = $localPath)); $uri = ImageTable::generateS3path($type, $filename); if ($check_first && $this->s3->getObjectInfo(sfConfig::get('app_amazon_s3_bucket'), $uri) !== false) { return; } if (S3::putObject($input, sfConfig::get('app_amazon_s3_bucket'), $uri, S3::ACL_PUBLIC_READ)) { print "UPLOADED: " . $uri . "\n"; } else { if ($debug) { print "Couldn't upload image to S3: " . $uri . "\n"; } } }
function uploadTmpFile($source, $overwrite = false, $debug = false, $localdir = false) { $filePath = $this->getTmpPath($source, $localdir); $size = filesize($filePath); $input = $this->s3->inputResource($f = fopen($filePath, "r"), $size); $url = ReferenceTable::generateS3path($source); if (!$overwrite && $this->s3->getObjectInfo(sfConfig::get('app_amazon_s3_bucket'), $url) !== false) { print "ALREADY UPLOADED: " . $url . "\n"; return; } if (S3::putObject($input, sfConfig::get('app_amazon_s3_bucket'), $url, S3::ACL_PUBLIC_READ)) { print "UPLOADED: " . $url . "\n"; } else { if ($debug) { print "Couldn't upload reference to S3: " . $url . "\n"; } } }
function putFile($filename) { $s3svc = new S3(); // Removing the first slash is important - otherwise the URL is different. $aws_filename = eregi_replace('^/', '', $filename); $filename = $_SERVER['DOCUMENT_ROOT'] . $filename; $mime_type = NFilesystem::getMimeType($filename); // Read the file into memory. $fh = fopen($filename, 'rb'); $contents = fread($fh, filesize($filename)); fclose($fh); $s3svc->putBucket(MIRROR_S3_BUCKET); $out = $s3svc->putObject($aws_filename, $contents, MIRROR_S3_BUCKET, 'public-read', $mime_type); // Now the file is accessable at: // http://MIRROR_S3_BUCKET.s3.amazonaws.com/put/the/filename/here.jpg OR // http://s3.amazonaws.com/MIRROR_S3_BUCKET/put/the/filename/here.jpg unset($s3svc); }
function persist($prefix, $name, $sBlobData, $headers = array()) { //create a unique name for s3 store $storeName = \com\indigloo\media\FileStore::getHashedName($name); $storeName = $prefix . $storeName; $bucket = Config::getInstance()->get_value("aws.bucket"); $awsKey = Config::getInstance()->get_value("aws.key"); $awsSecret = Config::getInstance()->get_value("aws.secret"); if (Config::getInstance()->is_debug()) { Logger::getInstance()->debug(" s3 bucket is => {$bucket}"); Logger::getInstance()->debug(" original name => {$name}"); Logger::getInstance()->debug(" file path is => {$storeName} "); } $s3 = new \S3($awsKey, $awsSecret, false); $metaHeaders = array(); //$input, $bucket, $uri, $acl , $metaHeaders, $requestHeaders $s3->putObject($sBlobData, $bucket, $storeName, \S3::ACL_PUBLIC_READ, $metaHeaders, $headers); return $storeName; }
function MoveToS3($strPath, $strFileName, $strType, $strS3Path) { rtrim($strPath, '/'); rtrim($strS3Path, '/'); if (file_exists($strPath . '/' . $strFileName)) { require_once __DOCROOT__ . __PHP_ASSETS__ . '/s3.class.php'; $objS3 = new S3(); $objS3->putBucket(AWS_BUCKET); $fh = fopen($strPath . '/' . $strFileName, 'rb'); $contents = fread($fh, filesize($strPath . '/' . $strFileName)); fclose($fh); $objS3->putObject($strFileName, $contents, AWS_BUCKET . $strS3Path, 'public-read', $strType); unlink($strPath . '/' . $strFileName); unset($objS3); return true; } else { return false; } }
private function S3copy($file) { $fileok = true; $s3 = new S3($this->AccessKey, $this->SecretKey); $list = $s3->getBucket($this->AWSFolder); foreach ($list as $existing) { if ($existing['name'] === $file) { $fileok = false; } } if ($fileok) { $put = $s3->putObject($s3->inputFile(ASSETS_PATH . DIRECTORY_SEPARATOR . $file), $this->AWSFolder, $file, S3::ACL_PRIVATE); if ($put) { echo $file . " transferred to S3<br>" . "\r\n"; } else { echo $file . " unable to be transferred to S3<br>" . "\r\n"; } } else { echo $file . " already in S3<br>" . "\r\n"; } }
function upload($path) { $access = DB::get()->assoc("SELECT name, value FROM options WHERE grouping = 'Amazon Web Services'"); $s3 = new S3($access['AWS Access Key ID'], $access['AWS Secret Access Key']); $bucketname = $access['S3 Bucket Name']; $filename = $_FILES['uploaded']['name']; $s3filename = $this->_safestring(Auth::user()->username) . '/' . date('YmdHis') . '/' . $filename; preg_match('%\\.(\\w+)$%', $filename, $matches); $filetype = $matches[1]; $s3->putObject(S3::inputFile($_FILES['uploaded']['tmp_name']), $bucketname, $s3filename, S3::ACL_PUBLIC_READ, array(), array("Content-Type" => "application/octet-stream", "Content-Disposition" => "attachment; filename=" . urlencode($filename) . ';')); //echo "Put {$filename} to {$bucketname} at {$s3filename}\n"; $url = "http://{$bucketname}.s3.amazonaws.com/{$s3filename}"; DB::get()->query("INSERT INTO files (user_id, filename, filesize, filetype, url) VALUES (:user_id, :filename, :filesize, :filetype, :url);", array('user_id' => Auth::user_id(), 'filename' => $filename, 'filesize' => $_FILES['uploaded']['size'], 'filetype' => $filetype, 'url' => $url)); $filenumber = DB::get()->lastInsertId(); echo <<<RELOAD_FILES atbottom = isVisible(\$('#notices tr:last-child')); \$('#filelisting').load('/files/filelist', function(){ \t\$('body').css('margin-bottom', \$('#command').height() + 15); \tdo_scroll(); }); send('/file {$filenumber}'); RELOAD_FILES; }
function file_handleInput($Field, $Input, $FieldType, $Config, $predata) { if (strtolower($Config['Content']['_ViewMode']) == 'api') { if (!empty($_FILES[$Field]['size'])) { $_FILES['dataForm']['name'][$Config['ID']][$Field] = $_FILES[$Field]['name']; $_FILES['dataForm']['size'][$Config['ID']][$Field] = $_FILES[$Field]['size']; $_FILES['dataForm']['tmp_name'][$Config['ID']][$Field] = $_FILES[$Field]['tmp_name']; } } if ($FieldType == 'multi') { return $Input; } if (!empty($_POST['deleteImage'][$Field])) { $FileInfo = explode('?', $predata[$Field]); if (file_exists($FileInfo[0])) { unlink($FileInfo[0]); } return ''; } if (empty($_FILES['dataForm']['name'][$Config['ID']][$Field])) { return $predata[$Field]; } // Create Directorys if (!empty($_FILES['dataForm']['size'][$Config['ID']][$Field])) { $path = wp_upload_dir(); // set filename and paths $Ext = pathinfo($_FILES['dataForm']['name'][$Config['ID']][$Field]); $newFileName = uniqid($Config['ID'] . '_') . '.' . $Ext['extension']; $newLoc = $path['path'] . '/' . $newFileName; //$urlLoc = $path['url'].'/'.$newFileName; $GLOBALS['UploadedFile'][$Field] = $newLoc; $upload = wp_upload_bits($_FILES['dataForm']['name'][$Config['ID']][$Field], null, file_get_contents($_FILES['dataForm']['tmp_name'][$Config['ID']][$Field])); if (!empty($Config['Content']['_filesToLibrary'])) { global $user_ID; $type = wp_check_filetype($upload['file']); $new_post = array('post_title' => $Ext['filename'], 'post_status' => 'inherit', 'post_date' => date('Y-m-d H:i:s'), 'post_author' => $user_ID, 'post_type' => 'attachment', 'post_mime_type' => $type['type'], 'guid' => $upload['url']); // This should never be set as it would then overwrite an existing attachment. if (isset($attachment['ID'])) { unset($attachment['ID']); } // Save the data //$id = wp_insert_attachment($new_post, $upload['file']); //if ( !is_wp_error($id) ) { // if(!function_exists('wp_generate_attachment_metadata')){ //require_once('includes/image.php'); // } //wp_update_attachment_metadata( $id, wp_generate_attachment_metadata( $id, $upload['file'] ) ); //} } //move_uploaded_file($_FILES['dataForm']['tmp_name'][$Config['ID']][$Field], $newLoc); //return $newLoc; if ($FieldType == 'image') { $imageWidth = $Config['Content']['_ImageSizeX'][$Field] == 'auto' ? '0' : $Config['Content']['_ImageSizeX'][$Field]; $imageHeight = $Config['Content']['_ImageSizeY'][$Field] == 'auto' ? '0' : $Config['Content']['_ImageSizeY'][$Field]; $iconWidth = $Config['Content']['_IconSizeX'][$Field] == 'auto' ? '0' : $Config['Content']['_IconSizeX'][$Field]; $iconHeight = $Config['Content']['_IconSizeY'][$Field] == 'auto' ? '0' : $Config['Content']['_IconSizeY'][$Field]; // crunch sizes $image = image_resize($upload['file'], $imageWidth, $imageHeight, true); $icon = image_resize($upload['file'], $iconWidth, $iconHeight, true); } //vardump($upload); //vardump($Config['Content']['_AWSBucket']); //die; if (!empty($Config['Content']['_enableS3'][$Field]) && !empty($Config['Content']['_AWSAccessKey'][$Field]) && !empty($Config['Content']['_AWSSecretKey'][$Field])) { include_once DB_TOOLKIT . 'data_form/fieldtypes/file/s3.php'; $s3 = new S3($Config['Content']['_AWSAccessKey'][$Field], $Config['Content']['_AWSSecretKey'][$Field]); $input = $s3->inputFile($upload['file']); $fileName = date('Y') . '/' . date('m') . '/' . $newFileName; if ($s3->putObject($input, $Config['Content']['_AWSBucket'][$Field], $fileName, 'public-read')) { unlink($upload['file']); return $fileName; } } return $upload['url']; } }
/** * saves image in the disk * @param string $file * @param integer $num number of the image * @return bool success? */ public function save_image_file($file, $num = 0) { if (core::config('image.aws_s3_active')) { require_once Kohana::find_file('vendor', 'amazon-s3-php-class/S3', 'php'); $s3 = new S3(core::config('image.aws_access_key'), core::config('image.aws_secret_key')); } $path = $this->image_path(); if ($path === FALSE) { Alert::set(Alert::ERROR, 'model\\ad.php:save_image(): ' . __('Image folder is missing and cannot be created with mkdir. Please correct to be able to upload images.')); return FALSE; } $directory = DOCROOT . $path; $image_quality = core::config('image.quality'); $width = core::config('image.width'); $width_thumb = core::config('image.width_thumb'); $height_thumb = core::config('image.height_thumb'); $height = core::config('image.height'); if (!is_numeric($height)) { // when installing this field is empty, to avoid crash we check here $height = NULL; } if (!is_numeric($height_thumb)) { $height_thumb = NULL; } $filename_thumb = 'thumb_' . $this->seotitle . '_' . $num . '.jpg'; $filename_original = $this->seotitle . '_' . $num . '.jpg'; /*WATERMARK*/ if (core::config('image.watermark') == TRUE and is_readable(core::config('image.watermark_path'))) { $mark = Image::factory(core::config('image.watermark_path')); // watermark image object $size_watermark = getimagesize(core::config('image.watermark_path')); // size of watermark if (core::config('image.watermark_position') == 0) { $wm_left_x = $width / 2 - $size_watermark[0] / 2; // x axis , from left $wm_top_y = $height / 2 - $size_watermark[1] / 2; // y axis , from top } elseif (core::config('image.watermark_position') == 1) { $wm_left_x = $width / 2 - $size_watermark[0] / 2; // x axis , from left $wm_top_y = $height - 10; // y axis , from top } elseif (core::config('image.watermark_position') == 2) { $wm_left_x = $width / 2 - $size_watermark[0] / 2; // x axis , from left $wm_top_y = 10; // y axis , from top } } /*end WATERMARK variables*/ //if original image is bigger that our constants we resize try { $image_size_orig = getimagesize($file); } catch (Exception $e) { return FALSE; } if ($image_size_orig[0] > $width || $image_size_orig[1] > $height) { if (core::config('image.watermark') and is_readable(core::config('image.watermark_path'))) { Image::factory($file)->orientate()->resize($width, $height, Image::AUTO)->watermark($mark, $wm_left_x, $wm_top_y)->save($directory . $filename_original, $image_quality); } else { Image::factory($file)->orientate()->resize($width, $height, Image::AUTO)->save($directory . $filename_original, $image_quality); } } else { if (core::config('image.watermark') and is_readable(core::config('image.watermark_path'))) { Image::factory($file)->orientate()->watermark($mark, $wm_left_x, $wm_top_y)->save($directory . $filename_original, $image_quality); } else { Image::factory($file)->orientate()->save($directory . $filename_original, $image_quality); } } //creating the thumb and resizing using the the biggest side INVERSE Image::factory($file)->orientate()->resize($width_thumb, $height_thumb, Image::INVERSE)->save($directory . $filename_thumb, $image_quality); //check if the height or width of the thumb is bigger than default then crop if ($height_thumb !== NULL) { $image_size_orig = getimagesize($directory . $filename_thumb); if ($image_size_orig[1] > $height_thumb || $image_size_orig[0] > $width_thumb) { Image::factory($directory . $filename_thumb)->crop($width_thumb, $height_thumb)->save($directory . $filename_thumb); } } // put image and thumb to Amazon S3 if (core::config('image.aws_s3_active')) { $s3->putObject($s3->inputFile($directory . $filename_original), core::config('image.aws_s3_bucket'), $path . $filename_original, S3::ACL_PUBLIC_READ); $s3->putObject($s3->inputFile($directory . $filename_thumb), core::config('image.aws_s3_bucket'), $path . $filename_thumb, S3::ACL_PUBLIC_READ); } // Delete the temporary file @unlink($file); $this->has_images++; try { $this->save(); return TRUE; } catch (Exception $e) { return FALSE; } }
/** * save_image upload images with given path * * @param [array] $image [image $_FILE-s ] * @param [string] $seotitle [unique id, and folder name] * @return [bool] [return true if 1 or more images uploaded, false otherwise] */ public function save_image($image) { if (core::config('image.aws_s3_active')) { require_once Kohana::find_file('vendor', 'amazon-s3-php-class/S3', 'php'); $s3 = new S3(core::config('image.aws_access_key'), core::config('image.aws_secret_key')); } if (!Upload::valid($image) or !Upload::not_empty($image) or !Upload::type($image, explode(',', core::config('image.allowed_formats'))) or !Upload::size($image, core::config('image.max_image_size') . 'M')) { if (Upload::not_empty($image) && !Upload::type($image, explode(',', core::config('image.allowed_formats')))) { return Alert::set(Alert::ALERT, $image['name'] . ': ' . sprintf(__('This uploaded image is not of a valid format. Please use one of these formats: %s'), core::config('image.allowed_formats'))); } if (!Upload::size($image, core::config('image.max_image_size') . 'M')) { return Alert::set(Alert::ALERT, $image['name'] . ': ' . sprintf(__("This uploaded image exceeds the allowable limit. Uploaded images cannot be larger than %s MB per image"), core::config('image.max_image_size'))); } } if ($image !== NULL) { $id = $this->id_product; $seotitle = $this->seotitle; $obj_product = new self($id); if ($obj_product->loaded()) { $created = $obj_product->created; } else { $created = NULL; } $path = $this->image_path($id, $created); $docroot = DOCROOT; $directory = $docroot . $path; $image_quality = core::config('image.quality'); $width = core::config('image.width'); $width_thumb = core::config('image.width_thumb'); $height_thumb = core::config('image.height_thumb'); $height = core::config('image.height'); if (!is_numeric($height)) { // when installing this field is empty, to avoid crash we check here $height = NULL; } if (!is_numeric($height_thumb)) { $height_thumb = NULL; } // how many files are saved $counter = $this->has_images > 0 ? $this->has_images + 1 : 1; if ($file = Upload::save($image, NULL, $directory)) { $filename_thumb = 'thumb_' . $seotitle . '_' . $counter . '.jpg'; $filename_original = $seotitle . '_' . $counter . '.jpg'; //if original image is bigger that our constants we resize $image_size_orig = getimagesize($file); if ($image_size_orig[0] > $width || $image_size_orig[1] > $height) { Image::factory($file)->orientate()->resize($width, $height, Image::AUTO)->save($directory . $filename_original, $image_quality); } else { Image::factory($file)->orientate()->save($directory . $filename_original, $image_quality); } //creating the thumb and resizing using the the biggest side INVERSE Image::factory($directory . $filename_original)->resize($width_thumb, $height_thumb, Image::INVERSE)->save($directory . $filename_thumb, $image_quality); //check if the height or width of the thumb is bigger than default then crop if ($height_thumb !== NULL) { $image_size_orig = getimagesize($directory . $filename_thumb); if ($image_size_orig[1] > $height_thumb || $image_size_orig[0] > $width_thumb) { Image::factory($directory . $filename_thumb)->crop($width_thumb, $height_thumb)->save($directory . $filename_thumb); } } if (core::config('image.aws_s3_active')) { // put image to Amazon S3 $s3->putObject($s3->inputFile($directory . $filename_original), core::config('image.aws_s3_bucket'), $path . $filename_original, S3::ACL_PUBLIC_READ); // put thumb to Amazon S3 $s3->putObject($s3->inputFile($directory . $filename_thumb), core::config('image.aws_s3_bucket'), $path . $filename_thumb, S3::ACL_PUBLIC_READ); } // Delete the temporary file @unlink($file); return TRUE; } } }
/** * Publish to AWS S3 bucket * * @return boolean|WP_Error */ public function publish_to_s3($bucket, $aws_access_key_id, $aws_secret_access_key) { $directory_iterator = new RecursiveDirectoryIterator($this->archive_dir, RecursiveDirectoryIterator::SKIP_DOTS); $recursive_iterator = new RecursiveIteratorIterator($directory_iterator, RecursiveIteratorIterator::SELF_FIRST); S3::$useExceptions = true; $s3 = new S3($aws_access_key_id, $aws_secret_access_key, false, 's3-eu-west-1.amazonaws.com'); foreach ($recursive_iterator as $item) { if (!$item->isDir()) { $path = $recursive_iterator->getSubPathName(); try { $s3->putObject(S3::inputFile($item->getRealPath()), $bucket, $path, S3::ACL_PUBLIC_READ); } catch (any $err) { return new WP_Error('cannot_publish_to_s3', sprintf(__("Could not publish file to S3: %s: %s", $this->slug, $err), $path)); } } } return true; }
function cache_me($params, $content_type) { global $PREFS, $TMPL; // Set URL constants if (is_readable($params['full_src']) || strstr($params['full_src'], 'http://') || strstr($params['full_src'], 'https://')) { $original = file_get_contents($params['full_src']); } else { $file_url = "http"; if (isset($_SERVER["HTTPS"]) && $_SERVER["HTTPS"] == "on") { $file_url .= "s"; } $file_url .= "://www." . $_SERVER['HTTP_HOST'] . $params['src']; file_get_contents($file_url); } $filename = basename($params['src']); $cache_structure = pathinfo($params['cache_src']); $cache_dirname = $cache_structure['dirname']; $cache_basename = $cache_structure['basename']; $cache_filename = $cache_structure['filename']; // Create Cache Dir if it does not exist if (!is_dir($cache_dirname)) { if (!mkdir($cache_dirname, 0777, true)) { error_log("I did not write the cache dir"); } } if (!defined("FILE_PUT_CONTENTS_ATOMIC_TEMP")) { define("FILE_PUT_CONTENTS_ATOMIC_TEMP", $cache_dirname); } if (!defined("FILE_PUT_CONTENTS_ATOMIC_MODE")) { define("FILE_PUT_CONTENTS_ATOMIC_MODE", 0777); } if (!defined("FILE_PUT_CONTENTS_ATOMIC_OWN")) { define("FILE_PUT_CONTENTS_ATOMIC_OWN", 'deploy'); } $temp = tempnam(FILE_PUT_CONTENTS_ATOMIC_TEMP, 'temp'); if (!($f = @fopen($temp, 'wb'))) { $temp = FILE_PUT_CONTENTS_ATOMIC_TEMP . DIRECTORY_SEPARATOR . uniqid('temp'); if (!($f = @fopen($temp, 'wb'))) { trigger_error("file_put_contents_atomic() : error writing temporary file '{$temp}'", E_USER_WARNING); return false; } } // Check to see if its a parsed CSS file, if so write the parsed data if (!empty($params['cssfile_content'])) { fwrite($f, $params['cssfile_content']); } else { fwrite($f, $original); } fclose($f); if (!@rename($temp, $params['cache_src'])) { @unlink($params['cache_src']); @rename($temp, $params['cache_src']); } //AWS access info - Make sure to add this to your config.php file $s3assetsConfig = $PREFS->core_ini['s3assets']; if (isset($s3assetsConfig['user'])) { if (!defined("FILE_PUT_CONTENTS_ATOMIC_OWN")) { define("FILE_PUT_CONTENTS_ATOMIC_OWN", $s3assetsConfig['user']); } chown($params['cache_src'], FILE_PUT_CONTENTS_ATOMIC_OWN); } chmod($params['cache_src'], FILE_PUT_CONTENTS_ATOMIC_MODE); // Initiate S3 class and upload the file if ($params['s3bucket'] != "") { if (!class_exists('S3')) { require_once 'pi.s3assets/S3.php'; } $awsAccessKey = $s3assetsConfig['awsAccessKey']; $awsSecretKey = $s3assetsConfig['awsSecretKey']; if (!defined('awsAccessKey')) { define('awsAccessKey', $awsAccessKey); } if (!defined('awsSecretKey')) { define('awsSecretKey', $awsSecretKey); } $s3 = new S3(awsAccessKey, awsSecretKey, false); if (isset($params['s3assetname'])) { $src = preg_replace("/^\\//", "", $params['s3assetname']); } else { $src = preg_replace("/^\\//", "", $params['src']); } S3::putObject(S3::inputFile($params['cache_src']), $params['s3bucket'], $src, S3::ACL_PUBLIC_READ, array(), array("Content-Type" => $content_type, "Cache-Control" => "max-age=315360000", "Expires" => gmdate("D, d M Y H:i:s T", strtotime("+5 years")))); } }
} file_put_contents($tmpfile, $data); } else { $tmpfile = $_FILES['file']['tmp_name']; } $v->filesize = filesize($tmpfile); $v->signature = sign_file($tmpfile, $app->sparkle_pkey); if (!$v->url) { $object = strtolower(preg_replace('/[^a-zA-Z0-9]/', '', $app->name)) . "_" . $v->version_number . "." . substr($_FILES['file']['name'], -3); if ($app->s3bucket && $app->s3path) { $v->url = slash($app->s3path) . $object; $info = parse_url($app->s3path); $object = slash($info['path']) . $object; chmod($tmpfile, 0755); $s3 = new S3($app->s3key, $app->s3pkey); $s3->putObject($app->s3bucket, $object, $tmpfile, true); } else { die("Configure your Amazon S3 account or modify version-new.php file."); /* $v->url = '/Users/dirk/work/wordpress/shine/' . $object; copy($_FILES['file']['tmp_name'], '/Users/dirk/work/wordpress/shine/' . $object); */ } } else { // Cleanup download unlink($tmpfile); } $v->insert(); redirect('versions.php?id=' . $app->id); } else { $version_number = $_POST['version_number'];
function test_s3($accesskey, $secretkey, $bucket, $directory = '', $ssl) { if (empty($accesskey) || empty($secretkey) || empty($bucket)) { return 'Missing one or more required fields.'; } require_once dirname(__FILE__) . '/lib/s3/s3.php'; $s3 = new S3($accesskey, $secretkey); if ($ssl != '1') { S3::$useSSL = false; } if ($s3->getBucketLocation($bucket) === false) { // Easy way to see if bucket already exists. $s3->putBucket($bucket, S3::ACL_PUBLIC_READ); } if (!empty($directory)) { $directory = $directory . '/'; } if ($s3->putObject('Upload test for BackupBuddy for Amazon S3', $bucket, $directory . 'backupbuddy.txt', S3::ACL_PRIVATE)) { // Success... just delete temp test file later... } else { return 'Unable to upload. Verify your keys, bucket name, and account permissions.'; } if (!S3::deleteObject($bucket, $directory . '/backupbuddy.txt')) { return 'Partial success. Could not delete temp file.'; } return true; // Success! }
public static function uploadFile(Zotero_StorageFileInfo $info, $file, $contentType) { Zotero_S3::requireLibrary(); S3::setAuth(Z_CONFIG::$S3_ACCESS_KEY, Z_CONFIG::$S3_SECRET_KEY); if (!file_exists($file)) { throw new Exception("File '{$file}' does not exist"); } $success = S3::putObject(S3::inputFile($file), Z_CONFIG::$S3_BUCKET, self::getPathPrefix($info->hash, $info->zip) . $info->filename, S3::ACL_PRIVATE, array(), array("Content-Type" => $contentType)); if (!$success) { return false; } return self::addFile($info); }
/** * push a file to a location on S3 based on a path from the local server that this plugin * is running on. * @param string $filePathToUpload - absolute path to local file that needs to be uploaded * @param string $locationOnS3 - path the file should have on S3 relative to the current bucket * @param string $permission - the access permissions the file should have, defaults to Public Read Acess * @param string $mimeType - set the mime type of the object in S3, defaults to autodetect * @return mixed - returns an array with details of the uploaded file on S3 for success, FALSE on failure */ public function putObject($filePathToUpload, $locationOnS3, $permission = self::ACL_PUBLIC_READ, $mimeType = null) { S3::putObject(S3::inputFile($filePathToUpload), $this->bucket, $locationOnS3, $permission, array(), $mimeType); $info = $this->getObjectInfo($locationOnS3); return array('name' => basename($locationOnS3), 'url' => $this->buildUrlToFile($locationOnS3), 'size' => $info['size']); }
public function action_icon() { //get icon if (isset($_FILES['category_icon'])) { $icon = $_FILES['category_icon']; } else { $this->redirect(Route::get($this->_route_name)->uri(array('controller' => Request::current()->controller(), 'action' => 'index'))); } $category = new Model_Category($this->request->param('id')); if (core::config('image.aws_s3_active')) { require_once Kohana::find_file('vendor', 'amazon-s3-php-class/S3', 'php'); $s3 = new S3(core::config('image.aws_access_key'), core::config('image.aws_secret_key')); } if (core::post('icon_delete') and $category->delete_icon() == TRUE) { Alert::set(Alert::SUCCESS, __('Icon deleted.')); $this->redirect(Route::get($this->_route_name)->uri(array('controller' => Request::current()->controller(), 'action' => 'update', 'id' => $category->id_category))); } // end of icon delete if (!Upload::valid($icon) or !Upload::not_empty($icon) or !Upload::type($icon, explode(',', core::config('image.allowed_formats'))) or !Upload::size($icon, core::config('image.max_image_size') . 'M')) { if (Upload::not_empty($icon) && !Upload::type($icon, explode(',', core::config('image.allowed_formats')))) { Alert::set(Alert::ALERT, $icon['name'] . ' ' . sprintf(__('Is not valid format, please use one of this formats "%s"'), core::config('image.allowed_formats'))); $this->redirect(Route::get($this->_route_name)->uri(array('controller' => Request::current()->controller(), 'action' => 'update', 'id' => $category->id_category))); } if (!Upload::size($icon, core::config('image.max_image_size') . 'M')) { Alert::set(Alert::ALERT, $icon['name'] . ' ' . sprintf(__('Is not of valid size. Size is limited to %s MB per image'), core::config('image.max_image_size'))); $this->redirect(Route::get($this->_route_name)->uri(array('controller' => Request::current()->controller(), 'action' => 'update', 'id' => $category->id_category))); } Alert::set(Alert::ALERT, $icon['name'] . ' ' . __('Image is not valid. Please try again.')); $this->redirect(Route::get($this->_route_name)->uri(array('controller' => Request::current()->controller(), 'action' => 'update', 'id' => $category->id_category))); } else { if ($icon != NULL) { // saving/uploading img file to dir. $path = 'images/categories/'; $root = DOCROOT . $path; //root folder $icon_name = $category->seoname . '.png'; // if folder does not exist, try to make it if (!file_exists($root) and !@mkdir($root, 0775, true)) { // mkdir not successful ? Alert::set(Alert::ERROR, __('Image folder is missing and cannot be created with mkdir. Please correct to be able to upload images.')); return; // exit function } // save file to root folder, file, name, dir if ($file = Upload::save($icon, $icon_name, $root)) { // put icon to Amazon S3 if (core::config('image.aws_s3_active')) { $s3->putObject($s3->inputFile($file), core::config('image.aws_s3_bucket'), $path . $icon_name, S3::ACL_PUBLIC_READ); } // update category info $category->has_image = 1; $category->last_modified = Date::unix2mysql(); $category->save(); Alert::set(Alert::SUCCESS, $icon['name'] . ' ' . __('Icon is uploaded.')); } else { Alert::set(Alert::ERROR, $icon['name'] . ' ' . __('Icon file could not been saved.')); } $this->redirect(Route::get($this->_route_name)->uri(array('controller' => Request::current()->controller(), 'action' => 'update', 'id' => $category->id_category))); } } }
/** * Method to upload file to S3. * This method also deletes the old files from S3. * * @param object $model Object of current model * @return boolean */ function __uploadToS3(&$model) { App::import('Vendor', 'S3', array('file' => 'S3.php')); // Run a loop on all files to be uploaded to S3 foreach ($this->files as $field => $file) { $accessKey = $this->__accessKey; $secretKey = $this->__secretKey; // If we have S3 credentials for this field/file if (!empty($this->settings[$model->name][$field]['s3_access_key']) && !empty($this->settings[$model->name][$field]['s3_secret_key'])) { $accessKey = $this->settings[$model->name][$field]['s3_access_key']; $secretKey = $this->settings[$model->name][$field]['s3_secret_key']; } // Instantiate the class $aws = new S3($accessKey, $secretKey); // If there is an old file to be removed if (!empty($file['old_filename'])) { $aws->deleteObject($this->settings[$model->name][$field]['s3_bucket'], $file['old_filename']); } // Put the object on S3 $isUploaded = $aws->putObject($aws->inputResource(fopen($file['tmp_name'], 'rb'), filesize($file['tmp_name'])), $this->settings[$model->name][$field]['s3_bucket'], $file['name'], $this->settings[$model->name][$field]['s3_acl'], $this->settings[$model->name][$field]['s3_meta_headers'], $this->settings[$model->name][$field]['s3_request_headers']); // If S3 upload failed then set the model error if ($isUploaded == false) { $model->invalidate($this->settings[$model->name][$field]['formfield'], 's3_upload_error'); return false; } // Set the field values to be saved in table $model->data[$model->name][$field] = $file['name']; } return true; }
public function execute(WPAdm_Command_Context $context) { require_once WPAdm_Core::getPluginDir() . '/modules/S3.php'; $s3 = new S3($context->get('AccessKeyId'), $context->get('SecretAccessKey')); //new S3Wrapper(); //S3::setAuth($context->get('AccessKeyId'), $context->get('SecretAccessKey')); /*(array( 'key' => $context->get('AccessKeyId'), 'secret' => $context->get('SecretAccessKey'), 'token' => $context->get('SessionToken') )); */ // $s3->setTimeCorrectionOffset(60); $dir = $context->get('dir') ? $context->get('dir') : ''; if ($dir) { //$s3->mkdir('s3://' . $context->get('bucket') . '/' . $dir); $logs = $s3->putObject($dir, $context->get('bucket'), $dir . "/", s3::ACL_PUBLIC_READ); //$logs = $s3->putObjectString($dir, $context->get('bucket'), $context->get('bucket') . '/' . $dir, s3::ACL_PUBLIC_READ_WRITE); WPAdm_Core::log('create folder logs ' . serialize($logs)); /*$s3->registerStreamWrapper("s3"); @mkdir('s3://'.$context->get('bucket').'/'.$dir);*/ } try { $filePath = preg_replace('#[/\\\\]+#', '/', $context->get('file')); $key = $dir ? $dir . '/' . basename($filePath) : basename($filePath); $key = ltrim(preg_replace('#[/\\\\]+#', '/', $key), '/'); //if first will be '/', file not will be uploaded, but result will be ok $putRes = $s3->putObjectFile($filePath, $context->get('bucket'), $key, s3::ACL_PUBLIC_READ_WRITE); WPAdm_Core::log('putObjectFile ' . $filePath . ' == ' . $context->get('bucket') . " == " . $key . ' == ' . (int) $putRes); } catch (Exception $e) { $context->setError($e->getMessage()); return false; } catch (S3Exception $e) { WPAdm_Core::log('error send file ' . $e->getMessage()); $context->setError($e->getMessage()); return false; } return true; }