public function put($file, $name = null) { if (empty($name)) { $name = str_replace("\\", '/', str_replace(PHPFOX_DIR, '', $file)); } return $this->_obj->putObjectFile($file, $this->_bucket, $name, \S3::ACL_PUBLIC_READ); }
function upload() { $error = "test"; if (isset($_POST['submitbtn'])) { if (array_key_exists('userFile', $_FILES)) { if ($_FILES['userFile']['error'] === UPLOAD_ERR_OK) { $filename = $_FILES["userFile"]["name"]; $ext = pathinfo($filename, PATHINFO_EXTENSION); $allowed = array("doc", "docx", "rtf", "pdf", "txt", "odf"); $fname = $_FILES["userFile"]["tmp_name"]; // Make sure extension matches if (in_array($ext, $allowed)) { if ($_FILES['userFile']['size'] < 2097152) { $bucket = 'sublite-resumes'; //Can use existing configs when merging with sublite $s3 = new S3("AKIAI7IVRJCSAWFTTS7Q", "B0qzRQJ1KlLy+STC2BspwT9oZONjt+U6sRNqaRr5"); $s3->putBucket($bucket, S3::ACL_PUBLIC_READ); $actual_image_name = time() . '.' . $ext; if ($s3->putObjectFile($fname, $bucket, $actual_image_name, S3::ACL_PUBLIC_READ)) { $image = 'http://' . $bucket . '.s3.amazonaws.com/' . $actual_image_name; return $image; } else { return "An unknown error occurred during upload!"; } /* // File validated; Upload the file! !!!Need to upload to S3!!! $uploaddir = 'resumes/'; $uploadfile = basename($_FILES['userFile']['name']); if (move_uploaded_file($_FILES['userFile']['tmp_name'], $uploaddir.$uploadfile)) { return "File is valid, and was successfully uploaded.\n"; } else { return "An unknown error occurred during upload!"; } */ } else { $error = "Max file size exceeded!"; } } else { $error = "Bad file extension!"; } } else { if ($_FILES['userFile']['error'] === UPLOAD_ERR_FORM_SIZE) { $error = "Max file size exceeded!"; } else { if ($_FILES['userFile']['error'] === UPLOAD_ERR_NO_FILE) { $error = "You must choose a file!"; } else { $error = "An unknown error occurred during upload!"; } } } return $error; } } }
public function delete() { $appHouseAds = AppHouseAdUtil::getAppHouseAdsByCid($this->id); foreach ($appHouseAds as $appHouseAd) { $appHouseAd->delete(); } if (!empty($this->imageLink)) { if (extension_loaded('curl') && !@dl(PHP_SHLIB_SUFFIX == 'so' ? 'curl.so' : 'php_curl.dll')) { $s3 = new S3(HouseAd::$HOUSEAD_AWS_KEY, HouseAd::$HOUSEAD_AWS_SECRET); $s3->deleteObject(HouseAd::$HOUSEAD_BUCKET, basename($this->imageLink)); } } parent::delete(); CacheUtil::invalidateCustom($this->id); }
private function listS3Docs() { $s3 = new S3(WH_AWS_WIKIPHOTO_ACCESS_KEY, WH_AWS_WIKIPHOTO_SECRET_KEY); $bucket_name = self::AWS_BUCKET; $prefix = null; $marker = null; $maxKeys = null; $delimiter = null; $returnCommonPrefixes = false; $buckets = $s3->getBucket($bucket_name, $prefix, $marker, $maxKeys, $delimiter, $returnCommonPrefixes); if (!self::$quiet) { print "number of buckets: " . count($buckets) . "\n"; } foreach ($buckets as $path => $details) { // match string: doc_folder/doc_file.ending if (!preg_match('@^(.*)/(.*)\\.(.*)$@i', $path, $m)) { continue; } list(, $doc_folder, $doc_file, $ending) = $m; //validate extension if (!in_array($ending, self::$docExts)) { continue; } $prefix = $doc_folder . '/' . $doc_file; $files = array($ending); list($err, $stageDir) = self::pullFiles($s3, $doc_folder, $doc_file, $ending); } //now process the display names self::processDisplayNames($s3); }
public function upload_image() { $config = array('allowed_types' => 'jpg|jpeg|gif|png', 'upload_path' => './temp', 'max_size' => 3072, 'overwrite' => true); $this->load->library('upload', $config); $this->upload->overwrite = true; $response['responseStatus'] = "Not OK"; if (!$this->upload->do_upload()) { $response['responseStatus'] = "Your image could not be uploaded"; } else { $data = $this->upload->data(); //instantiate the class $s3 = new S3(awsAccessKey, awsSecretKey); $ext = pathinfo($data['full_path'], PATHINFO_EXTENSION); $imgName = (string) time() . "." . $ext; $input = S3::inputFile($data['full_path'], FALSE); if ($s3->putObject(file_get_contents($data['full_path']), "tlahui-content", $imgName, S3::ACL_PUBLIC_READ)) { $response['responseStatus'] = "OK"; $response['url'] = "https://s3.amazonaws.com/tlahui-content/" . $imgName; unlink($data['full_path']); } else { $response['responseStatus'] = "Your image could not be uploaded"; unlink($data['full_path']); } } echo json_encode($response); }
private function listS3Images() { $s3 = new S3(WH_AWS_WIKIPHOTO_ACCESS_KEY, WH_AWS_WIKIPHOTO_SECRET_KEY); $bucket_name = self::AWS_BUCKET; $prefix = null; $marker = null; //$marker = 'paupau/257175/Make Chocolate-1.JPG'; $maxKeys = 1; //$maxKeys = null; $delimiter = null; $returnCommonPrefixes = false; $buckets = $s3->getBucket($bucket_name, $prefix, $marker, $maxKeys, $delimiter, $returnCommonPrefixes); print "number of buckets: " . count($buckets) . "\n"; foreach ($buckets as $path => $details) { // match string: username/(1234.zip or 1234/*.jpg) if (!preg_match('@^([a-z][-._0-9a-z]{0,30})/([0-9]+)(\\.zip|/.+)$@i', $path, $m)) { continue; } list(, $user, $id, $ending) = $m; $id = intval($id); if (!$id) { continue; } /* if (in_array($user, self::$excludeUsers) // don't process anything in excluded people || preg_match('@^[0-9]+$@', $user)) // don't allow usernames that are all digits { continue; } */ $prefix = $user . '/' . $id; $files = array($ending); list($err, $stageDir) = self::pullFiles($id, $s3, $prefix, $files); } }
function grav_submit_to_s3($entry, $form) { // no file? no problem. if (empty($entry[GFORM_UPLOAD_FIELD_ID])) { return; } $gfs3 = new S3(awsAccessKey, awsSecretKey); // url of uploaded file $file_url = $entry[GFORM_UPLOAD_FIELD_ID]; // filename of uploaded file $file_name = $_FILES['input_' . GFORM_UPLOAD_FIELD_ID]['name']; // ensure bucket is there $gfs3->putBucket(BUCKET_NAME, S3::ACL_AUTHENTICATED_READ); // clean up filename, split into parts $url_parts = parse_url($file_url); $full_path = $_SERVER['DOCUMENT_ROOT'] . substr($url_parts['path'], 1); if (is_dir($file_name)) { $file_name = basename($file_name); } // this is the full path to the file on S3 $filename_to_s3 = UPLOAD_PATH . sanitize_file_name($file_name); if ($gfs3->putObjectFile($full_path, BUCKET_NAME, $filename_to_s3, S3::ACL_PUBLIC_READ)) { return true; // upload success } else { wp_die('It looks like something went wrong while uploading your file. Please try again in a few moments.'); } }
/** * Get S3 bucket contents (from cache if possible) * * @return array */ private function getBucketContents() { $cacheFile = $this->cacheDir . '/s3browser-' . $this->s3Bucket; $contents = null; // get from cache if valid if ($this->cacheDuration && file_exists($cacheFile)) { $cacheAge = time() - filectime($cacheFile); if ($cacheAge < $this->cacheDuration) { $contents = unserialize(file_get_contents($cacheFile)); } } // hit s3 if we didn't have anything cached if (!$contents) { $s3 = new S3($this->s3AccessKey, $this->s3SecretKey, $this->s3useSSL, $this->s3endPoint); $contents = $s3->getBucket($this->s3Bucket); // we weren't able to access the bucket if (!is_array($contents)) { return null; } // save if caching is enabled if ($this->cacheDuration) { file_put_contents($cacheFile, serialize($contents)); } } return $contents; }
function upload($localFilepath = 'path/to/file.jpg',$s3Path = 'tmp/', $filename = 'filename.jpg',$bucket = 'idc_files'){ // $filename is the name that we want to save it as // - this is going to be something unique every time // Import Vendor file App::import('Vendor', 'S3', array('file' => 'S3'.DS.'s3.php')); // Instantiate the S3 class $s3 = new S3(AWS_ACCESS_KEY, AWS_SECRET_KEY); // defined in bootstrap.php // Ensure $newPath is valid if(substr($s3Path,-1,1) != '/'){ $s3Path .= '/'; } // Intended directory // - buckets are like the C: drive $intendedPath = $s3Path.$filename; // Put our file (also with public read access) if ($s3->putObjectFile($localFilepath, $bucket, $intendedPath, S3::ACL_PUBLIC_READ)) { //echo "S3::putObjectFile(): File copied to {$bucket}/".baseName($uploadFile).PHP_EOL; return 1; } else { return 0; } exit; }
static function createSquareFile($filename, $readPath, $writePath, $writeSize = 300, $upload = true) { # make sure all inputs are clean if (!self::areClean(array($filename, $readPath, $writePath, $writeSize))) { return false; } if (!self::imageMagickInstalled()) { return false; } if (!($size = getimagesize($readPath))) { return false; } $savePath = sfConfig::get('sf_image_dir') . DIRECTORY_SEPARATOR . $writePath . DIRECTORY_SEPARATOR . $filename; if ($size[0] > $size[1] * 2 || $size[1] > $size[0] * 2) { # pad to square if one dimension is more than twice the other dimension exec(sfConfig::get('app_imagemagick_binary_path') . " {$readPath} -virtual-pixel background -background white -set option:distort:viewport \"%[fx:max(w,h)]x%[fx:max(w,h)]-%[fx:max((h-w)/2,0)]-%[fx:max((w-h)/2,0)]\" -filter point -distort SRT 0 +repage {$savePath}"); } else { # otherwise, crop to square exec(sfConfig::get('app_imagemagick_binary_path') . " {$readPath} -virtual-pixel edge -set option:distort:viewport \"%[fx:min(w,h)]x%[fx:min(w,h)]+%[fx:max((w-h)/2,0)]+%[fx:max((h-w)/2,0)]\" -filter point -distort SRT 0 +repage {$savePath}"); } # resize exec(sfConfig::get('app_imagemagick_binary_path') . " {$savePath} -resize {$writeSize}x{$writeSize} {$savePath}"); # if s3 enabled, save to s3 if ($upload && sfConfig::get('app_amazon_enable_s3')) { $s3 = new S3(sfConfig::get('app_amazon_access_key'), sfConfig::get('app_amazon_secret_key')); $input = $s3->inputResource($f = fopen($savePath, "rb"), $s = filesize($savePath)); $uri = self::generateS3path($writePath, $filename); if (!S3::putObject($input, sfConfig::get('app_amazon_s3_bucket'), $uri, S3::ACL_PUBLIC_READ)) { return false; } } return $savePath; }
public function image() { $image = new Image('http://sinastorage.com/sandbox/test.jpg'); $image->rotate(50); $s3 = new S3(); $result = $s3->plainWrite('test.jpg', $image->getContent(), $image->getSize(), $image->getMimeType()); Common::debug($result); }
function deleteFile($filename) { $s3svc = new S3(); // Removing the first slash is important - otherwise the URL is different. $aws_filename = eregi_replace('^/', '', $filename); $s3svc->deleteObject($aws_filename, MIRROR_S3_BUCKET); unset($s3svc); }
/** * Returns a fresh S3 instance */ private function _getS3($cfg) { $s3 = new S3($cfg['accesskey'], $cfg['secretkey']); if (!empty($cfg['endpoint'])) { $s3->setEndpoint($cfg['endpoint']); } return $s3; }
public function execute() { $this->AWS_ACCESS_KEY = 'YOUR_AWS_ACCESS_KEY'; $this->AWS_SECRET_KEY = 'YOUR_AWS_SECRET_KEY'; $this->AWS_S3_BUCKET = 'YOUR_AWS_S3_BUCKET'; $this->AWS_S3_PUBLIC = true; $this->AWS_S3_SSL = false; $s3 = new S3(); $s3->setAuth($this->AWS_ACCESS_KEY, $this->AWS_SECRET_KEY); $s3->useSSL = $this->AWS_S3_SSL; // In my situation the images were in two different S3 buckets already. It will search these locations to try and find it. // Your images are probably local already, so you may need to modify the code further down to work with local directories. //$s3Buckets = array(...); $dbw = wfGetDB(DB_MASTER); $counter = 0; $iIncrement = 10000; for ($i = 0;; $i += $iIncrement) { $res = $dbw->select(array('image', 'imagelinks', 'page'), array('image.img_name', 'image.img_path', 'page.page_title'), 'image.img_name = imagelinks.il_to and imagelinks.il_from = page.page_id and page.page_namespace = 0 limit ' . $i . ', ' . $iIncrement, array()); if (!$res) { echo 'No for rows.\\n'; exit; } $logoPath = ''; foreach ($res as $row) { echo "counter:{$counter}\n"; echo "i:{$i}\n"; ++$counter; if (!$row->img_name || !$row->img_path) { continue; } echo 'img_name:' . $row->img_name . "\n"; echo 'img_path:' . $row->img_path . "\n"; echo 'page_title:' . $row->page_title . "\n"; $file = wfFindFile($row->img_name, array()); if ($file) { $path = $file->getFullUrl(); $path = str_replace('http://s3.amazonaws.com/' . $this->AWS_S3_BUCKET . '/', '', $path); echo "path:{$path}\n"; // If you have images that are already stored locally, you will need to modify this section. Instead of an S3::copyObject you // may need to use the S3::putObject method to upload your local copy. foreach ($s3Buckets as $s3Bucket) { if ($s3->copyObject($s3Bucket, $row->img_path, $this->AWS_S3_BUCKET, $path, $this->AWS_S3_PUBLIC ? S3::ACL_PUBLIC_READ : S3::ACL_PRIVATE)) { echo 'SUCCESS:' . $row->img_name . "\n"; break; } else { echo 'ERROR1:' . $row->img_name . "\n"; } } } else { echo 'ERROR2:' . $row->img_name . "\n"; } echo "\n"; } } }
function process_s3_copy($s3file, $accesskey, $secretkey, $bucket, $directory) { pb_backupbuddy::set_greedy_script_limits(); require_once pb_backupbuddy::plugin_path() . '/lib/s3/s3.php'; $s3 = new S3($accesskey, $secretkey); $destination_file = ABSPATH . 'wp-content/uploads/backupbuddy_backups/' . $s3file; if (file_exists($destination_file)) { $destination_file = str_replace('backup-', 'backup_copy_' . pb_backupbuddy::random_string(5) . '-', $destination_file); } $s3->getObject($bucket, $directory . $s3file, $destination_file); }
public static function updateUserImage($userId) { $result = new FunctionResult(); $result->success = false; if (!empty($userId)) { $user = GameUsers::getGameUserById($userId); if (!empty($user)) { $tmpImgPath = UserProfileImageUtils::downloadFBProfileImage($user->facebookId); $profileImageUrl = null; if (!empty($tmpImgPath)) { try { $s3 = new S3(AWS_API_KEY, AWS_SECRET_KEY); $s3->setEndpoint(AWS_S3_ENDPOINT); $imageName = "pi_" . $userId . ".png"; $s3Name = "profile.images/" . $userId . "/" . $imageName; $res = $s3->putObjectFile($tmpImgPath, AWS_S3_BUCKET, $s3Name, S3::ACL_PUBLIC_READ); if ($res) { $profileImageUrl = 'http://' . AWS_S3_BUCKET . '.s3.amazonaws.com/' . $s3Name; try { unlink($tmpImgPath); } catch (Exception $exc) { error_log($exc->getTraceAsString()); } } else { $result->result = json_encode($res); } } catch (Exception $exc) { $result->result = $exc->getTraceAsString(); } } else { $profileImageUrl = USER_DEFAULT_IMAGE_URL; } if (!empty($profileImageUrl)) { $user->setProfilePicture($profileImageUrl); try { $user->updateToDatabase(DBUtils::getConnection()); $result->success = true; $result->result = null; } catch (Exception $exc) { $result->result = $exc->getTraceAsString(); } } } else { $result->result = "user not found"; } } else { $result->result = "user id empty"; } return $result; }
function gsUpload($file, $bucket, $remoteFile) { $ret = false; $key = 'GOOGT4X7CFTWS2VWN2HT'; $secret = 'SEWZTyKZH6dNbjbT2CHg5Q5pUh5Y5+iinj0yBFB4'; $server = 'storage.googleapis.com'; $s3 = new S3($key, $secret, false, $server); $metaHeaders = array(); $requestHeaders = array(); if ($s3->putObject($s3->inputFile($file, false), $bucket, $remoteFile, S3::ACL_PUBLIC_READ, $metaHeaders, $requestHeaders)) { $ret = true; } return $ret; }
/** * Uploads a file to S3 storage bucket *. * @param $media_file_or_id entity or integer * @param $filepath string * @param $custom_filename string (optional name of file to be stored) * @param $media_work entity * @param $extra_dir string * @return mixed location of where it was stored or false. (ex: 'https://s3.amazonaws.com/BUCKET_NAME/...rest of path' */ public static function store_media($media_file_or_id, $filepath, $custom_filename = '', $media_work = null, $extra_dir = '') { $client = self::_get_client(); if ($client) { if (!$custom_filename) { $filename = basename($filepath); } else { $filename = $custom_filename; } if ($storage_path = parent::get_path($media_file_or_id, $filename, $media_work, $extra_dir)) { // specify the custom filename instead of the one generated by get_path() if ($custom_filename) { $storage_path = str_replace(basename($storage_path), $custom_filename, $storage_path); } if (strpos($filepath, "http") === 0) { return S3_BASE_URL . S3_BUCKET_NAME . '/' . $storage_path; } else { $fp = fopen($filepath, 'r'); $filesize = filesize($filepath); $input = S3::inputResource($fp, $filesize); if ($client->putObject($input, S3_BUCKET_NAME, $storage_path, S3::ACL_PUBLIC_READ, array(), self::_get_content_type($media_file_or_id, $filename))) { return S3_BASE_URL . S3_BUCKET_NAME . '/' . $storage_path; } else { trigger_error('Upload to S3 failed.'); } } } else { trigger_error('An invalid storage path was created.'); } } else { trigger_error('Could not create client object for interacting with S3 service.'); } return false; }
/** * Загружает файл в S3. */ public static function moveFileToS3($fileName, $mimeType = null, $baseName = null) { self::checkEnv($ctx = Context::last()); $conf = $ctx->config->get('modules/s3'); $s3 = new S3($conf['accesskey'], $conf['secretkey']); if (!($bucketName = trim($ctx->config->get('modules/s3/bucket', 'files'), '/'))) { throw new RuntimeException(t('Модуль s3 не настроен (bucket).')); } if ($folderName = $ctx->config->get('module/s3/folder', 'files')) { $folderName .= '/'; } /* if (!in_array($bucketName, $s3->listBuckets())) throw new RuntimeException(t('Нет такой папки: ' . $bucketName)); */ if ($f = fopen($fileName, 'rb')) { if (null === $baseName) { $baseName = basename($fileName); } if (!($r = S3::inputResource($f, filesize($fileName)))) { throw new RuntimeException(t('Не удалось создать ресурс из файла %filename.', array('%filename' => $fileName))); } if (!($response = S3::putObject($r, $bucketName, $folderName . $baseName, S3::ACL_PUBLIC_READ))) { throw new RuntimeException(t('Не удалось загрузить файл %filename в папку %bucket.', array('%filename' => $fileName, '%bucket' => $bucketName))); } $url = 'http://' . $bucketName . '.s3.amazonaws.com/' . $folderName . $baseName; Logger::log('S3: ' . $url); return $url; } }
public function __construct() { $CI =& get_instance(); $accessKey = $CI->config->item('awsAccessKey'); $secretKey = $CI->config->item('awsSecretKey'); parent::__construct($accessKey, $secretKey, true); }
/** * Execute the controller. * * @return mixed Return executed result. * * @throws \LogicException * @throws \RuntimeException */ public function execute() { $files = $this->input->files; $field = $this->input->get('field', 'file'); $type = $this->input->get('type', 'post'); try { $src = $files->getByPath($field . '.tmp_name', null, InputFilter::STRING); $name = $files->getByPath($field . '.name', null, InputFilter::STRING); if (!$src) { throw new \Exception('File not upload'); } $dest = $this->getDest($name, $type); $s3 = new \S3($this->app->get('amazon.access_key'), $this->app->get('amazon.secret_key')); $result = $s3::putObject(\S3::inputFile($src, false), 'windspeaker', $dest, \S3::ACL_PUBLIC_READ); if (!$result) { throw new \Exception('Upload fail.'); } } catch (\Exception $e) { $response = new Response(); $response->setBody(json_encode(['error' => $e->getMessage()])); $response->setMimeType('text/json'); $response->respond(); exit; } $return = new Registry(); $return['filename'] = 'https://windspeaker.s3.amazonaws.com/' . $dest; $return['file'] = 'https://windspeaker.s3.amazonaws.com/' . $dest; $response = new Response(); $response->setBody((string) $return); $response->setMimeType('text/json'); $response->respond(); exit; }
/** * Creates bucket * * @param string $container_id * @param string $error * @return boolean */ function create_container(&$container_id, &$error) { if (!$this->_init($error)) { return false; } $this->_set_error_handler(); $buckets = @$this->_s3->listBuckets(); if ($buckets === false) { $error = sprintf('Unable to list buckets (%s).', $this->_get_last_error()); $this->_restore_error_handler(); return false; } if (in_array($this->_config['bucket'], (array) $buckets)) { $error = sprintf('Bucket already exists: %s.', $this->_config['bucket']); $this->_restore_error_handler(); return false; } if (empty($this->_config['bucket_acl'])) { $this->_config['bucket_acl'] = S3::ACL_PRIVATE; } if (!isset($this->_config['bucket_location'])) { $this->_config['bucket_location'] = S3::LOCATION_US; } if (!@$this->_s3->putBucket($this->_config['bucket'], $this->_config['bucket_acl'], $this->_config['bucket_location'])) { $error = sprintf('Unable to create bucket: %s (%s).', $this->_config['bucket'], $this->_get_last_error()); $this->_restore_error_handler(); return false; } $this->_restore_error_handler(); return true; }
public function upload() { if (!empty($this->_upload_dir)) { $this->_bucket_name .= $this->_bucket_name . '/' . $this->_upload_dir; } // アマゾンS3のインスタンス生成 $s3 = new S3($this->_access_key, $this->_secret_key); if (!$s3) { throw new Teamlab_Batch_Exception(sprintf("AWS接続に失敗しました。: %s", $this->_bucket_name)); } // ファイルアップロード if ($s3->putObjectFile($this->_upload_file, $this->_bucket_name, baseName($this->_upload_file), S3::ACL_PUBLIC_READ)) { return true; } else { throw new Teamlab_Batch_Transport_Exception(sprintf("ファイルのアップロードに失敗しました。サーバ情報:%s ファイル名:%s", $this->_bucket_name, baseName($this->_upload_file))); } }
protected function _upToS3($id, array $files) { $s3 = $this->config->item('s3'); if (!$s3) { throw new Exception("Error Processing Request", 1); } $this->load->library('S3'); $s3 = new S3(); $bucket = $this->config->item('bucket'); foreach ($files as $key => $value) { if ($s3->putObjectFile($config['upload_path'] . DIRECTORY_SEPARATOR . $key . '_' . $value['file_name'], $bucket, $directory . DIRECTORY_SEPARATOR . 'photo' . DIRECTORY_SEPARATOR . $id . DIRECTORY_SEPARATOR . $key . '_' . $value['file_name'], S3::ACL_PUBLIC_READ)) { //echo "We successfully uploaded your file."; } else { //echo "Something went wrong while uploading your file... sorry."; } } }
function get($path) { $filenumber = intval($_GET['file']); $file = DB::get()->row('SELECT * FROM files WHERE id = :id', array('id' => $filenumber)); if ($file) { header('Content-Type: application/octet-stream'); header('Content-Disposition: attachment; filename=' . urlencode($file->filename) . ';'); $url = parse_url($file->url); $bucket = str_replace('.s3.amazonaws.com', '', $url['host']); $access = DB::get()->assoc("SELECT name, value FROM options WHERE grouping = 'Amazon Web Services'"); $s3 = new S3($access['AWS Access Key ID'], $access['AWS Secret Access Key']); $uri = $s3->getAuthenticatedURL($bucket, trim($url['path'], '/'), 3600); //readfile($file->url); header('location: ' . $uri); exit; } }
/** * upload * * @param \JInput $input */ public static function upload(\JInput $input) { try { $editorPlugin = \JPluginHelper::getPlugin('editors', 'akmarkdown'); if (!$editorPlugin) { throw new \Exception('Editor Akmarkdown not exists'); } $params = new Registry($editorPlugin->params); $files = $input->files; $field = $input->get('field', 'file'); $type = $input->get('type', 'post'); $allows = $params->get('Upload_AllowExtension', ''); $allows = array_map('strtolower', array_map('trim', explode(',', $allows))); $file = $files->getVar($field); $src = $file['tmp_name']; $name = $file['name']; $tmp = new \SplFileInfo(JPATH_ROOT . '/tmp/ak-upload/' . $name); if (empty($file['tmp_name'])) { throw new \Exception('File not upload'); } $ext = pathinfo($name, PATHINFO_EXTENSION); if (!in_array($ext, $allows)) { throw new \Exception('File extension now allowed.'); } // Move file to tmp if (!is_dir($tmp->getPath())) { \JFolder::create($tmp->getPath()); } if (is_file($tmp->getPathname())) { \JFile::delete($tmp->getPathname()); } \JFile::upload($src, $tmp->getPathname()); $src = $tmp; $dest = static::getDest($name, $params->get('Upload_S3_Subfolder', 'ak-upload')); $s3 = new \S3($params->get('Upload_S3_Key'), $params->get('Upload_S3_SecretKey')); $bucket = $params->get('Upload_S3_Bucket'); $result = $s3::putObject(\S3::inputFile($src->getPathname(), false), $bucket, $dest, \S3::ACL_PUBLIC_READ); if (is_file($tmp->getPathname())) { \JFile::delete($tmp->getPathname()); } if (!$result) { throw new \Exception('Upload fail.'); } } catch (\Exception $e) { $response = new Response(); $response->setBody(json_encode(['error' => $e->getMessage()])); $response->setMimeType('text/json'); $response->respond(); exit; } $return = new \JRegistry(); $return['filename'] = 'https://' . $bucket . '.s3.amazonaws.com/' . $dest; $return['file'] = 'https://' . $bucket . '.s3.amazonaws.com/' . $dest; $response = new Response(); $response->setBody((string) $return); $response->setMimeType('text/json'); $response->respond(); }
public function onImageDeletion(ImageDeletionEvent $event) { global $config; $access = $config->get_string("amazon_s3_access"); $secret = $config->get_string("amazon_s3_secret"); $bucket = $config->get_string("amazon_s3_bucket"); if (!empty($bucket)) { log_debug("amazon_s3", "Deleting Image #" . $event->image->id . " from S3"); $s3 = new S3($access, $secret); $s3->deleteObject($bucket, "images/" + $event->image->hash); $s3->deleteObject($bucket, "thumbs/" + $event->image->hash); } }
function s3_video_get_all_existing_video($pluginSettings = NULL) { if (!$pluginSettings) { return FALSE; } $s3Access = new S3($pluginSettings['amazon_access_key'], $pluginSettings['amazon_secret_access_key'], NULL, $pluginSettings['amazon_url']); if (!empty($pluginSettings['amazon_prefix'])) { $bucketContents = $s3Access->getBucket($pluginSettings['amazon_video_bucket'], $pluginSettings['amazon_prefix']); } else { $bucketContents = $s3Access->getBucket($pluginSettings['amazon_video_bucket']); } if (is_array($bucketContents) && !empty($bucketContents)) { return $bucketContents; } }
public function odpisy() { $id = (int) @$this->request->params['id']; $url = false; App::uses('S3', 'Vendor'); $S3 = new S3(S3_LOGIN, S3_SECRET, null, S3_ENDPOINT); $bucket = 'resources'; $file = 'KRS/' . $id . '.pdf'; $url = $S3->getAuthenticatedURL($bucket, $file, 60); if ($url) { $url = str_replace('s3.amazonaws.com/' . $bucket, $bucket . '.sds.tiktalik.com', $url); $success = true; } $this->set('url', $url); $this->set('_serialize', array('url')); }
function getFromCloud($uri) { App::import('Vendor', 'S3', array('file' => 'S3.php')); $s3 = new S3($this->awsAccessKey, $this->awsSecretKey, true, "s3-ap-southeast-1.amazonaws.com"); if (($info = $s3->getObject($this->bucketName, $uri)) !== false) { $path_tmp = APP . "tmp/cache/picture/"; /*if(!is_dir($path_tmp)) { mkdir(APP."tmp/cache/picture", 0777); } $handle = fopen($path_tmp."kossa.jpg", "w+"); if (fwrite($handle, $info->body) === FALSE) { echo "Cannot write to file (kossa.jpg)"; exit; } else { echo "success"; }*/ } }