Put an object from a file (legacy function)
public static putObjectFile ( string $file, string $bucket, string $uri, constant $acl = self::ACL_PRIVATE, array $metaHeaders = [], string $contentType = null ) : boolean | ||
$file | string | Input file path |
$bucket | string | Bucket name |
$uri | string | Object URI |
$acl | constant | ACL constant |
$metaHeaders | array | Array of x-amz-meta-* headers |
$contentType | string | Content type |
리턴 | boolean |
protected function _upload_to_amazon($file_name, $path = '', $folder = false) { $this->load->library('S3'); $bucket_name = $this->config->item('s3_bucket_name'); if ($path == '') { $path = $this->config->item('csv_upload_path'); } $folder_name = ''; if ($folder) { $folder_name = $folder; } $s3 = new S3($this->config->item('s3_access_key'), $this->config->item('s3_secret_key')); echo "in _upload_to_amazon(): \n" . " URL: http://{$bucket_name}/{$folder_name}{$file_name}\n"; if (file_exists($path . $file_name)) { echo "final file size: " . filesize($path . $file_name) . "\n"; try { $s3->putObjectFile($path . $file_name, $bucket_name, $folder_name . $file_name, S3::ACL_PUBLIC_READ); unset($s3); } catch (Exception $e) { echo '_upload_to_amazon exception: ', $e->getMessage(), "\n{$file_name}\n"; log_message('error', '_upload_to_amazon exception: ', $e->getMessage(), "\n{$file_name}"); unset($s3); return false; } } //exit; return true; }
public function put($file, $name = null) { if (empty($name)) { $name = str_replace("\\", '/', str_replace(PHPFOX_DIR, '', $file)); } return $this->_obj->putObjectFile($file, $this->_bucket, $name, \S3::ACL_PUBLIC_READ); }
function upload() { $error = "test"; if (isset($_POST['submitbtn'])) { if (array_key_exists('userFile', $_FILES)) { if ($_FILES['userFile']['error'] === UPLOAD_ERR_OK) { $filename = $_FILES["userFile"]["name"]; $ext = pathinfo($filename, PATHINFO_EXTENSION); $allowed = array("doc", "docx", "rtf", "pdf", "txt", "odf"); $fname = $_FILES["userFile"]["tmp_name"]; // Make sure extension matches if (in_array($ext, $allowed)) { if ($_FILES['userFile']['size'] < 2097152) { $bucket = 'sublite-resumes'; //Can use existing configs when merging with sublite $s3 = new S3("AKIAI7IVRJCSAWFTTS7Q", "B0qzRQJ1KlLy+STC2BspwT9oZONjt+U6sRNqaRr5"); $s3->putBucket($bucket, S3::ACL_PUBLIC_READ); $actual_image_name = time() . '.' . $ext; if ($s3->putObjectFile($fname, $bucket, $actual_image_name, S3::ACL_PUBLIC_READ)) { $image = 'http://' . $bucket . '.s3.amazonaws.com/' . $actual_image_name; return $image; } else { return "An unknown error occurred during upload!"; } /* // File validated; Upload the file! !!!Need to upload to S3!!! $uploaddir = 'resumes/'; $uploadfile = basename($_FILES['userFile']['name']); if (move_uploaded_file($_FILES['userFile']['tmp_name'], $uploaddir.$uploadfile)) { return "File is valid, and was successfully uploaded.\n"; } else { return "An unknown error occurred during upload!"; } */ } else { $error = "Max file size exceeded!"; } } else { $error = "Bad file extension!"; } } else { if ($_FILES['userFile']['error'] === UPLOAD_ERR_FORM_SIZE) { $error = "Max file size exceeded!"; } else { if ($_FILES['userFile']['error'] === UPLOAD_ERR_NO_FILE) { $error = "You must choose a file!"; } else { $error = "An unknown error occurred during upload!"; } } } return $error; } } }
/** * This moves a file from the local filesystem to the S3 file system provided in the tracmor_configuration.inc.php file * * @param string $strPath * @param string $strFileName * @param string $strType MIME type of the file * @param string $strS3Path path to S3 folder including leading slash- '/attachments' for example * @return bool */ public static function MoveToS3($strPath, $strFileName, $strType, $strS3Path) { $strPath = rtrim($strPath, '/'); $strS3Path = rtrim($strS3Path, '/'); if (file_exists($strPath . '/' . $strFileName)) { require_once __DOCROOT__ . __PHP_ASSETS__ . '/S3.php'; $objS3 = new S3(AWS_ACCESS_KEY, AWS_SECRET_KEY); // Put the file in S3 $objS3->putObjectFile($strPath . '/' . $strFileName, AWS_BUCKET, ltrim(AWS_PATH, '/') . $strS3Path . '/' . $strFileName, S3::ACL_PUBLIC_READ); // Delete the temporary file from web server unlink($strPath . '/' . $strFileName); unset($objS3); return true; } else { return false; } }
/** * Uploads files to FTP * * @param array $files * @param array $results * @param boolean $force_rewrite * @return boolean */ function upload($files, &$results, $force_rewrite = false) { $count = 0; $error = null; if (!$this->_init($error)) { $results = $this->get_results($files, W3_CDN_RESULT_HALT, $error); return false; } foreach ($files as $local_path => $remote_path) { if (!file_exists($local_path)) { $results[] = $this->get_result($local_path, $remote_path, W3_CDN_RESULT_ERROR, 'Source file not found'); continue; } if (!$force_rewrite) { $info = @$this->_s3->getObjectInfo($this->_config['bucket'], $remote_path); if ($info) { $hash = @md5_file($local_path); $s3_hash = isset($info['hash']) ? $info['hash'] : ''; if ($hash === $s3_hash) { $results[] = $this->get_result($local_path, $remote_path, W3_CDN_RESULT_ERROR, 'Object already exists'); continue; } } } $result = @$this->_s3->putObjectFile($local_path, $this->_config['bucket'], $remote_path, S3::ACL_PUBLIC_READ); $results[] = $this->get_result($local_path, $remote_path, $result ? W3_CDN_RESULT_OK : W3_CDN_RESULT_ERROR, $result ? 'OK' : 'Unable to put object'); if ($result) { $count++; } } return $count; }
public function onImageAddition(ImageAdditionEvent $event) { global $config; $access = $config->get_string("amazon_s3_access"); $secret = $config->get_string("amazon_s3_secret"); $bucket = $config->get_string("amazon_s3_bucket"); if (!empty($bucket)) { log_debug("amazon_s3", "Mirroring Image #" . $event->image->id . " to S3 #{$bucket}"); $s3 = new S3($access, $secret); $s3->putBucket($bucket, S3::ACL_PUBLIC_READ); $s3->putObjectFile(warehouse_path("thumbs", $event->image->hash), $bucket, 'thumbs/' . $event->image->hash, S3::ACL_PUBLIC_READ, array(), array("Content-Type" => "image/jpeg", "Content-Disposition" => "inline; filename=image-" . $event->image->id . ".jpg")); $s3->putObjectFile(warehouse_path("images", $event->image->hash), $bucket, 'images/' . $event->image->hash, S3::ACL_PUBLIC_READ, array(), array("Content-Type" => $event->image->get_mime_type(), "Content-Disposition" => "inline; filename=image-" . $event->image->id . "." . $event->image->ext)); } }
public function uploadFile($file, $path, $acl = S3::ACL_PUBLIC_READ, $save = true) { //is it a real file? if (file_exists($file)) { //do the actual upload. $s3 = new S3(AMAZON_AWS_KEY, AMAZON_AWS_SECRET); $result = $s3->putObjectFile($file, AMAZON_S3_BUCKET_NAME, $path, S3::ACL_PUBLIC_READ); //echo "Uploading {$file} to " . AMAZON_S3_BUCKET_NAME . ":{$path}\n"; //get our info for saving $info = $s3->getObjectInfo(AMAZON_S3_BUCKET_NAME, $path, true); //save our info. $this->set('hash', $info['hash']); $this->set('size', $info['size']); $this->set('type', $info['type']); $this->set('bucket', AMAZON_S3_BUCKET_NAME); $this->set('path', $path); $this->set('add_date', date("Y-m-d H:i:s")); //for non db accessible scripts. if ($save) { $this->save(); } //yay! if ($result !== false) { return true; } } else { echo "No file at {$path} or {$file}\n"; } //fail! return false; }
function upload($localFilepath = 'path/to/file.jpg',$s3Path = 'tmp/', $filename = 'filename.jpg',$bucket = 'idc_files'){ // $filename is the name that we want to save it as // - this is going to be something unique every time // Import Vendor file App::import('Vendor', 'S3', array('file' => 'S3'.DS.'s3.php')); // Instantiate the S3 class $s3 = new S3(AWS_ACCESS_KEY, AWS_SECRET_KEY); // defined in bootstrap.php // Ensure $newPath is valid if(substr($s3Path,-1,1) != '/'){ $s3Path .= '/'; } // Intended directory // - buckets are like the C: drive $intendedPath = $s3Path.$filename; // Put our file (also with public read access) if ($s3->putObjectFile($localFilepath, $bucket, $intendedPath, S3::ACL_PUBLIC_READ)) { //echo "S3::putObjectFile(): File copied to {$bucket}/".baseName($uploadFile).PHP_EOL; return 1; } else { return 0; } exit; }
/** * Uploads single file to S3 * * @param array CDN file array * @param boolean $force_rewrite * @return array */ function _upload($file, $force_rewrite = false) { $local_path = $file['local_path']; $remote_path = $file['remote_path']; if (!file_exists($local_path)) { return $this->_get_result($local_path, $remote_path, W3TC_CDN_RESULT_ERROR, 'Source file not found.'); } if (!$force_rewrite) { $this->_set_error_handler(); $info = @$this->_s3->getObjectInfo($this->_config['bucket'], $remote_path); $this->_restore_error_handler(); if ($info) { $hash = @md5_file($local_path); $s3_hash = isset($info['hash']) ? $info['hash'] : ''; if ($hash === $s3_hash) { return $this->_get_result($local_path, $remote_path, W3TC_CDN_RESULT_OK, 'Object up-to-date.'); } } } $headers = $this->_get_headers($file); $this->_set_error_handler(); $result = @$this->_s3->putObjectFile($local_path, $this->_config['bucket'], $remote_path, S3::ACL_PUBLIC_READ, array(), $headers); $this->_restore_error_handler(); if ($result) { return $this->_get_result($local_path, $remote_path, W3TC_CDN_RESULT_OK, 'OK'); } return $this->_get_result($local_path, $remote_path, W3TC_CDN_RESULT_ERROR, sprintf('Unable to put object (%s).', $this->_get_last_error())); }
function grav_submit_to_s3($entry, $form) { // no file? no problem. if (empty($entry[GFORM_UPLOAD_FIELD_ID])) { return; } $gfs3 = new S3(awsAccessKey, awsSecretKey); // url of uploaded file $file_url = $entry[GFORM_UPLOAD_FIELD_ID]; // filename of uploaded file $file_name = $_FILES['input_' . GFORM_UPLOAD_FIELD_ID]['name']; // ensure bucket is there $gfs3->putBucket(BUCKET_NAME, S3::ACL_AUTHENTICATED_READ); // clean up filename, split into parts $url_parts = parse_url($file_url); $full_path = $_SERVER['DOCUMENT_ROOT'] . substr($url_parts['path'], 1); if (is_dir($file_name)) { $file_name = basename($file_name); } // this is the full path to the file on S3 $filename_to_s3 = UPLOAD_PATH . sanitize_file_name($file_name); if ($gfs3->putObjectFile($full_path, BUCKET_NAME, $filename_to_s3, S3::ACL_PUBLIC_READ)) { return true; // upload success } else { wp_die('It looks like something went wrong while uploading your file. Please try again in a few moments.'); } }
/** * Copy * * @param string $from Full path * @param string $to Short path * @param array $httpHeaders HTTP headers OPTIONAL * * @return boolean */ public function copy($from, $to, array $httpHeaders = array()) { $result = false; if (\Includes\Utils\FileManager::isExists($from)) { try { $result = $this->client->putObjectFile($from, \XLite\Core\Config::getInstance()->CDev->AmazonS3Images->bucket, $to, \S3::ACL_PUBLIC_READ, array(), $httpHeaders); } catch (\S3Exception $e) { $result = false; \XLite\Logger::getInstance()->registerException($e); } } return $result; }
/** * Transfer an object to the S3 storage bucket. * * @access public * @param string $path * @return string */ public function transfer($path) { if ($this->s3 === null) { return $path; } $name = $this->s3->path . basename($path); $bucket = $this->s3->bucket; if ($this->s3->putObjectFile($this->uploader->formatPath($path), $bucket, $name, S3::ACL_PUBLIC_READ)) { $this->s3->uploads[] = $path; return sprintf('http://%s.%s/%s', $bucket, self::AS3_DOMAIN, $name); } return $path; }
public static function updateUserImage($userId) { $result = new FunctionResult(); $result->success = false; if (!empty($userId)) { $user = GameUsers::getGameUserById($userId); if (!empty($user)) { $tmpImgPath = UserProfileImageUtils::downloadFBProfileImage($user->facebookId); $profileImageUrl = null; if (!empty($tmpImgPath)) { try { $s3 = new S3(AWS_API_KEY, AWS_SECRET_KEY); $s3->setEndpoint(AWS_S3_ENDPOINT); $imageName = "pi_" . $userId . ".png"; $s3Name = "profile.images/" . $userId . "/" . $imageName; $res = $s3->putObjectFile($tmpImgPath, AWS_S3_BUCKET, $s3Name, S3::ACL_PUBLIC_READ); if ($res) { $profileImageUrl = 'http://' . AWS_S3_BUCKET . '.s3.amazonaws.com/' . $s3Name; try { unlink($tmpImgPath); } catch (Exception $exc) { error_log($exc->getTraceAsString()); } } else { $result->result = json_encode($res); } } catch (Exception $exc) { $result->result = $exc->getTraceAsString(); } } else { $profileImageUrl = USER_DEFAULT_IMAGE_URL; } if (!empty($profileImageUrl)) { $user->setProfilePicture($profileImageUrl); try { $user->updateToDatabase(DBUtils::getConnection()); $result->success = true; $result->result = null; } catch (Exception $exc) { $result->result = $exc->getTraceAsString(); } } } else { $result->result = "user not found"; } } else { $result->result = "user id empty"; } return $result; }
public function upload() { if (!empty($this->_upload_dir)) { $this->_bucket_name .= $this->_bucket_name . '/' . $this->_upload_dir; } // アマゾンS3のインスタンス生成 $s3 = new S3($this->_access_key, $this->_secret_key); if (!$s3) { throw new Teamlab_Batch_Exception(sprintf("AWS接続に失敗しました。: %s", $this->_bucket_name)); } // ファイルアップロード if ($s3->putObjectFile($this->_upload_file, $this->_bucket_name, baseName($this->_upload_file), S3::ACL_PUBLIC_READ)) { return true; } else { throw new Teamlab_Batch_Transport_Exception(sprintf("ファイルのアップロードに失敗しました。サーバ情報:%s ファイル名:%s", $this->_bucket_name, baseName($this->_upload_file))); } }
protected function _upToS3($id, array $files) { $s3 = $this->config->item('s3'); if (!$s3) { throw new Exception("Error Processing Request", 1); } $this->load->library('S3'); $s3 = new S3(); $bucket = $this->config->item('bucket'); foreach ($files as $key => $value) { if ($s3->putObjectFile($config['upload_path'] . DIRECTORY_SEPARATOR . $key . '_' . $value['file_name'], $bucket, $directory . DIRECTORY_SEPARATOR . 'photo' . DIRECTORY_SEPARATOR . $id . DIRECTORY_SEPARATOR . $key . '_' . $value['file_name'], S3::ACL_PUBLIC_READ)) { //echo "We successfully uploaded your file."; } else { //echo "Something went wrong while uploading your file... sorry."; } } }
/** * Transfer an object to the S3 storage bucket. * * @access public * @param string $path * @return string */ public function transfer($path) { if ($this->s3 === null) { return $path; } $host = empty($this->s3->host) ? self::AS3_DOMAIN : $this->s3->host; $name = basename($path); $bucket = $this->s3->bucket; if (!empty($this->s3->path)) { $name = $this->s3->path . '/' . $name; } if ($this->s3->putObjectFile($this->uploader->formatPath($path), $bucket, $name, S3::ACL_PUBLIC_READ)) { $this->s3->uploads[] = $path; return String::insert($this->s3->format, array('bucket' => $bucket, 'path' => $name, 'host' => $host), array('before' => '{', 'after' => '}')); } return $path; }
public static function put($file, $type, $name = false) { global $globals; if (!$name) { $name = baseName($file); } if (empty($type)) { $type = 'notype'; } $uri = "{$type}/{$name}"; S3::setAuth($globals['Amazon_access_key'], $globals['Amazon_secret_key']); if (S3::putObjectFile($file, $globals['Amazon_S3_media_bucket'], $uri, S3::ACL_PUBLIC_READ, array(), array("Cache-Control" => "max-age=864000", "Expires" => gmdate("D, d M Y H:i:s T", time() + 864000)))) { //syslog(LOG_NOTICE, "Meneame, uploaded $uri to S3"); return true; } syslog(LOG_NOTICE, "Meneame, failed to upload {$uri} to S3"); return false; }
public function actionUpload() { $user = Yii::app()->user->data(); $s3 = new S3('AKIAIRCVNJWPLDLX2PJA', 'Bo3cbCSVmsTyxrlsg1R0nVrkLuHXpPv9aYUhjBff'); header('Content-Type: application/json'); try { if (!isset($_FILES['files']['error']) || is_array($_FILES['files']['error'])) { $this->send_err('Error undefined', 500); } // Check $_FILES['files']['error'] value. switch ($_FILES['files']['error']) { case UPLOAD_ERR_OK: break; case UPLOAD_ERR_NO_FILE: $this->send_err('No file sent.', 500); case UPLOAD_ERR_INI_SIZE: case UPLOAD_ERR_FORM_SIZE: $this->send_err('Exceeded filesize limit.', 500); default: $this->send_err('Unknown errors.', 500); } // You should also check filesize here. if ($_FILES['files']['size'] > 1000000) { $this->send_err('Exceeded filesize limit.', 501); } // DO NOT TRUST $_FILES['files']['mime'] VALUE !! // Check MIME Type by yourself. $finfo = new finfo(FILEINFO_MIME_TYPE); if (false === ($ext = array_search($finfo->file($_FILES['files']['tmp_name']), array('jpg' => 'image/jpeg', 'png' => 'image/png', 'gif' => 'image/gif'), true))) { $this->send_err('Invalid file format.', 502); } $uploadFile = $_FILES['files']['tmp_name']; $fname = sprintf('userid_%s/%s', $user->id, $_FILES['files']['name']); if (!$s3->putObjectFile($uploadFile, "mbtx", $fname)) { $this->send_err('Failed to move uploaded file.', 503); } else { echo '{"MSG": "OK", "Code": "200"}'; Yii::app()->end(); } } catch (RuntimeException $e) { $this->send_err('Error' . $e->getMessage(), 500); } }
public static function wp_db_backup_completed(&$args) { $destination_s3 = get_option('wp_db_backup_destination_s3'); if (isset($destination_s3) && $destination_s3 == 1 && get_option('wpdb_dest_amazon_s3_bucket') && get_option('wpdb_dest_amazon_s3_bucket_key') && get_option('wpdb_dest_amazon_s3_bucket_secret')) { try { if (!class_exists('S3')) { require_once 'S3.php'; } // AWS access info if (!defined('awsAccessKey')) { define('awsAccessKey', get_option('wpdb_dest_amazon_s3_bucket_key')); } if (!defined('awsSecretKey')) { define('awsSecretKey', get_option('wpdb_dest_amazon_s3_bucket_secret')); } // Check for CURL if (!extension_loaded('curl') && !@dl(PHP_SHLIB_SUFFIX == 'so' ? 'curl.so' : 'php_curl.dll')) { error_log("ERROR: CURL extension not loaded"); } $s3 = new S3(awsAccessKey, awsSecretKey); $bucketName = get_option('wpdb_dest_amazon_s3_bucket'); $result = $s3->listBuckets(); if (get_option('wpdb_dest_amazon_s3_bucket')) { if (in_array(get_option('wpdb_dest_amazon_s3_bucket'), $result)) { if ($s3->putObjectFile($args[1], $bucketName, baseName($args[1]), S3::ACL_PUBLIC_READ)) { error_log("S3::{$args['0']} upload in bucket {$bucketName}"); $args[2] = $args[2] . '<br> Upload Database Backup on s3 bucket ' . $bucketName; } else { error_log("S3::Failed to upload {$args['0']}"); $args[2] = $args[2] . '<br>Failed to upload Database Backup on s3 bucket ' . $bucketName; } } else { error_log("Invalid bucket name or AWS details"); $args[2] = $args[2] . '<br>Invalid bucket name or AWS details'; } } } catch (Exception $e) { // echo ($e->getMessage()); error_log("Invalid AWS details"); } } }
function upload_to_amazon($source_filename, $dest_filename, $image_type = IMAGETYPE_JPEG, $compression = 75, $permissions = null) { // Create local instance of image $this->save($source_filename, $image_type, $compression, $permissions); // Begin s3 sequence $s3 = new S3('AMAZON_ACCESS_TOKEN', 'AMAZON_SECRET_TOKEN'); // Name each bucket off the domain $bucket = 'screenbin'; // Make sure the bucket is there if (!in_array($bucket, $s3->listBuckets())) { $s3->putBucket($bucket, S3::ACL_PUBLIC_READ); } // Upload to s3 if ($s3->putObjectFile($source_filename, $bucket, $dest_filename, S3::ACL_PUBLIC_READ)) { // Delete local version of the file return true; } else { return false; } }
function done_cropimage() { $this->layout = 'ajax'; if (!empty($this->params['data']['width']) && !empty($this->params['data']['height'])) { $valid_exts = array('jpeg', 'jpg', 'png', 'gif'); $max_file_size = 100 * 1024; #200kb $nw = $nh = 100; # image with & height $imgName = HTTP_ROOT . 'files/profile/' . $this->params['data']['imgName']; $imgthumbSrc = ""; if (isset($imgName)) { # grab data form post request $x = (int) $this->params['data']['x-cord']; $y = (int) $this->params['data']['y-cord']; $w = (int) $this->params['data']['width']; $h = (int) $this->params['data']['height']; if (USE_S3) { $imgSrc = $this->Format->generateTemporaryURL(DIR_USER_PHOTOS_S3_TEMP . $this->params['data']['imgName']); } else { $imgSrc = HTTP_ROOT . 'files/profile/orig/' . $this->params['data']['imgName']; } //getting the image dimensions list($width, $height) = getimagesize($imgSrc); //saving the image into memory (for manipulation with GD Library) $type = exif_imagetype($imgSrc); switch ($type) { case 1: $myImage = imagecreatefromgif($imgSrc); break; case 2: $myImage = imagecreatefromjpeg($imgSrc); break; case 3: $myImage = imagecreatefrompng($imgSrc); break; case 6: $myImage = imagecreatefromwbmp($imgSrc); break; default: $src = imagecreatefromjpeg($imgSrc); break; } // calculating the part of the image to use for thumbnail /*if ($width > $height) { $y = 0; $x = ($width - $height) / 2; $smallestSide = $height; } else { $x = 0; $y = 0; // $y = ($height - $width) / 2; $smallestSide = $width; }*/ // copying the part into thumbnail $thumbSize = 120; $thumb = imagecreatetruecolor($thumbSize, $thumbSize); imagecopyresampled($thumb, $myImage, 0, 0, $x, $y, $thumbSize, $thumbSize, $w, $h); /*# read image binary data $data = file_get_contents($imgName); # create v image form binary data $vImg = imagecreatefromstring($data); $dstImg = imagecreatetruecolor($nw, $nh); # copy image imagecopyresampled($dstImg, $vImg, 0, 0, $x, $y, $nw, $nh, $w, $h); # save image imagejpeg($dstImg, $path);*/ //Imagejpeg($thumb, $path); $imgthumbNm = $this->params['data']['imgName']; //time()."_".$this->params['data']['imgName']; $imgthumbSrc = DIR_USER_PHOTOS . $imgthumbNm; switch ($type) { case 1: imagegif($thumb, $imgthumbSrc); break; case 2: imagejpeg($thumb, $imgthumbSrc); break; case 3: imagepng($thumb, $imgthumbSrc); break; case 6: imagewbmp($thumb, $imgthumbSrc); break; default: imagejpeg($thumb, $imgthumbSrc); break; } //echo "<img src='$path' />"; if (USE_S3) { $s3 = new S3(awsAccessKey, awsSecretKey); $s3->putObjectFile(DIR_USER_PHOTOS . $imgthumbNm, BUCKET_NAME, DIR_USER_PHOTOS_THUMB . $imgthumbNm, S3::ACL_PRIVATE); } echo $imgthumbNm; } else { echo 'file not set'; } } exit; }
if (!extension_loaded('curl') && !@dl(PHP_SHLIB_SUFFIX == 'so' ? 'curl.so' : 'php_curl.dll')) { exit("\nERROR: CURL extension not loaded\n\n"); } // Pointless without your keys! if (awsAccessKey == 'change-this' || awsSecretKey == 'change-this') { exit("\nERROR: AWS access information required\n\nPlease edit the following lines in this file:\n\n" . "define('awsAccessKey', 'change-me');\ndefine('awsSecretKey', 'change-me');\n\n"); } // Instantiate the class $s3 = new S3(awsAccessKey, awsSecretKey); // List your buckets: echo "S3::listBuckets(): " . print_r($s3->listBuckets(), 1) . "\n"; // Create a bucket with public read access if ($s3->putBucket($bucketName, S3::ACL_PUBLIC_READ)) { echo "Created bucket {$bucketName}" . PHP_EOL; // Put our file (also with public read access) if ($s3->putObjectFile($uploadFile, $bucketName, baseName($uploadFile), S3::ACL_PUBLIC_READ)) { echo "S3::putObjectFile(): File copied to {$bucketName}/" . baseName($uploadFile) . PHP_EOL; // Get the contents of our bucket $contents = $s3->getBucket($bucketName); echo "S3::getBucket(): Files in bucket {$bucketName}: " . print_r($contents, 1); // Get object info $info = $s3->getObjectInfo($bucketName, baseName($uploadFile)); echo "S3::getObjectInfo(): Info for {$bucketName}/" . baseName($uploadFile) . ': ' . print_r($info, 1); // You can also fetch the object into memory // var_dump("S3::getObject() to memory", $s3->getObject($bucketName, baseName($uploadFile))); // Or save it into a file (write stream) // var_dump("S3::getObject() to savefile.txt", $s3->getObject($bucketName, baseName($uploadFile), 'savefile.txt')); // Or write it to a resource (write stream) // var_dump("S3::getObject() to resource", $s3->getObject($bucketName, baseName($uploadFile), fopen('savefile.txt', 'wb'))); // Get the access control policy for a bucket: // $acp = $s3->getAccessControlPolicy($bucketName);
public function unipdfimport() { $s3Accesskey = Mage::getStoreConfig('imagecdn/amazons3/access_key_id'); $s3Secretkey = Mage::getStoreConfig('imagecdn/amazons3/secret_access_key'); // AWS access info if (!defined('awsAccessKey')) { define('awsAccessKey', $s3Accesskey); } if (!defined('awsSecretKey')) { define('awsSecretKey', $s3Secretkey); } // Check for CURL if (!extension_loaded('curl') && !@dl(PHP_SHLIB_SUFFIX == 'so' ? 'curl.so' : 'php_curl.dll')) { exit("\nERROR: CURL extension not loaded\n\n"); } $resource = Mage::getSingleton('core/resource'); $readConnection = $resource->getConnection('core_read'); $writeConnection = $resource->getConnection('core_write'); $query = 'SELECT * FROM `unicom_invoice` WHERE pdf_complete = "No"'; $orders = $readConnection->fetchAll($query); foreach ($orders as $order) { if ($order['ship_code']) { $del = 'DEL'; $locpos = strpos($order['ship_code'], $del); if ($locpos === false) { $username = '******'; $password = '******'; } else { $username = '******'; $password = '******'; } $cookie = '/tmp/curl-session'; $postdata = "j_username="******"&j_password="******"{$username}:{$password}"); curl_setopt($ch, CURLOPT_COOKIEFILE, $cookie); curl_setopt($ch, CURLOPT_COOKIEJAR, $cookie); curl_setopt($ch, CURLOPT_POSTFIELDS, $postdata); /*$agent = $_SERVER["HTTP_USER_AGENT"]; curl_setopt ($ch, CURLOPT_USERAGENT, $agent);*/ $output = curl_exec($ch); $info = curl_getinfo($ch); curl_close($ch); if ($output) { $pdf = new Zend_Pdf(); $pdf = Zend_Pdf::parse($output); $fs_path = Mage::getBaseDir() . '/media/sales/order/pdf/' . $order['ship_code'] . '.pdf'; $pdf->save($fs_path); $uploadFile = $fs_path; // File to upload, we'll use the S3 class since it exists $bucketName = 'zoffio.invoices'; // Temporary bucket $morders = Mage::getModel('sales/order')->getCollection()->addAttributeToFilter('increment_id', $order['order_id']); foreach ($morders as $morder) { $orderDate = $morder->getCreatedAt(); } $year = date('Y', strtotime($orderDate)); $month = date('m', strtotime($orderDate)); $day = date('d', strtotime($orderDate)); $pdfFile = $year . "/" . $month . "/" . $day . "/" . baseName($uploadFile); // Put our file (also with public read access) $s3->putObjectFile($uploadFile, $bucketName, $pdfFile, S3::ACL_PUBLIC_READ); unlink($uploadFile); if ($order['state_complete'] == 'Yes') { $insertquery = "UPDATE `unicom_invoice` SET pdf_complete = 'Yes' WHERE ship_code = '" . $order['ship_code'] . "'"; $writeConnection->query($insertquery); } } } } }
public function save_as($key, $version) { if ($this->error) { return $this->getDebug() ? call_user_func_array('error', $this->error) : array(); } if (!($key && $version)) { return $this->getDebug() ? error('OrmImageUploader 錯誤!', '參數錯誤,請檢查 save_as 函式參數!', '請程式設計者確認狀況!') : array(); } if (!($versions = ($versions = $this->getVersions()) ? $versions : $this->configs['default_version'])) { return $this->getDebug() ? error('OrmImageUploader 錯誤!', 'Versions 格式錯誤,請檢查 getVersions () 或者 default_version!', '預設值 default_version 請檢查 config/system/orm_uploader.php 設定檔!') : array(); } if (in_array($key, $keys = array_keys($versions))) { return $this->getDebug() ? error('OrmImageUploader 錯誤!', '已經有相符合的 key 名稱,key:' . $key, '目前的 key 有:' . implode(', ', $keys)) : array(); } switch ($this->getDriver()) { case 'local': foreach ($versions as $ori_key => $ori_version) { if (is_readable(FCPATH . implode(DIRECTORY_SEPARATOR, $ori_path = array_merge($this->getBaseDirectory(), $this->getSavePath(), array($ori_key . $this->configs['separate_symbol'] . ($name = $this->getValue())))))) { break; } } if (!$ori_path) { return $this->getDebug() ? error('OrmImageUploader 錯誤!', '沒有任何的檔案可以被使用!', '請確認 getVersions () 函式內有存在的檔案可被另存!', '請程式設計者確認狀況!') : array(); } if (!file_exists(FCPATH . implode(DIRECTORY_SEPARATOR, $path = array_merge($this->getBaseDirectory(), $this->getSavePath())))) { $oldmask = umask(0); @mkdir(FCPATH . implode(DIRECTORY_SEPARATOR, $path), 0777, true); umask($oldmask); } if (!is_writable(FCPATH . implode(DIRECTORY_SEPARATOR, $path))) { return $this->getDebug() ? error('OrmImageUploader 錯誤!', '資料夾不能儲存!路徑:' . $path, '請程式設計者確認狀況!') : ''; } try { $image = ImageUtility::create(FCPATH . implode(DIRECTORY_SEPARATOR, $ori_path), null); $path = array_merge($path, array($key . $this->configs['separate_symbol'] . $name)); if ($this->_utility($image, FCPATH . implode(DIRECTORY_SEPARATOR, $path), $key, $version)) { return $path; } else { return array(); } } catch (Exception $e) { return $this->getDebug() ? call_user_func_array('error', $e->getMessages()) : ''; } break; case 's3': if (!@S3::getObject($this->getS3Bucket(), implode(DIRECTORY_SEPARATOR, array_merge($path = array_merge($this->getBaseDirectory(), $this->getSavePath()), array($fileName = array_shift(array_keys($versions)) . $this->configs['separate_symbol'] . ($name = $this->getValue())))), FCPATH . implode(DIRECTORY_SEPARATOR, $fileName = array_merge($this->getTempDirectory(), array($fileName))))) { return $this->getDebug() ? error('OrmImageUploader 錯誤!', '沒有任何的檔案可以被使用!', '請確認 getVersions () 函式內有存在的檔案可被另存!', '請程式設計者確認狀況!') : array(); } try { $image = ImageUtility::create($fileName = FCPATH . implode(DIRECTORY_SEPARATOR, $fileName), null); $newPath = array_merge($path, array($newName = $key . $this->configs['separate_symbol'] . $name)); if ($this->_utility($image, FCPATH . implode(DIRECTORY_SEPARATOR, $newFileName = array_merge($this->getTempDirectory(), array($newName))), $key, $version) && S3::putObjectFile($newFileName = FCPATH . implode(DIRECTORY_SEPARATOR, $newFileName), $this->getS3Bucket(), implode(DIRECTORY_SEPARATOR, $newPath), S3::ACL_PUBLIC_READ, array(), array('Cache-Control' => 'max-age=315360000', 'Expires' => gmdate('D, d M Y H:i:s T', strtotime('+5 years')))) && @unlink($newFileName) && @unlink($fileName)) { return $newPath; } else { return array(); } } catch (Exception $e) { return $this->getDebug() ? call_user_func_array('error', $e->getMessages()) : ''; } break; } return $this->getDebug() ? error('OrmImageUploader 錯誤!', '未知的 driver,系統尚未支援 ' . $this->getDriver() . ' 的空間!', '請檢查 config/system/orm_uploader.php 設定檔!') : array(); }
} $c->title = $_POST['c_title']; $c->url = $_POST['c_url']; $c->seo_title = $_POST['c_seo_title']; $c->height = $_POST['height']; $c->seo_description = $_POST['c_seo_description']; $c->seo_keywords = $_POST['c_seo_keywords']; $c->short_description = $_POST['c_sdesc']; $c->description = $_POST['c_desc']; $c->more_description = $_POST['c_mdesc']; $c->save('display_order'); if (!empty($_FILES['background']['name'])) { $finfo = getimagesize($_FILES['background']['tmp_name']); if (!isset($finfo['mime'])) { Messages::addError('Background image file: Invalid file type'); } else { $result = S3::putObjectFile($_FILES['background']['tmp_name'], $config->amazon->S3Bucket, $config->amazon->folders['site']['examples']['backgrounds'] . '/' . $c->id, S3::ACL_PUBLIC_READ, array(), $finfo['mime']); if (!$result) { Messages::addError('Amazon image upload failed'); jump('?page=examples'); } } } jump('?page=examples'); } if (isset($_GET['todo']) && $_GET['todo'] == 'edit') { $c = new ExampleCategories(isset($_GET['id']) ? $_GET['id'] : null); $smarty->assign('c', $c); $smarty->assign('edit', 1); $smarty->assign('backgroundImage', $c->getBackgroundLink()); }
public function put_file($file, $hash) { $s3 = new S3('csub001050', 'studiomaiocchi', true, 'seewebstorage.it'); $res = $s3->putObjectFile($file, 'studiomaiocchistaging', $hash); return $res; }
protected function moveFileAndUploadColumn($temp, $save_path, $ori_name) { if ($this->error) { return $this->getDebug() ? call_user_func_array('error', $this->error) : array(); } switch ($this->getDriver()) { case 'local': if ($this->uploadColumnAndUpload('') && @rename($temp, $save_path = FCPATH . implode(DIRECTORY_SEPARATOR, $save_path) . DIRECTORY_SEPARATOR . $ori_name)) { return $this->uploadColumnAndUpload($ori_name); } else { return $this->getDebug() ? error('OrmUploader 錯誤!', '搬移預設位置時發生錯誤!', 'temp:' . $temp, 'save_path:' . $save_path, 'name:' . $ori_name, '請程式設計者確認狀況!') : false; } break; case 's3': if ($this->uploadColumnAndUpload('') && S3::putObjectFile($temp, $this->getS3Bucket(), implode(DIRECTORY_SEPARATOR, $save_path) . DIRECTORY_SEPARATOR . $ori_name, S3::ACL_PUBLIC_READ, array(), array('Cache-Control' => 'max-age=315360000', 'Expires' => gmdate('D, d M Y H:i:s T', strtotime('+5 years'))))) { return $this->uploadColumnAndUpload($ori_name) && @unlink($temp); } else { return $this->getDebug() ? error('OrmUploader 錯誤!', '搬移預設位置時發生錯誤!', 'temp:' . $temp, 'save_path:' . $save_path, 'name:' . $ori_name, '請程式設計者確認狀況!') : false; } break; } return $this->getDebug() ? error('OrmUploader 錯誤!', '未知的 driver,系統尚未支援 ' . $this->getDriver() . ' 的空間!', '請檢查 config/system/orm_uploader.php 設定檔!') : array(); }
function uploadPhoto($tmp_name, $name, $size, $path, $count, $type) { if ($name) { $inkb = $size / 1024; $oldname = strtolower($name); $ext = substr(strrchr($oldname, "."), 1); if ($ext != 'gif' && $ext != 'jpg' && $ext != 'jpeg' && $ext != 'png') { return "ext"; } else { list($width, $height) = getimagesize($tmp_name); if ($width > 800) { try { if ($extname == "png") { $src = imagecreatefrompng($tmp_name); } elseif ($extname == "gif") { $src = imagecreatefromgif($tmp_name); } elseif ($extname == "bmp") { $src = imagecreatefromwbmp($tmp_name); } else { $src = imagecreatefromjpeg($tmp_name); } $newwidth = 800; $newheight = $height / $width * $newwidth; $tmp = imagecreatetruecolor($newwidth, $newheight); imagecopyresampled($tmp, $src, 0, 0, 0, 0, $newwidth, $newheight, $width, $height); $newname = md5(time() . $count) . "." . $ext; $targetpath = $path . $newname; imagejpeg($tmp, $targetpath, 100); imagedestroy($src); imagedestroy($tmp); // s3 bucket start $s3 = new S3(awsAccessKey, awsSecretKey); //$s3->putBucket(BUCKET_NAME, S3::ACL_PUBLIC_READ_WRITE); $s3->putBucket(BUCKET_NAME, S3::ACL_PRIVATE); if ($type == "profile_img") { $folder_orig_Name = 'files/photos/' . trim($newname); } else { $folder_orig_Name = 'files/company/' . trim($newname); } //$s3->putObjectFile($tmp_name,BUCKET_NAME ,$folder_orig_Name ,S3::ACL_PUBLIC_READ_WRITE); $s3->putObjectFile($targetpath, BUCKET_NAME, $folder_orig_Name, S3::ACL_PRIVATE); //s3 bucket end unlink($targetpath); } catch (Exception $e) { return false; } } else { $newname = md5(time() . $count) . "." . $ext; $targetpath = $path . $newname; move_uploaded_file($tmp_name, $targetpath); // s3 bucket start $s3 = new S3(awsAccessKey, awsSecretKey); $s3->putBucket(BUCKET_NAME, S3::ACL_PRIVATE); if ($type == "profile_img") { $folder_orig_Name = 'files/photos/' . trim($newname); } else { $folder_orig_Name = 'files/company/' . trim($newname); } //$folder_orig_Name = 'files/photos/'.trim($newname); //$s3->putObjectFile($tmp_name,BUCKET_NAME ,$folder_orig_Name ,S3::ACL_PUBLIC_READ_WRITE); $s3->putObjectFile($targetpath, BUCKET_NAME, $folder_orig_Name, S3::ACL_PRIVATE); //s3 bucket end unlink($targetpath); } if ($width < 200 || $height < 200) { $im_P = 'convert ' . $targetpath . ' -background white -gravity center -extent 200x200 ' . $targetpath; exec($im_P); } return $newname; } } else { return false; } }
public function execute(WPAdm_Command_Context $context) { require_once WPAdm_Core::getPluginDir() . '/modules/S3.php'; $s3 = new S3($context->get('AccessKeyId'), $context->get('SecretAccessKey')); //new S3Wrapper(); //S3::setAuth($context->get('AccessKeyId'), $context->get('SecretAccessKey')); /*(array( 'key' => $context->get('AccessKeyId'), 'secret' => $context->get('SecretAccessKey'), 'token' => $context->get('SessionToken') )); */ // $s3->setTimeCorrectionOffset(60); $dir = $context->get('dir') ? $context->get('dir') : ''; if ($dir) { //$s3->mkdir('s3://' . $context->get('bucket') . '/' . $dir); $logs = $s3->putObject($dir, $context->get('bucket'), $dir . "/", s3::ACL_PUBLIC_READ); //$logs = $s3->putObjectString($dir, $context->get('bucket'), $context->get('bucket') . '/' . $dir, s3::ACL_PUBLIC_READ_WRITE); WPAdm_Core::log('create folder logs ' . serialize($logs)); /*$s3->registerStreamWrapper("s3"); @mkdir('s3://'.$context->get('bucket').'/'.$dir);*/ } try { $filePath = preg_replace('#[/\\\\]+#', '/', $context->get('file')); $key = $dir ? $dir . '/' . basename($filePath) : basename($filePath); $key = ltrim(preg_replace('#[/\\\\]+#', '/', $key), '/'); //if first will be '/', file not will be uploaded, but result will be ok $putRes = $s3->putObjectFile($filePath, $context->get('bucket'), $key, s3::ACL_PUBLIC_READ_WRITE); WPAdm_Core::log('putObjectFile ' . $filePath . ' == ' . $context->get('bucket') . " == " . $key . ' == ' . (int) $putRes); } catch (Exception $e) { $context->setError($e->getMessage()); return false; } catch (S3Exception $e) { WPAdm_Core::log('error send file ' . $e->getMessage()); $context->setError($e->getMessage()); return false; } return true; }
function store_backup() { foreach ($this->b['storage_servers'] as $s) { $s = $this->s[$s]; switch ($s['type']) { case 'local': $path = backup__($s['path']) . '/' . $this->b['_dirname']; //ensure directory structure if (!is_dir($path)) { mkdir($path, 0755, true); } //would rather use the native copy() here, but by defualt //php doesnt support files > 2GB //see here for a posible solution: //http://ca3.php.net/manual/en/function.fopen.php#37791 $cmd[] = fpbx_which('cp'); $cmd[] = $this->b['_tmpfile']; $cmd[] = $path . '/' . $this->b['_file'] . '.tgz'; exec(implode(' ', $cmd), $error, $status); unset($cmd, $error); if ($status !== 0) { $this->b['error'] = 'Error copying ' . $this->b['_tmpfile'] . ' to ' . $path . '/' . $this->b['_file'] . '.tgz: ' . $error; backup_log($this->b['error']); } //run maintenance on the directory $this->maintenance($s['type'], $s); break; case 'email': //TODO: set agent to something informative, including fpbx & backup versions $email_options = array('useragent' => 'freepbx', 'protocol' => 'mail'); $email = new \CI_Email(); $from = $this->amp_conf['AMPBACKUPEMAILFROM'] ? $this->amp_conf['AMPBACKUPEMAILFROM'] : '*****@*****.**'; $msg[] = _('Name') . ': ' . $this->b['name']; $msg[] = _('Created') . ': ' . date('r', $this->b['_ctime']); $msg[] = _('Files') . ': ' . $this->manifest['file_count']; $msg[] = _('Mysql Db\'s') . ': ' . $this->manifest['mysql_count']; $msg[] = _('astDb\'s') . ': ' . $this->manifest['astdb_count']; $email->from($from); $email->to(backup__($s['addr'])); $email->subject(_('Backup') . ' ' . $this->b['name']); $body = implode("\n", $msg); // If the backup file is more than 25MB, yell $encodedsize = ceil(filesize($this->b['_tmpfile']) / 3) * 4; if ($encodedsize > 26214400) { $email->subject(_('Backup ERROR (exceeded SMTP limits)') . ' ' . $this->b['name']); $email->message(_('BACKUP NOT ATTACHED') . "\n" . _('The backup file exceeded the maximum SMTP limits of 25MB. It was not attempted to be sent. Please shrink your backup, or use a different method of transferring your backup.') . "\n{$body}\n"); } elseif ($encodedsize > $s['maxsize']) { $email->subject(_('Backup ERROR (exceeded soft limit)') . ' ' . $this->b['name']); $email->message(_('BACKUP NOT ATTACHED') . "\n" . _('The backup file exceeded the soft limit set in SMTP configuration (%s bytes). It was not attempted to be sent. Please shrink your backup, or use a different method of transferring your backup.') . "\n{$body}\n"); } else { $email->message($body); $email->attach($this->b['_tmpfile']); } $email->send(); unset($msg); break; case 'ftp': //subsitute variables if nesesary $s['host'] = backup__($s['host']); $s['port'] = backup__($s['port']); $s['user'] = backup__($s['user']); $s['password'] = backup__($s['password']); $s['path'] = backup__($s['path']); $ftp = @ftp_connect($s['host'], $s['port']); if ($ftp === false) { $this->b['error'] = _("Error connecting to the FTP Server... Check your host name or DNS"); backup_log($this->b['error']); return $ftp; } if (ftp_login($ftp, $s['user'], $s['password'])) { //chose pasive/active transfer mode ftp_pasv($ftp, $s['transfer'] == 'passive'); //switch to directory. If we fail, build directory structure and try again if (!@ftp_chdir($ftp, $s['path'] . '/' . $this->b['_dirname'])) { //ensure directory structure @ftp_mkdir($ftp, $s['path']); @ftp_mkdir($ftp, $s['path'] . '/' . $this->b['_dirname']); ftp_chdir($ftp, $s['path'] . '/' . $this->b['_dirname']); } //copy file ftp_put($ftp, $this->b['_file'] . '.tgz', $this->b['_tmpfile'], FTP_BINARY); //run maintenance on the directory $this->maintenance($s['type'], $s, $ftp); //release handel ftp_close($ftp); } else { $this->b['error'] = _("Error connecting to the FTP Server..."); backup_log($this->b['error']); } break; case 'awss3': //subsitute variables if nesesary $s['bucket'] = backup__($s['bucket']); $s['awsaccesskey'] = backup__($s['awsaccesskey']); $s['awssecret'] = backup__($s['awssecret']); $awss3 = new \S3($s['awsaccesskey'], $s['awssecret']); // Does this bucket already exist? $buckets = $awss3->listBuckets(); if (!in_array($s['bucket'], $buckets)) { // Create the bucket $awss3->putBucket($s['bucket'], \S3::ACL_PUBLIC_READ); } //copy file if ($awss3->putObjectFile($this->b['_tmpfile'], $s['bucket'], $this->b['name'] . "/" . $this->b['_file'] . '.tgz', \S3::ACL_PUBLIC_READ)) { dbug('S3 successfully uploaded your backup file.'); } else { dbug('S3 failed to accept your backup file'); } //run maintenance on the directory $this->maintenance($s['type'], $s, $awss3); break; case 'ssh': //subsitute variables if nesesary $s['path'] = backup__($s['path']); $s['user'] = backup__($s['user']); $s['host'] = backup__($s['host']); $destdir = $s['path'] . '/' . $this->b['_dirname']; //ensure directory structure $cmd = fpbx_which('ssh') . ' -o StrictHostKeyChecking=no -i '; $cmd .= $s['key'] . " -l " . $s['user'] . ' ' . $s['host'] . ' -p ' . $s['port']; $cmd .= " 'mkdir -p {$destdir}'"; exec($cmd, $output, $ret); if ($ret !== 0) { backup_log("SSH Error ({$ret}) - Received " . json_encode($output) . " from {$cmd}"); } $output = null; //put file // Note that SCP (*unlike SSH*) needs IPv6 addresses in ['s. Consistancy is awesome. if (filter_var($s['host'], \FILTER_VALIDATE_IP, \FILTER_FLAG_IPV6)) { $scphost = "[" . $s['host'] . "]"; } else { $scphost = $s['host']; } $cmd = fpbx_which('scp') . ' -o StrictHostKeyChecking=no -i ' . $s['key'] . ' -P ' . $s['port']; $cmd .= " " . $this->b['_tmpfile'] . " " . $s['user'] . "@{$scphost}:{$destdir}"; exec($cmd, $output, $ret); if ($ret !== 0) { backup_log("SCP Error ({$ret}) - Received " . json_encode($output) . " from {$cmd}"); } //run maintenance on the directory $this->maintenance($s['type'], $s); break; } } }