If maxKeys is null this method will loop through truncated result sets
public static getBucket ( string $bucket, string $prefix = null, string $marker = null, string $maxKeys = null, string $delimiter = null, boolean $returnCommonPrefixes = false ) : array | false | ||
$bucket | string | Bucket name |
$prefix | string | Prefix |
$marker | string | Marker (last file listed) |
$maxKeys | string | Max keys (maximum number of keys to return) |
$delimiter | string | Delimiter |
$returnCommonPrefixes | boolean | Set to true to return CommonPrefixes |
return | array | false | | false |
private function listS3Images() { $s3 = new S3(WH_AWS_WIKIPHOTO_ACCESS_KEY, WH_AWS_WIKIPHOTO_SECRET_KEY); $bucket_name = self::AWS_BUCKET; $prefix = null; $marker = null; //$marker = 'paupau/257175/Make Chocolate-1.JPG'; $maxKeys = 1; //$maxKeys = null; $delimiter = null; $returnCommonPrefixes = false; $buckets = $s3->getBucket($bucket_name, $prefix, $marker, $maxKeys, $delimiter, $returnCommonPrefixes); print "number of buckets: " . count($buckets) . "\n"; foreach ($buckets as $path => $details) { // match string: username/(1234.zip or 1234/*.jpg) if (!preg_match('@^([a-z][-._0-9a-z]{0,30})/([0-9]+)(\\.zip|/.+)$@i', $path, $m)) { continue; } list(, $user, $id, $ending) = $m; $id = intval($id); if (!$id) { continue; } /* if (in_array($user, self::$excludeUsers) // don't process anything in excluded people || preg_match('@^[0-9]+$@', $user)) // don't allow usernames that are all digits { continue; } */ $prefix = $user . '/' . $id; $files = array($ending); list($err, $stageDir) = self::pullFiles($id, $s3, $prefix, $files); } }
public static function ls($pattern = null) { global $globals; S3::setAuth($globals['Amazon_access_key'], $globals['Amazon_secret_key']); $list = S3::getBucket($globals['Amazon_S3_media_bucket'], $pattern); return $list; }
/** * Get S3 bucket contents (from cache if possible) * * @return array */ private function getBucketContents() { $cacheFile = $this->cacheDir . '/s3browser-' . $this->s3Bucket; $contents = null; // get from cache if valid if ($this->cacheDuration && file_exists($cacheFile)) { $cacheAge = time() - filectime($cacheFile); if ($cacheAge < $this->cacheDuration) { $contents = unserialize(file_get_contents($cacheFile)); } } // hit s3 if we didn't have anything cached if (!$contents) { $s3 = new S3($this->s3AccessKey, $this->s3SecretKey, $this->s3useSSL, $this->s3endPoint); $contents = $s3->getBucket($this->s3Bucket); // we weren't able to access the bucket if (!is_array($contents)) { return null; } // save if caching is enabled if ($this->cacheDuration) { file_put_contents($cacheFile, serialize($contents)); } } return $contents; }
private function listS3Docs() { $s3 = new S3(WH_AWS_WIKIPHOTO_ACCESS_KEY, WH_AWS_WIKIPHOTO_SECRET_KEY); $bucket_name = self::AWS_BUCKET; $prefix = null; $marker = null; $maxKeys = null; $delimiter = null; $returnCommonPrefixes = false; $buckets = $s3->getBucket($bucket_name, $prefix, $marker, $maxKeys, $delimiter, $returnCommonPrefixes); if (!self::$quiet) { print "number of buckets: " . count($buckets) . "\n"; } foreach ($buckets as $path => $details) { // match string: doc_folder/doc_file.ending if (!preg_match('@^(.*)/(.*)\\.(.*)$@i', $path, $m)) { continue; } list(, $doc_folder, $doc_file, $ending) = $m; //validate extension if (!in_array($ending, self::$docExts)) { continue; } $prefix = $doc_folder . '/' . $doc_file; $files = array($ending); list($err, $stageDir) = self::pullFiles($s3, $doc_folder, $doc_file, $ending); } //now process the display names self::processDisplayNames($s3); }
/** * @inheritDoc BaseAssetSourceType::deleteSourceFolder() * * @param AssetFolderModel $parentFolder * @param $folderName * * @return bool */ protected function deleteSourceFolder(AssetFolderModel $parentFolder, $folderName) { $this->_prepareForRequests(); $bucket = $this->getSettings()->bucket; $objectsToDelete = $this->_s3->getBucket($bucket, $this->_getPathPrefix() . $parentFolder->path . $folderName); foreach ($objectsToDelete as $uri) { @$this->_s3->deleteObject($bucket, $uri['name']); } return true; }
function rotate() { $maxAge = strtotime('1- Year'); $s3 = S3::getBucket($this->backupBucket); // Organize Backups Chronologically foreach ($s3 as $s3_filename => $data) { $filename = substr($s3_filename, 0, strpos($s3_filename, '.')); $backupgroup = substr($filename, 0, strlen($filename) - 25); $timestamp = $data['time']; $backups[$backupgroup][date('Y', $timestamp)][date('n', $timestamp)][date('j', $timestamp)][date('G', $timestamp)][] = $s3_filename; } // Get Expired Children of Each Backup Group $expired = array(); $success = array(); // Separate Out File Types (Currently Unused) foreach ($backups as $backupgroup => $data) { // Year foreach ($data as $year => $ydata) { // Month foreach ($ydata as $month => $mdata) { $maxTime = mktime(23, 59, 59, $month, cal_days_in_month(CAL_GREGORIAN, $month, $year), $year); if ($maxTime < $maxAge) { $expired = array_merge($expired, $this->getChildren($mdata)); } else { foreach ($mdata as $day => $ddata) { $maxTime = mktime(23, 59, 59, $month, $day, $year); if ($maxTime < $maxAge) { $expired = array_merge($expired, $this->getChildren($ddata)); } else { foreach ($ddata as $hour => $hdata) { $maxTime = mktime($hour, 59, 59, $month, $day, $year); if ($maxTime < $maxAge) { $expired = array_merge($expired, $this->getChildren($hdata)); } } } } } } } } foreach ($expired as $object) { $thisSuccess = $this->deleteObject($this->backupBucket, $object); if ($thisSuccess) { echo "Deleted "; $success[] = $object; } else { echo "ERROR: Could Not Delete "; } echo $object . "\n"; } echo "\n" . count($success) . " Backups Deleted"; }
/** * Check - path is directory or not * * @param string $path Short path * * @return boolean */ public function isDir($path) { $result = false; try { $result = $this->client->getObjectInfo(\XLite\Core\Config::getInstance()->CDev->AmazonS3Images->bucket, $path); if (is_array($result)) { $result = $result['type'] == 'binary/octet-stream'; } else { $result = (bool) $this->client->getBucket(\XLite\Core\Config::getInstance()->CDev->AmazonS3Images->bucket, $path); } } catch (\S3Exception $e) { $result = false; \XLite\Logger::getInstance()->registerException($e); } return $result; }
private function S3copy($file) { $fileok = true; $s3 = new S3($this->AccessKey, $this->SecretKey); $list = $s3->getBucket($this->AWSFolder); foreach ($list as $existing) { if ($existing['name'] === $file) { $fileok = false; } } if ($fileok) { $put = $s3->putObject($s3->inputFile(ASSETS_PATH . DIRECTORY_SEPARATOR . $file), $this->AWSFolder, $file, S3::ACL_PRIVATE); if ($put) { echo $file . " transferred to S3<br>" . "\r\n"; } else { echo $file . " unable to be transferred to S3<br>" . "\r\n"; } } else { echo $file . " already in S3<br>" . "\r\n"; } }
function yss_s3_edit($id = false) { global $wpdb, $yss_post_assoc; $checked = array(); $s3file = yss_get($id); $sql = 'SELECT ID, post_title FROM ' . $wpdb->posts . ' WHERE post_status = "publish" AND post_type IN ("page","post") ORDER BY post_title'; $posts = $wpdb->get_results($sql); if ($id) { $sql = 'SELECT post_id FROM ' . $yss_post_assoc . ' WHERE s3_id = ' . $id; $results = $wpdb->get_results($sql); foreach ($results as $result) { $checked[] = $result->post_id; } } echo ym_start_box($id ? 'Edit Video' : 'Add Video'); if (!$id) { require_once YSS_CLASSES_DIR . 'S3.php'; $s3 = new S3(); $s3->setAuth(get_option('yss_user_key'), get_option('yss_secret_key')); } echo ' <table class="widefat form-table" style="width: 100%;" cellspacing="10"> <tr valign="top"> <td> ' . __('S3 Bucket/file', "ym") . ' </td> <td>'; if (!$id) { echo ' <select name="s3_file_select"> '; foreach ($s3->listBuckets() as $bucket) { $thisbucket = $s3->getBucket($bucket); foreach ($thisbucket as $file) { echo '<option '; if ($s3file->bucket . '/' . $s3file->resource_path == $bucket . '/' . $file['name']) { echo 'selected="selected"'; } echo '>' . $bucket . '/' . $file['name'] . '</option>'; } } echo ' </select> '; } else { echo $s3file->bucket . '/' . $s3file->resource_path; echo '<input type="hidden" name="s3_file_select" value="' . $s3file->bucket . '/' . $s3file->resource_path . '" />'; } echo ' </td> </tr> <tr valign="top"> <td> ' . __('Your Members Package Types access', "ym") . ' <div style="font-size: 10px; color: gray; margin-top: 10px;">Your videos can be protected by account type here. If none of the boxes are checked then it will fall back to the next section (post protection)</div> </td><td>'; echo ' <div>'; if ($data = get_option('ym_account_types')) { $types = $data->types; $ac_checked = array(); if ($selected = @$s3file->account_types) { $ac_checked = explode('||', $selected); } foreach ((array) $types as $type) { $checked_string = ''; if (in_array($type, $ac_checked)) { $checked_string = 'checked="checked"'; } echo ' <div class="ym_setting_list_item"> <label> <input type="checkbox" class="checkbox" name="account_types[]" value="' . $type . '" ' . $checked_string . ' /> ' . __($type) . ' </label> </div>'; } } else { echo '<div>The system is unable to find any YM account types. Is there a problem with the install?</div>'; } echo '</div>'; echo ' </td> </tr> <tr valign="top"> <td> ' . __('Restrict access by post/page?', "ym") . ' <input type="checkbox" name="memberonly" ' . (@$s3file->members ? "checked='checked'" : '') . ' /> (Check to activate) <div style="font-size: 10px; color: gray; margin-top: 10px;">If the above account type check fails or you choose not to use it then you can optionally use this section. This will check access against a number of posts or pages and if at least one has access then the video will be shown.<br /><br />If the restrict access checkbox is unticked then YSS will assume that the video should remain unprotected (if you are not using the account type protection)</div> </td> <td> <br /><select name="link_to_post_id[]" multiple size=10 style="height: 250px; width: 450px;">'; foreach ($posts as $row) { $selected = in_array($row->ID, $checked) ? 'selected="selected"' : ''; echo '<option value="' . $row->ID . '" ' . $selected . ' >' . $row->post_title . '</option>'; } echo ' </select> </td> </tr>'; echo ' </table> <p class="submit"> <div style="float: right;"> <input type="submit" class="button" name="submit_edit_s3" value="' . __('Save', 'yss') . '" /> </div> <input type="submit" value="' . __('Back', 'yss') . '" /> <div class="ym_clear"> </div> </p> <input type="hidden" name="task" value="save" /> <input type="hidden" name="s3s_id" value="' . @$s3file->id . '" /> '; echo ym_end_box(); }
$objMossoAuth->authenticate(); // Let's get a connection to CloudFiles $objMosso = new CF_Connection($objMossoAuth); echo "Listing buckets from your Amazon S3\n"; $awsBucketList = $objS3->listBuckets(); echo str_replace('Array', 'Amazon S3 Buckets', print_r($awsBucketList, true)) . "\n"; foreach ($awsBucketList as $awsBucketName) { if (in_array($awsBucketName, $awsExcludeBuckets)) { echo "---> Bucket {$awsBucketName} will be excluded\n"; continue; } $mossoContainerName = $prefixToAddToContainers . $awsBucketName; // TODO: check if Bucket is CDN enabled // Get objects echo "Listing objects in Bucket {$awsBucketName} \n"; $awsObjectList = $objS3->getBucket($awsBucketName); // Create this bucket as a Container on MOSSO echo "Creating Container {$mossoContainerName} in Cloud Files\n"; $objMossoContainer = $objMosso->create_container($mossoContainerName); echo "Processing objects in Bucket {$awsBucketName} \n"; foreach ($awsObjectList as $awsObjectInfo) { // Check if Object is in ignore list if (in_array($awsObjectInfo["name"], $awsExcludeObjects[$awsBucketName])) { echo "---> Object {$awsObjectInfo["name"]} will be excluded\n"; continue; } //$awsObjectInfo = $objS3->getObjectInfo($awsBucketName, $awsObjectName); echo str_replace('Array', $awsObjectInfo["name"], print_r($awsObjectInfo, true)); // TODO: Get Metadata and convert them to Mosso // Check if it's a folder if (strstr($awsObjectInfo["name"], '_$folder$')) {
echo ' ' . color('◎ 執行開始 ◎', 'P') . "\n"; echo str_repeat('-', 80) . "\n"; // // ======================================================================== // // ======================================================================== // // ======================================================================== echo ' ➜ ' . color('初始化 S3 工具', 'g'); include_once 'libs/s3.php'; S3::init($access, $secret); echo ' - ' . color('初始化成功!', 'C') . "\n"; echo str_repeat('-', 80) . "\n"; // // ======================================================================== // // ======================================================================== // // ======================================================================== echo ' ➜ ' . color('列出 S3 上所有檔案', 'g'); try { $s3_files = array_filter(S3::getBucket($bucket), function ($s3_file) { return preg_match('/^' . NAME . '\\//', $s3_file['name']); }); echo color('(' . ($c = count($s3_files)) . ')', 'g') . ' - 100% - ' . color('取得檔案成功!', 'C') . "\n"; echo str_repeat('-', 80) . "\n"; } catch (Exception $e) { echo ' - ' . color('取得檔案失敗!', 'R') . "\n"; exit; } // // ======================================================================== // // ======================================================================== // // ======================================================================== $i = 0; $c = 5; $local_files = array(); echo ' ➜ ' . color('列出即將上傳所有檔案', 'g');
function file_listing() { require_once "S3.php"; global $aws_access, $aws_secret; global $s3_bucket, $s3_path; $listing = array(); $s3 = new S3($aws_access, $aws_secret); foreach ($s3->getBucket($s3_bucket, $s3_path) as $file) { if (preg_match('@^.*/([ab])/([0-9]+)\\.jpg$@i', $file['name'], $match)) { $listing[$match[2]][$match[1]] = array('size' => $file['size'], 'time' => $file['time']); } } return $listing; }
static function sync($task) { aws_s3::log("Amazon S3 Re-sync task started.."); batch::start(); $items = ORM::factory("item")->find_all(); $task->set("total_count", count($items)); $task->set("completed", 0); if (!module::get_var("aws_s3", "synced", false)) { aws_s3::log("Emptying contents of bucket"); $task->status = "Emptying contents of bucket"; $task->save(); require_once MODPATH . "aws_s3/lib/s3.php"; $s3 = new S3(module::get_var("aws_s3", "access_key"), module::get_var("aws_s3", "secret_key")); $bucket = module::get_var("aws_s3", "bucket_name"); $resource = aws_s3::get_resource_url(""); $stuff = array_reverse(S3::getBucket($bucket, $resource)); $i = 0; foreach ($stuff as $uri => $item) { $i++; aws_s3::log("Removing " . $uri . " from S3"); S3::deleteObject($bucket, $uri); $task->percent_complete = round(20 * ($i / count($stuff))); $task->save(); } } $task->percent_complete = 20; aws_s3::log("Commencing upload tasks"); $task->state = "Commencing upload..."; $task->save(); $completed = $task->get("completed", 0); $items = ORM::factory("item")->find_all(); foreach ($items as $item) { try { if ($item->id > 1) { aws_s3::upload_item($item, aws_s3::get_upload_flags()); } } catch (Exception $err) { } $completed++; $task->set("completed", $completed); $task->percent_complete = round(80 * ($completed / $task->get("total_count"))) + 20; $task->status = $completed . " of " . $task->get("total_count") . " uploaded."; $task->save(); } $task->percent_complete = 100; $task->state = "success"; $task->done = true; aws_s3::log("Sync task completed successfully"); $task->status = "Sync task completed successfully"; module::set_var("aws_s3", "synced", true); site_status::clear("aws_s3_not_synced"); batch::stop(); $task->save(); }
<th>Actions</th> </tr> </thead> <tfoot> <tr class="thead"> <th scope="col" class="check-column"><input type="checkbox" class="check-all-entries" /></th> <th>Backup File</th> <th>Last Modified ↓</th> <th>File Size</th> <th>Actions</th> </tr> </tfoot> <tbody> <?php // List s3 backups $results = $s3->getBucket($aws_bucket); if (empty($results)) { echo '<tr><td colspan="5" style="text-align: center;"><i>You have not created any S3 backups yet.</i></td></tr>'; } else { $file_count = 0; foreach ((array) $results as $rekey => $reval) { // check if file is backup $pos = strpos($rekey, $aws_directory . 'backup-'); if ($pos !== FALSE) { $file_count++; ?> <tr class="entry-row alternate"> <th scope="row" class="check-column"><input type="checkbox" name="files[]" class="entries" value="<?php echo $rekey; ?> " /></th>
public function get_contents() { $s3 = new S3('csub001050', 'studiomaiocchi', true, 'seewebstorage.it'); $contents = $s3->getBucket('studiomaiocchistaging'); return $contents; }
$website = ''; $dimensions = ''; $redownloadImages = false; $overwriteThumbs = true; $recalculateThumbs = false; $compareThumbsBySize = false; //Prepare parameters $dimensions = parseDimensions($dimensions, $website); //Prepare values $imgPrefix = $website . '/full/'; $imgPrefixLen = strlen($imgPrefix); $imgPathLen = $imgPrefixLen + 4; //Initialise S3 object $s3 = new S3($accessKey, $secretKey, true); //Get list of images from the bucket $images = $s3->getBucket($bucket, $imgPrefix); $imgNumber = 1; $totalImgs = count($images); echo 'Number of images: ', $totalImgs, "\n"; if (!$totalImgs) { exit('No images in the bucket'); } $_reuploadThumbs = $overwriteThumbs || $compareThumbsBySize; $images = array_keys($images); foreach ($images as $image) { echo $image, ' (', $imgNumber++, ' of ', $totalImgs, ')', "\n"; if (!file_exists($image) || $redownloadImages) { //Create directories for the image createDirectory(substr($image, 0, $imgPathLen)); for ($i = 1; $i <= MAX_TRIES; $i++) { if ($s3->getObject($bucket, $image, $image) !== false) {
/** * @method public removeTempFilesFromS3() It will check the temp file and delete those file which are created b4 24 hours. * @return bool */ function removeTempFilesFromS3() { try { $s3 = new S3(awsAccessKey, awsSecretKey); // Get the contents of our bucket $contents = $s3->getBucket(BUCKET_NAME, DIR_CASE_FILES_S3_FOLDER_TEMP); $date_before_24hr = date('F_dS_Y', strtotime('-24 hours', time())); if (isset($contents) && $contents != '') { foreach ($contents as $file) { $fname = $file['name']; $content1s = $s3->getObjectInfo(BUCKET_NAME, $fname); if ($content1s) { $file_created_date = date('F_dS_Y', $content1s['time']); if ($date_before_24hr == $file_created_date) { $s3->deleteObject(BUCKET_NAME, $fname); } } } die; } } catch (Exception $e) { //print $e->getMessage();exit; } }
public function getBucket($bucket, $prefix = null, $marker = null, $maxKeys = null) { return S3::getBucket($bucket, $prefix, $marker, $maxKeys); }
/** * Finds the latest Backup archieve file from the Amazon S3 Bucket * * @return String * @access private * @see executeRestore() * */ private function getBackupFileFromS3() { require_once 'S3.php'; //Create a new Instance of the S3 Object $s3 = new S3($this->s3Config['accessKey'], $this->s3Config['secretKey'], false); $bucketContent = $s3->getBucket($this->s3Config['bucketName'], $this->databaseVars['database_name'] . "_backup"); if (!empty($bucketContent)) { end($bucketContent); $keyname = key($bucketContent); // Save object to a file. $file = $s3->getObject($this->s3Config['bucketName'], $keyname, $this->backupDir . '/S3_' . $keyname); return $this->backupDir . '/S3_' . $keyname; } else { return NULL; } }
public function upload_remote_backup($filename) { // Init global $config; $file_path = SITE_PATH . '/data/backups/' . $filename; // Amazon S3 if ($config['backup_type'] == 'amazon') { // Set variables $bucket_name = 'synala'; // Init client include_once SITE_PATH . '/data/lib/S3.php'; $s3_client = new S3($config['backup_amazon_access_key'], $config['backup_amazon_secret_key']); // Create subject, if needed $buckets = $s3_client->listBuckets(); if (!in_array($bucket_name, $buckets)) { $s3_client->putBucket($bucket_name, S3::ACL_PRIVATE); } $s3_files_tmp = $s3_client->getBucket($bucket_name); $s3_files = array_keys($s3_files_tmp); // Upload backup file $s3_client->putObjectFile($file_path, $bucket_name, $filename); // Remote FTP } elseif ($config['backup_type'] == 'ftp') { if ($config['backup_ftp_type'] == 'ftps') { $ftp_client = ftp_ssl_connect($config['backup_ftp_host'], 22, 360); } else { $ftp_client = ftp_connect($config['backup_ftp_host'], $config['backup_ftp_port']); } ftp_login($ftp_client, $config['backup_ftp_username'], $config['backup_ftp_password']); // Set transfer mode //$is_passive = $config['remote_backup_ftp_mode'] == 'passive' ? true : false; //ftp_pasv($ftp_client, $is_passive); // Upload file //if ($config['remote_backup_ftp_dir'] != '') { $filename = $config['remote_backup_ftp_dir'] . '/' . $filename; } @ftp_put($ftp_client, $filename, SITE_PATH . "/data/backups/{$filename}", FTP_BINARY); ftp_close($ftp_client); // Tarsnap } elseif ($config['backup_type'] == 'tarsnap') { system($config['backup_tarsnap_location'] . " -cf {$config['backup_tarsnap_archive']} " . SITE_PATH); } // Delete local file, if needed //if ($config['remote_backup_retain_local'] != 1 && is_file($file_path)) { // @unlink($file_path); //} }
/** * list the contents of a bucket. catches exceptions as * a lack of bucket / bucket contents and returns an empty array * @return array */ public function listBucketContents() { try { $contents = S3::getBucket($this->bucket); } catch (Exception $e) { $contents = array(); } return $contents; }
public static function purgeUnusedFiles() { throw new Exception("Now sharded"); self::requireLibrary(); // Get all used files and files that were last deleted more than a month ago $sql = "SELECT MD5(CONCAT(hash, filename, zip)) AS file FROM storageFiles\n\t\t\t\t\tJOIN storageFileItems USING (storageFileID)\n\t\t\t\tUNION\n\t\t\t\tSELECT MD5(CONCAT(hash, filename, zip)) AS file FROM storageFiles\n\t\t\t\t\tWHERE lastDeleted > NOW() - INTERVAL 1 MONTH"; $files = Zotero_DB::columnQuery($sql); S3::setAuth(Z_CONFIG::$S3_ACCESS_KEY, Z_CONFIG::$S3_SECRET_KEY); $s3Files = S3::getBucket(Z_CONFIG::$S3_BUCKET); $toPurge = array(); foreach ($s3Files as $s3File) { preg_match('/^([0-9a-g]{32})\\/(c\\/)?(.+)$/', $s3File['name'], $matches); if (!$matches) { throw new Exception("Invalid filename '" . $s3File['name'] . "'"); } $zip = $matches[2] ? '1' : '0'; // Compressed file $hash = md5($matches[1] . $matches[3] . $zip); if (!in_array($hash, $files)) { $toPurge[] = array('hash' => $matches[1], 'filename' => $matches[3], 'zip' => $zip); } } Zotero_DB::beginTransaction(); foreach ($toPurge as $info) { S3::deleteObject(Z_CONFIG::$S3_BUCKET, self::getPathPrefix($info['hash'], $info['zip']) . $info['filename']); $sql = "DELETE FROM storageFiles WHERE hash=? AND filename=? AND zip=?"; Zotero_DB::query($sql, array($info['hash'], $info['filename'], $info['zip'])); // TODO: maybe check to make sure associated files haven't just been created? } Zotero_DB::commit(); return sizeOf($toPurge); }
/** * [MM2015] Questo metodo crea una lista di file di una directory da 3 sorgenti: PATH, URL, S3. * tale contenuto potrà aggiungersi a quello di una specifica directory di un server S3 (Simple Storage Server) * in base ai dati della configurazione presi dalle Options * * @mvc @model * * @param string $dir input local folder path or URL ! * @param array $search_ext search extension (null=search all ext) * @param boolean $sanitize remove ext and double values in output array * @return array forma [file]=>path_file senza ext // DA CONFERMARE !!!!! */ protected static function OSmedia_dir_list($options, $search_ext = null, $sanitize = true) { $s3_list = array(); $path_list = array(); $url_list = array(); $result = array(); $result['file_list'] = array(); $result['monitor_server'] = array(); extract($options); $dir = $OSmedia_path; $url = $OSmedia_url; /* if ( !empty($option['basic']['OSmedia_url']) && file_exists($url) ){ $url = $option['basic']['OSmedia_url']; if ( !$fp = fopen($url, 'r') ) trigger_error("Unable to open URL ($url)", E_USER_ERROR); // echo '<pre>'; print_r( stream_get_meta_data($fp) ); echo '</pre>'; fclose($fp); } */ $s3_url = $OSmedia_s3server . $OSmedia_s3bucket . '/' . $OSmedia_s3dir; // $result = array(); /////////////////////////////////// S3 -> cURL if (isset($OSmedia_s3enable) && $OSmedia_s3enable) { $s3 = new S3($OSmedia_s3access, $OSmedia_s3secret); $s3_array = $s3->getBucket($OSmedia_s3bucket); if ($s3_array) { $monitor_server['s3'] = 'ok'; } elseif (!$s3_url) { $monitor_server['s3'] = 'unset'; } else { $monitor_server['s3'] = 'error'; } if ($s3_array) { // array forma [file]=>path_file foreach ($s3_array as $kk => $vv) { $file = str_replace($OSmedia_s3dir, '', $vv['name']); // toglie la directory s3 dal nome del file // $file = preg_replace( '/\.[^.]+$/', '', $file ); // SANITIZE if ($file) { $s3_list[$file] = $s3_url; } } } } else { $monitor_server['s3'] = 'unset'; } ///////////////////////////////////// PATH videofile -> scandir if ($dir) { if (file_exists($dir)) { $cdir_path = scandir($dir); // array forma [file]=>path_file if (is_array($cdir_path)) { foreach ($cdir_path as $key => $value) { // $value = preg_replace( '/\.[^.]+$/', '', $value ); // if ( $value != '.' && $value != '..' ) $path_list[$value] = ''; // $dir /*. $value*/; } $monitor_server['path'] = 'ok'; } } else { $monitor_server['path'] = 'error'; } } else { $monitor_server['path'] = 'unset'; } ///////////////////////////// URL (media server) -> fopen if ($url) { // input URL if (OSmedia_isValidUrl($url)) { $res = array(); $content = ''; $fp_load = fopen($url, "rb"); if ($fp_load) { while (!feof($fp_load)) { $content .= fgets($fp_load, 8192); } } fclose($fp_load); preg_match_all("/(a href\\=\")([^\\?\"]*)(\")/i", $content, $res); // array forma [file]=>path_file senza ext // DA VERIFICARE !!!!!! foreach ($res[2] as $val) { // $value = preg_replace( '/\.[^.]+$/', '', $value ); if ($val != '.' && $val != '..') { $url_list[$val] = $url; } } $monitor_server['url'] = 'ok'; } else { $monitor_server['url'] = 'error'; } } else { $monitor_server['url'] = 'unset'; } // unisce i 3 risultati $file_list = $s3_list; if (!empty($path_list)) { foreach ($path_list as $k2 => $v2) { $file_list[$k2] = $v2; } } if (!empty($url_list)) { foreach ($url_list as $k3 => $v3) { $file_list[$k3] = $v3; } } // SANITIZE // filtra i risultati togliendo i file di estensione non richiesta, poi le estensioni e poi gli elementi con lo stesso valore: // in caso di chiavi doppie, rimangono i primi inseriti, ovvero, in ordine quelli di S3 -> PATH -> URL if (!empty($file_list) && $sanitize) { foreach ($file_list as $key => $value) { // if (!in_array($key, array(".",".."))) { // if (is_dir($dir . DIRECTORY_SEPARATOR . $key)) { // $result[$key] = self::OSmedia_dir_list ( $dir, $search_ext ); // Ricorsività // }else{ if (!empty($search_ext) && is_array($search_ext)) { foreach ($search_ext as $ext) { if (strpos($key, '.' . $ext) !== false) { // toglie l'estensione $key = preg_replace('/\\.[^.]+$/', '', $key); // essendo unificata la chiave dei file con estensione diversa, vengono inseriti una sola volta nell'array $result $result['file_list'][$key] = $value; } } } // } // } } // if ($sanitize) $result = array_unique ($result); // } $result['monitor_server'] = $monitor_server; // echo 'ARRAY_S3: <pre>'; print_r($monitor_server); echo '</pre>'; // MONITOR // echo 'ARRAY_PATH: <pre>'; print_r($path_list); echo '</pre>'; // MONITOR // echo 'ARRAY_URL: <pre>'; print_r($url_list); echo '</pre>'; // MONITOR // echo 'RESULT: <pre>'; print_r($result); echo '</pre>'; // MONITOR return $result; }
// Pointless without your keys! if (awsAccessKey == 'change-this' || awsSecretKey == 'change-this') { exit("\nERROR: AWS access information required\n\nPlease edit the following lines in this file:\n\n" . "define('awsAccessKey', 'change-me');\ndefine('awsSecretKey', 'change-me');\n\n"); } // Instantiate the class $s3 = new S3(awsAccessKey, awsSecretKey); // List your buckets: echo "S3::listBuckets(): " . print_r($s3->listBuckets(), 1) . "\n"; // Create a bucket with public read access if ($s3->putBucket($bucketName, S3::ACL_PUBLIC_READ)) { echo "Created bucket {$bucketName}" . PHP_EOL; // Put our file (also with public read access) if ($s3->putObjectFile($uploadFile, $bucketName, baseName($uploadFile), S3::ACL_PUBLIC_READ)) { echo "S3::putObjectFile(): File copied to {$bucketName}/" . baseName($uploadFile) . PHP_EOL; // Get the contents of our bucket $contents = $s3->getBucket($bucketName); echo "S3::getBucket(): Files in bucket {$bucketName}: " . print_r($contents, 1); // Get object info $info = $s3->getObjectInfo($bucketName, baseName($uploadFile)); echo "S3::getObjectInfo(): Info for {$bucketName}/" . baseName($uploadFile) . ': ' . print_r($info, 1); // You can also fetch the object into memory // var_dump("S3::getObject() to memory", $s3->getObject($bucketName, baseName($uploadFile))); // Or save it into a file (write stream) // var_dump("S3::getObject() to savefile.txt", $s3->getObject($bucketName, baseName($uploadFile), 'savefile.txt')); // Or write it to a resource (write stream) // var_dump("S3::getObject() to resource", $s3->getObject($bucketName, baseName($uploadFile), fopen('savefile.txt', 'wb'))); // Get the access control policy for a bucket: // $acp = $s3->getAccessControlPolicy($bucketName); // echo "S3::getAccessControlPolicy(): {$bucketName}: ".print_r($acp, 1); // Update an access control policy ($acp should be the same as the data returned by S3::getAccessControlPolicy()) // $s3->setAccessControlPolicy($bucketName, '', $acp);
if ($res === false) { echo "<p>Query failed: ({$query}) : " . mysql_error() . "</p>"; } echo "Added imas_log table<br/>"; } if ($last < 67) { $query = 'ALTER TABLE `imas_users` ADD `hasuserimg` TINYINT( 1 ) UNSIGNED NOT NULL DEFAULT \'0\''; $res = mysql_query($query); if ($res === false) { echo "<p>Query failed: ({$query}) : " . mysql_error() . "</p>"; } $hasimg = array(); if (isset($GLOBALS['CFG']['GEN']['AWSforcoursefiles']) && $GLOBALS['CFG']['GEN']['AWSforcoursefiles'] == true) { require "includes/filehandler.php"; $s3 = new S3($GLOBALS['AWSkey'], $GLOBALS['AWSsecret']); $arr = $s3->getBucket($GLOBALS['AWSbucket'], "cfiles/"); if ($arr != false) { foreach ($arr as $k => $v) { if (substr(basename($arr[$k]['name']), 0, 10) == 'userimg_sm') { $hasimg[] = substr(basename($arr[$k]['name']), 10, -4); } } } } else { $curdir = rtrim(dirname(__FILE__), '/\\'); $galleryPath = "{$curdir}/course/files"; if ($handle = @opendir($galleryPath)) { while (false !== ($file = readdir($handle))) { if ($file != "." && $file != ".." && !is_dir($file)) { if (substr(basename($file), 0, 10) == 'userimg_sm') { $hasimg[] = substr(basename($file), 10, -4);
<!-- 預覽縮圖 div --> <div class="image-proview" id="image-proview-layer"> <img id="reg_pic" src="upload_photo.PNG"/> </div> <!-- end 預覽縮圖 div --> <!-- 上傳表單,選擇檔案後 onchange 會改變預覽圖案 --> <form action="" method="post" enctype="multipart/form-data" name="form1" id="form1"> <input class="but" id="theFile" name="theFile" type="file" onchange="ImagesProview(this)" /> <input name="Submit" type="submit" value="上傳"> </form> <!-- end 上傳表單 --> <br> <?php /* 用 foreach 把 S3 所有的圖讀出來顯示,若為資料夾就掉過 */ // Get the contents of our bucket $contents = $s3->getBucket('nccus3'); foreach ($contents as $file) { $fname = $file['name']; $value = $redis->get($fname); $num = strrpos($fname, "/"); // if $file is a directory path if ($num === false) { $furl = "http://nccus3.s3.amazonaws.com/" . $fname; echo "<a href=\"image_cache.php?fn={$fname}\" alt=\"{$fname}\"><img id=\"thumb\" src=\"{$furl}\" /></a>"; } } /* end 讀圖 */ ?> <!-- javascript 縮圖程式 --> <script type="text/javascript"> var isIE=function() {
<?php ########################################################### # Reinit ########################################################### require_once SCRIPT_DIR_PATH . '/S3.php'; log_message("[32mRemoving all files in the S3 bucket...[37m"); $s3 = new S3(S3_ACCESS_KEY, S3_SECRET_KEY); $contents = $s3->getBucket(S3_BUCKET_NAME); foreach ($contents as $file) { $msg = ''; if ($s3->deleteObject(S3_BUCKET_NAME, $file['name'])) { $msg = '\\033[32m' . $file['name'] . ' was successfully deleted.\\033[37m'; } else { $errorHappened = TRUE; $msg = '\\033[32m' . $file['name'] . ' was NOT deleted.\\033[37m'; } log_message($msg); } log_message("[32mDone.[37m");
<?php $contentEncoding = "UTF-8"; //include the S3 stuff require_once 'myS3.php'; //instantiate the class $s3 = new S3(); //create a new bucket (this will be ignored if bucket is already there) $myBucketName = "robs-1st-bucket"; // Get the contents of our bucket $bucket_contents = $s3->getBucket($myBucketName); // Disable any caching in the brwoser. header("Cache-Control: no-cache, no-store, max-age=0, must-revalidate"); header("Sat, 1 Jan 2000 00:00:00 GMT"); header("Pragma: no-cache"); header("Content-encoding: " . $contentEncoding); header("Content-type: text/xml"); echo "<?xml version='1.0' encoding='" . $contentEncoding . "'?>"; echo "<images>"; foreach ($bucket_contents as $file) { $fname = $file['name']; $furl = S3::getAuthenticatedURL($myBucketName, $fname, 60, false, true); //$furl = "http://$myBucketName.s3.amazonaws.com/$fname"; //output a link to each file echo "<image filename='{$fname}' url='{$furl}'/>"; } echo "</images>";
function delete_from_post(&$req) { trigger_before('delete_from_post', $this, $req); global $db; if ($this->has_metadata && !isset($req->params['entry']['etag'])) { trigger_error("Sorry, the etag was not submitted with the database entry", E_USER_ERROR); } $fields = $this->fields_from_request($req); if ($this->has_metadata) { $atomentry = $db->models['entries']->find_by('etag', $req->params['entry']['etag']); $recid = $atomentry->attributes['record_id']; } else { $recid = $req->id; } $rec = $this->find($recid); if ($this->has_metadata) { $Person =& $db->model('Person'); $Group =& $db->model('Group'); $p = $Person->find(get_person_id()); if (!($p->id == $atomentry->attributes['person_id']) && !$this->can_superuser($req->resource)) { trigger_error("Sorry, your id does not match the owner of the database entry", E_USER_ERROR); } } $coll = environment('collection_cache'); if ($this->has_metadata && isset($coll[$req->resource]) && $coll[$req->resource]['location'] == 'aws') { $ext = extension_for($atomentry->content_type); $pkname = $rec->primary_key; global $prefix; $aws_file = $prefix . $rec->table . $rec->{$pkname} . "." . $ext; lib_include('S3'); $s3 = new S3(environment('awsAccessKey'), environment('awsSecretKey')); if (!$s3) { trigger_error('Sorry, there was a problem connecting to Amazon Web Services', E_USER_ERROR); } if ($s3->getBucket(environment('awsBucket')) && $s3->getObject(environment('awsBucket'), urlencode($aws_file))) { $result = $s3->deleteObject(environment('awsBucket'), urlencode($aws_file)); if (!$result) { trigger_error('Sorry, there was a problem deleting the file from Amazon Web Services', E_USER_ERROR); } } } $result = $db->delete_record($rec); trigger_after('delete_from_post', $this, $req); }
function s3_video_get_all_existing_video($pluginSettings = NULL) { if (!$pluginSettings) { return FALSE; } $s3Access = new S3($pluginSettings['amazon_access_key'], $pluginSettings['amazon_secret_access_key'], NULL, $pluginSettings['amazon_url']); if (!empty($pluginSettings['amazon_prefix'])) { $bucketContents = $s3Access->getBucket($pluginSettings['amazon_video_bucket'], $pluginSettings['amazon_prefix']); } else { $bucketContents = $s3Access->getBucket($pluginSettings['amazon_video_bucket']); } if (is_array($bucketContents) && !empty($bucketContents)) { return $bucketContents; } }