Get a list of buckets
public static listBuckets ( boolean $detailed = false ) : array | false | ||
$detailed | boolean | Returns detailed bucket list when true |
return | array | false | | false |
/** * Creates bucket * * @param string $container_id * @param string $error * @return boolean */ function create_container(&$container_id, &$error) { if (!$this->_init($error)) { return false; } $this->_set_error_handler(); $buckets = @$this->_s3->listBuckets(); if ($buckets === false) { $error = sprintf('Unable to list buckets (%s).', $this->_get_last_error()); $this->_restore_error_handler(); return false; } if (in_array($this->_config['bucket'], (array) $buckets)) { $error = sprintf('Bucket already exists: %s.', $this->_config['bucket']); $this->_restore_error_handler(); return false; } if (empty($this->_config['bucket_acl'])) { $this->_config['bucket_acl'] = S3::ACL_PRIVATE; } if (!isset($this->_config['bucket_location'])) { $this->_config['bucket_location'] = S3::LOCATION_US; } if (!@$this->_s3->putBucket($this->_config['bucket'], $this->_config['bucket_acl'], $this->_config['bucket_location'])) { $error = sprintf('Unable to create bucket: %s (%s).', $this->_config['bucket'], $this->_get_last_error()); $this->_restore_error_handler(); return false; } $this->_restore_error_handler(); return true; }
/** * Get bucket list with credentials. * * @param $keyId * @param $secret * * @throws Exception * @return array */ public static function getBucketList($keyId, $secret) { $s3 = new \S3($keyId, $secret); $buckets = @$s3->listBuckets(); if (empty($buckets)) { throw new Exception(Craft::t("Credentials rejected by target host.")); } $bucketList = array(); foreach ($buckets as $bucket) { $location = $s3->getBucketLocation($bucket); $bucketList[] = array('bucket' => $bucket, 'location' => $location, 'url_prefix' => 'http://' . static::getEndpointByLocation($location) . '/' . $bucket . '/'); } return $bucketList; }
/** * Retrieves a list of buckets * * @since 4.0 * @access public * @param string * @return */ public function getBuckets() { S3::setAuth($this->key, $this->secret); $data = S3::listBuckets(); if (!$data) { return false; } $buckets = array(); foreach ($data as $item) { $bucket = new stdClass(); $bucket->title = $item; // Get bucket location $location = S3::getBucketLocation($item); $bucket->locationTitle = $this->getLocationTitle($location); $bucket->location = $location; $buckets[] = $bucket; } return $buckets; }
public static function wp_db_backup_completed(&$args) { $destination_s3 = get_option('wp_db_backup_destination_s3'); if (isset($destination_s3) && $destination_s3 == 1 && get_option('wpdb_dest_amazon_s3_bucket') && get_option('wpdb_dest_amazon_s3_bucket_key') && get_option('wpdb_dest_amazon_s3_bucket_secret')) { try { if (!class_exists('S3')) { require_once 'S3.php'; } // AWS access info if (!defined('awsAccessKey')) { define('awsAccessKey', get_option('wpdb_dest_amazon_s3_bucket_key')); } if (!defined('awsSecretKey')) { define('awsSecretKey', get_option('wpdb_dest_amazon_s3_bucket_secret')); } // Check for CURL if (!extension_loaded('curl') && !@dl(PHP_SHLIB_SUFFIX == 'so' ? 'curl.so' : 'php_curl.dll')) { error_log("ERROR: CURL extension not loaded"); } $s3 = new S3(awsAccessKey, awsSecretKey); $bucketName = get_option('wpdb_dest_amazon_s3_bucket'); $result = $s3->listBuckets(); if (get_option('wpdb_dest_amazon_s3_bucket')) { if (in_array(get_option('wpdb_dest_amazon_s3_bucket'), $result)) { if ($s3->putObjectFile($args[1], $bucketName, baseName($args[1]), S3::ACL_PUBLIC_READ)) { error_log("S3::{$args['0']} upload in bucket {$bucketName}"); $args[2] = $args[2] . '<br> Upload Database Backup on s3 bucket ' . $bucketName; } else { error_log("S3::Failed to upload {$args['0']}"); $args[2] = $args[2] . '<br>Failed to upload Database Backup on s3 bucket ' . $bucketName; } } else { error_log("Invalid bucket name or AWS details"); $args[2] = $args[2] . '<br>Invalid bucket name or AWS details'; } } } catch (Exception $e) { // echo ($e->getMessage()); error_log("Invalid AWS details"); } } }
/** * Creates bucket * * @param string $error * @return boolean */ function create_bucket(&$error) { if (!$this->_init($error)) { return false; } $buckets = @$this->_s3->listBuckets(); if (!$buckets) { $error = 'Unable to list buckets (check your credentials).'; return false; } if (in_array($this->_config['bucket'], (array) $buckets)) { $error = sprintf('Bucket already exists: %s.', $this->_config['bucket']); return false; } if (!@$this->_s3->putBucket($this->_config['bucket'], S3::ACL_PUBLIC_READ)) { $error = sprintf('Unable to create bucket: %s.', $this->_config['bucket']); return false; } return true; }
function upload_to_amazon($source_filename, $dest_filename, $image_type = IMAGETYPE_JPEG, $compression = 75, $permissions = null) { // Create local instance of image $this->save($source_filename, $image_type, $compression, $permissions); // Begin s3 sequence $s3 = new S3('AMAZON_ACCESS_TOKEN', 'AMAZON_SECRET_TOKEN'); // Name each bucket off the domain $bucket = 'screenbin'; // Make sure the bucket is there if (!in_array($bucket, $s3->listBuckets())) { $s3->putBucket($bucket, S3::ACL_PUBLIC_READ); } // Upload to s3 if ($s3->putObjectFile($source_filename, $bucket, $dest_filename, S3::ACL_PUBLIC_READ)) { // Delete local version of the file return true; } else { return false; } }
/** * Get bucket list with credentials. * * @param $keyId * @param $secret * * @throws Exception * @return array */ public static function getBucketList($keyId, $secret) { $s3 = new \S3($keyId, $secret); $s3->setExceptions(true); try { $buckets = $s3->listBuckets(); } catch (\Exception $exception) { // Re-throw a proper Craft Exception throw new Exception($exception->getMessage()); } $bucketList = array(); foreach ($buckets as $bucket) { try { $location = $s3->getBucketLocation($bucket); $bucketList[] = array('bucket' => $bucket, 'location' => $location, 'url_prefix' => 'http://' . static::getEndpointByLocation($location) . '/' . $bucket . '/'); } catch (\Exception $exception) { continue; } } return $bucketList; }
public function amazon_upload() { $persistence = $this->getPersistence(); $data = $persistence['Nitro']['CDNAmazon']; $this->amazon_set_progress('Initializing connection...', 0, 0, true); if (!class_exists('S3')) { require_once DIR_SYSTEM . 'nitro/lib/S3.php'; } $s3 = new S3($data['AccessKeyID'], $data['SecretAccessKey']); $buckets = $s3->listBuckets(); if (is_array($buckets) && in_array($data['Bucket'], $buckets)) { $this->loadConfig(); $this->loadCore(); // The connection is successful. We can now start to upload :) // clearAmazonPersistence(); $this->amazon_set_progress('Scanning files...'); $files = array(); $site_root = dirname(DIR_SYSTEM) . '/'; if (!empty($data['SyncCSS'])) { $files = array_merge($files, $this->list_files_with_ext($site_root, 'css')); } if (!empty($data['SyncJavaScript'])) { $files = array_merge($files, $this->list_files_with_ext($site_root, 'js')); } if (!empty($data['SyncImages'])) { $files = array_merge($files, $this->list_files_with_ext($site_root, array('png', 'jpg', 'jpeg', 'gif', 'tiff', 'bmp'))); } $all_size = 0; $admin_folder_parts = array_filter(explode('/', DIR_APPLICATION)); $admin_folder = array_pop($admin_folder_parts) . '/'; $site_root = dirname(DIR_SYSTEM) . '/'; clearstatcache(true); foreach ($files as $i => $file) { $destination = substr($file, strlen($site_root)); // If in admin folder, omit if (stripos($destination, $admin_folder) === 0) { unset($files[$i]); continue; } if (file_exists($file) && is_file($file)) { $all_size += filesize($file); } else { unset($files[$i]); } } $this->amazon_set_progress('Starting upload...', 0, $all_size); $this->amazon_upload_files($s3, $data['Bucket'], $files); $this->amazon_set_progress('Task finished!', 'success'); if ($this->session_closed) { session_start(); $this->session_closed = false; } } else { throw new Exception('The specified bucket does not exist. Please create it.'); } }
// Check if our upload file exists if (!file_exists($uploadFile) || !is_file($uploadFile)) { exit("\nERROR: No such file: {$uploadFile}\n\n"); } // Check for CURL if (!extension_loaded('curl') && !@dl(PHP_SHLIB_SUFFIX == 'so' ? 'curl.so' : 'php_curl.dll')) { exit("\nERROR: CURL extension not loaded\n\n"); } // Pointless without your keys! if (awsAccessKey == 'change-this' || awsSecretKey == 'change-this') { exit("\nERROR: AWS access information required\n\nPlease edit the following lines in this file:\n\n" . "define('awsAccessKey', 'change-me');\ndefine('awsSecretKey', 'change-me');\n\n"); } // Instantiate the class $s3 = new S3(awsAccessKey, awsSecretKey); // List your buckets: echo "S3::listBuckets(): " . print_r($s3->listBuckets(), 1) . "\n"; // Create a bucket with public read access if ($s3->putBucket($bucketName, S3::ACL_PUBLIC_READ)) { echo "Created bucket {$bucketName}" . PHP_EOL; // Put our file (also with public read access) if ($s3->putObjectFile($uploadFile, $bucketName, baseName($uploadFile), S3::ACL_PUBLIC_READ)) { echo "S3::putObjectFile(): File copied to {$bucketName}/" . baseName($uploadFile) . PHP_EOL; // Get the contents of our bucket $contents = $s3->getBucket($bucketName); echo "S3::getBucket(): Files in bucket {$bucketName}: " . print_r($contents, 1); // Get object info $info = $s3->getObjectInfo($bucketName, baseName($uploadFile)); echo "S3::getObjectInfo(): Info for {$bucketName}/" . baseName($uploadFile) . ': ' . print_r($info, 1); // You can also fetch the object into memory // var_dump("S3::getObject() to memory", $s3->getObject($bucketName, baseName($uploadFile))); // Or save it into a file (write stream)
defined('ABSPATH') or die("Direct access to the script does not allowed"); ?> <?php $awsAccessKey = get_option('s3_secure_url_aws_access_key'); $awsSecretKey = get_option('s3_secure_url_aws_secret_key'); if (!$awsAccessKey || !$awsSecretKey) { die('Please enter your Amazon S3 credentials on the <a href="' . admin_url('options-general.php?page=' . $this->plugin_slug) . '">options page</a>'); } else { require dirname(dirname(dirname(dirname(__FILE__)))) . '/includes/S3.php'; $s3Files = array(); // Store bucket names and bucket files in array $AwsS3Client = new S3($awsAccessKey, $awsSecretKey); // Get all buckets $buckets = @$AwsS3Client->listBuckets(); if (is_array($buckets)) { foreach ($buckets as $bucket) { // Get all objects in bucket $bucketFiles = $AwsS3Client->getBucket($bucket); if (is_array($bucketFiles)) { foreach ($bucketFiles as $filename => $fileinfo) { // Get detailed info about object $info = $AwsS3Client->getObjectInfo($bucket, $filename); if (is_array($info)) { //If object is not a folder and have a size>0 then add it to $s3Files array if ($info['size'] > 0 && $info['type'] != 'binary/octet-stream') { $s3Files[$bucket][] = $filename; } } }
public function list_buckets() { $s3 = new S3('csub001050', 'studiomaiocchi', true, 'seewebstorage.it'); $res = $s3->listBuckets(); return $res; }
function testConfig() { // Test S3 connection info @($access_key = DevblocksPlatform::importGPC($_POST['access_key'], 'string', '')); @($secret_key = DevblocksPlatform::importGPC($_POST['secret_key'], 'string', '')); @($bucket = DevblocksPlatform::importGPC($_POST['bucket'], 'string', '')); try { $s3 = new S3($access_key, $secret_key); if (@(!$s3->listBuckets())) { return false; } } catch (Exception $e) { return false; } return true; }
function yss_s3_edit($id = false) { global $wpdb, $yss_post_assoc; $checked = array(); $s3file = yss_get($id); $sql = 'SELECT ID, post_title FROM ' . $wpdb->posts . ' WHERE post_status = "publish" AND post_type IN ("page","post") ORDER BY post_title'; $posts = $wpdb->get_results($sql); if ($id) { $sql = 'SELECT post_id FROM ' . $yss_post_assoc . ' WHERE s3_id = ' . $id; $results = $wpdb->get_results($sql); foreach ($results as $result) { $checked[] = $result->post_id; } } echo ym_start_box($id ? 'Edit Video' : 'Add Video'); if (!$id) { require_once YSS_CLASSES_DIR . 'S3.php'; $s3 = new S3(); $s3->setAuth(get_option('yss_user_key'), get_option('yss_secret_key')); } echo ' <table class="widefat form-table" style="width: 100%;" cellspacing="10"> <tr valign="top"> <td> ' . __('S3 Bucket/file', "ym") . ' </td> <td>'; if (!$id) { echo ' <select name="s3_file_select"> '; foreach ($s3->listBuckets() as $bucket) { $thisbucket = $s3->getBucket($bucket); foreach ($thisbucket as $file) { echo '<option '; if ($s3file->bucket . '/' . $s3file->resource_path == $bucket . '/' . $file['name']) { echo 'selected="selected"'; } echo '>' . $bucket . '/' . $file['name'] . '</option>'; } } echo ' </select> '; } else { echo $s3file->bucket . '/' . $s3file->resource_path; echo '<input type="hidden" name="s3_file_select" value="' . $s3file->bucket . '/' . $s3file->resource_path . '" />'; } echo ' </td> </tr> <tr valign="top"> <td> ' . __('Your Members Package Types access', "ym") . ' <div style="font-size: 10px; color: gray; margin-top: 10px;">Your videos can be protected by account type here. If none of the boxes are checked then it will fall back to the next section (post protection)</div> </td><td>'; echo ' <div>'; if ($data = get_option('ym_account_types')) { $types = $data->types; $ac_checked = array(); if ($selected = @$s3file->account_types) { $ac_checked = explode('||', $selected); } foreach ((array) $types as $type) { $checked_string = ''; if (in_array($type, $ac_checked)) { $checked_string = 'checked="checked"'; } echo ' <div class="ym_setting_list_item"> <label> <input type="checkbox" class="checkbox" name="account_types[]" value="' . $type . '" ' . $checked_string . ' /> ' . __($type) . ' </label> </div>'; } } else { echo '<div>The system is unable to find any YM account types. Is there a problem with the install?</div>'; } echo '</div>'; echo ' </td> </tr> <tr valign="top"> <td> ' . __('Restrict access by post/page?', "ym") . ' <input type="checkbox" name="memberonly" ' . (@$s3file->members ? "checked='checked'" : '') . ' /> (Check to activate) <div style="font-size: 10px; color: gray; margin-top: 10px;">If the above account type check fails or you choose not to use it then you can optionally use this section. This will check access against a number of posts or pages and if at least one has access then the video will be shown.<br /><br />If the restrict access checkbox is unticked then YSS will assume that the video should remain unprotected (if you are not using the account type protection)</div> </td> <td> <br /><select name="link_to_post_id[]" multiple size=10 style="height: 250px; width: 450px;">'; foreach ($posts as $row) { $selected = in_array($row->ID, $checked) ? 'selected="selected"' : ''; echo '<option value="' . $row->ID . '" ' . $selected . ' >' . $row->post_title . '</option>'; } echo ' </select> </td> </tr>'; echo ' </table> <p class="submit"> <div style="float: right;"> <input type="submit" class="button" name="submit_edit_s3" value="' . __('Save', 'yss') . '" /> </div> <input type="submit" value="' . __('Back', 'yss') . '" /> <div class="ym_clear"> </div> </p> <input type="hidden" name="task" value="save" /> <input type="hidden" name="s3s_id" value="' . @$s3file->id . '" /> '; echo ym_end_box(); }
function store_backup() { foreach ($this->b['storage_servers'] as $s) { $s = $this->s[$s]; switch ($s['type']) { case 'local': $path = backup__($s['path']) . '/' . $this->b['_dirname']; //ensure directory structure if (!is_dir($path)) { mkdir($path, 0755, true); } //would rather use the native copy() here, but by defualt //php doesnt support files > 2GB //see here for a posible solution: //http://ca3.php.net/manual/en/function.fopen.php#37791 $cmd[] = fpbx_which('cp'); $cmd[] = $this->b['_tmpfile']; $cmd[] = $path . '/' . $this->b['_file'] . '.tgz'; exec(implode(' ', $cmd), $error, $status); unset($cmd, $error); if ($status !== 0) { $this->b['error'] = 'Error copying ' . $this->b['_tmpfile'] . ' to ' . $path . '/' . $this->b['_file'] . '.tgz: ' . $error; backup_log($this->b['error']); } //run maintenance on the directory $this->maintenance($s['type'], $s); break; case 'email': //TODO: set agent to something informative, including fpbx & backup versions $email_options = array('useragent' => 'freepbx', 'protocol' => 'mail'); $email = new \CI_Email(); //Generic email $from = '*****@*****.**'; //If we have sysadmin and "from is set" if (function_exists('sysadmin_get_storage_email')) { $emails = sysadmin_get_storage_email(); //Check that what we got back above is a email address if (!empty($emails['fromemail']) && filter_var($emails['fromemail'], FILTER_VALIDATE_EMAIL)) { $from = $emails['fromemail']; } } //If the user set an email in advanced settings it wins, otherwise take whatever won above. $from = filter_var($this->amp_conf['AMPBACKUPEMAILFROM'], FILTER_VALIDATE_EMAIL) ? $this->amp_conf['AMPBACKUPEMAILFROM'] : $from; $msg[] = _('Name') . ': ' . $this->b['name']; $msg[] = _('Created') . ': ' . date('r', $this->b['_ctime']); $msg[] = _('Files') . ': ' . $this->manifest['file_count']; $msg[] = _('Mysql Db\'s') . ': ' . $this->manifest['mysql_count']; $msg[] = _('astDb\'s') . ': ' . $this->manifest['astdb_count']; $email->from($from); $email->to(backup__($s['addr'])); $email->subject($this->amp_conf['FREEPBX_SYSTEM_IDENT'] . ' ' . _('Backup') . ' ' . $this->b['name']); $body = implode("\n", $msg); // If the backup file is more than 25MB, yell $encodedsize = ceil(filesize($this->b['_tmpfile']) / 3) * 4; if ($encodedsize > 26214400) { $email->subject($this->amp_conf['FREEPBX_SYSTEM_IDENT'] . ' ' . _('Backup ERROR (exceeded SMTP limits)') . ' ' . $this->b['name']); $email->message(_('BACKUP NOT ATTACHED') . "\n" . _('The backup file exceeded the maximum SMTP limits of 25MB. It was not attempted to be sent. Please shrink your backup, or use a different method of transferring your backup.') . "\n{$body}\n"); } elseif ($encodedsize > $s['maxsize']) { $email->subject($this->amp_conf['FREEPBX_SYSTEM_IDENT'] . ' ' . _('Backup ERROR (exceeded soft limit)') . ' ' . $this->b['name']); $email->message(_('BACKUP NOT ATTACHED') . "\n" . _('The backup file exceeded the soft limit set in SMTP configuration (%s bytes). It was not attempted to be sent. Please shrink your backup, or use a different method of transferring your backup.') . "\n{$body}\n"); } else { $email->message($body); $email->attach($this->b['_tmpfile']); } $email->send(); unset($msg); break; case 'ftp': //subsitute variables if nesesary $s['host'] = backup__($s['host']); $s['port'] = backup__($s['port']); $s['user'] = backup__($s['user']); $s['password'] = backup__($s['password']); $s['path'] = trim(backup__($s['path']), '/'); $fstype = isset($s['fstype']) ? $s['fstype'] : 'auto'; $path = $s['path'] . '/' . $this->b['_dirname']; $connection = new Connection($s['host'], $s['user'], $s['password'], $s['port'], 90, $s['transfer'] == 'passive'); try { $connection->open(); } catch (\Exception $e) { $this->b['error'] = $e->getMessage(); backup_log($this->b['error']); return; } $wrapper = new FTPWrapper($connection); $permFactory = new PermissionsFactory(); switch ($fstype) { case 'auto': $ftptype = $wrapper->systype(); if (strtolower($ftptype) == "unix") { $fsFactory = new FilesystemFactory($permFactory); } else { $fsFactory = new WindowsFilesystemFactory(); } break; case 'unix': $fsFactory = new FilesystemFactory($permFactory); break; case 'windows': $fsFactory = new WindowsFilesystemFactory(); break; } $manager = new FTPFilesystemManager($wrapper, $fsFactory); $dlVoter = new DownloaderVoter(); $ulVoter = new UploaderVoter(); $ulVoter->addDefaultFTPUploaders($wrapper); $crVoter = new CreatorVoter(); $crVoter->addDefaultFTPCreators($wrapper, $manager); $deVoter = new DeleterVoter(); $deVoter->addDefaultFTPDeleters($wrapper, $manager); $ftp = new FTP($manager, $dlVoter, $ulVoter, $crVoter, $deVoter); if (!$ftp) { $this->b['error'] = _("Error creating the FTP object"); backup_log($this->b['error']); return; } if (!$ftp->directoryExists(new Directory($path))) { backup_log(sprintf(_("Creating directory '%s'"), $path)); try { $ftp->create(new Directory($path), array(FTP::RECURSIVE => true)); } catch (\Exception $e) { $this->b['error'] = sprintf(_("Directory '%s' did not exist and we could not create it"), $path); backup_log($this->b['error']); backup_log($e->getMessage()); return; } } try { backup_log(_("Saving file to remote ftp")); $ftp->upload(new File($path . '/' . $this->b['_file'] . '.tgz'), $this->b['_tmpfile']); } catch (\Exception $e) { $this->b['error'] = _("Unable to upload file to the remote server"); backup_log($this->b['error']); backup_log($e->getMessage()); return; } //run maintenance on the directory $this->maintenance($s['type'], $path, $ftp); break; case 'awss3': //subsitute variables if nesesary $s['bucket'] = backup__($s['bucket']); $s['awsaccesskey'] = backup__($s['awsaccesskey']); $s['awssecret'] = backup__($s['awssecret']); $awss3 = new \S3($s['awsaccesskey'], $s['awssecret']); // Does this bucket already exist? $buckets = $awss3->listBuckets(); if (!in_array($s['bucket'], $buckets)) { // Create the bucket $awss3->putBucket($s['bucket'], \S3::ACL_PUBLIC_READ); } //copy file if ($awss3->putObjectFile($this->b['_tmpfile'], $s['bucket'], $this->b['name'] . "/" . $this->b['_file'] . '.tgz', \S3::ACL_PUBLIC_READ)) { dbug('S3 successfully uploaded your backup file.'); } else { dbug('S3 failed to accept your backup file'); } //run maintenance on the directory $this->maintenance($s['type'], $s, $awss3); break; case 'ssh': //subsitute variables if nesesary $s['path'] = backup__($s['path']); $s['user'] = backup__($s['user']); $s['host'] = backup__($s['host']); $destdir = $s['path'] . '/' . $this->b['_dirname']; //ensure directory structure $cmd = fpbx_which('ssh') . ' -o StrictHostKeyChecking=no -i '; $cmd .= $s['key'] . " -l " . $s['user'] . ' ' . $s['host'] . ' -p ' . $s['port']; $cmd .= " 'mkdir -p {$destdir}'"; exec($cmd, $output, $ret); if ($ret !== 0) { backup_log("SSH Error ({$ret}) - Received " . json_encode($output) . " from {$cmd}"); } $output = null; //put file // Note that SCP (*unlike SSH*) needs IPv6 addresses in ['s. Consistancy is awesome. if (filter_var($s['host'], \FILTER_VALIDATE_IP, \FILTER_FLAG_IPV6)) { $scphost = "[" . $s['host'] . "]"; } else { $scphost = $s['host']; } $cmd = fpbx_which('scp') . ' -o StrictHostKeyChecking=no -i ' . $s['key'] . ' -P ' . $s['port']; $cmd .= " " . $this->b['_tmpfile'] . " " . $s['user'] . "@{$scphost}:{$destdir}"; exec($cmd, $output, $ret); if ($ret !== 0) { backup_log("SCP Error ({$ret}) - Received " . json_encode($output) . " from {$cmd}"); } //run maintenance on the directory $this->maintenance($s['type'], $s); break; } } }
public function upload_remote_backup($filename) { // Init global $config; $file_path = SITE_PATH . '/data/backups/' . $filename; // Amazon S3 if ($config['backup_type'] == 'amazon') { // Set variables $bucket_name = 'synala'; // Init client include_once SITE_PATH . '/data/lib/S3.php'; $s3_client = new S3($config['backup_amazon_access_key'], $config['backup_amazon_secret_key']); // Create subject, if needed $buckets = $s3_client->listBuckets(); if (!in_array($bucket_name, $buckets)) { $s3_client->putBucket($bucket_name, S3::ACL_PRIVATE); } $s3_files_tmp = $s3_client->getBucket($bucket_name); $s3_files = array_keys($s3_files_tmp); // Upload backup file $s3_client->putObjectFile($file_path, $bucket_name, $filename); // Remote FTP } elseif ($config['backup_type'] == 'ftp') { if ($config['backup_ftp_type'] == 'ftps') { $ftp_client = ftp_ssl_connect($config['backup_ftp_host'], 22, 360); } else { $ftp_client = ftp_connect($config['backup_ftp_host'], $config['backup_ftp_port']); } ftp_login($ftp_client, $config['backup_ftp_username'], $config['backup_ftp_password']); // Set transfer mode //$is_passive = $config['remote_backup_ftp_mode'] == 'passive' ? true : false; //ftp_pasv($ftp_client, $is_passive); // Upload file //if ($config['remote_backup_ftp_dir'] != '') { $filename = $config['remote_backup_ftp_dir'] . '/' . $filename; } @ftp_put($ftp_client, $filename, SITE_PATH . "/data/backups/{$filename}", FTP_BINARY); ftp_close($ftp_client); // Tarsnap } elseif ($config['backup_type'] == 'tarsnap') { system($config['backup_tarsnap_location'] . " -cf {$config['backup_tarsnap_archive']} " . SITE_PATH); } // Delete local file, if needed //if ($config['remote_backup_retain_local'] != 1 && is_file($file_path)) { // @unlink($file_path); //} }
<?php // Bucket Name $bucket = "communitycloud1"; if (!class_exists('S3')) { require_once 'library/S3.php'; } //AWS access info if (!defined('awsAccessKey')) { define('awsAccessKey', 'AKIAI26EDFLOQPYCL26A'); } if (!defined('awsSecretKey')) { define('awsSecretKey', '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'); } try { $s3 = new S3(awsAccessKey, awsSecretKey); $s3->putBucket($bucket, S3::ACL_PUBLIC_READ); $s3->listBuckets(); } catch (Exception $e) { echo $e->getMessage(); }
<?php // if you want to use it: // include('./S3/S3_config.php'); // Bucket Name $bucket = "patrimonio24"; if (!class_exists('S3')) { require_once 'S3.php'; } //AWS access info if (!defined('awsAccessKey')) { define('awsAccessKey', '*******'); } if (!defined('awsSecretKey')) { define('awsSecretKey', '********'); } //instantiate the class $s3 = new S3(awsAccessKey, awsSecretKey); // print_r($s3->listBuckets()); $session->mys3 = $s3->listBuckets(); // $s3->putBucket($bucket, S3::ACL_PUBLIC_READ);
if (!class_exists('S3')) { require_once 'S3.php'; } // AWS access info if (!defined('awsAccessKey')) { define('awsAccessKey', get_option('wpdb_dest_amazon_s3_bucket_key')); } if (!defined('awsSecretKey')) { define('awsSecretKey', get_option('wpdb_dest_amazon_s3_bucket_secret')); } // Check for CURL if (!extension_loaded('curl') && !@dl(PHP_SHLIB_SUFFIX == 'so' ? 'curl.so' : 'php_curl.dll')) { echo "ERROR: CURL extension not loaded\n\n"; } $s3 = new S3(awsAccessKey, awsSecretKey); $result = $s3->listBuckets(); if (get_option('wpdb_dest_amazon_s3_bucket')) { if (!in_array(get_option('wpdb_dest_amazon_s3_bucket'), $result)) { echo '<span class="label label-warning">Invalid bucket name or AWS details</span>'; } } } catch (Exception $e) { // echo ($e->getMessage()); echo '<span class="label label-warning">Invalid AWS details</span>'; } } ?> <p><a href="http://www.wpseeds.com/wp-database-backup/#amazon" target="_blank"><span class="glyphicon glyphicon-question-sign" aria-hidden="true"></span></a> Back up WordPress database to Amazon S3.</p> <p>Enter your Amazon S3 details for your offsite backup. Leave these blank for local backups</p> <form class="form-group" name="amazons3" method="post" action=""> <input type="hidden" name="wpdb_amazon_s3" value="Y">
############################################################################## } } logxx("Total backup size:" . $bsize); ####### STARING AMAZON S3 MODE if ($_CONFIG['cron_amazon_active']) { include_once "classes/S3.php"; logxx(); if (!$_CONFIG['cron_amazon_ssl']) { $amazon_ssl = false; } else { $amazon_ssl = true; } $s3 = new S3($_CONFIG['cron_amazon_awsAccessKey'], $_CONFIG['cron_amazon_awsSecretKey'], $amazon_ssl); logxx("AMAZON S3: Starting communication with the Amazon S3 server...ssl mode " . (int) $amazon_ssl); $buckets = $s3->listBuckets(); if ($s3->putBucket($_CONFIG['cron_amazon_bucket'], "private") || @in_array($_CONFIG['cron_amazon_bucket'], $buckets)) { if ($s3->putObjectFile($clonerPath . "/" . $file, $_CONFIG['cron_amazon_bucket'], $_CONFIG['cron_amazon_dirname'] . "/" . baseName($file), "private")) { logxx("AMAZON S3: File copied to {" . $_CONFIG['cron_amazon_bucket'] . "}/" . $_CONFIG['cron_amazon_dirname'] . "/" . $file); } else { logxx("AMAZON S3: Failed to copy file to {" . $_CONFIG['cron_amazon_bucket'] . "}/" . $_CONFIG['cron_amazon_dirname'] . "/" . $file); exit; } } else { logxx("AMAZON S3: Unable to create bucket " . $_CONFIG['cron_amazon_bucket'] . " (it may already exist and/or be owned by someone else)!"); exit; } } ###### END ####### STARING DROPBOX MODE if ($_CONFIG['cron_dropbox_active']) {
public static function buckets($detailed = false) { global $globals; S3::setAuth($globals['Amazon_access_key'], $globals['Amazon_secret_key']); return S3::listBuckets($detailed); }
/** * Generates the settings page * */ function settings_page() { include_once 'S3.php'; $sections = get_option('s3b-section'); if (!$sections) { $sections = array(); } ?> <script type="text/javascript"> var ajaxTarget = "<?php echo self::getURL(); ?> backup.ajax.php"; var nonce = "<?php echo wp_create_nonce('wp-s3-backups'); ?> "; </script> <div class="wrap"> <h2><?php _e('S3 Backup', 'wp-s3-backups'); ?> </h2> <form method="post" action="options.php"> <input type="hidden" name="action" value="update" /> <?php wp_nonce_field('update-options'); ?> <input type="hidden" name="page_options" value="s3b-access-key,s3b-secret-key,s3b-bucket,s3b-section,s3b-schedule" /> <p> <?php _e('AWS Access Key:', 'wp-s3-backups'); ?> <input type="text" name="s3b-access-key" value="<?php echo get_option('s3b-access-key'); ?> " /> </p> <p> <?php _e('AWS Secret Key:', 'wp-s3-backups'); ?> <input type="text" name="s3b-secret-key" value="<?php echo get_option('s3b-secret-key'); ?> " /> </p> <?php if (get_option('s3b-access-key') && get_option('s3b-secret-key')) { ?> <?php $s3 = new S3(get_option('s3b-access-key'), get_option('s3b-secret-key')); $buckets = $s3->listBuckets(); ?> <p> <span style="vertical-align: middle;"><?php _e('S3 Bucket Name:', 'wp-s3-backups'); ?> </span> <select name="s3b-bucket"> <?php foreach ($buckets as $b) { ?> <option <?php if ($b == get_option('s3b-bucket')) { echo 'selected="selected"'; } ?> ><?php echo $b; ?> </option> <?php } ?> </select> <br /> <span style="vertical-align: middle;"><?php _e('Or create a bucket:', 'wp-s3-backups'); ?> </span> <input type="text" name="s3-new-bucket" id="new-s3-bucket" value="" /> </p> <p> <span style="vertical-align: middle;"><?php _e('Backup schedule:', 'wp-s3-backups'); ?> </span> <select name="s3b-schedule"> <?php foreach (array('Disabled', 'Daily', 'Weekly', 'Monthly') as $s) { ?> <option value="<?php echo strtolower($s); ?> " <?php if (strtolower($s) == get_option('s3b-schedule')) { echo 'selected="selected"'; } ?> ><?php echo $s; ?> </option> <?php } ?> </select> </p> <p> <?php _e('Parts of your blog to back up', 'wp-s3-backups'); ?> <br /> <label for="s3b-section-config"> <input <?php if (in_array('config', $sections)) { echo 'checked="checked"'; } ?> type="checkbox" name="s3b-section[]" value="config" id="s3b-section-config" /> <?php _e('Config file', 'wp-s3-backups'); ?> </label><br /> <label for="s3b-section-database"> <input <?php if (in_array('database', $sections)) { echo 'checked="checked"'; } ?> type="checkbox" name="s3b-section[]" value="database" id="s3b-section-database" /> <?php _e('Database dump', 'wp-s3-backups'); ?> </label><br /> <label for="s3b-section-themes"> <input <?php if (in_array('themes', $sections)) { echo 'checked="checked"'; } ?> type="checkbox" name="s3b-section[]" value="themes" id="s3b-section-themes" /> <?php _e('Themes folder', 'wp-s3-backups'); ?> </label><br /> <label for="s3b-section-plugins"> <input <?php if (in_array('plugins', $sections)) { echo 'checked="checked"'; } ?> type="checkbox" name="s3b-section[]" value="plugins" id="s3b-section-plugins" /> <?php _e('Plugins folder', 'wp-s3-backups'); ?> </label><br /> <?php do_action('s3b_sections'); ?> <label for="s3b-section-uploads"> <input <?php if (in_array('uploads', $sections)) { echo 'checked="checked"'; } ?> type="checkbox" name="s3b-section[]" value="uploads" id="s3b-section-uploads" /> <?php _e('Uploaded content', 'wp-s3-backups'); ?> </label><br /> </p> <?php } ?> <p class="submit"> <input type="submit" name="Submit" value="<?php _e('Save Changes', 'wp-s3-backups'); ?> " /> </p> </form> <?php //WPS3BU::backup() ?> <h3>Download recent backups</h3> <div id="backups"> <?php if (get_option('s3b-bucket')) { $backups = $s3->getBucket(get_option('s3b-bucket'), next(explode('//', get_bloginfo('siteurl')))); krsort($backups); $count = 0; foreach ($backups as $key => $backup) { $backup['label'] = sprintf(__('WordPress Backup from %s', 'wp-s3-backups'), mysql2date(__('F j, Y h:i a'), date('Y-m-d H:i:s', $backup['time']))); if (preg_match('|\\.uploads\\.zip$|', $backup['name'])) { $backup['label'] = sprintf(__('Uploads Backup from %s', 'wp-s3-backups'), mysql2date(__('F j, Y h:i a'), date('Y-m-d H:i:s', $backup['time']))); } $backup = apply_filters('s3b-backup-item', $backup); if (++$count > 20) { break; } ?> <div class="backup"><a href="<?php echo $s3->getObjectURL(get_option('s3b-bucket'), $backup['name']); ?> "><?php echo $backup['label']; ?> </a></div> <?php } } ?> <div class="backup"> </div> </div> </div> <?php }
<?php /* * $Id: yss_s3.php 1754 2012-01-03 16:45:50Z BarryCarlyon $ * $Revision: 1754 $ * $Date: 2012-01-03 16:45:50 +0000 (Tue, 03 Jan 2012) $ */ if ($task = @$_REQUEST['buckettask']) { $s3 = new S3(); $s3->setAuth(get_option('yss_user_key'), get_option('yss_secret_key')); switch ($task) { case 'buckets': echo '<select name="origin" id="origin"> <option value="">--Select--</option> '; foreach ($s3->listBuckets() as $bucket) { echo '<option value="' . $bucket . '">' . $bucket . '</option>'; } echo ' </select>'; break; default: echo 'Nothing to do: ' . $task; } exit; } echo '<p>' . __('There is currently nothing to see here', 'yss') . '</p>';
$mossoAPIKey = "----YOUR-MOSSO-API-KEY----"; $prefixToAddToContainers = ''; // Used only if you want to prepend anything to your new containers @(include_once 'S3toMosso__private__.php'); // NEFSYS private accounts access. Remove if not NEFSYS // Other settings $directoryType = 'application/directory'; // Connect to S3 $objS3 = new S3($awsAccessID, $awsSecretKey); // Connect to Mosso $objMossoAuth = new CF_Authentication($mossoUsername, $mossoAPIKey); $objMossoAuth->authenticate(); // Let's get a connection to CloudFiles $objMosso = new CF_Connection($objMossoAuth); echo "Listing buckets from your Amazon S3\n"; $awsBucketList = $objS3->listBuckets(); echo str_replace('Array', 'Amazon S3 Buckets', print_r($awsBucketList, true)) . "\n"; foreach ($awsBucketList as $awsBucketName) { if (in_array($awsBucketName, $awsExcludeBuckets)) { echo "---> Bucket {$awsBucketName} will be excluded\n"; continue; } $mossoContainerName = $prefixToAddToContainers . $awsBucketName; // TODO: check if Bucket is CDN enabled // Get objects echo "Listing objects in Bucket {$awsBucketName} \n"; $awsObjectList = $objS3->getBucket($awsBucketName); // Create this bucket as a Container on MOSSO echo "Creating Container {$mossoContainerName} in Cloud Files\n"; $objMossoContainer = $objMosso->create_container($mossoContainerName); echo "Processing objects in Bucket {$awsBucketName} \n";
function file_filesetup($Field, $Table, $Config = false) { $viewValue = 'iconlink'; if (!empty($Config['Content']['_fileReturnValue'][$Field])) { $viewValue = $Config['Content']['_fileReturnValue'][$Field]; } $Return = 'Value Returned: <select name="Data[Content][_fileReturnValue][' . $Field . ']" >'; $sel = ''; if ($viewValue == 'iconlink') { $sel = 'selected="selected"'; } $Return .= '<option value="iconlink" ' . $sel . '>Icon and Link</option>'; $sel = ''; if ($viewValue == 'filesize') { $sel = 'selected="selected"'; } $Return .= '<option value="filesize" ' . $sel . '>Size (Readable)</option>'; $sel = ''; if ($viewValue == 'filesizeraw') { $sel = 'selected="selected"'; } $Return .= '<option value="filesizeraw" ' . $sel . '>Size (bytes)</option>'; $sel = ''; if ($viewValue == 'filename') { $sel = 'selected="selected"'; } $Return .= '<option value="filename" ' . $sel . '>Filename</option>'; $sel = ''; if ($viewValue == 'filepath') { $sel = 'selected="selected"'; } $Return .= '<option value="filepath" ' . $sel . '>URL</option>'; $sel = ''; if ($viewValue == 'ext') { $sel = 'selected="selected"'; } $Return .= '<option value="ext" ' . $sel . '>Extention</option>'; $sel = ''; if ($viewValue == 'mimetype') { $sel = 'selected="selected"'; } $Return .= '<option value="mimetype" ' . $sel . '>MIME type</option>'; $sel = ''; $Return .= '</select>'; $sel = ''; if (!empty($Config['Content']['_filesToLibrary'])) { $sel = 'checked="checked"'; } $Return .= ' Add Files to Media Library: <input type="checkbox" value="1" name="Data[Content][_filesToLibrary][' . $Field . ']" ' . $sel . ' /><br />'; $sel = ''; if (!empty($Config['Content']['_enableS3'])) { $sel = 'checked="checked"'; } $Return .= 'Enable S3 uploading: <input type="checkbox" value="1" name="Data[Content][_enableS3][' . $Field . ']" ' . $sel . ' /><br />'; $accessKey = ''; if (!empty($Config['Content']['_AWSAccessKey'][$Field])) { $accessKey = $Config['Content']['_AWSAccessKey'][$Field]; } $secretKey = ''; if (!empty($Config['Content']['_AWSSecretKey'][$Field])) { $secretKey = $Config['Content']['_AWSSecretKey'][$Field]; } $Return .= 'AWS AccessKey: <input type="text" value="' . $accessKey . '" name="Data[Content][_AWSAccessKey][' . $Field . ']" ' . $sel . ' /><br />'; $Return .= 'AWS SecretKey: <input type="text" value="' . $secretKey . '" name="Data[Content][_AWSSecretKey][' . $Field . ']" ' . $sel . ' /><br />'; if (!empty($Config['Content']['_AWSAccessKey'][$Field]) && !empty($Config['Content']['_AWSSecretKey'][$Field])) { $Return .= 'Upload Bucket: <select name="Data[Content][_AWSBucket][' . $Field . ']">'; include_once DB_TOOLKIT . 'data_form/fieldtypes/file/s3.php'; $s3 = new S3($Config['Content']['_AWSAccessKey'][$Field], $Config['Content']['_AWSSecretKey'][$Field]); foreach ($s3->listBuckets() as $bucket) { $Return .= '<option value="' . $bucket . '">' . $bucket . '</option>'; } $Return .= '</select>'; } else { $Return .= '<p>NB: Please save and re-edit to select bucket</p>'; } return $Return; }
#!/usr/bin/php <?php require_once '../application/Initializer.php'; // must specify the environment as the first variable if (!count($argv) > 1) { echo 'You must specify an environment as the first argument'; exit(-1); } $init = new Initializer($argv[1]); $config = $init->getConfig(); Zend_Loader::registerAutoload(); $s3 = new S3($config->aws->accessKey, $config->aws->secretKey); $buckets = array($config->aws->publicBucket => S3::ACL_PUBLIC_READ, $config->aws->contentBucket => S3::ACL_PRIVATE); $existingBuckets = $s3->listBuckets(); foreach ($buckets as $name => $acl) { if (!in_array($name, $existingBuckets)) { $s3->putBucket($name, $acl); echo "Bucket Added: {$name}\n"; } }
function store_backup() { foreach ($this->b['storage_servers'] as $s) { $s = $this->s[$s]; switch ($s['type']) { case 'local': $path = backup__($s['path']) . '/' . $this->b['_dirname']; //ensure directory structure if (!is_dir($path)) { mkdir($path, 0755, true); } //would rather use the native copy() here, but by defualt //php doesnt support files > 2GB //see here for a posible solution: //http://ca3.php.net/manual/en/function.fopen.php#37791 $cmd[] = fpbx_which('cp'); $cmd[] = $this->b['_tmpfile']; $cmd[] = $path . '/' . $this->b['_file'] . '.tgz'; exec(implode(' ', $cmd), $error, $status); unset($cmd, $error); if ($status !== 0) { $this->b['error'] = 'Error copying ' . $this->b['_tmpfile'] . ' to ' . $path . '/' . $this->b['_file'] . '.tgz: ' . $error; backup_log($this->b['error']); } //run maintenance on the directory $this->maintenance($s['type'], $s); break; case 'email': //TODO: set agent to something informative, including fpbx & backup versions $email_options = array('useragent' => 'freepbx', 'protocol' => 'mail'); $email = new \CI_Email(); $from = $this->amp_conf['AMPBACKUPEMAILFROM'] ? $this->amp_conf['AMPBACKUPEMAILFROM'] : '*****@*****.**'; $msg[] = _('Name') . ': ' . $this->b['name']; $msg[] = _('Created') . ': ' . date('r', $this->b['_ctime']); $msg[] = _('Files') . ': ' . $this->manifest['file_count']; $msg[] = _('Mysql Db\'s') . ': ' . $this->manifest['mysql_count']; $msg[] = _('astDb\'s') . ': ' . $this->manifest['astdb_count']; $email->from($from); $email->to(backup__($s['addr'])); $email->subject(_('Backup') . ' ' . $this->b['name']); $body = implode("\n", $msg); // If the backup file is more than 25MB, yell $encodedsize = ceil(filesize($this->b['_tmpfile']) / 3) * 4; if ($encodedsize > 26214400) { $email->subject(_('Backup ERROR (exceeded SMTP limits)') . ' ' . $this->b['name']); $email->message(_('BACKUP NOT ATTACHED') . "\n" . _('The backup file exceeded the maximum SMTP limits of 25MB. It was not attempted to be sent. Please shrink your backup, or use a different method of transferring your backup.') . "\n{$body}\n"); } elseif ($encodedsize > $s['maxsize']) { $email->subject(_('Backup ERROR (exceeded soft limit)') . ' ' . $this->b['name']); $email->message(_('BACKUP NOT ATTACHED') . "\n" . _('The backup file exceeded the soft limit set in SMTP configuration (%s bytes). It was not attempted to be sent. Please shrink your backup, or use a different method of transferring your backup.') . "\n{$body}\n"); } else { $email->message($body); $email->attach($this->b['_tmpfile']); } $email->send(); unset($msg); break; case 'ftp': //subsitute variables if nesesary $s['host'] = backup__($s['host']); $s['port'] = backup__($s['port']); $s['user'] = backup__($s['user']); $s['password'] = backup__($s['password']); $s['path'] = backup__($s['path']); $ftp = @ftp_connect($s['host'], $s['port']); if ($ftp === false) { $this->b['error'] = _("Error connecting to the FTP Server... Check your host name or DNS"); backup_log($this->b['error']); return $ftp; } if (ftp_login($ftp, $s['user'], $s['password'])) { //chose pasive/active transfer mode ftp_pasv($ftp, $s['transfer'] == 'passive'); //switch to directory. If we fail, build directory structure and try again if (!@ftp_chdir($ftp, $s['path'] . '/' . $this->b['_dirname'])) { //ensure directory structure @ftp_mkdir($ftp, $s['path']); @ftp_mkdir($ftp, $s['path'] . '/' . $this->b['_dirname']); ftp_chdir($ftp, $s['path'] . '/' . $this->b['_dirname']); } //copy file ftp_put($ftp, $this->b['_file'] . '.tgz', $this->b['_tmpfile'], FTP_BINARY); //run maintenance on the directory $this->maintenance($s['type'], $s, $ftp); //release handel ftp_close($ftp); } else { $this->b['error'] = _("Error connecting to the FTP Server..."); backup_log($this->b['error']); } break; case 'awss3': //subsitute variables if nesesary $s['bucket'] = backup__($s['bucket']); $s['awsaccesskey'] = backup__($s['awsaccesskey']); $s['awssecret'] = backup__($s['awssecret']); $awss3 = new \S3($s['awsaccesskey'], $s['awssecret']); // Does this bucket already exist? $buckets = $awss3->listBuckets(); if (!in_array($s['bucket'], $buckets)) { // Create the bucket $awss3->putBucket($s['bucket'], \S3::ACL_PUBLIC_READ); } //copy file if ($awss3->putObjectFile($this->b['_tmpfile'], $s['bucket'], $this->b['name'] . "/" . $this->b['_file'] . '.tgz', \S3::ACL_PUBLIC_READ)) { dbug('S3 successfully uploaded your backup file.'); } else { dbug('S3 failed to accept your backup file'); } //run maintenance on the directory $this->maintenance($s['type'], $s, $awss3); break; case 'ssh': //subsitute variables if nesesary $s['path'] = backup__($s['path']); $s['user'] = backup__($s['user']); $s['host'] = backup__($s['host']); $destdir = $s['path'] . '/' . $this->b['_dirname']; //ensure directory structure $cmd = fpbx_which('ssh') . ' -o StrictHostKeyChecking=no -i '; $cmd .= $s['key'] . " -l " . $s['user'] . ' ' . $s['host'] . ' -p ' . $s['port']; $cmd .= " 'mkdir -p {$destdir}'"; exec($cmd, $output, $ret); if ($ret !== 0) { backup_log("SSH Error ({$ret}) - Received " . json_encode($output) . " from {$cmd}"); } $output = null; //put file // Note that SCP (*unlike SSH*) needs IPv6 addresses in ['s. Consistancy is awesome. if (filter_var($s['host'], \FILTER_VALIDATE_IP, \FILTER_FLAG_IPV6)) { $scphost = "[" . $s['host'] . "]"; } else { $scphost = $s['host']; } $cmd = fpbx_which('scp') . ' -o StrictHostKeyChecking=no -i ' . $s['key'] . ' -P ' . $s['port']; $cmd .= " " . $this->b['_tmpfile'] . " " . $s['user'] . "@{$scphost}:{$destdir}"; exec($cmd, $output, $ret); if ($ret !== 0) { backup_log("SCP Error ({$ret}) - Received " . json_encode($output) . " from {$cmd}"); } //run maintenance on the directory $this->maintenance($s['type'], $s); break; } } }