function upload() { $error = "test"; if (isset($_POST['submitbtn'])) { if (array_key_exists('userFile', $_FILES)) { if ($_FILES['userFile']['error'] === UPLOAD_ERR_OK) { $filename = $_FILES["userFile"]["name"]; $ext = pathinfo($filename, PATHINFO_EXTENSION); $allowed = array("doc", "docx", "rtf", "pdf", "txt", "odf"); $fname = $_FILES["userFile"]["tmp_name"]; // Make sure extension matches if (in_array($ext, $allowed)) { if ($_FILES['userFile']['size'] < 2097152) { $bucket = 'sublite-resumes'; //Can use existing configs when merging with sublite $s3 = new S3("AKIAI7IVRJCSAWFTTS7Q", "B0qzRQJ1KlLy+STC2BspwT9oZONjt+U6sRNqaRr5"); $s3->putBucket($bucket, S3::ACL_PUBLIC_READ); $actual_image_name = time() . '.' . $ext; if ($s3->putObjectFile($fname, $bucket, $actual_image_name, S3::ACL_PUBLIC_READ)) { $image = 'http://' . $bucket . '.s3.amazonaws.com/' . $actual_image_name; return $image; } else { return "An unknown error occurred during upload!"; } /* // File validated; Upload the file! !!!Need to upload to S3!!! $uploaddir = 'resumes/'; $uploadfile = basename($_FILES['userFile']['name']); if (move_uploaded_file($_FILES['userFile']['tmp_name'], $uploaddir.$uploadfile)) { return "File is valid, and was successfully uploaded.\n"; } else { return "An unknown error occurred during upload!"; } */ } else { $error = "Max file size exceeded!"; } } else { $error = "Bad file extension!"; } } else { if ($_FILES['userFile']['error'] === UPLOAD_ERR_FORM_SIZE) { $error = "Max file size exceeded!"; } else { if ($_FILES['userFile']['error'] === UPLOAD_ERR_NO_FILE) { $error = "You must choose a file!"; } else { $error = "An unknown error occurred during upload!"; } } } return $error; } } }
function grav_submit_to_s3($entry, $form) { // no file? no problem. if (empty($entry[GFORM_UPLOAD_FIELD_ID])) { return; } $gfs3 = new S3(awsAccessKey, awsSecretKey); // url of uploaded file $file_url = $entry[GFORM_UPLOAD_FIELD_ID]; // filename of uploaded file $file_name = $_FILES['input_' . GFORM_UPLOAD_FIELD_ID]['name']; // ensure bucket is there $gfs3->putBucket(BUCKET_NAME, S3::ACL_AUTHENTICATED_READ); // clean up filename, split into parts $url_parts = parse_url($file_url); $full_path = $_SERVER['DOCUMENT_ROOT'] . substr($url_parts['path'], 1); if (is_dir($file_name)) { $file_name = basename($file_name); } // this is the full path to the file on S3 $filename_to_s3 = UPLOAD_PATH . sanitize_file_name($file_name); if ($gfs3->putObjectFile($full_path, BUCKET_NAME, $filename_to_s3, S3::ACL_PUBLIC_READ)) { return true; // upload success } else { wp_die('It looks like something went wrong while uploading your file. Please try again in a few moments.'); } }
/** * Uses the init action to catch changes in the schedule and pass those on to the scheduler. * */ function init() { if (isset($_POST['s3b-schedule'])) { wp_clear_scheduled_hook('s3-backup'); if ($_POST['s3b-schedule'] != 'disabled') { wp_schedule_event(time(), $_POST['s3b-schedule'], 's3-backup'); } } if (isset($_POST['s3-new-bucket']) && !empty($_POST['s3-new-bucket'])) { include_once 'S3.php'; $_POST['s3-new-bucket'] = strtolower($_POST['s3-new-bucket']); $s3 = new S3(get_option('s3b-access-key'), get_option('s3b-secret-key')); $s3->putBucket($_POST['s3-new-bucket']); $buckets = $s3->listBuckets(); if (is_array($buckets) && in_array($_POST['s3-new-bucket'], $buckets)) { update_option('s3b-bucket', $_POST['s3-new-bucket']); $_POST['s3b-bucket'] = $_POST['s3-new-bucket']; } else { update_option('s3b-bucket', false); } } if (!get_option('s3b-bucket')) { add_action('admin_notices', array('WPS3B', 'newBucketWarning')); } }
/** * Creates bucket * * @param string $container_id * @param string $error * @return boolean */ function create_container(&$container_id, &$error) { if (!$this->_init($error)) { return false; } $this->_set_error_handler(); $buckets = @$this->_s3->listBuckets(); if ($buckets === false) { $error = sprintf('Unable to list buckets (%s).', $this->_get_last_error()); $this->_restore_error_handler(); return false; } if (in_array($this->_config['bucket'], (array) $buckets)) { $error = sprintf('Bucket already exists: %s.', $this->_config['bucket']); $this->_restore_error_handler(); return false; } if (empty($this->_config['bucket_acl'])) { $this->_config['bucket_acl'] = S3::ACL_PRIVATE; } if (!isset($this->_config['bucket_location'])) { $this->_config['bucket_location'] = S3::LOCATION_US; } if (!@$this->_s3->putBucket($this->_config['bucket'], $this->_config['bucket_acl'], $this->_config['bucket_location'])) { $error = sprintf('Unable to create bucket: %s (%s).', $this->_config['bucket'], $this->_get_last_error()); $this->_restore_error_handler(); return false; } $this->_restore_error_handler(); return true; }
public function onImageAddition(ImageAdditionEvent $event) { global $config; $access = $config->get_string("amazon_s3_access"); $secret = $config->get_string("amazon_s3_secret"); $bucket = $config->get_string("amazon_s3_bucket"); if (!empty($bucket)) { log_debug("amazon_s3", "Mirroring Image #" . $event->image->id . " to S3 #{$bucket}"); $s3 = new S3($access, $secret); $s3->putBucket($bucket, S3::ACL_PUBLIC_READ); $s3->putObjectFile(warehouse_path("thumbs", $event->image->hash), $bucket, 'thumbs/' . $event->image->hash, S3::ACL_PUBLIC_READ, array(), array("Content-Type" => "image/jpeg", "Content-Disposition" => "inline; filename=image-" . $event->image->id . ".jpg")); $s3->putObjectFile(warehouse_path("images", $event->image->hash), $bucket, 'images/' . $event->image->hash, S3::ACL_PUBLIC_READ, array(), array("Content-Type" => $event->image->get_mime_type(), "Content-Disposition" => "inline; filename=image-" . $event->image->id . "." . $event->image->ext)); } }
/** * Constructor * * @return void */ protected function __construct() { require_once LC_DIR_MODULES . 'CDev' . LC_DS . 'AmazonS3Images' . LC_DS . 'lib' . LC_DS . 'S3.php'; $config = \XLite\Core\Config::getInstance()->CDev->AmazonS3Images; if ($config->access_key && $config->secret_key && function_exists('curl_init')) { try { $this->client = new \S3($config->access_key, $config->secret_key); \S3::setExceptions(true); if (!$this->client->getBucketLocation($config->bucket)) { $this->client->putBucket($config->bucket); } $this->valid = true; } catch (\S3Exception $e) { \XLite\Logger::getInstance()->registerException($e); } } }
function putFile($filename) { $s3svc = new S3(); // Removing the first slash is important - otherwise the URL is different. $aws_filename = eregi_replace('^/', '', $filename); $filename = $_SERVER['DOCUMENT_ROOT'] . $filename; $mime_type = NFilesystem::getMimeType($filename); // Read the file into memory. $fh = fopen($filename, 'rb'); $contents = fread($fh, filesize($filename)); fclose($fh); $s3svc->putBucket(MIRROR_S3_BUCKET); $out = $s3svc->putObject($aws_filename, $contents, MIRROR_S3_BUCKET, 'public-read', $mime_type); // Now the file is accessable at: // http://MIRROR_S3_BUCKET.s3.amazonaws.com/put/the/filename/here.jpg OR // http://s3.amazonaws.com/MIRROR_S3_BUCKET/put/the/filename/here.jpg unset($s3svc); }
function MoveToS3($strPath, $strFileName, $strType, $strS3Path) { rtrim($strPath, '/'); rtrim($strS3Path, '/'); if (file_exists($strPath . '/' . $strFileName)) { require_once __DOCROOT__ . __PHP_ASSETS__ . '/s3.class.php'; $objS3 = new S3(); $objS3->putBucket(AWS_BUCKET); $fh = fopen($strPath . '/' . $strFileName, 'rb'); $contents = fread($fh, filesize($strPath . '/' . $strFileName)); fclose($fh); $objS3->putObject($strFileName, $contents, AWS_BUCKET . $strS3Path, 'public-read', $strType); unlink($strPath . '/' . $strFileName); unset($objS3); return true; } else { return false; } }
/** * Creates bucket * * @param string $error * @return boolean */ function create_bucket(&$error) { if (!$this->_init($error)) { return false; } $buckets = @$this->_s3->listBuckets(); if (!$buckets) { $error = 'Unable to list buckets (check your credentials).'; return false; } if (in_array($this->_config['bucket'], (array) $buckets)) { $error = sprintf('Bucket already exists: %s.', $this->_config['bucket']); return false; } if (!@$this->_s3->putBucket($this->_config['bucket'], S3::ACL_PUBLIC_READ)) { $error = sprintf('Unable to create bucket: %s.', $this->_config['bucket']); return false; } return true; }
function upload_to_amazon($source_filename, $dest_filename, $image_type = IMAGETYPE_JPEG, $compression = 75, $permissions = null) { // Create local instance of image $this->save($source_filename, $image_type, $compression, $permissions); // Begin s3 sequence $s3 = new S3('AMAZON_ACCESS_TOKEN', 'AMAZON_SECRET_TOKEN'); // Name each bucket off the domain $bucket = 'screenbin'; // Make sure the bucket is there if (!in_array($bucket, $s3->listBuckets())) { $s3->putBucket($bucket, S3::ACL_PUBLIC_READ); } // Upload to s3 if ($s3->putObjectFile($source_filename, $bucket, $dest_filename, S3::ACL_PUBLIC_READ)) { // Delete local version of the file return true; } else { return false; } }
function store_backup() { foreach ($this->b['storage_servers'] as $s) { $s = $this->s[$s]; switch ($s['type']) { case 'local': $path = backup__($s['path']) . '/' . $this->b['_dirname']; //ensure directory structure if (!is_dir($path)) { mkdir($path, 0755, true); } //would rather use the native copy() here, but by defualt //php doesnt support files > 2GB //see here for a posible solution: //http://ca3.php.net/manual/en/function.fopen.php#37791 $cmd[] = fpbx_which('cp'); $cmd[] = $this->b['_tmpfile']; $cmd[] = $path . '/' . $this->b['_file'] . '.tgz'; exec(implode(' ', $cmd), $error, $status); unset($cmd, $error); if ($status !== 0) { $this->b['error'] = 'Error copying ' . $this->b['_tmpfile'] . ' to ' . $path . '/' . $this->b['_file'] . '.tgz: ' . $error; backup_log($this->b['error']); } //run maintenance on the directory $this->maintenance($s['type'], $s); break; case 'email': //TODO: set agent to something informative, including fpbx & backup versions $email_options = array('useragent' => 'freepbx', 'protocol' => 'mail'); $email = new \CI_Email(); //Generic email $from = '*****@*****.**'; //If we have sysadmin and "from is set" if (function_exists('sysadmin_get_storage_email')) { $emails = sysadmin_get_storage_email(); //Check that what we got back above is a email address if (!empty($emails['fromemail']) && filter_var($emails['fromemail'], FILTER_VALIDATE_EMAIL)) { $from = $emails['fromemail']; } } //If the user set an email in advanced settings it wins, otherwise take whatever won above. $from = filter_var($this->amp_conf['AMPBACKUPEMAILFROM'], FILTER_VALIDATE_EMAIL) ? $this->amp_conf['AMPBACKUPEMAILFROM'] : $from; $msg[] = _('Name') . ': ' . $this->b['name']; $msg[] = _('Created') . ': ' . date('r', $this->b['_ctime']); $msg[] = _('Files') . ': ' . $this->manifest['file_count']; $msg[] = _('Mysql Db\'s') . ': ' . $this->manifest['mysql_count']; $msg[] = _('astDb\'s') . ': ' . $this->manifest['astdb_count']; $email->from($from); $email->to(backup__($s['addr'])); $email->subject($this->amp_conf['FREEPBX_SYSTEM_IDENT'] . ' ' . _('Backup') . ' ' . $this->b['name']); $body = implode("\n", $msg); // If the backup file is more than 25MB, yell $encodedsize = ceil(filesize($this->b['_tmpfile']) / 3) * 4; if ($encodedsize > 26214400) { $email->subject($this->amp_conf['FREEPBX_SYSTEM_IDENT'] . ' ' . _('Backup ERROR (exceeded SMTP limits)') . ' ' . $this->b['name']); $email->message(_('BACKUP NOT ATTACHED') . "\n" . _('The backup file exceeded the maximum SMTP limits of 25MB. It was not attempted to be sent. Please shrink your backup, or use a different method of transferring your backup.') . "\n{$body}\n"); } elseif ($encodedsize > $s['maxsize']) { $email->subject($this->amp_conf['FREEPBX_SYSTEM_IDENT'] . ' ' . _('Backup ERROR (exceeded soft limit)') . ' ' . $this->b['name']); $email->message(_('BACKUP NOT ATTACHED') . "\n" . _('The backup file exceeded the soft limit set in SMTP configuration (%s bytes). It was not attempted to be sent. Please shrink your backup, or use a different method of transferring your backup.') . "\n{$body}\n"); } else { $email->message($body); $email->attach($this->b['_tmpfile']); } $email->send(); unset($msg); break; case 'ftp': //subsitute variables if nesesary $s['host'] = backup__($s['host']); $s['port'] = backup__($s['port']); $s['user'] = backup__($s['user']); $s['password'] = backup__($s['password']); $s['path'] = trim(backup__($s['path']), '/'); $fstype = isset($s['fstype']) ? $s['fstype'] : 'auto'; $path = $s['path'] . '/' . $this->b['_dirname']; $connection = new Connection($s['host'], $s['user'], $s['password'], $s['port'], 90, $s['transfer'] == 'passive'); try { $connection->open(); } catch (\Exception $e) { $this->b['error'] = $e->getMessage(); backup_log($this->b['error']); return; } $wrapper = new FTPWrapper($connection); $permFactory = new PermissionsFactory(); switch ($fstype) { case 'auto': $ftptype = $wrapper->systype(); if (strtolower($ftptype) == "unix") { $fsFactory = new FilesystemFactory($permFactory); } else { $fsFactory = new WindowsFilesystemFactory(); } break; case 'unix': $fsFactory = new FilesystemFactory($permFactory); break; case 'windows': $fsFactory = new WindowsFilesystemFactory(); break; } $manager = new FTPFilesystemManager($wrapper, $fsFactory); $dlVoter = new DownloaderVoter(); $ulVoter = new UploaderVoter(); $ulVoter->addDefaultFTPUploaders($wrapper); $crVoter = new CreatorVoter(); $crVoter->addDefaultFTPCreators($wrapper, $manager); $deVoter = new DeleterVoter(); $deVoter->addDefaultFTPDeleters($wrapper, $manager); $ftp = new FTP($manager, $dlVoter, $ulVoter, $crVoter, $deVoter); if (!$ftp) { $this->b['error'] = _("Error creating the FTP object"); backup_log($this->b['error']); return; } if (!$ftp->directoryExists(new Directory($path))) { backup_log(sprintf(_("Creating directory '%s'"), $path)); try { $ftp->create(new Directory($path), array(FTP::RECURSIVE => true)); } catch (\Exception $e) { $this->b['error'] = sprintf(_("Directory '%s' did not exist and we could not create it"), $path); backup_log($this->b['error']); backup_log($e->getMessage()); return; } } try { backup_log(_("Saving file to remote ftp")); $ftp->upload(new File($path . '/' . $this->b['_file'] . '.tgz'), $this->b['_tmpfile']); } catch (\Exception $e) { $this->b['error'] = _("Unable to upload file to the remote server"); backup_log($this->b['error']); backup_log($e->getMessage()); return; } //run maintenance on the directory $this->maintenance($s['type'], $path, $ftp); break; case 'awss3': //subsitute variables if nesesary $s['bucket'] = backup__($s['bucket']); $s['awsaccesskey'] = backup__($s['awsaccesskey']); $s['awssecret'] = backup__($s['awssecret']); $awss3 = new \S3($s['awsaccesskey'], $s['awssecret']); // Does this bucket already exist? $buckets = $awss3->listBuckets(); if (!in_array($s['bucket'], $buckets)) { // Create the bucket $awss3->putBucket($s['bucket'], \S3::ACL_PUBLIC_READ); } //copy file if ($awss3->putObjectFile($this->b['_tmpfile'], $s['bucket'], $this->b['name'] . "/" . $this->b['_file'] . '.tgz', \S3::ACL_PUBLIC_READ)) { dbug('S3 successfully uploaded your backup file.'); } else { dbug('S3 failed to accept your backup file'); } //run maintenance on the directory $this->maintenance($s['type'], $s, $awss3); break; case 'ssh': //subsitute variables if nesesary $s['path'] = backup__($s['path']); $s['user'] = backup__($s['user']); $s['host'] = backup__($s['host']); $destdir = $s['path'] . '/' . $this->b['_dirname']; //ensure directory structure $cmd = fpbx_which('ssh') . ' -o StrictHostKeyChecking=no -i '; $cmd .= $s['key'] . " -l " . $s['user'] . ' ' . $s['host'] . ' -p ' . $s['port']; $cmd .= " 'mkdir -p {$destdir}'"; exec($cmd, $output, $ret); if ($ret !== 0) { backup_log("SSH Error ({$ret}) - Received " . json_encode($output) . " from {$cmd}"); } $output = null; //put file // Note that SCP (*unlike SSH*) needs IPv6 addresses in ['s. Consistancy is awesome. if (filter_var($s['host'], \FILTER_VALIDATE_IP, \FILTER_FLAG_IPV6)) { $scphost = "[" . $s['host'] . "]"; } else { $scphost = $s['host']; } $cmd = fpbx_which('scp') . ' -o StrictHostKeyChecking=no -i ' . $s['key'] . ' -P ' . $s['port']; $cmd .= " " . $this->b['_tmpfile'] . " " . $s['user'] . "@{$scphost}:{$destdir}"; exec($cmd, $output, $ret); if ($ret !== 0) { backup_log("SCP Error ({$ret}) - Received " . json_encode($output) . " from {$cmd}"); } //run maintenance on the directory $this->maintenance($s['type'], $s); break; } } }
<?php // Bucket Name $bucket = "communitycloud1"; if (!class_exists('S3')) { require_once 'library/S3.php'; } //AWS access info if (!defined('awsAccessKey')) { define('awsAccessKey', 'AKIAI26EDFLOQPYCL26A'); } if (!defined('awsSecretKey')) { define('awsSecretKey', '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'); } try { $s3 = new S3(awsAccessKey, awsSecretKey); $s3->putBucket($bucket, S3::ACL_PUBLIC_READ); $s3->listBuckets(); } catch (Exception $e) { echo $e->getMessage(); }
/** * @method public taskDownload() Create downloadable folder which will contain a .csv File and a Folder containg all the Attachment * @return string Returns the downloadable URL * @author GDR<*****@*****.**> */ function taskDownload() { if (!is_dir(DOWNLOAD_TASK_PATH)) { mkdir(DOWNLOAD_TASK_PATH, 0777, true); } if (!is_dir(DOWNLOAD_TASK_PATH . "zipTask")) { mkdir(DOWNLOAD_TASK_PATH . "zipTask", 0777, true); } $caseUniqId = $this->data['caseUid']; //$caseUniqId = '8d082f712782302aafe8a62129f7cc24'; $this->layout = 'ajax'; $sorting = ''; $ProjId = NULL; $ProjName = NULL; $curCaseNo = NULL; $curCaseId = NULL; ######## get case number from case uniq ID ################ $getCaseNoPjId = $this->Easycase->getEasycase($caseUniqId); if ($getCaseNoPjId) { $curCaseNo = $getCaseNoPjId['Easycase']['case_no']; $curCaseId = $getCaseNoPjId['Easycase']['id']; $prjid = $getCaseNoPjId['Easycase']['project_id']; $is_active = intval($getCaseNoPjId['Easycase']['isactive']) ? 1 : 0; } else { //No task with uniq_id $caseUniqId die; } ######## Checking user_project ################ $this->loadModel('ProjectUser'); $cond1 = array('conditions' => array('ProjectUser.user_id' => SES_ID, 'ProjectUser.company_id' => SES_COMP, 'Project.isactive' => 1, 'Project.id' => $prjid), 'fields' => array('DISTINCT Project.id', 'Project.uniq_id', 'Project.name', 'Project.short_name')); $this->ProjectUser->unbindModel(array('belongsTo' => array('User'))); $getProjId = $this->ProjectUser->find('first', $cond1); if ($getProjId) { $ProjId = $getProjId['Project']['id']; $projUniqId = $getProjId['Project']['uniq_id']; $ProjName = $getProjId['Project']['name']; $projShorName = $getProjId['Project']['short_name']; } else { //Session user not assigned the project $prjid die; } $sqlcasedata = array(); $getPostCase = array(); if ($ProjId && $curCaseNo) { //$getPostCase = $this->Easycase->query("SELECT Easycase.*, User1.name AS created_by , User2.name as updated_by , User3.name AS Assigned_to FROM easycases as Easycase LEFT JOIN users User1 ON Easycase.user_id=User1.id LEFT JOIN users User2 ON Easycase.updated_by= User2.id LEFT JOIN users User3 ON Easycase.assign_to= User3.id WHERE Easycase.project_id='".$ProjId."' AND Easycase.case_no=".$curCaseNo." AND (Easycase.legend !=6) ORDER BY Easycase.actual_dt_created ASC"); $getPostCase = $this->Easycase->query("SELECT Easycase.*, User1.name AS created_by , User2.name as updated_by , User3.name AS Assigned_to FROM easycases as Easycase LEFT JOIN users User1 ON Easycase.user_id=User1.id LEFT JOIN users User2 ON Easycase.updated_by= User2.id LEFT JOIN users User3 ON Easycase.assign_to= User3.id WHERE Easycase.project_id='" . $ProjId . "' AND Easycase.case_no=" . $curCaseNo . " AND (Easycase.istype='1' OR Easycase.legend !=6) ORDER BY Easycase.actual_dt_created ASC"); $estimated_hours = isset($getPostCase['0']['Easycase']) && !empty($getPostCase['0']['Easycase']) ? $getPostCase['0']['Easycase']['estimated_hours'] : '0.0'; $getHours = $this->Easycase->query("SELECT SUM(hours) as hours FROM easycases as Easycase WHERE project_id='" . $ProjId . "' AND case_no=" . $curCaseNo . " AND reply_type=0"); $hours = $getHours[0][0]['hours']; // $getcompletedtask = $this->Easycase->query("SELECT completed_task FROM easycases as Easycase WHERE project_id='".$ProjId."' AND case_no=".$curCaseNo." and completed_task != 0 ORDER BY id DESC LIMIT 1"); // $completedtask = $getcompletedtask[0]['Easycase']['completed_task']; } else { //$ProjId and $curCaseNo not found. This step should not, b'cos it handeled previously. die; } $view = new View(); $cq = $view->loadHelper('Casequery'); $frmt = $view->loadHelper('Format'); $curdt = date('F_dS_Y', time()); $filename = strtoupper($projShorName) . '_TASK_' . $curCaseNo . "_" . $curdt . '.csv'; //$filename = $ProjName . "_#".$curCaseNo."_" . date("mdY", time()).'.csv'; $folder_name = strtoupper($projShorName) . '_TASK_' . $curCaseNo . "_" . $curdt; if (file_exists(DOWNLOAD_TASK_PATH . $folder_name)) { @chmod(DOWNLOAD_TASK_PATH . $folder_name . "/attachments", 0777); @array_map('unlink', glob(DOWNLOAD_TASK_PATH . $folder_name . "/attachments/*")); @rmdir(DOWNLOAD_TASK_PATH . $folder_name . '/attachments'); @array_map('unlink', glob(DOWNLOAD_TASK_PATH . $folder_name . "/*")); $isdel = rmdir(DOWNLOAD_TASK_PATH . $folder_name); } mkdir(DOWNLOAD_TASK_PATH . $folder_name, 0777, true); $file = fopen(DOWNLOAD_TASK_PATH . $folder_name . '/' . $filename, "w"); $csv_output = "Title, Description, Status, Priority, Task Type, Assigned To, Created By, Last Updated By, Created On, Estimated Hours, Hours Spent"; fputcsv($file, explode(',', $csv_output)); foreach ($getPostCase as $key => $case_list) { $status = ''; $priority = ''; $tasktype = ''; $taskTitle = ''; //if(!$key) { if (isset($case_list['Easycase']['title']) && $case_list['Easycase']['title']) { $taskTitle = $case_list['Easycase']['title']; } $status = $this->Format->displayStatus($case_list['Easycase']['legend']); if ($case_list['Easycase']['priority'] == 2) { $priority = 'Low'; } elseif ($case_list['Easycase']['priority'] == 1) { $priority = 'Medium'; } elseif ($case_list['Easycase']['priority'] == 0) { $priority = 'High'; } $types = $cq->getTypeArr($case_list['Easycase']['type_id'], $GLOBALS['TYPE']); if (count($types)) { $tasktype = $types['Type']['name']; } //} $arr = ''; $arr[] = $title = str_replace('"', '""', $case_list['Easycase']['title']); $arr[] = $description = strip_tags(str_replace('"', '""', $case_list['Easycase']['message'])); $arr[] = $status; $arr[] = $priority; $arr[] = $tasktype; if ($case_list['User3']['Assigned_to']) { $Assigned = $case_list['User3']['Assigned_to']; } else { $Assigned = $case_list['User1']['created_by']; } $arr[] = $Assigned; $arr[] = $crby = $case_list['User1']['created_by']; $arr[] = $updateby = $case_list['User2']['updated_by']; $tz = $view->loadHelper('Tmzone'); $temp_dat = $tz->GetDateTime(SES_TIMEZONE, TZ_GMT, TZ_DST, TZ_CODE, $case_list['Easycase']['actual_dt_created'], "datetime"); $arr[] = $crted = date('m/d/Y H:i:s', strtotime($temp_dat)); //$arr[] = $crted =date('m/d/Y H:i:s', strtotime($case_list['Easycase']['actual_dt_created'])); $estmthrs = ''; $hrspent = ''; if ($case_list['Easycase']['istype'] == 1) { $estmthrs = $estimated_hours; $hrspent = $hours; } else { $estimated_hours = ''; $hrspent = $case_list['Easycase']['hours']; } $arr[] = $estimated_hours; $arr[] = $hrspent; $easycaseids[] = $case_list['Easycase']['id']; $retval = fputcsv($file, $arr); //$csv_output .= $title.",".$status.",".$priority.",".$tasktype.",".$description.",".$Assigned.",".$crby.",".$updateby.",".$estmthrs.",".$hrspent.",".$crted.",".$modified; } fclose($file); if ($retval) { $filesarr = ClassRegistry::init('CaseFile')->find('all', array('conditions' => array('CaseFile.easycase_id' => $easycaseids, 'CaseFile.project_id' => $ProjId, 'CaseFile.company_id' => SES_COMP))); if ($filesarr) { foreach ($filesarr as $k => $value) { if ($value['CaseFile']['downloadurl']) { if (!isset($fp)) { $fp = fopen(DOWNLOAD_TASK_PATH . $folder_name . '/cloud.txt', 'a+'); } fwrite($fp, "\n\t" . $value['CaseFile']['downloadurl'] . "\n"); $temp_url = $value['CaseFile']['downloadurl']; } else { if (!file_exists(DOWNLOAD_TASK_PATH . $folder_name . '/attachments')) { mkdir(DOWNLOAD_TASK_PATH . $folder_name . "/attachments", 0777, true); } $temp_url = $frmt->generateTemporaryURL(DIR_CASE_FILES_S3 . $value['CaseFile']['file']); $img = DOWNLOAD_TASK_PATH . $folder_name . "/attachments/" . $value['CaseFile']['file']; $resp = file_put_contents($img, file_get_contents($temp_url)); } } if (isset($fp)) { fclose($fp); } } $zipfile_name = strtoupper($projShorName) . '_TASK_' . $curCaseNo . "_" . $curdt . '.zip'; $zipfile = DOWNLOAD_TASK_PATH . 'zipTask/' . $zipfile_name; $return = $this->Format->zipFile(DOWNLOAD_TASK_PATH . $folder_name, $zipfile, 1); if ($return) { if (file_exists(DOWNLOAD_TASK_PATH . $folder_name)) { @array_map('unlink', glob(DOWNLOAD_TASK_PATH . $folder_name . "/attachments/*")); @rmdir(DOWNLOAD_TASK_PATH . $folder_name . '/attachments'); @array_map('unlink', glob(DOWNLOAD_TASK_PATH . $folder_name . "/*")); $isdel = rmdir(DOWNLOAD_TASK_PATH . $folder_name); } if (USE_S3 == 0) { $download_url = HTTP_ROOT . DOWNLOAD_S3_TASK_PATH . $zipfile_name; $this->set('downloadurl', $download_url); } else { $s3 = new S3(awsAccessKey, awsSecretKey); $s3->putBucket(DOWNLOAD_BUCKET_NAME, S3::ACL_PRIVATE); $download_url = DOWNLOAD_S3_TASK_PATH . $zipfile_name; $s3_download_url = "https://s3.amazonaws.com/" . DOWNLOAD_BUCKET_NAME . '/' . DOWNLOAD_S3_TASK_PATH . $zipfile_name; $returnvalue = $s3->putObjectFile(DOWNLOAD_S3_TASK_PATH . $zipfile_name, DOWNLOAD_BUCKET_NAME, $download_url, S3::ACL_PUBLIC_READ); if ($returnvalue) { unlink(DOWNLOAD_S3_TASK_PATH . $zipfile_name); } $this->set('downloadurl', $s3_download_url); } $this->set('projName', $ProjName); $this->set('projId', $ProjId); $this->set('caseUid', $caseUniqId); $this->set('caseNum', $curCaseNo); $this->set('taskTitle', $taskTitle); $this->set('zipfilename', $zipfile_name); } else { $this->set('derror', 'Opps! Error occured in creation of zip file.'); } } else { $this->set('derror', 'Opps! Error occured in creating the task csv file.'); } }
} } logxx("Total backup size:" . $bsize); ####### STARING AMAZON S3 MODE if ($_CONFIG['cron_amazon_active']) { include_once "classes/S3.php"; logxx(); if (!$_CONFIG['cron_amazon_ssl']) { $amazon_ssl = false; } else { $amazon_ssl = true; } $s3 = new S3($_CONFIG['cron_amazon_awsAccessKey'], $_CONFIG['cron_amazon_awsSecretKey'], $amazon_ssl); logxx("AMAZON S3: Starting communication with the Amazon S3 server...ssl mode " . (int) $amazon_ssl); $buckets = $s3->listBuckets(); if ($s3->putBucket($_CONFIG['cron_amazon_bucket'], "private") || @in_array($_CONFIG['cron_amazon_bucket'], $buckets)) { if ($s3->putObjectFile($clonerPath . "/" . $file, $_CONFIG['cron_amazon_bucket'], $_CONFIG['cron_amazon_dirname'] . "/" . baseName($file), "private")) { logxx("AMAZON S3: File copied to {" . $_CONFIG['cron_amazon_bucket'] . "}/" . $_CONFIG['cron_amazon_dirname'] . "/" . $file); } else { logxx("AMAZON S3: Failed to copy file to {" . $_CONFIG['cron_amazon_bucket'] . "}/" . $_CONFIG['cron_amazon_dirname'] . "/" . $file); exit; } } else { logxx("AMAZON S3: Unable to create bucket " . $_CONFIG['cron_amazon_bucket'] . " (it may already exist and/or be owned by someone else)!"); exit; } } ###### END ####### STARING DROPBOX MODE if ($_CONFIG['cron_dropbox_active']) { include_once "classes/DropboxClient.php";
//AWS access info if (!defined('awsAccessKey')) { define('awsAccessKey', 'CHANGETHIS'); } if (!defined('awsSecretKey')) { define('awsSecretKey', 'CHANGETHISTOO'); } $s3 = new S3('', ''); //check whether a form was submitted if (isset($_POST['Submit'])) { //retreive post variables $fileName = $_FILES['theFile']['name']; $fileTempName = $_FILES['theFile']['tmp_name']; $fileSize = $_FILES['theFile']['size']; $fileUrl = "https://s3-eu-west-1.amazonaws.com/photo-upload/" . $fileName; $s3->putBucket("photo-upload", S3::ACL_PUBLIC_READ); //move the file if ($s3->putObjectFile($fileTempName, "photo-upload", $fileName, S3::ACL_PUBLIC_READ)) { echo "We successfully uploaded your file."; } else { echo "Something went wrong while uploading your file... sorry."; } } $sql = "UPDATE Owner \nSET Owner_img_path='" . $fileUrl . "'\nWHERE Owner_uname='" . $uname . "'"; if (mysqli_query($conn, $sql)) { echo "New record created successfully. Last inserted ID is: " . $last_id; } else { echo "Error: " . $sql . "<br>" . mysqli_error($conn); } mysqli_close(); ?>
public function process($pa_parameters) { $va_files = $pa_parameters["FILES"]; $vn_files_sent = 0; $table = $pa_parameters["TABLE"]; $field = $pa_parameters["FIELD"]; $pk = $pa_parameters["PK"]; $id = $pa_parameters["PK_VAL"]; $o_eventlog = new Eventlog(); $va_report = array('errors' => array(), 'notes' => array()); // AWS access info $access_key_id = $pa_parameters["MIRROR_INFO"]["access_key_id"]; $secret_access_key = $pa_parameters["MIRROR_INFO"]["secret_access_key"]; if (!defined('awsAccessKey')) { define('awsAccessKey', $access_key_id); } if (!defined('awsSecretKey')) { define('awsSecretKey', $secret_access_key); } # # Connect to AS3 # $s3 = new S3(awsAccessKey, awsSecretKey); foreach ($va_files as $va_file_info) { $vs_file_path = $va_file_info["FILE_PATH"]; if ($pa_parameters["DELETE"]) { # # Delete file from remote server # $bucketName = $pa_parameters["MIRROR_INFO"]["bucket"]; $deleteFile = $va_file_info["FILENAME"]; if ($this->debug) { print "DEBUG: DELETING {$deleteFile} FROM remote server\n"; } $s3->deleteObject($bucketName, baseName($deleteFile)); $va_report['notes'][] = _t("Deleted %1 from remote server", $deleteFile); $vn_files_sent++; } else { # # Upload file to remote server # if (file_exists($vs_file_path)) { # # Create BUCKET # $bucketName = $pa_parameters["MIRROR_INFO"]["bucket"]; if ($this->debug) { print "DEBUG: CREATING BUCKET {$bucketName}\n"; } $s3->putBucket($bucketName, S3::ACL_PUBLIC_READ); $putFile = $va_file_info["HASH"] . "/" . $va_file_info["FILENAME"]; # fake directories for AS3 if ($this->debug) { print "DEBUG: SENDING {$putFile} TO remote " . $bucketName . "\n"; } $s3->putObjectFile($vs_file_path, $bucketName, $putFile, S3::ACL_PUBLIC_READ); $va_report['notes'][] = _t("Sent %1 to remote %2", $putFile, $bucketName); $vn_files_sent++; } else { # bad table name $this->error->setError(570, "File to mirror '{$vs_file_path}' does not exist", "as3mirror->process()"); } } } if ($vn_files_sent < sizeof($va_files)) { // partial mirror $vn_mirror_code = "PARTIAL"; } else { if ($vn_files_sent == 0) { // failed mirror $vn_mirror_code = "FAIL"; } else { // successful mirror $vn_mirror_code = "SUCCESS"; } } # # Update record # $o_dm =& Datamodel::load(); if ($table_obj = $o_dm->getTableInstance($table)) { if ($table_obj->hasField($field)) { if ($table_obj->load($id)) { $md = $table_obj->get($field); if (!is_array($md["MIRROR_STATUS"])) { $md["MIRROR_STATUS"] = array(); } $md["MIRROR_STATUS"][$pa_parameters["MIRROR"]] = $vn_mirror_code; $table_obj->setMediaInfo($field, $md); $table_obj->setMode(ACCESS_WRITE); $table_obj->update(); if ($table_obj->numErrors()) { $o_eventlog->log(array("CODE" => "ERR", "SOURCE" => "as3mirror->process", "MESSAGE" => "Could not update mirror status for mirror '" . $pa_parameters["MIRROR"] . "' on '{$table}'; row_id={$id}\n")); $va_report['errors'][] = _t("Could not update mirror status for mirror '%1' on '%2'; row_id=%3", $pa_parameters["MIRROR"], $table, $id); } } } } return $va_report; }
function ia_upload($data, $identifier, $fallbackid, $filename, $accesskey, $secretkey) { global $l; $status = 0; $bucketName = $identifier; if (!defined('awsAccessKey')) { define('awsAccessKey', $accesskey); } if (!defined('awsSecretKey')) { define('awsSecretKey', $secretkey); } if (!extension_loaded('curl') && !@dl(PHP_SHLIB_SUFFIX == 'so' ? 'curl.so' : 'php_curl.dll')) { exit("\nERROR: CURL extension not loaded\n\n"); } $s3 = new S3(awsAccessKey, awsSecretKey, true, 's3.us.archive.org'); if ($s3->putBucket($bucketName, S3::ACL_PUBLIC_READ)) { $l->a("Created bucket {$bucketName}" . PHP_EOL . '<br>'); } else { $l->a("information code 55: S3::putBucket(): Unable to create bucket\n<br>"); } if ($s3->putObject($data, $bucketName, $filename, S3::ACL_PUBLIC_READ)) { $l->a("S3::putObjectFile(): File copied to {$bucketName}/" . $filename . PHP_EOL . '<br>'); } else { $l->a("error code 34: S3::putObjectFile(): Failed to copy file\n<br>"); $status = 34; } return $status; }
function test_s3($accesskey, $secretkey, $bucket, $directory = '', $ssl) { if (empty($accesskey) || empty($secretkey) || empty($bucket)) { return __('Missing one or more required fields.', 'it-l10n-backupbuddy'); } $bucket_requirements = __("Your bucket name must meet certain criteria. It must fulfill the following: \n\n Characters may be lowercase letters, numbers, periods (.), and dashes (-). \n Must start with a number or letter. \n Must be between 3 and 63 characters long. \n Must not be formatted as an IP address (e.g., 192.168.5.4). \n Should not contain underscores (_). \n Should be between 3 and 63 characters long. \n Should not end with a dash. \n Cannot contain two, adjacent periods. \n Cannot contain dashes next to periods.", 'it-l10n-backupbuddy'); if (preg_match("/^[a-z0-9][a-z0-9\\-\\.]*(?<!-)\$/i", $bucket) == 0) { // Starts with a-z or 0-9; middle is a-z, 0-9, -, or .; cannot end in a dash. return __('Your bucket contains a period next to a dash.', 'it-l10n-backupbuddy') . ' ' . $bucket_requirements; } if (strlen($bucket) < 3 || strlen($bucket) > 63) { // Must be between 3 and 63 characters long return __('Your bucket must be between 3 and 63 characters long.', 'it-l10n-backupbuddy') . ' ' . $bucket_requirements; } if (strstr($bucket, '.-') !== false || strstr($bucket, '-.') !== false || strstr($bucket, '..') !== false) { // Bucket names cannot contain dashes next to periods (e.g., "my-.bucket.com" and "my.-bucket" are invalid) return __('Your bucket contains a period next to a dash.', 'it-l10n-backupbuddy') . ' ' . $bucket_requirements; } require_once dirname(__FILE__) . '/lib/s3/s3.php'; $s3 = new S3($accesskey, $secretkey); if ($ssl != '1') { S3::$useSSL = false; } if ($s3->getBucketLocation($bucket) === false) { // Easy way to see if bucket already exists. $s3->putBucket($bucket, S3::ACL_PRIVATE); } if (!empty($directory)) { $directory = $directory . '/'; } if ($s3->putObject(__('Upload test for BackupBuddy for Amazon S3', 'it-l10n-backupbuddy'), $bucket, $directory . 'backupbuddy.txt', S3::ACL_PRIVATE)) { // Success... just delete temp test file later... } else { return __('Unable to upload. Verify your keys, bucket name, and account permissions.', 'it-l10n-backupbuddy'); } if (!S3::deleteObject($bucket, $directory . 'backupbuddy.txt')) { return __('Partial success. Could not delete temp file.', 'it-l10n-backupbuddy'); } return true; // Success! }
<?php //include the S3 stuff require_once 'myS3.php'; //instantiate the class $s3 = new S3(); //create a new bucket (this will be ignored if bucket is already there) $myBucketName = "robs-1st-bucket"; //retreive post variables $file = $_FILES['Filedata']; if ($file == '') { $file = $_FILES['file']; } $fileName = $file['name']; $fileTempName = $file['tmp_name']; //create a new bucket (this will be ignored if bucket is already there) $s3->putBucket($myBucketName, S3::ACL_PUBLIC_READ); //move the file if ($s3->putObjectFile($fileTempName, $myBucketName, $fileName, S3::ACL_PUBLIC_READ)) { echo "Successfully uploaded the file."; } else { echo "Something went wrong while uploading the file..."; }
exit("\nERROR: No such file: {$uploadFile}\n\n"); } // Check for CURL if (!extension_loaded('curl') && !@dl(PHP_SHLIB_SUFFIX == 'so' ? 'curl.so' : 'php_curl.dll')) { exit("\nERROR: CURL extension not loaded\n\n"); } // Pointless without your keys! if (awsAccessKey == 'change-this' || awsSecretKey == 'change-this') { exit("\nERROR: AWS access information required\n\nPlease edit the following lines in this file:\n\n" . "define('awsAccessKey', 'change-me');\ndefine('awsSecretKey', 'change-me');\n\n"); } // Instantiate the class $s3 = new S3(awsAccessKey, awsSecretKey); // List your buckets: echo "S3::listBuckets(): " . print_r($s3->listBuckets(), 1) . "\n"; // Create a bucket with public read access if ($s3->putBucket($bucketName, S3::ACL_PUBLIC_READ)) { echo "Created bucket {$bucketName}" . PHP_EOL; // Put our file (also with public read access) if ($s3->putObjectFile($uploadFile, $bucketName, baseName($uploadFile), S3::ACL_PUBLIC_READ)) { echo "S3::putObjectFile(): File copied to {$bucketName}/" . baseName($uploadFile) . PHP_EOL; // Get the contents of our bucket $contents = $s3->getBucket($bucketName); echo "S3::getBucket(): Files in bucket {$bucketName}: " . print_r($contents, 1); // Get object info $info = $s3->getObjectInfo($bucketName, baseName($uploadFile)); echo "S3::getObjectInfo(): Info for {$bucketName}/" . baseName($uploadFile) . ': ' . print_r($info, 1); // You can also fetch the object into memory // var_dump("S3::getObject() to memory", $s3->getObject($bucketName, baseName($uploadFile))); // Or save it into a file (write stream) // var_dump("S3::getObject() to savefile.txt", $s3->getObject($bucketName, baseName($uploadFile), 'savefile.txt')); // Or write it to a resource (write stream)
} else { $data = $mege; } } } } } } //$data=getdecodevalue($mege,$part->type); fputs($fp, $data); fclose($fp); $fpos += 1; $size = floor(filesize($savedirpath . $filename) / 1024); // s3 bucket start $s3 = new S3(awsAccessKey, awsSecretKey); $s3->putBucket(BUCKET_NAME, S3::ACL_PRIVATE); $folder_orig_Name = 'files/case_files/' . trim($filename); $s3->putObjectFile($savedirpath . $filename, BUCKET_NAME, $folder_orig_Name, S3::ACL_PRIVATE); //s3 bucket end unlink($savedirpath . $filename); $query_file = "INSERT INTO case_files (easycase_id,comment_id,file,thumb,file_size,count,isactive) VALUES\r\n('" . $last_id . "','0','" . $filename . "','','" . $size . "','0','1')"; $result_file = mysql_query($query_file) or die('Query failed: ' . mysql_error()); $query_up = "UPDATE easycases SET format='1' WHERE id= '" . $row['id'] . "' "; $result_up = mysql_query($query_up) or die('Query failed: ' . mysql_error()); $query_new = "UPDATE easycases SET format='1' WHERE id= '" . $last_id . "' "; $result_new = mysql_query($query_new) or die('Query failed: ' . mysql_error()); } } } } }
//AWS access info if (!defined('awsAccessKey')) { define('awsAccessKey', 'AKIAISQRITXWEZ76MJHA'); } if (!defined('awsSecretKey')) { define('awsSecretKey', '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'); } //instantiate the class $s3 = new S3(awsAccessKey, awsSecretKey); //check whether a form was submitted if (isset($_POST['Submit'])) { //retreive post variables $fileName = $_FILES['theFile']['name']; $fileTempName = $_FILES['theFile']['tmp_name']; //create a new bucket $result = $s3->putBucket("dbsystems", S3::ACL_PUBLIC_READ); //move the file if ($s3->putObjectFile($fileTempName, "dbsystems", $fileName, S3::ACL_PUBLIC_READ)) { echo "We successfully uploaded your file."; $image_url = "http://s3-us-west-2.amazonaws.com/" . $fileName; $query = "\n UPDATE user\n SET\n picture_url = :url\n WHERE\n _id = :id\n "; $query_params = array(':url' => $image_url, ':id' => $_SESSION['user']['_id']); try { $stmt = $db->prepare($query); $result = $stmt->execute($query_params); } catch (PDOException $ex) { die("Failed to run query: " . $ex->getMessage()); } } else { echo "Something went wrong while uploading your file... sorry."; }
//AWS access info if (!defined('awsAccessKey')) { define('awsAccessKey', 'AKIAJQX5I545NDU35UBA'); } if (!defined('awsSecretKey')) { define('awsSecretKey', 'lh7WlF+6ucIavQFiMqt0PcrK4TydWKLygTbgIG1A'); } //instantiate the class $s3 = new S3(awsAccessKey, awsSecretKey); //check whether a form was submitted if (isset($_POST['Submit'])) { //retreive post variables $fileName = $_FILES['theFile']['name']; $fileTempName = $_FILES['theFile']['tmp_name']; //create a new bucket $result = $s3->putBucket("walphotobucket", S3::ACL_PUBLIC_READ); //move the file if ($s3->putObjectFile($fileTempName, "walphotobucket", $fileName, S3::ACL_PUBLIC_READ)) { echo "We successfully uploaded your file."; $image_url = "http://walphotobucket.s3.amazonaws.com/" . $fileName; $query = "\n UPDATE users\n SET\n picture_url = :url\n WHERE\n id = :id\n "; $query_params = array(':url' => $image_url, ':id' => $_SESSION['user']['id']); try { $stmt = $db->prepare($query); $result = $stmt->execute($query_params); } catch (PDOException $ex) { die("Failed to run query: " . $ex->getMessage()); } } else { echo "Something went wrong while uploading your file... sorry."; }
function uploadProfilePhoto($name, $path) { if ($name) { $oldname = strtolower($name); $ext = substr(strrchr($oldname, "."), 1); if ($ext != 'gif' && $ext != 'jpg' && $ext != 'jpeg' && $ext != 'png' && $ext != 'bmp') { return "ext"; } else { $targetpath = $path . $name; $newname = $name; //md5(time().$count).".".$ext; if (defined('USE_S3') && USE_S3) { // s3 bucket start $s3 = new S3(awsAccessKey, awsSecretKey); $s3->putBucket(BUCKET_NAME, S3::ACL_PRIVATE); $folder_orig_Name = 'files/photos/' . trim($newname); //$s3->putObjectFile($targetpath,BUCKET_NAME ,$folder_orig_Name ,S3::ACL_PRIVATE); $s3->copyObject(BUCKET_NAME, DIR_USER_PHOTOS_THUMB . trim($newname), BUCKET_NAME, $folder_orig_Name, S3::ACL_PRIVATE); //s3 bucket end //unlink($targetpath); } return $newname; } } else { return false; } }
function test_s3($accesskey, $secretkey, $bucket, $directory = '', $ssl) { if (empty($accesskey) || empty($secretkey) || empty($bucket)) { return 'Missing one or more required fields.'; } require_once dirname(__FILE__) . '/lib/s3/s3.php'; $s3 = new S3($accesskey, $secretkey); if ($ssl != '1') { S3::$useSSL = false; } if ($s3->getBucketLocation($bucket) === false) { // Easy way to see if bucket already exists. $s3->putBucket($bucket, S3::ACL_PUBLIC_READ); } if (!empty($directory)) { $directory = $directory . '/'; } if ($s3->putObject('Upload test for BackupBuddy for Amazon S3', $bucket, $directory . 'backupbuddy.txt', S3::ACL_PRIVATE)) { // Success... just delete temp test file later... } else { return 'Unable to upload. Verify your keys, bucket name, and account permissions.'; } if (!S3::deleteObject($bucket, $directory . '/backupbuddy.txt')) { return 'Partial success. Could not delete temp file.'; } return true; // Success! }
#!/usr/bin/php <?php require_once '../application/Initializer.php'; // must specify the environment as the first variable if (!count($argv) > 1) { echo 'You must specify an environment as the first argument'; exit(-1); } $init = new Initializer($argv[1]); $config = $init->getConfig(); Zend_Loader::registerAutoload(); $s3 = new S3($config->aws->accessKey, $config->aws->secretKey); $buckets = array($config->aws->publicBucket => S3::ACL_PUBLIC_READ, $config->aws->contentBucket => S3::ACL_PRIVATE); $existingBuckets = $s3->listBuckets(); foreach ($buckets as $name => $acl) { if (!in_array($name, $existingBuckets)) { $s3->putBucket($name, $acl); echo "Bucket Added: {$name}\n"; } }
function cron_aws($aws_accesskey, $aws_secretkey, $aws_bucket, $aws_directory, $file, $delete_after_int = 0) { $details = ''; $details .= "AWS Access Key: " . $aws_accesskey . "\n"; if ($this->_debug) { $details .= "AWS Secret Key: " . $aws_secretkey . "\n"; } else { $details .= "AWS Secret Key: *hidden*\n"; } $details .= "AWS Bucket: " . $aws_bucket . "\n"; $details .= "AWS Directory: " . $aws_directory . "\n"; $details .= "Local File & Path: " . $this->_options['backup_directory'] . '/' . basename($file) . "\n"; $details .= "Filename: " . basename($file) . "\n"; $this->log('Starting Amazon S3 cron. Details: ' . $details); require_once dirname(__FILE__) . '/lib/s3/s3.php'; $s3 = new S3($aws_accesskey, $aws_secretkey); if ($this->_options['aws_ssl'] != '1') { S3::$useSSL = false; } $this->log('About to put bucket to Amazon S3 cron.'); $s3->putBucket($aws_bucket, S3::ACL_PUBLIC_READ); $this->log('About to put object (the file) to Amazon S3 cron.'); if ($s3->putObject(S3::inputFile($file), $aws_bucket, $aws_directory . '/' . basename($file), S3::ACL_PRIVATE)) { // success $this->log('SUCCESS sending to Amazon S3!'); } else { $this->mail_notice('ERROR #9002! Failed sending file to Amazon S3. Details:' . "\n\n" . $details); $this->log('FAILURE sending to Amazon S3! Details: ' . $details, 'error'); } if ($delete_after_int == 1) { $this->log('Deleting backup file after Amazon S3 cron.'); unlink($file); $this->log('Done deleting backup file after Amazon S3 cron.'); } }
public static function connectAndAuthorize($key, $secret, $bucket, $email, $auth_type = 'FULL_CONTROL') { require_once CASH_PLATFORM_ROOT . '/lib/S3.php'; $s3_instance = new S3($key, $secret); $bucket_exists = $s3_instance->getBucket($bucket); if (!$bucket_exists) { $bucket_exists = $s3_instance->putBucket($bucket); } if ($bucket_exists) { $acp = $s3_instance->getAccessControlPolicy($bucket); if (is_array($acp)) { $acp['acl'][] = array('email' => $email, 'permission' => $auth_type); return $s3_instance->setAccessControlPolicy($bucket, '', $acp); } else { return false; } } }
public function upload_remote_backup($filename) { // Init global $config; $file_path = SITE_PATH . '/data/backups/' . $filename; // Amazon S3 if ($config['backup_type'] == 'amazon') { // Set variables $bucket_name = 'synala'; // Init client include_once SITE_PATH . '/data/lib/S3.php'; $s3_client = new S3($config['backup_amazon_access_key'], $config['backup_amazon_secret_key']); // Create subject, if needed $buckets = $s3_client->listBuckets(); if (!in_array($bucket_name, $buckets)) { $s3_client->putBucket($bucket_name, S3::ACL_PRIVATE); } $s3_files_tmp = $s3_client->getBucket($bucket_name); $s3_files = array_keys($s3_files_tmp); // Upload backup file $s3_client->putObjectFile($file_path, $bucket_name, $filename); // Remote FTP } elseif ($config['backup_type'] == 'ftp') { if ($config['backup_ftp_type'] == 'ftps') { $ftp_client = ftp_ssl_connect($config['backup_ftp_host'], 22, 360); } else { $ftp_client = ftp_connect($config['backup_ftp_host'], $config['backup_ftp_port']); } ftp_login($ftp_client, $config['backup_ftp_username'], $config['backup_ftp_password']); // Set transfer mode //$is_passive = $config['remote_backup_ftp_mode'] == 'passive' ? true : false; //ftp_pasv($ftp_client, $is_passive); // Upload file //if ($config['remote_backup_ftp_dir'] != '') { $filename = $config['remote_backup_ftp_dir'] . '/' . $filename; } @ftp_put($ftp_client, $filename, SITE_PATH . "/data/backups/{$filename}", FTP_BINARY); ftp_close($ftp_client); // Tarsnap } elseif ($config['backup_type'] == 'tarsnap') { system($config['backup_tarsnap_location'] . " -cf {$config['backup_tarsnap_archive']} " . SITE_PATH); } // Delete local file, if needed //if ($config['remote_backup_retain_local'] != 1 && is_file($file_path)) { // @unlink($file_path); //} }
function store_backup() { foreach ($this->b['storage_servers'] as $s) { $s = $this->s[$s]; switch ($s['type']) { case 'local': $path = backup__($s['path']) . '/' . $this->b['_dirname']; //ensure directory structure if (!is_dir($path)) { mkdir($path, 0755, true); } //would rather use the native copy() here, but by defualt //php doesnt support files > 2GB //see here for a posible solution: //http://ca3.php.net/manual/en/function.fopen.php#37791 $cmd[] = fpbx_which('cp'); $cmd[] = $this->b['_tmpfile']; $cmd[] = $path . '/' . $this->b['_file'] . '.tgz'; exec(implode(' ', $cmd), $error, $status); unset($cmd, $error); if ($status !== 0) { $this->b['error'] = 'Error copying ' . $this->b['_tmpfile'] . ' to ' . $path . '/' . $this->b['_file'] . '.tgz: ' . $error; backup_log($this->b['error']); } //run maintenance on the directory $this->maintenance($s['type'], $s); break; case 'email': //TODO: set agent to something informative, including fpbx & backup versions $email_options = array('useragent' => 'freepbx', 'protocol' => 'mail'); $email = new \CI_Email(); $from = $this->amp_conf['AMPBACKUPEMAILFROM'] ? $this->amp_conf['AMPBACKUPEMAILFROM'] : '*****@*****.**'; $msg[] = _('Name') . ': ' . $this->b['name']; $msg[] = _('Created') . ': ' . date('r', $this->b['_ctime']); $msg[] = _('Files') . ': ' . $this->manifest['file_count']; $msg[] = _('Mysql Db\'s') . ': ' . $this->manifest['mysql_count']; $msg[] = _('astDb\'s') . ': ' . $this->manifest['astdb_count']; $email->from($from); $email->to(backup__($s['addr'])); $email->subject(_('Backup') . ' ' . $this->b['name']); $body = implode("\n", $msg); // If the backup file is more than 25MB, yell $encodedsize = ceil(filesize($this->b['_tmpfile']) / 3) * 4; if ($encodedsize > 26214400) { $email->subject(_('Backup ERROR (exceeded SMTP limits)') . ' ' . $this->b['name']); $email->message(_('BACKUP NOT ATTACHED') . "\n" . _('The backup file exceeded the maximum SMTP limits of 25MB. It was not attempted to be sent. Please shrink your backup, or use a different method of transferring your backup.') . "\n{$body}\n"); } elseif ($encodedsize > $s['maxsize']) { $email->subject(_('Backup ERROR (exceeded soft limit)') . ' ' . $this->b['name']); $email->message(_('BACKUP NOT ATTACHED') . "\n" . _('The backup file exceeded the soft limit set in SMTP configuration (%s bytes). It was not attempted to be sent. Please shrink your backup, or use a different method of transferring your backup.') . "\n{$body}\n"); } else { $email->message($body); $email->attach($this->b['_tmpfile']); } $email->send(); unset($msg); break; case 'ftp': //subsitute variables if nesesary $s['host'] = backup__($s['host']); $s['port'] = backup__($s['port']); $s['user'] = backup__($s['user']); $s['password'] = backup__($s['password']); $s['path'] = backup__($s['path']); $ftp = @ftp_connect($s['host'], $s['port']); if ($ftp === false) { $this->b['error'] = _("Error connecting to the FTP Server... Check your host name or DNS"); backup_log($this->b['error']); return $ftp; } if (ftp_login($ftp, $s['user'], $s['password'])) { //chose pasive/active transfer mode ftp_pasv($ftp, $s['transfer'] == 'passive'); //switch to directory. If we fail, build directory structure and try again if (!@ftp_chdir($ftp, $s['path'] . '/' . $this->b['_dirname'])) { //ensure directory structure @ftp_mkdir($ftp, $s['path']); @ftp_mkdir($ftp, $s['path'] . '/' . $this->b['_dirname']); ftp_chdir($ftp, $s['path'] . '/' . $this->b['_dirname']); } //copy file ftp_put($ftp, $this->b['_file'] . '.tgz', $this->b['_tmpfile'], FTP_BINARY); //run maintenance on the directory $this->maintenance($s['type'], $s, $ftp); //release handel ftp_close($ftp); } else { $this->b['error'] = _("Error connecting to the FTP Server..."); backup_log($this->b['error']); } break; case 'awss3': //subsitute variables if nesesary $s['bucket'] = backup__($s['bucket']); $s['awsaccesskey'] = backup__($s['awsaccesskey']); $s['awssecret'] = backup__($s['awssecret']); $awss3 = new \S3($s['awsaccesskey'], $s['awssecret']); // Does this bucket already exist? $buckets = $awss3->listBuckets(); if (!in_array($s['bucket'], $buckets)) { // Create the bucket $awss3->putBucket($s['bucket'], \S3::ACL_PUBLIC_READ); } //copy file if ($awss3->putObjectFile($this->b['_tmpfile'], $s['bucket'], $this->b['name'] . "/" . $this->b['_file'] . '.tgz', \S3::ACL_PUBLIC_READ)) { dbug('S3 successfully uploaded your backup file.'); } else { dbug('S3 failed to accept your backup file'); } //run maintenance on the directory $this->maintenance($s['type'], $s, $awss3); break; case 'ssh': //subsitute variables if nesesary $s['path'] = backup__($s['path']); $s['user'] = backup__($s['user']); $s['host'] = backup__($s['host']); $destdir = $s['path'] . '/' . $this->b['_dirname']; //ensure directory structure $cmd = fpbx_which('ssh') . ' -o StrictHostKeyChecking=no -i '; $cmd .= $s['key'] . " -l " . $s['user'] . ' ' . $s['host'] . ' -p ' . $s['port']; $cmd .= " 'mkdir -p {$destdir}'"; exec($cmd, $output, $ret); if ($ret !== 0) { backup_log("SSH Error ({$ret}) - Received " . json_encode($output) . " from {$cmd}"); } $output = null; //put file // Note that SCP (*unlike SSH*) needs IPv6 addresses in ['s. Consistancy is awesome. if (filter_var($s['host'], \FILTER_VALIDATE_IP, \FILTER_FLAG_IPV6)) { $scphost = "[" . $s['host'] . "]"; } else { $scphost = $s['host']; } $cmd = fpbx_which('scp') . ' -o StrictHostKeyChecking=no -i ' . $s['key'] . ' -P ' . $s['port']; $cmd .= " " . $this->b['_tmpfile'] . " " . $s['user'] . "@{$scphost}:{$destdir}"; exec($cmd, $output, $ret); if ($ret !== 0) { backup_log("SCP Error ({$ret}) - Received " . json_encode($output) . " from {$cmd}"); } //run maintenance on the directory $this->maintenance($s['type'], $s); break; } } }