public function write($path, $content) { $options = $this->_getOptions($path, array('Body' => $content)); $finfo = new finfo(FILEINFO_MIME_TYPE); $mimeType = $finfo->buffer($content); if (false !== $mimeType) { $options['ContentType'] = $mimeType; } try { $this->_client->putObject($options); } catch (\Exception $e) { throw new CM_Exception('Cannot write ' . strlen($content) . ' bytes to `' . $path . '`: ' . $e->getMessage()); } }
public function write($path, $content) { $options = $this->_getOptions($path, array('Body' => $content)); $finfo = new finfo(FILEINFO_MIME_TYPE); $mimeType = $finfo->buffer($content); if (false !== $mimeType) { $options['ContentType'] = $mimeType; } try { $this->_client->putObject($options); } catch (\Exception $e) { throw new CM_Exception('Cannot write bytes to the path', null, ['bytesCount' => strlen($content), 'path' => $path, 'originalExceptionMessage' => $e->getMessage()]); } }
/** * Uploads from a local path to the S3 Bucket * @param $srcPath String * @param $dstPath String * @return bool */ public function upload($srcPath, $dstPath) { if (!file_exists($srcPath)) { return false; } //todo Fix type $this->set('type', 'application/octet-stream'); $this->set('size', filesize($srcPath)); $this->set('hash', md5_file($srcPath)); $this->set('path', $dstPath); $this->client->putObject(array('Bucket' => $this->get('bucket'), 'Key' => $dstPath, 'Body' => fopen($srcPath, 'r+'), 'ACL' => "public-read")); $this->save(); // todo Verify that it actually did work return true; }
} echo 'Here is some more debugging info:'; print_r($_FILES); print "</pre>"; require 'vendor/autoload.php'; $s3 = new Aws\S3\S3Client(['version' => 'latest', 'region' => 'us-east-1']); #print_r($s3); $bucket = uniqid("Sneha", false); #$result = $s3->createBucket(array( # 'Bucket' => $bucket #)); # ## AWS PHP SDK version 3 create bucket $result = $s3->createBucket(['ACL' => 'public-read', 'Bucket' => $bucket]); #print_r($result); $result = $s3->putObject(['ACL' => 'public-read', 'Bucket' => $bucket, 'Key' => $uploadfile, 'ContentType' => $_FILES['userfile']['type'], 'Body' => fopen($uploadfile, 'r+')]); $url = $result['ObjectURL']; echo $url; $rds = new Aws\Rds\RdsClient(['version' => 'latest', 'region' => 'us-east-1']); $result = $rds->describeDBInstances(array('DBInstanceIdentifier' => 'db1')); $endpoint = $result['DBInstances'][0]['Endpoint']['Address']; echo "============\n" . $endpoint . "================"; $link = mysqli_connect($endpoint, "testconnection1", "testconnection1", "Project1"); if (mysqli_connect_errno()) { printf("Connect failed: %s\n", mysqli_connect_error()); exit; } else { echo "Success"; } #create sns client $sns = new Aws\Sns\SnsClient(['version' => 'latest', 'region' => 'us-east-1']);
$rds = new Aws\Rds\RdsClient(['version' => 'latest', 'region' => 'us-east-1']); $resultrdb = $rds->describeDBInstances(array('DBInstanceIdentifier' => 'mp1SKread-replica')); $endpointrdb = $resultrdb['DBInstances'][0]['Endpoint']['Address']; echo "============\n" . $endpointrdb . "================"; $linkrdb = mysqli_connect($endpointrdb, "testconnection1", "testconnection1", "Project1"); if (mysqli_connect_errno()) { printf("Connect failed: %s\n", mysqli_connect_error()); exit; } else { echo "Connection to RDB Success"; } $backupFile = '/tmp/FinalProjectDB' . date("Y-m-d-H-i-s") . '.gz'; $command = "mysqldump --opt -h {$endpointrdb} -u testconnection1 -ptestconnection1 Project1 | gzip > {$backupFile}"; exec($command); echo "success"; $s3 = new Aws\S3\S3Client(['version' => 'latest', 'region' => 'us-east-1']); $bucket = 'snehatestproject-' . rand() . '-dbdump'; if (!$s3->doesBucketExist($bucket)) { $result = $s3->createBucket(['ACL' => 'public-read', 'Bucket' => $bucket]); $s3->waitUntil('BucketExists', array('Bucket' => $bucket)); echo "{$bucket} Created Successfully"; } $result = $s3->putObject(['ACL' => 'public-read', 'Bucket' => $bucket, 'Key' => $backupFile, 'SourceFile' => $backupFile]); $result = $s3->putBucketLifecycleConfiguration(['Bucket' => $bucket, 'LifecycleConfiguration' => ['Rules' => [['Expiration' => ['Days' => 2], 'NoncurrentVersionExpiration' => ['NoncurrentDays' => 2], 'Prefix' => '', 'Status' => 'Enabled']]]]); echo "backup success"; $url = $result['ObjectURL']; echo $url; $urlintro = "index.php"; header('Location: ' . $urlintro, true); die; $linkrdb->close();
date_default_timezone_set('Europe/Prague'); ini_set('display_errors', true); error_reporting(E_ALL); $basedir = dirname(__DIR__); require_once $basedir . '/vendor/autoload.php'; $client = new \Aws\S3\S3Client(['region' => getenv('AWS_REGION'), 'version' => '2006-03-01', 'credentials' => ['key' => getenv('AWS_ACCESS_KEY'), 'secret' => getenv('AWS_SECRET_KEY')]]); // Where the files will be source from $source = $basedir . '/tests/_data/csv-import'; // Where the files will be transferred to $bucket = getenv('AWS_S3_BUCKET'); $dest = 's3://' . $bucket; // clear bucket $result = $client->listObjects(['Bucket' => $bucket, 'Delimiter' => '/']); $objects = $result->get('Contents'); if ($objects) { $client->deleteObjects(['Bucket' => $bucket, 'Delete' => ['Objects' => array_map(function ($object) { return ['Key' => $object['Key']]; }, $objects)]]); } // Create a transfer object. $manager = new \Aws\S3\Transfer($client, $source, $dest, ['debug' => true]); // Perform the transfer synchronously. $manager->transfer(); // Create manifests $manifest = ['entries' => [['url' => sprintf("s3://%s/tw_accounts.csv", $bucket), 'mandatory' => true]]]; $client->putObject(['Bucket' => $bucket, 'Key' => '01_tw_accounts.csv.manifest', 'Body' => json_encode($manifest)]); $manifest = ['entries' => [['url' => sprintf("s3://%s/04_tw_accounts.csv.gz", $bucket), 'mandatory' => true]]]; $client->putObject(['Bucket' => $bucket, 'Key' => '03_tw_accounts.csv.gzip.manifest', 'Body' => json_encode($manifest)]); $manifest = ['entries' => [['url' => sprintf("s3://%s/not-exists.csv", $bucket), 'mandatory' => true]]]; $client->putObject(['Bucket' => $bucket, 'Key' => '02_tw_accounts.csv.invalid.manifest', 'Body' => json_encode($manifest)]); echo "Data loaded OK\n";
http://docs.aws.amazon.com/aws-sdk-php/v3/guide/getting-started/basic-usage.html#creating-a-client */ $s3 = new Aws\S3\S3Client(['version' => '2006-03-01', 'region' => 'eu-central-1']); /* Everything uploaded to Amazon S3 must belong to a bucket. These buckets are in the global namespace, and must have a unique name. For more information about bucket name restrictions, see: http://docs.aws.amazon.com/AmazonS3/latest/dev/BucketRestrictions.html */ $bucket = 'kuchy'; $filename = 'orange-pi-one-pocitac-raspberry-pi-nestandard2.jpg'; $imgUrl = 'http://ipravda.sk/res/2016/01/30/thumbs/' . $filename; $img = file_get_contents($imgUrl); /* Files in Amazon S3 are called "objects" and are stored in buckets. A specific object is referred to by its key (i.e., name) and holds data. Here, we create a new object with the key "hello_world.txt" and content "Hello World!". For a detailed list of putObject's parameters, see: http://docs.aws.amazon.com/aws-sdk-php/v3/api/api-s3-2006-03-01.html#putobject */ $response = $s3->doesObjectExist($bucket, $filename); if ($response) { echo 'Image already exist!' . "\r\n"; echo 'url: https://s3.eu-central-1.amazonaws.com/kuchy/' . $filename . "\r\n"; } else { echo "Creating a new object with key {$filename}\n"; $result = $s3->putObject(['Bucket' => $bucket, 'Key' => $filename, 'Body' => $img, 'ContentType' => 'image/jpeg', 'ACL' => 'public-read']); echo 'url: ' . $result['ObjectURL'] . "\r\n"; }
# 'Bucket' => $bucket #)); # AWS PHP SDK version 3 create bucket $result = $s3->createBucket(['ACL' => 'public-read-write', 'Bucket' => $bucket]); print_r($result); #$client->waitUntilBucketExists(array('Bucket' => $bucket)); #Old PHP SDK version 2 #$key = $uploadfile; #$result = $client->putObject(array( # 'ACL' => 'public-read', # 'Bucket' => $bucket, # 'Key' => $key, # 'SourceFile' => $uploadfile #)); # PHP version 3 $result = $s3->putObject(['ACL' => 'public-read-write', 'Bucket' => $bucket, 'Key' => $uploadfile, 'SourceFile' => $uploadfile]); $url = $result['ObjectURL']; echo $url; $rds = new Aws\Rds\RdsClient(['version' => 'latest', 'region' => 'us-east-1']); $result = $rds->describeDBInstances(['DBInstanceIdentifier' => 'mp1-rca']); $endpoint = $result['DBInstances'][0]['Endpoint']['Address']; #print "============\n". $endpoint . "================\n"; //echo "begin database";^M $link = mysqli_connect($endpoint, "controller", "letmein888", "db444Name") or die("Error " . mysqli_error($link)); /* check connection */ if (mysqli_connect_errno()) { printf("Connect failed: %s\n", mysqli_connect_error()); exit; } /* Prepared statement, stage 1: prepare */ #if ($stmt = $link->prepare("INSERT INTO comments (id, email,phone,filename,s3rawurl,s3finishedurl,status,issubscribed) VALUES (NULL,?,?,?,?,?,?,?)")) {
//Creating a dump of the RDS database instance $db = 'usnehadb'; $username = '******'; $password = '******'; $dbclient = new Aws\Rds\RdsClient(['version' => 'latest', 'region' => 'us-west-2']); $result = $dbclient->describeDBInstances(['DBInstanceIdentifier' => 'usneha']); $endpoint = $result['DBInstances'][0]['Endpoint']['Address']; $link = mysqli_connect($endpoint, "username", "password", "usnehadb") or die("Error " . mysqli_error($link)); // making a folder for storing backup mkdir("/tmp/dbDump"); // path for backup folder $dumpPath = '/tmp/dbDump/'; $fname = uniqid("dbdump", false); $finalPath = $dumpPath . $fname . '.' . sql; // found this line in stackoverflow $sql = "mysqldump --user={$username} --password={$password} --host={$endpoint} {$db} > {$finalPath}"; exec($sql); $bucketname = uniqid("dbdump", false); $s3 = new Aws\S3\S3Client(['version' => 'latest', 'region' => 'us-west-2']); # AWS PHP SDK version 3 create bucket $result = $s3->createBucket(['ACL' => 'public-read', 'Bucket' => $bucketname]); $key = $fname . '.' . sql; # PHP version 3 $result = $s3->putObject(['ACL' => 'public-read', 'Bucket' => $bucketname, 'Key' => $key, 'SourceFile' => $finalPath]); // reference: https://docs.aws.amazon.com/aws-sdk-php/v3/api/api-s3-2006-03-01.html#putbucketlifecycleconfiguration $result = $s3->putBucketLifecycleConfiguration(['Bucket' => $bucketname, 'LifecycleConfiguration' => ['Rules' => [['Expiration' => ['Days' => 1], 'NoncurrentVersionExpiration' => ['NoncurrentDays' => 1], 'Prefix' => ' ', 'Status' => 'Enabled']]]]); mysql_close($link); echo "Create db dump in s3!"; ?> </html>
echo "Possible file upload attack!\n"; } print_r($_FILES); print "</pre>"; $testimagefilename = $uploadfile . '_magick'; $testimage = new Imagick($uploadfile); $testimage->thumbnailImage(100, 0); $testimage->writeImages($testimagefilename, false); require 'vendor/autoload.php'; require 'resources/library/db.php'; $s3 = new Aws\S3\S3Client(['version' => 'latest', 'region' => 'us-east-1']); use Aws\Sns\SnsClient; $sns = SnsClient::factory(array('version' => 'latest', 'region' => 'us-east-1')); $bucket = uniqid("php-pv-", false); $result = $s3->createBucket(['ACL' => 'public-read', 'Bucket' => $bucket]); $result = $s3->putObject(['ACL' => 'public-read', 'Bucket' => $bucket, 'Expires' => gmdate("D, d M Y H:i:s T", strtotime("+1 day")), 'Key' => $uploadfile, 'SourceFile' => $uploadfile]); $url = $result['ObjectURL']; $resultthumb = $s3->putObject(['ACL' => 'public-read', 'Bucket' => $bucket, 'Expires' => gmdate("D, d M Y H:i:s T", strtotime("+1 day")), 'Key' => $testimagefilename, 'SourceFile' => $testimagefilename]); $urlthumb = $resultthumb['ObjectURL']; $link = getDbConn(); if (!($stmt = $link->prepare("INSERT INTO items (id, email,phone,filename,s3rawurl,s3finishedurl,status,issubscribed) VALUES (NULL,?,?,?,?,?,?,?)"))) { echo "Prepare failed: (" . $link->errno . ") " . $link->error; } $email = $_SESSION["email"]; $phone = $_SESSION['phone']; $s3rawurl = $url; // $result['ObjectURL']; from above $filename = basename($_FILES['userfile']['name']); $s3finishedurl = $urlthumb; $status = 0; $issubscribed = 0;
<?php session_start(); require 'vendor/autoload.php'; $rds = new Aws\Rds\RdsClient(['version' => 'latest', 'region' => 'us-west-2']); $result = $rds->describeDBInstances(['DBInstanceIdentifier' => 'Project1db']); $endpoint = $result['DBInstances'][0]['Endpoint']['Address']; echo "============\n" . $endpoint . "================"; $link = mysqli_connect($endpoint, "nandini", "nandinipwd", "Project1db") or die("Error " . mysqli_error($link)); /* check connection */ if (mysqli_connect_errno()) { printf("Connect failed: %s\n", mysqli_connect_error()); exit; } mkdir("/tmp/Backup"); $path = '/tmp/Backup/'; $bname = uniqid("Bckupname", false); $append = $bname . '.' . sql; $BackPath = $path . $append; echo $BackPath; $cmd = "mysqldump --user=nandini --password=nandinipwd --host={$endpoint} Project1db > {$BackPath}"; exec($cmd); $bucketname = uniqid("dbbackupbucket", false); $s3 = new Aws\S3\S3Client(['version' => 'latest', 'region' => 'us-west-2']); # AWS PHP SDK version 3 create bucket $result = $s3->createBucket(['ACL' => 'public-read', 'Bucket' => $bucketname]); # PHP version 3 $result = $s3->putObject(['ACL' => 'public-read', 'Bucket' => $bucketname, 'Key' => $append, 'SourceFile' => $BackPath]); $objectruledb = $s3->putBucketLifecycleConfiguration(['Bucket' => $bucketname, 'LifecycleConfiguration' => ['Rules' => [['Expiration' => ['Date' => '2015-12-24'], 'Prefix' => ' ', 'Status' => 'Enabled']]]]); session_destroy(); header("location: gallery.php");
echo "File is valid, and was successfully uploaded.\n"; } else { header("location: gallery.php"); } echo 'Here is some more debugging info:'; print_r($_FILES); print "</pre>"; require 'vendor/autoload.php'; $s3 = new Aws\S3\S3Client(['version' => 'latest', 'region' => 'us-west-2']); #print_r($s3); $bucket = uniqid("nandinibuckettest", false); echo $bucket; # AWS PHP SDK version 3 create bucket $result = $s3->createBucket(['ACL' => 'public-read', 'Bucket' => $bucket]); # PHP version 3 $result = $s3->putObject(['ACL' => 'public-read', 'Bucket' => $bucket, 'Key' => $fname, 'SourceFile' => $uploadfile]); $objectrule = $s3->putBucketLifecycleConfiguration(['Bucket' => $bucket, 'LifecycleConfiguration' => ['Rules' => [['Expiration' => ['Date' => '2015-12-24'], 'Prefix' => ' ', 'Status' => 'Enabled']]]]); $url = $result['ObjectURL']; echo $url; $rds = new Aws\Rds\RdsClient(['version' => 'latest', 'region' => 'us-west-2']); $result = $rds->describeDBInstances(['DBInstanceIdentifier' => 'Project1db']); $endpoint = $result['DBInstances'][0]['Endpoint']['Address']; echo "============\n" . $endpoint . "================"; $link = mysqli_connect($endpoint, "nandini", "nandinipwd", "Project1db") or die("Error " . mysqli_error($link)); /* check connection */ if (mysqli_connect_errno()) { printf("Connect failed: %s\n", mysqli_connect_error()); exit; } /* Prepared statement, stage 1: prepare */ if (!($stmt = $link->prepare("INSERT INTO Projectrec (uname, email, phone, raws3url, finisheds3url, jpegfilename, state, DateTime) VALUES (?, ?, ?, ?, ?, ?, ?, ?)"))) {
print "File is valid, and was successfully uploaded.\n"; } else { print "Couldnt upload file\n"; } print 'Here is some more debugging info:'; print_r($_FILES); //<!-- use Aws\S3\S3Client; --> $s3 = new Aws\S3\S3Client(['version' => 'latest', 'region' => 'us-west-2']); $bucket = uniqid("usnehas3", false); print "Creating bucket named {$bucket}\n"; $result = $s3->createBucket(['ACL' => 'public-read', 'Bucket' => $bucket]); print 'outside the create bucket command'; # waiting for the s3 bucket to be available $result = $s3->waitUntil('BucketExists', array('Bucket' => $bucket)); echo "bucket creation done"; $result = $s3->putObject(['ACL' => 'public-read', 'Bucket' => $bucket, 'Key' => "uploads" . $uploadfile, 'ContentType' => $_FILES['userfile']['type'], 'Body' => fopen($uploadfile, 'r+')]); $url = $result['ObjectURL']; echo $url; echo "s3 file uploaded"; // reference: https://docs.aws.amazon.com/aws-sdk-php/v3/api/api-s3-2006-03-01.html#putbucketlifecycleconfiguration $result = $s3->putBucketLifecycleConfiguration(['Bucket' => $bucket, 'LifecycleConfiguration' => ['Rules' => [['Expiration' => ['Days' => 1], 'NoncurrentVersionExpiration' => ['NoncurrentDays' => 1], 'Prefix' => ' ', 'Status' => 'Enabled']]]]); //-------------------------------------------------------------------------------------------------------------- // trying framed image .... yet to implement thumbnail // reference: http://php.net/manual/en/imagick.writeimage.php // reference: stackoverflow $imgpath = new Imagick($uploadfile); $imgpath->frameImage('#a00000', 20, 20, 5, 5); mkdir("/tmp/Image"); $ext = end(explode('.', $fname)); echo "file type is {$ext}"; $path = '/tmp/Image/';
ini_set('display_errors', 1); $uploaddir = '/tmp/'; $uploadfile = $uploaddir . basename($_FILES['userFileForm']['name']); if (move_uploaded_file($_FILES['userFileForm']['tmp_name'], $uploadfile)) { echo "File is valid, and was successfully uploaded.\n"; } else { echo "Possible file upload attack!\n"; } echo 'Here is some more debugging info:'; print_r($_FILES); require '/var/www/html/vendor/autoload.php'; $userS3 = new Aws\S3\S3Client(['version' => 'latest', 'region' => 'us-east-1', 'credentials' => ['key' => '', 'secret' => '']]); $bucket = uniqid("jss-userImages", false); # AWS PHP SDK version 3 create bucket $result = $userS3->createBucket(['ACL' => 'public-read', 'Bucket' => $bucket]); $result = $userS3->putObject(['ACL' => 'public-read', 'Bucket' => $bucket, 'Key' => $bucket, 'SourceFile' => $uploadfile]); $rawurl = $bucket; $url = $result['ObjectURL']; echo $url; $rds = new Aws\Rds\RdsClient(['version' => 'latest', 'region' => 'us-east-1', 'credentials' => ['key' => '', 'secret' => '']]); $result = $rds->describeDBInstances(array('DBInstanceIdentifier' => 'jss-itmo444-db')); $endpoint = $result['DBInstances'][0]['Endpoint']['Address']; echo "============\n" . $endpoint . "================"; $link = mysqli_connect($endpoint, "controller", "letmein1234", "jssitmo444db") or die("Error" . mysql_error($link)); /*check connection*/ if (mysqli_connect_errno()) { printf("Connect failed: %s\n", mysqli_connect_error()); exit; } /*Prepared statement, stage 1: prepare*/ if (!($stmt = $link->prepare("INSERT INTO jssUserImages (userNameTable,userEmailTable,userTelephoneTable,rawS3URLTable,finishedS3URLTable,fileNameTable,stateTable,dateTable) \n\tVALUES ('?','?','?','?','?','?','?','?')"))) {
echo '<pre>'; if (move_uploaded_file($_FILES['userfile']['tmp_name'], $uploadfile)) { echo "File is valid, and was successfully uploaded.\n"; } else { echo "Possible file upload attack!\n"; } echo 'Here is some more debugging info:'; print_r($_FILES); print "</pre>"; require 'vendor/autoload.php'; #Creating s3 object $s3 = new Aws\S3\S3Client(['version' => 'latest', 'region' => 'us-east-1']); $sns = new Aws\Sns\SnsClient(['version' => 'latest', 'region' => 'us-east-1']); $bucket = uniqid("php-jay-", false); $result = $s3->createBucket(['ACL' => 'public-read', 'Bucket' => $bucket]); $result = $s3->putObject(['ACL' => 'public-read', 'Bucket' => $bucket, 'Key' => $uploadfile, 'SourceFile' => $uploadfile]); $url = $result['ObjectURL']; echo $url; #Bucket expiration $objectrule = $s3->putBucketLifecycleConfiguration(['Bucket' => $bucket, 'LifecycleConfiguration' => ['Rules' => [['Expiration' => ['Days' => 1], 'NoncurrentVersionExpiration' => ['NoncurrentDays' => 1], 'Prefix' => ' ', 'Status' => 'Enabled']]]]); #php5 imagick code from php tutorial $filepath = new Imagick($uploadfile); $filepath->flipImage(); mkdir("/tmp/imgk"); $extension = end(explode('.', $filename)); $path = '/tmp/imgk/'; $imgid = uniqid("Image"); $imgloc = $imgid . '.' . $extension; $DestPath = $path . $imgloc; echo $DestPath; $filepath->writeImage($DestPath);