/** * Garbage collector * * @param int $lifeTime Session maximal lifetime * @see session.gc_divisor 100 * @see session.gc_maxlifetime 1440 * @see session.gc_probability 1 * @usage Execution rate 1/100 (session.gc_probability/session.gc_divisor) * @return boolean */ public function gc($lifeTime) { if ($this->_storageType == self::STORAGE_TYPE_TABLE) { // In table storage try { $result = $this->_storage->retrieveEntities($this->_sessionContainer, 'PartitionKey eq \'' . $this->_sessionContainerPartition . '\' and sessionExpires lt ' . (time() - $lifeTime)); foreach ($result as $sessionRecord) { $this->_storage->deleteEntity($this->_sessionContainer, $sessionRecord); } return true; } catch (Microsoft_WindowsAzure_exception $ex) { return false; } } else { if ($this->_storageType == self::STORAGE_TYPE_BLOB) { // In blob storage try { $result = $this->_storage->listBlobs($this->_sessionContainer, $this->_sessionContainerPartition, '', null, null, 'metadata'); foreach ($result as $sessionRecord) { if ($sessionRecord->Metadata['sessionexpires'] < time() - $lifeTime) { $this->_storage->deleteBlob($this->_sessionContainer, $sessionRecord->Name); } } return true; } catch (Microsoft_WindowsAzure_exception $ex) { return false; } } } }
/** * @see FileBackend::getFileList() */ function getFileListInternal($container, $dir, array $params) { $files = array(); try { if (trim($dir) == '') { $blobs = $this->storageClient->listBlobs($container); } else { if (strrpos($dir, '/') != strlen($dir) - 1) { $dir = $dir . '/'; } $blobs = $this->storageClient->listBlobs($container, $dir); } foreach ($blobs as $blob) { // Only return the actual file name without the / $tempName = $blob->name; if (trim($dir) != '') { if (strstr($tempName, $dir) !== false) { $tempName = substr($tempName, strpos($tempName, $dir) + strlen($dir)); $files[] = $tempName; } } else { $files[] = $tempName; } } } catch (Exception $e) { return null; } // if there are no files matching the prefix, return empty array return $files; }
function dest_msazure() { global $WORKING, $STATIC; $WORKING['STEPTODO'] = 2 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']); trigger_error(sprintf(__('%d. try sending backup to a Microsoft Azure (Blob)...', 'backwpup'), $WORKING['DEST_MSAZURE']['STEP_TRY']), E_USER_NOTICE); require_once dirname(__FILE__) . '/../libs/Microsoft/WindowsAzure/Storage/Blob.php'; need_free_memory(4194304 * 1.5); try { $storageClient = new Microsoft_WindowsAzure_Storage_Blob($STATIC['JOB']['msazureHost'], $STATIC['JOB']['msazureAccName'], $STATIC['JOB']['msazureKey']); if (!$storageClient->containerExists($STATIC['JOB']['msazureContainer'])) { trigger_error(sprintf(__('Microsoft Azure container "%s" not exists!', 'backwpup'), $STATIC['JOB']['msazureContainer']), E_USER_ERROR); return; } else { trigger_error(sprintf(__('Connected to Microsoft Azure container "%s"', 'backwpup'), $STATIC['JOB']['msazureContainer']), E_USER_NOTICE); } trigger_error(__('Upload to MS Azure now started... ', 'backwpup'), E_USER_NOTICE); $result = $storageClient->putBlob($STATIC['JOB']['msazureContainer'], $STATIC['JOB']['msazuredir'] . $STATIC['backupfile'], $STATIC['JOB']['backupdir'] . $STATIC['backupfile']); if ($result->Name == $STATIC['JOB']['msazuredir'] . $STATIC['backupfile']) { $WORKING['STEPTODO'] = 1 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']); trigger_error(sprintf(__('Backup transferred to %s', 'backwpup'), 'https://' . $STATIC['JOB']['msazureAccName'] . '.' . $STATIC['JOB']['msazureHost'] . '/' . $STATIC['JOB']['msazuredir'] . $STATIC['backupfile']), E_USER_NOTICE); $STATIC['JOB']['lastbackupdownloadurl'] = $STATIC['WP']['ADMINURL'] . '?page=backwpupbackups&action=downloadmsazure&file=' . $STATIC['JOB']['msazuredir'] . $STATIC['backupfile'] . '&jobid=' . $STATIC['JOB']['jobid']; $WORKING['STEPSDONE'][] = 'DEST_MSAZURE'; //set done } else { trigger_error(__('Can not transfer backup to Microsoft Azure!', 'backwpup'), E_USER_ERROR); } if ($STATIC['JOB']['msazuremaxbackups'] > 0) { //Delete old backups $backupfilelist = array(); $blobs = $storageClient->listBlobs($STATIC['JOB']['msazureContainer'], $STATIC['JOB']['msazuredir']); if (is_array($blobs)) { foreach ($blobs as $blob) { $file = basename($blob->Name); if ($STATIC['JOB']['fileprefix'] == substr($file, 0, strlen($STATIC['JOB']['fileprefix'])) and $STATIC['JOB']['fileformart'] == substr($file, -strlen($STATIC['JOB']['fileformart']))) { $backupfilelist[] = $file; } } } if (sizeof($backupfilelist) > 0) { rsort($backupfilelist); $numdeltefiles = 0; for ($i = $STATIC['JOB']['msazuremaxbackups']; $i < sizeof($backupfilelist); $i++) { $storageClient->deleteBlob($STATIC['JOB']['msazureContainer'], $STATIC['JOB']['msazuredir'] . $backupfilelist[$i]); //delte files on Cloud $numdeltefiles++; } if ($numdeltefiles > 0) { trigger_error(sprintf(_n('One file deleted on Microsoft Azure container', '%d files deleted on Microsoft Azure container', $numdeltefiles, 'backwpup'), $numdeltefiles), E_USER_NOTICE); } } } } catch (Exception $e) { trigger_error(sprintf(__('Microsoft Azure API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR); } $WORKING['STEPDONE']++; }
/** * @see FileBackend::getFileList() */ function getFileListInternal( $container, $dir, array $params ) { // TODO: merely renamed from getFileList. Check implications (i assume, the list thing is no longer needed. //list( $c, $dir ) = $this->resolveStoragePath( $params['dir'] ); $files = array(); try { if ( $dir === null ) { $blobs = $this->storageClient->listBlobs($container); } else { //TODO: Check if $dir really is a startsequence of the blob name $blobs = $this->storageClient->listBlobs($container, $dir); } foreach( $blobs as $blob ) { $files[] = $blob->name; } } catch( Exception $e ) { return null; } // if there are no files matching the prefix, return empty array return $files; }
function backwpup_get_backup_files($jobid, $dest) { global $backwpup_message; if (empty($jobid) or !in_array(strtoupper($dest), explode(',', strtoupper(BACKWPUP_DESTS))) and $dest != 'FOLDER') { return false; } $jobs = get_option('backwpup_jobs'); //Load jobs $jobvalue = $jobs[$jobid]; $filecounter = 0; $files = array(); //Get files/filinfo in backup folder if ($dest == 'FOLDER' and !empty($jobvalue['backupdir']) and is_dir($jobvalue['backupdir'])) { if ($dir = opendir($jobvalue['backupdir'])) { while (($file = readdir($dir)) !== false) { if (substr($file, 0, 1) == '.') { continue; } if (is_file($jobvalue['backupdir'] . $file)) { $files[$filecounter]['JOBID'] = $jobid; $files[$filecounter]['DEST'] = $dest; $files[$filecounter]['folder'] = $jobvalue['backupdir']; $files[$filecounter]['file'] = $jobvalue['backupdir'] . $file; $files[$filecounter]['filename'] = $file; $files[$filecounter]['downloadurl'] = backwpup_admin_url('admin.php') . '?page=backwpupbackups&action=download&file=' . $jobvalue['backupdir'] . $file; $files[$filecounter]['filesize'] = filesize($jobvalue['backupdir'] . $file); $files[$filecounter]['time'] = filemtime($jobvalue['backupdir'] . $file); $filecounter++; } } closedir($dir); } } //Get files/filinfo from Dropbox if ($dest == 'DROPBOX' and !empty($jobvalue['dropetoken']) and !empty($jobvalue['dropesecret'])) { require_once realpath(dirname(__FILE__) . '/../libs/dropbox.php'); try { $dropbox = new backwpup_Dropbox('dropbox'); $dropbox->setOAuthTokens($jobvalue['dropetoken'], $jobvalue['dropesecret']); $contents = $dropbox->metadata($jobvalue['dropedir']); if (is_array($contents)) { foreach ($contents['contents'] as $object) { if ($object['is_dir'] != true) { $files[$filecounter]['JOBID'] = $jobid; $files[$filecounter]['DEST'] = $dest; $files[$filecounter]['folder'] = "https://api-content.dropbox.com/1/files/" . $jobvalue['droperoot'] . "/" . dirname($object['path']) . "/"; $files[$filecounter]['file'] = $object['path']; $files[$filecounter]['filename'] = basename($object['path']); $files[$filecounter]['downloadurl'] = backwpup_admin_url('admin.php') . '?page=backwpupbackups&action=downloaddropbox&file=' . $object['path'] . '&jobid=' . $jobid; $files[$filecounter]['filesize'] = $object['bytes']; $files[$filecounter]['time'] = strtotime($object['modified']); $filecounter++; } } } } catch (Exception $e) { $backwpup_message .= 'DROPBOX: ' . $e->getMessage() . '<br />'; } } //Get files/filinfo from Sugarsync if ($dest == 'SUGARSYNC' and !empty($jobvalue['sugarrefreshtoken'])) { if (!class_exists('SugarSync')) { require_once dirname(__FILE__) . '/../libs/sugarsync.php'; } if (class_exists('SugarSync')) { try { $sugarsync = new SugarSync($jobvalue['sugarrefreshtoken']); $dirid = $sugarsync->chdir($jobvalue['sugardir'], $jobvalue['sugarroot']); $user = $sugarsync->user(); $dir = $sugarsync->showdir($dirid); $getfiles = $sugarsync->getcontents('file'); if (is_object($getfiles)) { foreach ($getfiles->file as $getfile) { $files[$filecounter]['JOBID'] = $jobid; $files[$filecounter]['DEST'] = $dest; $files[$filecounter]['folder'] = 'https://' . $user->nickname . '.sugarsync.com/' . $dir; $files[$filecounter]['file'] = (string) $getfile->ref; $files[$filecounter]['filename'] = utf8_decode((string) $getfile->displayName); $files[$filecounter]['downloadurl'] = backwpup_admin_url('admin.php') . '?page=backwpupbackups&action=downloadsugarsync&file=' . (string) $getfile->ref . '&jobid=' . $jobid; $files[$filecounter]['filesize'] = (int) $getfile->size; $files[$filecounter]['time'] = strtotime((string) $getfile->lastModified); $filecounter++; } } } catch (Exception $e) { $backwpup_message .= 'SUGARSYNC: ' . $e->getMessage() . '<br />'; } } } //Get files/filinfo from S3 if ($dest == 'S3' and !empty($jobvalue['awsAccessKey']) and !empty($jobvalue['awsSecretKey']) and !empty($jobvalue['awsBucket'])) { if (!class_exists('AmazonS3')) { require_once dirname(__FILE__) . '/../libs/aws/sdk.class.php'; } if (class_exists('AmazonS3')) { try { $s3 = new AmazonS3(array('key' => $jobvalue['awsAccessKey'], 'secret' => $jobvalue['awsSecretKey'], 'certificate_authority' => true)); if (($contents = $s3->list_objects($jobvalue['awsBucket'], array('prefix' => $jobvalue['awsdir']))) !== false) { foreach ($contents->body->Contents as $object) { $files[$filecounter]['JOBID'] = $jobid; $files[$filecounter]['DEST'] = $dest; $files[$filecounter]['folder'] = "https://" . $jobvalue['awsBucket'] . ".s3.amazonaws.com/" . dirname((string) $object->Key) . '/'; $files[$filecounter]['file'] = (string) $object->Key; $files[$filecounter]['filename'] = basename($object->Key); $files[$filecounter]['downloadurl'] = backwpup_admin_url('admin.php') . '?page=backwpupbackups&action=downloads3&file=' . $object->Key . '&jobid=' . $jobid; $files[$filecounter]['filesize'] = (string) $object->Size; $files[$filecounter]['time'] = strtotime($object->LastModified); $filecounter++; } } } catch (Exception $e) { $backwpup_message .= 'Amazon S3: ' . $e->getMessage() . '<br />'; } } } //Get files/filinfo from Google Storage if ($dest == 'GSTORAGE' and !empty($jobvalue['GStorageAccessKey']) and !empty($jobvalue['GStorageSecret']) and !empty($jobvalue['GStorageBucket'])) { if (!class_exists('AmazonS3')) { require_once dirname(__FILE__) . '/../libs/aws/sdk.class.php'; } if (class_exists('AmazonS3')) { try { $gstorage = new AmazonS3(array('key' => $jobvalue['GStorageAccessKey'], 'secret' => $jobvalue['GStorageSecret'], 'certificate_authority' => true)); $gstorage->set_hostname('storage.googleapis.com'); $gstorage->allow_hostname_override(false); if (($contents = $gstorage->list_objects($jobvalue['GStorageBucket'], array('prefix' => $jobvalue['GStoragedir']))) !== false) { foreach ($contents->body->Contents as $object) { $files[$filecounter]['JOBID'] = $jobid; $files[$filecounter]['DEST'] = $dest; $files[$filecounter]['folder'] = "https://storage.cloud.google.com/" . $jobvalue['GStorageBucket'] . "/" . dirname((string) $object->Key) . '/'; $files[$filecounter]['file'] = (string) $object->Key; $files[$filecounter]['filename'] = basename($object->Key); $files[$filecounter]['downloadurl'] = "https://storage.cloud.google.com/" . $jobvalue['GStorageBucket'] . "/" . (string) $object->Key; $files[$filecounter]['filesize'] = (string) $object->Size; $files[$filecounter]['time'] = strtotime($object->LastModified); $filecounter++; } } } catch (Exception $e) { $backwpup_message .= sprintf(__('GStorage API: %s', 'backwpup'), $e->getMessage()) . '<br />'; } } } //Get files/filinfo from Microsoft Azure if ($dest == 'MSAZURE' and !empty($jobvalue['msazureHost']) and !empty($jobvalue['msazureAccName']) and !empty($jobvalue['msazureKey']) and !empty($jobvalue['msazureContainer'])) { if (!class_exists('Microsoft_WindowsAzure_Storage_Blob')) { require_once dirname(__FILE__) . '/../libs/Microsoft/WindowsAzure/Storage/Blob.php'; } if (class_exists('Microsoft_WindowsAzure_Storage_Blob')) { try { $storageClient = new Microsoft_WindowsAzure_Storage_Blob($jobvalue['msazureHost'], $jobvalue['msazureAccName'], $jobvalue['msazureKey']); $blobs = $storageClient->listBlobs($jobvalue['msazureContainer'], $jobvalue['msazuredir']); if (is_array($blobs)) { foreach ($blobs as $blob) { $files[$filecounter]['JOBID'] = $jobid; $files[$filecounter]['DEST'] = $dest; $files[$filecounter]['folder'] = "https://" . $jobvalue['msazureAccName'] . '.' . $jobvalue['msazureHost'] . "/" . $jobvalue['msazureContainer'] . "/" . dirname($blob->Name) . "/"; $files[$filecounter]['file'] = $blob->Name; $files[$filecounter]['filename'] = basename($blob->Name); $files[$filecounter]['downloadurl'] = backwpup_admin_url('admin.php') . '?page=backwpupbackups&action=downloadmsazure&file=' . $blob->Name . '&jobid=' . $jobid; $files[$filecounter]['filesize'] = $blob->size; $files[$filecounter]['time'] = strtotime($blob->lastmodified); $filecounter++; } } } catch (Exception $e) { $backwpup_message .= 'MSAZURE: ' . $e->getMessage() . '<br />'; } } } //Get files/filinfo from RSC if ($dest == 'RSC' and !empty($jobvalue['rscUsername']) and !empty($jobvalue['rscAPIKey']) and !empty($jobvalue['rscContainer'])) { if (!class_exists('CF_Authentication')) { require_once dirname(__FILE__) . '/../libs/rackspace/cloudfiles.php'; } if (class_exists('CF_Authentication')) { try { $auth = new CF_Authentication($jobvalue['rscUsername'], $jobvalue['rscAPIKey']); $auth->ssl_use_cabundle(); if ($auth->authenticate()) { $conn = new CF_Connection($auth); $conn->ssl_use_cabundle(); $backwpupcontainer = $conn->get_container($jobvalue['rscContainer']); $contents = $backwpupcontainer->get_objects(0, NULL, NULL, $jobvalue['rscdir']); foreach ($contents as $object) { $files[$filecounter]['JOBID'] = $jobid; $files[$filecounter]['DEST'] = $dest; $files[$filecounter]['folder'] = "RSC://" . $jobvalue['rscContainer'] . "/" . dirname($object->name) . "/"; $files[$filecounter]['file'] = $object->name; $files[$filecounter]['filename'] = basename($object->name); $files[$filecounter]['downloadurl'] = backwpup_admin_url('admin.php') . '?page=backwpupbackups&action=downloadrsc&file=' . $object->name . '&jobid=' . $jobid; $files[$filecounter]['filesize'] = $object->content_length; $files[$filecounter]['time'] = strtotime($object->last_modified); $filecounter++; } } } catch (Exception $e) { $backwpup_message .= 'RSC: ' . $e->getMessage() . '<br />'; } } } //Get files/filinfo from FTP if ($dest == 'FTP' and !empty($jobvalue['ftphost']) and function_exists('ftp_connect') and !empty($jobvalue['ftpuser']) and !empty($jobvalue['ftppass'])) { if (function_exists('ftp_ssl_connect') and $jobvalue['ftpssl']) { //make SSL FTP connection $ftp_conn_id = ftp_ssl_connect($jobvalue['ftphost'], $jobvalue['ftphostport'], 10); } elseif (!$jobvalue['ftpssl']) { //make normal FTP conection if SSL not work $ftp_conn_id = ftp_connect($jobvalue['ftphost'], $jobvalue['ftphostport'], 10); } $loginok = false; if ($ftp_conn_id) { //FTP Login if (@ftp_login($ftp_conn_id, $jobvalue['ftpuser'], backwpup_base64($jobvalue['ftppass']))) { $loginok = true; } else { //if PHP ftp login don't work use raw login ftp_raw($ftp_conn_id, 'USER ' . $jobvalue['ftpuser']); $return = ftp_raw($ftp_conn_id, 'PASS ' . backwpup_base64($jobvalue['ftppass'])); if (substr(trim($return[0]), 0, 3) <= 400) { $loginok = true; } } } if ($loginok) { ftp_chdir($ftp_conn_id, $jobvalue['ftpdir']); $currentftpdir = rtrim(ftp_pwd($ftp_conn_id), '/') . '/'; ftp_pasv($ftp_conn_id, $jobvalue['ftppasv']); if ($ftpfilelist = ftp_nlist($ftp_conn_id, $currentftpdir)) { foreach ($ftpfilelist as $ftpfiles) { if (substr(basename($ftpfiles), 0, 1) == '.') { continue; } $files[$filecounter]['JOBID'] = $jobid; $files[$filecounter]['DEST'] = $dest; $files[$filecounter]['folder'] = "ftp://" . $jobvalue['ftphost'] . ':' . $jobvalue['ftphostport'] . dirname($ftpfiles) . "/"; $files[$filecounter]['file'] = $ftpfiles; $files[$filecounter]['filename'] = basename($ftpfiles); $files[$filecounter]['downloadurl'] = "ftp://" . rawurlencode($jobvalue['ftpuser']) . ":" . rawurlencode(backwpup_base64($jobvalue['ftppass'])) . "@" . $jobvalue['ftphost'] . ':' . $jobvalue['ftphostport'] . $ftpfiles; $files[$filecounter]['filesize'] = ftp_size($ftp_conn_id, $ftpfiles); $files[$filecounter]['time'] = ftp_mdtm($ftp_conn_id, $ftpfiles); $filecounter++; } } } else { $backwpup_message .= 'FTP: ' . __('Login failure!', 'backwpup') . '<br />'; } $donefolders[] = $jobvalue['ftphost'] . '|' . $jobvalue['ftpuser'] . '|' . $jobvalue['ftpdir']; } return $files; }
$image = $blob->putBlob(BLOB_GUESTBOOK, $_FILES['Image']['name'], $_FILES['Image']['tmp_name']); $table = new Microsoft_WindowsAzure_Storage_Table(); $writer = new Microsoft_WindowsAzure_Log_Writer_WindowsAzure($table, 'logThis'); $logger = new Microsoft_Log($writer); //$logger->addWriter($writer); $logger->log($_FILES['Image']['name'] . " added", 1); } if (isset($_POST['Update'])) { echo "<b>UPDATE NOT YET IMPLEMENTED</b>"; } // User wishes to delete something if (isset($_GET['Delete'])) { $blob->deleteBlob(BLOB_GUESTBOOK, $_GET['Delete']); } // Get all the guest book entries for display $entries = $blob->listBlobs(BLOB_GUESTBOOK); ?> <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html xmlns="http://www.w3.org/1999/xhtml"> <head> <title>Windows Azure Guestbook</title> <link href="main.css" rel="stylesheet" type="text/css" /> </head> <body> <div class="general"> <div class="title"> <h1> Windows Azure Picture GuestBook </h1>