$activejobs = false; foreach ($jobs as $jobid => $jobvalue) { if (!empty($jobvalue['activated'])) { $activejobs = true; } } if ($activejobs and false === wp_next_scheduled('backwpup_cron')) { wp_schedule_event(time(), 'backwpup_int', 'backwpup_cron'); } if (!$activejobs and false !== wp_next_scheduled('backwpup_cron')) { wp_clear_scheduled_hook('backwpup_cron'); } //get dropbox auth if (isset($_POST['dropboxauth']) and !empty($_POST['dropboxauth'])) { require_once dirname(__FILE__) . '/../libs/dropbox.php'; $dropbox = new backwpup_Dropbox('dropbox'); // let the user authorize (user will be redirected) $response = $dropbox->oAuthAuthorize(backwpup_admin_url('admin.php') . '?page=backwpupeditjob&jobid=' . $jobvalues['jobid'] . '&dropboxauth=AccessToken&_wpnonce=' . wp_create_nonce('edit-job')); // save oauth_token_secret set_transient('backwpup_dropboxrequest', array('oAuthRequestToken' => $response['oauth_token'], 'oAuthRequestTokenSecret' => $response['oauth_token_secret']), 600); //forward to auth page wp_redirect($response['authurl']); } $_POST['jobid'] = $jobvalues['jobid']; $backwpup_message .= str_replace('%1', $jobvalues['name'], __('Job \'%1\' changes saved.', 'backwpup')) . ' <a href="' . backwpup_admin_url('admin.php') . '?page=backwpup">' . __('Jobs overview.', 'backwpup') . '</a>'; } //load java wp_enqueue_script('common'); wp_enqueue_script('wp-lists'); wp_enqueue_script('postbox'); //add columns
function backwpup_get_backup_files($jobid, $dest) { global $backwpup_message; if (empty($jobid) or !in_array(strtoupper($dest), explode(',', strtoupper(BACKWPUP_DESTS))) and $dest != 'FOLDER') { return false; } $jobs = get_option('backwpup_jobs'); //Load jobs $jobvalue = $jobs[$jobid]; $filecounter = 0; $files = array(); //Get files/filinfo in backup folder if ($dest == 'FOLDER' and !empty($jobvalue['backupdir']) and is_dir($jobvalue['backupdir'])) { if ($dir = opendir($jobvalue['backupdir'])) { while (($file = readdir($dir)) !== false) { if (substr($file, 0, 1) == '.') { continue; } if (is_file($jobvalue['backupdir'] . $file)) { $files[$filecounter]['JOBID'] = $jobid; $files[$filecounter]['DEST'] = $dest; $files[$filecounter]['folder'] = $jobvalue['backupdir']; $files[$filecounter]['file'] = $jobvalue['backupdir'] . $file; $files[$filecounter]['filename'] = $file; $files[$filecounter]['downloadurl'] = backwpup_admin_url('admin.php') . '?page=backwpupbackups&action=download&file=' . $jobvalue['backupdir'] . $file; $files[$filecounter]['filesize'] = filesize($jobvalue['backupdir'] . $file); $files[$filecounter]['time'] = filemtime($jobvalue['backupdir'] . $file); $filecounter++; } } closedir($dir); } } //Get files/filinfo from Dropbox if ($dest == 'DROPBOX' and !empty($jobvalue['dropetoken']) and !empty($jobvalue['dropesecret'])) { require_once realpath(dirname(__FILE__) . '/../libs/dropbox.php'); try { $dropbox = new backwpup_Dropbox('dropbox'); $dropbox->setOAuthTokens($jobvalue['dropetoken'], $jobvalue['dropesecret']); $contents = $dropbox->metadata($jobvalue['dropedir']); if (is_array($contents)) { foreach ($contents['contents'] as $object) { if ($object['is_dir'] != true) { $files[$filecounter]['JOBID'] = $jobid; $files[$filecounter]['DEST'] = $dest; $files[$filecounter]['folder'] = "https://api-content.dropbox.com/1/files/" . $jobvalue['droperoot'] . "/" . dirname($object['path']) . "/"; $files[$filecounter]['file'] = $object['path']; $files[$filecounter]['filename'] = basename($object['path']); $files[$filecounter]['downloadurl'] = backwpup_admin_url('admin.php') . '?page=backwpupbackups&action=downloaddropbox&file=' . $object['path'] . '&jobid=' . $jobid; $files[$filecounter]['filesize'] = $object['bytes']; $files[$filecounter]['time'] = strtotime($object['modified']); $filecounter++; } } } } catch (Exception $e) { $backwpup_message .= 'DROPBOX: ' . $e->getMessage() . '<br />'; } } //Get files/filinfo from Sugarsync if ($dest == 'SUGARSYNC' and !empty($jobvalue['sugarrefreshtoken'])) { if (!class_exists('SugarSync')) { require_once dirname(__FILE__) . '/../libs/sugarsync.php'; } if (class_exists('SugarSync')) { try { $sugarsync = new SugarSync($jobvalue['sugarrefreshtoken']); $dirid = $sugarsync->chdir($jobvalue['sugardir'], $jobvalue['sugarroot']); $user = $sugarsync->user(); $dir = $sugarsync->showdir($dirid); $getfiles = $sugarsync->getcontents('file'); if (is_object($getfiles)) { foreach ($getfiles->file as $getfile) { $files[$filecounter]['JOBID'] = $jobid; $files[$filecounter]['DEST'] = $dest; $files[$filecounter]['folder'] = 'https://' . $user->nickname . '.sugarsync.com/' . $dir; $files[$filecounter]['file'] = (string) $getfile->ref; $files[$filecounter]['filename'] = utf8_decode((string) $getfile->displayName); $files[$filecounter]['downloadurl'] = backwpup_admin_url('admin.php') . '?page=backwpupbackups&action=downloadsugarsync&file=' . (string) $getfile->ref . '&jobid=' . $jobid; $files[$filecounter]['filesize'] = (int) $getfile->size; $files[$filecounter]['time'] = strtotime((string) $getfile->lastModified); $filecounter++; } } } catch (Exception $e) { $backwpup_message .= 'SUGARSYNC: ' . $e->getMessage() . '<br />'; } } } //Get files/filinfo from S3 if ($dest == 'S3' and !empty($jobvalue['awsAccessKey']) and !empty($jobvalue['awsSecretKey']) and !empty($jobvalue['awsBucket'])) { if (!class_exists('AmazonS3')) { require_once dirname(__FILE__) . '/../libs/aws/sdk.class.php'; } if (class_exists('AmazonS3')) { try { $s3 = new AmazonS3(array('key' => $jobvalue['awsAccessKey'], 'secret' => $jobvalue['awsSecretKey'], 'certificate_authority' => true)); if (($contents = $s3->list_objects($jobvalue['awsBucket'], array('prefix' => $jobvalue['awsdir']))) !== false) { foreach ($contents->body->Contents as $object) { $files[$filecounter]['JOBID'] = $jobid; $files[$filecounter]['DEST'] = $dest; $files[$filecounter]['folder'] = "https://" . $jobvalue['awsBucket'] . ".s3.amazonaws.com/" . dirname((string) $object->Key) . '/'; $files[$filecounter]['file'] = (string) $object->Key; $files[$filecounter]['filename'] = basename($object->Key); $files[$filecounter]['downloadurl'] = backwpup_admin_url('admin.php') . '?page=backwpupbackups&action=downloads3&file=' . $object->Key . '&jobid=' . $jobid; $files[$filecounter]['filesize'] = (string) $object->Size; $files[$filecounter]['time'] = strtotime($object->LastModified); $filecounter++; } } } catch (Exception $e) { $backwpup_message .= 'Amazon S3: ' . $e->getMessage() . '<br />'; } } } //Get files/filinfo from Google Storage if ($dest == 'GSTORAGE' and !empty($jobvalue['GStorageAccessKey']) and !empty($jobvalue['GStorageSecret']) and !empty($jobvalue['GStorageBucket'])) { if (!class_exists('AmazonS3')) { require_once dirname(__FILE__) . '/../libs/aws/sdk.class.php'; } if (class_exists('AmazonS3')) { try { $gstorage = new AmazonS3(array('key' => $jobvalue['GStorageAccessKey'], 'secret' => $jobvalue['GStorageSecret'], 'certificate_authority' => true)); $gstorage->set_hostname('storage.googleapis.com'); $gstorage->allow_hostname_override(false); if (($contents = $gstorage->list_objects($jobvalue['GStorageBucket'], array('prefix' => $jobvalue['GStoragedir']))) !== false) { foreach ($contents->body->Contents as $object) { $files[$filecounter]['JOBID'] = $jobid; $files[$filecounter]['DEST'] = $dest; $files[$filecounter]['folder'] = "https://storage.cloud.google.com/" . $jobvalue['GStorageBucket'] . "/" . dirname((string) $object->Key) . '/'; $files[$filecounter]['file'] = (string) $object->Key; $files[$filecounter]['filename'] = basename($object->Key); $files[$filecounter]['downloadurl'] = "https://storage.cloud.google.com/" . $jobvalue['GStorageBucket'] . "/" . (string) $object->Key; $files[$filecounter]['filesize'] = (string) $object->Size; $files[$filecounter]['time'] = strtotime($object->LastModified); $filecounter++; } } } catch (Exception $e) { $backwpup_message .= sprintf(__('GStorage API: %s', 'backwpup'), $e->getMessage()) . '<br />'; } } } //Get files/filinfo from Microsoft Azure if ($dest == 'MSAZURE' and !empty($jobvalue['msazureHost']) and !empty($jobvalue['msazureAccName']) and !empty($jobvalue['msazureKey']) and !empty($jobvalue['msazureContainer'])) { if (!class_exists('Microsoft_WindowsAzure_Storage_Blob')) { require_once dirname(__FILE__) . '/../libs/Microsoft/WindowsAzure/Storage/Blob.php'; } if (class_exists('Microsoft_WindowsAzure_Storage_Blob')) { try { $storageClient = new Microsoft_WindowsAzure_Storage_Blob($jobvalue['msazureHost'], $jobvalue['msazureAccName'], $jobvalue['msazureKey']); $blobs = $storageClient->listBlobs($jobvalue['msazureContainer'], $jobvalue['msazuredir']); if (is_array($blobs)) { foreach ($blobs as $blob) { $files[$filecounter]['JOBID'] = $jobid; $files[$filecounter]['DEST'] = $dest; $files[$filecounter]['folder'] = "https://" . $jobvalue['msazureAccName'] . '.' . $jobvalue['msazureHost'] . "/" . $jobvalue['msazureContainer'] . "/" . dirname($blob->Name) . "/"; $files[$filecounter]['file'] = $blob->Name; $files[$filecounter]['filename'] = basename($blob->Name); $files[$filecounter]['downloadurl'] = backwpup_admin_url('admin.php') . '?page=backwpupbackups&action=downloadmsazure&file=' . $blob->Name . '&jobid=' . $jobid; $files[$filecounter]['filesize'] = $blob->size; $files[$filecounter]['time'] = strtotime($blob->lastmodified); $filecounter++; } } } catch (Exception $e) { $backwpup_message .= 'MSAZURE: ' . $e->getMessage() . '<br />'; } } } //Get files/filinfo from RSC if ($dest == 'RSC' and !empty($jobvalue['rscUsername']) and !empty($jobvalue['rscAPIKey']) and !empty($jobvalue['rscContainer'])) { if (!class_exists('CF_Authentication')) { require_once dirname(__FILE__) . '/../libs/rackspace/cloudfiles.php'; } if (class_exists('CF_Authentication')) { try { $auth = new CF_Authentication($jobvalue['rscUsername'], $jobvalue['rscAPIKey']); $auth->ssl_use_cabundle(); if ($auth->authenticate()) { $conn = new CF_Connection($auth); $conn->ssl_use_cabundle(); $backwpupcontainer = $conn->get_container($jobvalue['rscContainer']); $contents = $backwpupcontainer->get_objects(0, NULL, NULL, $jobvalue['rscdir']); foreach ($contents as $object) { $files[$filecounter]['JOBID'] = $jobid; $files[$filecounter]['DEST'] = $dest; $files[$filecounter]['folder'] = "RSC://" . $jobvalue['rscContainer'] . "/" . dirname($object->name) . "/"; $files[$filecounter]['file'] = $object->name; $files[$filecounter]['filename'] = basename($object->name); $files[$filecounter]['downloadurl'] = backwpup_admin_url('admin.php') . '?page=backwpupbackups&action=downloadrsc&file=' . $object->name . '&jobid=' . $jobid; $files[$filecounter]['filesize'] = $object->content_length; $files[$filecounter]['time'] = strtotime($object->last_modified); $filecounter++; } } } catch (Exception $e) { $backwpup_message .= 'RSC: ' . $e->getMessage() . '<br />'; } } } //Get files/filinfo from FTP if ($dest == 'FTP' and !empty($jobvalue['ftphost']) and function_exists('ftp_connect') and !empty($jobvalue['ftpuser']) and !empty($jobvalue['ftppass'])) { if (function_exists('ftp_ssl_connect') and $jobvalue['ftpssl']) { //make SSL FTP connection $ftp_conn_id = ftp_ssl_connect($jobvalue['ftphost'], $jobvalue['ftphostport'], 10); } elseif (!$jobvalue['ftpssl']) { //make normal FTP conection if SSL not work $ftp_conn_id = ftp_connect($jobvalue['ftphost'], $jobvalue['ftphostport'], 10); } $loginok = false; if ($ftp_conn_id) { //FTP Login if (@ftp_login($ftp_conn_id, $jobvalue['ftpuser'], backwpup_base64($jobvalue['ftppass']))) { $loginok = true; } else { //if PHP ftp login don't work use raw login ftp_raw($ftp_conn_id, 'USER ' . $jobvalue['ftpuser']); $return = ftp_raw($ftp_conn_id, 'PASS ' . backwpup_base64($jobvalue['ftppass'])); if (substr(trim($return[0]), 0, 3) <= 400) { $loginok = true; } } } if ($loginok) { ftp_chdir($ftp_conn_id, $jobvalue['ftpdir']); $currentftpdir = rtrim(ftp_pwd($ftp_conn_id), '/') . '/'; ftp_pasv($ftp_conn_id, $jobvalue['ftppasv']); if ($ftpfilelist = ftp_nlist($ftp_conn_id, $currentftpdir)) { foreach ($ftpfilelist as $ftpfiles) { if (substr(basename($ftpfiles), 0, 1) == '.') { continue; } $files[$filecounter]['JOBID'] = $jobid; $files[$filecounter]['DEST'] = $dest; $files[$filecounter]['folder'] = "ftp://" . $jobvalue['ftphost'] . ':' . $jobvalue['ftphostport'] . dirname($ftpfiles) . "/"; $files[$filecounter]['file'] = $ftpfiles; $files[$filecounter]['filename'] = basename($ftpfiles); $files[$filecounter]['downloadurl'] = "ftp://" . rawurlencode($jobvalue['ftpuser']) . ":" . rawurlencode(backwpup_base64($jobvalue['ftppass'])) . "@" . $jobvalue['ftphost'] . ':' . $jobvalue['ftphostport'] . $ftpfiles; $files[$filecounter]['filesize'] = ftp_size($ftp_conn_id, $ftpfiles); $files[$filecounter]['time'] = ftp_mdtm($ftp_conn_id, $ftpfiles); $filecounter++; } } } else { $backwpup_message .= 'FTP: ' . __('Login failure!', 'backwpup') . '<br />'; } $donefolders[] = $jobvalue['ftphost'] . '|' . $jobvalue['ftpuser'] . '|' . $jobvalue['ftpdir']; } return $files; }
die; } else { header('HTTP/1.0 ' . $s3file->status . ' Not Found'); die; } break; case 'downloaddropbox': //Download Dropbox Backup check_admin_referer('download-backup'); if (!class_exists('Dropbox_API')) { require_once realpath(dirname(__FILE__) . '/../libs/dropbox.php'); } $jobs = get_option('backwpup_jobs'); $jobid = $_GET['jobid']; try { $dropbox = new backwpup_Dropbox('dropbox'); $dropbox->setOAuthTokens($jobs[$jobid]['dropetoken'], $jobs[$jobid]['dropesecret']); $filemeta = $dropbox->metadata($_GET['file'], false, 1); header("Pragma: public"); header("Expires: 0"); header("Cache-Control: must-revalidate, post-check=0, pre-check=0"); header("Content-Type: " . $filemeta['mime_type']); header("Content-Type: application/force-download"); header("Content-Type: application/octet-stream"); header("Content-Type: application/download"); header("Content-Disposition: attachment; filename=" . basename($_GET['file']) . ";"); header("Content-Transfer-Encoding: binary"); header("Content-Length: " . $filemeta['bytes']); $dropbox->download($_GET['file'], true); die; } catch (Exception $e) {
function dest_dropbox() { global $WORKING, $STATIC; $WORKING['STEPTODO'] = 2 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']); $WORKING['STEPDONE'] = 0; trigger_error(sprintf(__('%d. Try to sending backup file to DropBox...', 'backwpup'), $WORKING['DEST_DROPBOX']['STEP_TRY']), E_USER_NOTICE); require_once realpath(dirname(__FILE__) . '/../libs/dropbox.php'); try { need_free_memory(10000000); //set boxtype $dropbox = new backwpup_Dropbox('dropbox'); // set the tokens $dropbox->setOAuthTokens($STATIC['JOB']['dropetoken'], $STATIC['JOB']['dropesecret']); $info = $dropbox->accountInfo(); if (!empty($info['uid'])) { trigger_error(sprintf(__('Authed with DropBox from %s', 'backwpup'), $info['display_name']), E_USER_NOTICE); } //Check Quota $dropboxfreespase = (double) $info['quota_info']['quota'] - (double) $info['quota_info']['shared'] - (double) $info['quota_info']['normal']; if (filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']) > $dropboxfreespase) { trigger_error(__('No free space left on DropBox!!!', 'backwpup'), E_USER_ERROR); $WORKING['STEPSDONE'][] = 'DEST_DROPBOX'; //set done return; } else { trigger_error(sprintf(__('%s free on DropBox', 'backwpup'), formatBytes($dropboxfreespase)), E_USER_NOTICE); } //set callback function $dropbox->setProgressFunction('curl_progresscallback'); // put the file trigger_error(__('Upload to DropBox now started... ', 'backwpup'), E_USER_NOTICE); $response = $dropbox->upload($STATIC['JOB']['backupdir'] . $STATIC['backupfile'], $STATIC['JOB']['dropedir'] . $STATIC['backupfile']); if ($response['bytes'] == filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile'])) { $STATIC['JOB']['lastbackupdownloadurl'] = $STATIC['WP']['ADMINURL'] . '?page=backwpupbackups&action=downloaddropbox&file=' . $STATIC['JOB']['dropedir'] . $STATIC['backupfile'] . '&jobid=' . $STATIC['JOB']['jobid']; $WORKING['STEPDONE']++; $WORKING['STEPSDONE'][] = 'DEST_DROPBOX'; //set done trigger_error(sprintf(__('Backup transferred to %s', 'backwpup'), 'https://api-content.dropbox.com/1/files/' . $STATIC['JOB']['droperoot'] . '/' . $STATIC['JOB']['dropedir'] . $STATIC['backupfile']), E_USER_NOTICE); } //unset calback function $dropbox->setProgressFunction(); } catch (Exception $e) { trigger_error(sprintf(__('DropBox API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR); } try { if ($STATIC['JOB']['dropemaxbackups'] > 0 and is_object($dropbox)) { //Delete old backups $backupfilelist = array(); $metadata = $dropbox->metadata($STATIC['JOB']['dropedir']); if (is_array($metadata)) { foreach ($metadata['contents'] as $data) { $file = basename($data['path']); if ($data['is_dir'] != true and $STATIC['JOB']['fileprefix'] == substr($file, 0, strlen($STATIC['JOB']['fileprefix'])) and $STATIC['JOB']['fileformart'] == substr($file, -strlen($STATIC['JOB']['fileformart']))) { $backupfilelist[] = $file; } } } if (sizeof($backupfilelist) > 0) { rsort($backupfilelist); $numdeltefiles = 0; for ($i = $STATIC['JOB']['dropemaxbackups']; $i < count($backupfilelist); $i++) { $dropbox->fileopsDelete($STATIC['JOB']['dropedir'] . $backupfilelist[$i]); //delete files on Cloud $numdeltefiles++; } if ($numdeltefiles > 0) { trigger_error(sprintf(_n('One file deleted on DropBox', '%d files deleted on DropBox', $numdeltefiles, 'backwpup'), $numdeltefiles), E_USER_NOTICE); } } } } catch (Exception $e) { trigger_error(sprintf(__('DropBox API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR); } $WORKING['STEPDONE']++; }