function dest_mail() { global $WORKING, $STATIC; $WORKING['STEPTODO'] = filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']); $WORKING['STEPDONE'] = 0; trigger_error(sprintf(__('%d. try to sending backup with mail...', 'backwpup'), $WORKING['DEST_MAIL']['STEP_TRY']), E_USER_NOTICE); //Create PHP Mailer require_once realpath($STATIC['WP']['ABSPATH'] . $STATIC['WP']['WPINC']) . '/class-phpmailer.php'; $phpmailer = new PHPMailer(); //Setting den methode if ($STATIC['CFG']['mailmethod'] == "SMTP") { require_once realpath($STATIC['WP']['ABSPATH'] . $STATIC['WP']['WPINC']) . '/class-smtp.php'; $phpmailer->Host = $STATIC['CFG']['mailhost']; $phpmailer->Port = $STATIC['CFG']['mailhostport']; $phpmailer->SMTPSecure = $STATIC['CFG']['mailsecure']; $phpmailer->Username = $STATIC['CFG']['mailuser']; $phpmailer->Password = base64_decode($STATIC['CFG']['mailpass']); if (!empty($STATIC['CFG']['mailuser']) and !empty($STATIC['CFG']['mailpass'])) { $phpmailer->SMTPAuth = true; } $phpmailer->IsSMTP(); trigger_error(__('Send mail with SMTP', 'backwpup'), E_USER_NOTICE); } elseif ($STATIC['CFG']['mailmethod'] == "Sendmail") { $phpmailer->Sendmail = $STATIC['CFG']['mailsendmail']; $phpmailer->IsSendmail(); trigger_error(__('Send mail with Sendmail', 'backwpup'), E_USER_NOTICE); } else { $phpmailer->IsMail(); trigger_error(__('Send mail with PHP mail', 'backwpup'), E_USER_NOTICE); } trigger_error(__('Creating mail', 'backwpup'), E_USER_NOTICE); $phpmailer->From = $STATIC['CFG']['mailsndemail']; $phpmailer->FromName = $STATIC['CFG']['mailsndname']; $phpmailer->AddAddress($STATIC['JOB']['mailaddress']); $phpmailer->Subject = sprintf(__('BackWPup archive from %1$s: %2$s', 'backwpup'), date('Y/m/d @ H:i', $STATIC['JOB']['starttime'] + $STATIC['WP']['TIMEDIFF']), $STATIC['JOB']['name']); $phpmailer->IsHTML(false); $phpmailer->Body = sprintf(__('Backup archive: %s', 'backwpup'), $STATIC['backupfile']); //check file Size if (!empty($STATIC['JOB']['mailefilesize'])) { if (filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']) > abs($STATIC['JOB']['mailefilesize'] * 1024 * 1024)) { trigger_error(__('Backup archive too big for sending by mail!', 'backwpup'), E_USER_ERROR); $WORKING['STEPDONE'] = 1; $WORKING['STEPSDONE'][] = 'DEST_MAIL'; //set done return; } } trigger_error(__('Adding backup archive to mail', 'backwpup'), E_USER_NOTICE); need_free_memory(filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']) * 5); $phpmailer->AddAttachment($STATIC['JOB']['backupdir'] . $STATIC['backupfile']); trigger_error(__('Send mail....', 'backwpup'), E_USER_NOTICE); if (false == $phpmailer->Send()) { trigger_error(sprintf(__('Error "%s" on sending mail!', 'backwpup'), $phpmailer->ErrorInfo), E_USER_ERROR); } else { $WORKING['STEPTODO'] = filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']); trigger_error(__('Mail send!!!', 'backwpup'), E_USER_NOTICE); } $WORKING['STEPSDONE'][] = 'DEST_MAIL'; //set done }
function dest_msazure() { global $WORKING, $STATIC; $WORKING['STEPTODO'] = 2 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']); trigger_error(sprintf(__('%d. try sending backup to a Microsoft Azure (Blob)...', 'backwpup'), $WORKING['DEST_MSAZURE']['STEP_TRY']), E_USER_NOTICE); require_once dirname(__FILE__) . '/../libs/Microsoft/WindowsAzure/Storage/Blob.php'; need_free_memory(4194304 * 1.5); try { $storageClient = new Microsoft_WindowsAzure_Storage_Blob($STATIC['JOB']['msazureHost'], $STATIC['JOB']['msazureAccName'], $STATIC['JOB']['msazureKey']); if (!$storageClient->containerExists($STATIC['JOB']['msazureContainer'])) { trigger_error(sprintf(__('Microsoft Azure container "%s" not exists!', 'backwpup'), $STATIC['JOB']['msazureContainer']), E_USER_ERROR); return; } else { trigger_error(sprintf(__('Connected to Microsoft Azure container "%s"', 'backwpup'), $STATIC['JOB']['msazureContainer']), E_USER_NOTICE); } trigger_error(__('Upload to MS Azure now started... ', 'backwpup'), E_USER_NOTICE); $result = $storageClient->putBlob($STATIC['JOB']['msazureContainer'], $STATIC['JOB']['msazuredir'] . $STATIC['backupfile'], $STATIC['JOB']['backupdir'] . $STATIC['backupfile']); if ($result->Name == $STATIC['JOB']['msazuredir'] . $STATIC['backupfile']) { $WORKING['STEPTODO'] = 1 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']); trigger_error(sprintf(__('Backup transferred to %s', 'backwpup'), 'https://' . $STATIC['JOB']['msazureAccName'] . '.' . $STATIC['JOB']['msazureHost'] . '/' . $STATIC['JOB']['msazuredir'] . $STATIC['backupfile']), E_USER_NOTICE); $STATIC['JOB']['lastbackupdownloadurl'] = $STATIC['WP']['ADMINURL'] . '?page=backwpupbackups&action=downloadmsazure&file=' . $STATIC['JOB']['msazuredir'] . $STATIC['backupfile'] . '&jobid=' . $STATIC['JOB']['jobid']; $WORKING['STEPSDONE'][] = 'DEST_MSAZURE'; //set done } else { trigger_error(__('Can not transfer backup to Microsoft Azure!', 'backwpup'), E_USER_ERROR); } if ($STATIC['JOB']['msazuremaxbackups'] > 0) { //Delete old backups $backupfilelist = array(); $blobs = $storageClient->listBlobs($STATIC['JOB']['msazureContainer'], $STATIC['JOB']['msazuredir']); if (is_array($blobs)) { foreach ($blobs as $blob) { $file = basename($blob->Name); if ($STATIC['JOB']['fileprefix'] == substr($file, 0, strlen($STATIC['JOB']['fileprefix'])) and $STATIC['JOB']['fileformart'] == substr($file, -strlen($STATIC['JOB']['fileformart']))) { $backupfilelist[] = $file; } } } if (sizeof($backupfilelist) > 0) { rsort($backupfilelist); $numdeltefiles = 0; for ($i = $STATIC['JOB']['msazuremaxbackups']; $i < sizeof($backupfilelist); $i++) { $storageClient->deleteBlob($STATIC['JOB']['msazureContainer'], $STATIC['JOB']['msazuredir'] . $backupfilelist[$i]); //delte files on Cloud $numdeltefiles++; } if ($numdeltefiles > 0) { trigger_error(sprintf(_n('One file deleted on Microsoft Azure container', '%d files deleted on Microsoft Azure container', $numdeltefiles, 'backwpup'), $numdeltefiles), E_USER_NOTICE); } } } } catch (Exception $e) { trigger_error(sprintf(__('Microsoft Azure API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR); } $WORKING['STEPDONE']++; }
function dest_s3() { global $WORKING, $STATIC; trigger_error(sprintf(__('%d. try sending backup file to Amazon S3...', 'backwpup'), $WORKING['DEST_S3']['STEP_TRY']), E_USER_NOTICE); $WORKING['STEPTODO'] = 2 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']); $WORKING['STEPDONE'] = 0; require_once dirname(__FILE__) . '/../libs/aws/sdk.class.php'; need_free_memory(26214400 * 1.1); try { $s3 = new AmazonS3(array('key' => $STATIC['JOB']['awsAccessKey'], 'secret' => $STATIC['JOB']['awsSecretKey'], 'certificate_authority' => true)); if ($s3->if_bucket_exists($STATIC['JOB']['awsBucket'])) { trigger_error(sprintf(__('Connected to S3 Bucket: %s', 'backwpup'), $STATIC['JOB']['awsBucket']), E_USER_NOTICE); //Transfer Backup to S3 if ($STATIC['JOB']['awsrrs']) { //set reduced redundancy or not $storage = AmazonS3::STORAGE_REDUCED; } else { $storage = AmazonS3::STORAGE_STANDARD; } //set curl Progress bar $curlops = array(); if (defined('CURLOPT_PROGRESSFUNCTION')) { $curlops = array(CURLOPT_NOPROGRESS => false, CURLOPT_PROGRESSFUNCTION => 'curl_progresscallback', CURLOPT_BUFFERSIZE => 1048576); } trigger_error(__('Upload to Amazon S3 now started... ', 'backwpup'), E_USER_NOTICE); //transferee file to S3 $result = $s3->create_object($STATIC['JOB']['awsBucket'], $STATIC['JOB']['awsdir'] . $STATIC['backupfile'], array('fileUpload' => $STATIC['JOB']['backupdir'] . $STATIC['backupfile'], 'acl' => AmazonS3::ACL_PRIVATE, 'storage' => $storage, 'curlopts' => $curlops)); $result = (array) $result; if ($result["status"] >= 200 and $result["status"] < 300) { $WORKING['STEPTODO'] = 1 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']); trigger_error(sprintf(__('Backup transferred to %s', 'backwpup'), $result["header"]["_info"]["url"]), E_USER_NOTICE); $STATIC['JOB']['lastbackupdownloadurl'] = $STATIC['WP']['ADMINURL'] . '?page=backwpupbackups&action=downloads3&file=' . $STATIC['JOB']['awsdir'] . $STATIC['backupfile'] . '&jobid=' . $STATIC['JOB']['jobid']; $WORKING['STEPSDONE'][] = 'DEST_S3'; //set done } else { trigger_error(sprintf(__('Can not transfer backup to S3! (%1$d) %2$s', 'backwpup'), $result["status"], $result["Message"]), E_USER_ERROR); } } else { trigger_error(sprintf(__('S3 Bucket "%s" not exists!', 'backwpup'), $STATIC['JOB']['awsBucket']), E_USER_ERROR); } } catch (Exception $e) { trigger_error(sprintf(__('Amazon API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR); return; } try { if ($s3->if_bucket_exists($STATIC['JOB']['awsBucket'])) { if ($STATIC['JOB']['awsmaxbackups'] > 0) { //Delete old backups $backupfilelist = array(); if (($contents = $s3->list_objects($STATIC['JOB']['awsBucket'], array('prefix' => $STATIC['JOB']['awsdir']))) !== false) { foreach ($contents->body->Contents as $object) { $file = basename($object->Key); if ($STATIC['JOB']['fileprefix'] == substr($file, 0, strlen($STATIC['JOB']['fileprefix'])) and $STATIC['JOB']['fileformart'] == substr($file, -strlen($STATIC['JOB']['fileformart']))) { $backupfilelist[] = $file; } } } if (sizeof($backupfilelist) > 0) { rsort($backupfilelist); $numdeltefiles = 0; for ($i = $STATIC['JOB']['awsmaxbackups']; $i < sizeof($backupfilelist); $i++) { if ($s3->delete_object($STATIC['JOB']['awsBucket'], $STATIC['JOB']['awsdir'] . $backupfilelist[$i])) { //delte files on S3 $numdeltefiles++; } else { trigger_error(sprintf(__('Can not delete backup on S3://%s', 'backwpup'), $STATIC['JOB']['awsBucket'] . '/' . $STATIC['JOB']['awsdir'] . $backupfilelist[$i]), E_USER_ERROR); } } if ($numdeltefiles > 0) { trigger_error(sprintf(_n('One file deleted on S3 Bucket', '%d files deleted on S3 Bucket', $numdeltefiles, 'backwpup'), $numdeltefiles), E_USER_NOTICE); } } } } } catch (Exception $e) { trigger_error(sprintf(__('Amazon API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR); return; } $WORKING['STEPDONE']++; }
function dest_gstorage() { global $WORKING, $STATIC; trigger_error(sprintf(__('%d. try sending backup to Google Storage...', 'backwpup'), $WORKING['DEST_GSTORAGE']['STEP_TRY']), E_USER_NOTICE); $WORKING['STEPTODO'] = 2 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']); $WORKING['STEPDONE'] = 0; require_once dirname(__FILE__) . '/../libs/aws/sdk.class.php'; need_free_memory(26214400 * 1.1); try { $gstorage = new AmazonS3(array('key' => $STATIC['JOB']['GStorageAccessKey'], 'secret' => $STATIC['JOB']['GStorageSecret'], 'certificate_authority' => true)); //set up s3 for google $gstorage->set_hostname('storage.googleapis.com'); $gstorage->allow_hostname_override(false); if ($gstorage->if_bucket_exists($STATIC['JOB']['GStorageBucket'])) { trigger_error(sprintf(__('Connected to GStorage Bucket: %s', 'backwpup'), $STATIC['JOB']['GStorageBucket']), E_USER_NOTICE); //set curl Prozess bar $curlops = array(); if (defined('CURLOPT_PROGRESSFUNCTION')) { $curlops = array(CURLOPT_NOPROGRESS => false, CURLOPT_PROGRESSFUNCTION => 'curl_progresscallback', CURLOPT_BUFFERSIZE => 1048576); } trigger_error(__('Upload to GStorage now started... ', 'backwpup'), E_USER_NOTICE); //transferee file to GStorage $result = $gstorage->create_object($STATIC['JOB']['GStorageBucket'], $STATIC['JOB']['GStoragedir'] . $STATIC['backupfile'], array('fileUpload' => $STATIC['JOB']['backupdir'] . $STATIC['backupfile'], 'acl' => 'private', 'curlopts' => $curlops)); $result = (array) $result; if ($result["status"] >= 200 and $result["status"] < 300) { $WORKING['STEPTODO'] = 1 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']); trigger_error(sprintf(__('Backup transferred to %s', 'backwpup'), "https://storage.cloud.google.com/" . $STATIC['JOB']['GStorageBucket'] . "/" . $STATIC['JOB']['GStoragedir'] . $STATIC['backupfile']), E_USER_NOTICE); $STATIC['JOB']['lastbackupdownloadurl'] = "https://storage.cloud.google.com/" . $STATIC['JOB']['GStorageBucket'] . "/" . $STATIC['JOB']['GStoragedir'] . $STATIC['backupfile']; $WORKING['STEPSDONE'][] = 'DEST_GSTORAGE'; //set done } else { trigger_error(sprintf(__('Can not transfer backup to GStorage! (%1$d) %2$s', 'backwpup'), $result["status"], $result["Message"]), E_USER_ERROR); } } else { trigger_error(sprintf(__('GStorage Bucket "%s" not exists!', 'backwpup'), $STATIC['JOB']['GStorageBucket']), E_USER_ERROR); } } catch (Exception $e) { trigger_error(sprintf(__('GStorage API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR); return; } try { if ($gstorage->if_bucket_exists($STATIC['JOB']['GStorageBucket'])) { if ($STATIC['JOB']['GStoragemaxbackups'] > 0) { //Delete old backups $backupfilelist = array(); if (($contents = $gstorage->list_objects($STATIC['JOB']['GStorageBucket'], array('prefix' => $STATIC['JOB']['GStoragedir']))) !== false) { foreach ($contents->body->Contents as $object) { $file = basename($object->Key); if ($STATIC['JOB']['fileprefix'] == substr($file, 0, strlen($STATIC['JOB']['fileprefix'])) and $STATIC['JOB']['fileformart'] == substr($file, -strlen($STATIC['JOB']['fileformart']))) { $backupfilelist[] = $file; } } } if (sizeof($backupfilelist) > 0) { rsort($backupfilelist); $numdeltefiles = 0; for ($i = $STATIC['JOB']['GStoragemaxbackups']; $i < sizeof($backupfilelist); $i++) { if ($gstorage->delete_object($STATIC['JOB']['GStorageBucket'], $STATIC['JOB']['GStoragedir'] . $backupfilelist[$i])) { //delte files on S3 $numdeltefiles++; } else { trigger_error(sprintf(__('Can not delete backup on GStorage://%s', 'backwpup'), $STATIC['JOB']['awsBucket'] . '/' . $STATIC['JOB']['GStoragedir'] . $backupfilelist[$i]), E_USER_ERROR); } } if ($numdeltefiles > 0) { trigger_error(sprintf(_n('One file deleted on GStorage Bucket', '%d files deleted on GStorage Bucket', $numdeltefiles, 'backwpup'), $numdeltefiles), E_USER_NOTICE); } } } } } catch (Exception $e) { trigger_error(sprintf(__('GStorage API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR); return; } $WORKING['STEPDONE']++; }
function backup_create() { global $WORKING, $STATIC; if ($WORKING['ALLFILESIZE'] == 0) { return; } $filelist = get_filelist(); //get file list $WORKING['STEPTODO'] = count($filelist); if (empty($WORKING['STEPDONE'])) { $WORKING['STEPDONE'] = 0; } if (strtolower($STATIC['JOB']['fileformart']) == ".zip") { //Zip files if ($STATIC['CFG']['phpzip']) { //use php zip lib trigger_error(sprintf(__('%d. try to create backup zip archive...', 'backwpup'), $WORKING['BACKUP_CREATE']['STEP_TRY']), E_USER_NOTICE); $zip = new ZipArchive(); if ($res = $zip->open($STATIC['JOB']['backupdir'] . $STATIC['backupfile'], ZIPARCHIVE::CREATE) === TRUE) { for ($i = $WORKING['STEPDONE']; $i < $WORKING['STEPTODO']; $i++) { if (!$zip->addFile($filelist[$i]['FILE'], $filelist[$i]['OUTFILE'])) { trigger_error(sprintf(__('Can not add "%s" to zip archive!', 'backwpup'), $filelist[$i]['OUTFILE']), E_USER_ERROR); } $WORKING['STEPDONE']++; update_working_file(); } if ($zip->status > 0) { $ziperror = $zip->status; if ($zip->status == 4) { $ziperror = __('(4) ER_SEEK', 'backwpup'); } if ($zip->status == 5) { $ziperror = __('(5) ER_READ', 'backwpup'); } if ($zip->status == 9) { $ziperror = __('(9) ER_NOENT', 'backwpup'); } if ($zip->status == 10) { $ziperror = __('(10) ER_EXISTS', 'backwpup'); } if ($zip->status == 11) { $ziperror = __('(11) ER_OPEN', 'backwpup'); } if ($zip->status == 14) { $ziperror = __('(14) ER_MEMORY', 'backwpup'); } if ($zip->status == 18) { $ziperror = __('(18) ER_INVAL', 'backwpup'); } if ($zip->status == 19) { $ziperror = __('(19) ER_NOZIP', 'backwpup'); } if ($zip->status == 21) { $ziperror = __('(21) ER_INCONS', 'backwpup'); } trigger_error(sprintf(__('Zip returns status: %s', 'backwpup'), $zip->status), E_USER_ERROR); } $res2 = $zip->close(); trigger_error(__('Backup zip archive create done!', 'backwpup'), E_USER_NOTICE); $WORKING['STEPSDONE'][] = 'BACKUP_CREATE'; //set done } else { trigger_error(sprintf(__('Can not create backup zip archive $s!', 'backwpup'), $res), E_USER_ERROR); } } else { //use PclZip define('PCLZIP_TEMPORARY_DIR', $STATIC['TEMPDIR']); require_once $STATIC['WP']['ABSPATH'] . 'wp-admin/includes/class-pclzip.php'; if (ini_get('mbstring.func_overload') && function_exists('mb_internal_encoding')) { $previous_encoding = mb_internal_encoding(); mb_internal_encoding('ISO-8859-1'); } //Create Zip File if (is_array($filelist[0])) { trigger_error(sprintf(__('%d. try to create backup zip (PclZip) archive...', 'backwpup'), $WORKING['BACKUP_CREATE']['STEP_TRY']), E_USER_NOTICE); for ($i = 0; $i < $WORKING['STEPTODO']; $i++) { //must begin at 0 for PCLzip $files[$i][79001] = $filelist[$i]['FILE']; $files[$i][79003] = $filelist[$i]['OUTFILE']; $files[$i][79004] = $filelist[$i]['MTIME']; } need_free_memory('20M'); //20MB free memory for zip $zipbackupfile = new PclZip($STATIC['JOB']['backupdir'] . $STATIC['backupfile']); if (0 == $zipbackupfile->create($files, PCLZIP_CB_POST_ADD, '_pclzipPostAddCallBack', PCLZIP_OPT_TEMP_FILE_THRESHOLD, 5)) { trigger_error(sprintf(__('Zip archive create error: %s', 'backwpup'), $zipbackupfile->errorInfo(true)), E_USER_ERROR); } else { $WORKING['STEPDONE'] = count($filelist); unset($files); trigger_error(__('Backup zip archive create done', 'backwpup'), E_USER_NOTICE); } } if (isset($previous_encoding)) { mb_internal_encoding($previous_encoding); } } } elseif (strtolower($STATIC['JOB']['fileformart']) == ".tar.gz" or strtolower($STATIC['JOB']['fileformart']) == ".tar.bz2" or strtolower($STATIC['JOB']['fileformart']) == ".tar") { //tar files if (strtolower($STATIC['JOB']['fileformart']) == '.tar.gz') { $tarbackup = gzopen($STATIC['JOB']['backupdir'] . $STATIC['backupfile'], 'w9'); } elseif (strtolower($STATIC['JOB']['fileformart']) == '.tar.bz2') { $tarbackup = bzopen($STATIC['JOB']['backupdir'] . $STATIC['backupfile'], 'w'); } else { $tarbackup = fopen($STATIC['JOB']['backupdir'] . $STATIC['backupfile'], 'w'); } if (!$tarbackup) { trigger_error(__('Can not create tar arcive file!', 'backwpup'), E_USER_ERROR); return; } else { trigger_error(sprintf(__('%1$d. try to create %2$s archive file...', 'backwpup'), $WORKING['BACKUP_CREATE']['STEP_TRY'], substr($STATIC['JOB']['fileformart'], 1)), E_USER_NOTICE); } for ($index = $WORKING['STEPDONE']; $index < $WORKING['STEPTODO']; $index++) { need_free_memory(2097152); //2MB free memory for tar $files = $filelist[$index]; //check file readable if (!is_readable($files['FILE']) or empty($files['FILE'])) { trigger_error(sprintf(__('File "%s" not readable!', 'backwpup'), $files['FILE']), E_USER_WARNING); $WORKING['STEPDONE']++; continue; } //split filename larger than 100 chars if (strlen($files['OUTFILE']) <= 100) { $filename = $files['OUTFILE']; $filenameprefix = ""; } else { $filenameofset = strlen($files['OUTFILE']) - 100; $dividor = strpos($files['OUTFILE'], '/', $filenameofset); $filename = substr($files['OUTFILE'], $dividor + 1); $filenameprefix = substr($files['OUTFILE'], 0, $dividor); if (strlen($filename) > 100) { trigger_error(sprintf(__('File name "%1$s" to long to save corectly in %2$s archive!', 'backwpup'), $files['OUTFILE'], substr($STATIC['JOB']['fileformart'], 1)), E_USER_WARNING); } if (strlen($filenameprefix) > 155) { trigger_error(sprintf(__('File path "%1$s" to long to save corectly in %2$s archive!', 'backwpup'), $files['OUTFILE'], substr($STATIC['JOB']['fileformart'], 1)), E_USER_WARNING); } } //Set file user/group name if linux $fileowner = "Unknown"; $filegroup = "Unknown"; if (function_exists('posix_getpwuid')) { $info = posix_getpwuid($files['UID']); $fileowner = $info['name']; $info = posix_getgrgid($files['GID']); $filegroup = $info['name']; } // Generate the TAR header for this file $header = pack("a100a8a8a8a12a12a8a1a100a6a2a32a32a8a8a155a12", $filename, sprintf("%07o", $files['MODE']), sprintf("%07o", $files['UID']), sprintf("%07o", $files['GID']), sprintf("%011o", $files['SIZE']), sprintf("%011o", $files['MTIME']), " ", 0, "", "ustar", "00", $fileowner, $filegroup, "", "", $filenameprefix, ""); //fill block 512K // Computes the unsigned Checksum of a file's header $checksum = 0; for ($i = 0; $i < 512; $i++) { $checksum += ord(substr($header, $i, 1)); } $checksum = pack("a8", sprintf("%07o", $checksum)); $header = substr_replace($header, $checksum, 148, 8); if (strtolower($STATIC['JOB']['fileformart']) == '.tar.gz') { gzwrite($tarbackup, $header); } elseif (strtolower($STATIC['JOB']['fileformart']) == '.tar.bz2') { bzwrite($tarbackup, $header); } else { fwrite($tarbackup, $header); } // read/write files in 512K Blocks $fd = fopen($files['FILE'], 'rb'); while (!feof($fd)) { $filedata = fread($fd, 512); if (strlen($filedata) > 0) { if (strtolower($STATIC['JOB']['fileformart']) == '.tar.gz') { gzwrite($tarbackup, pack("a512", $filedata)); } elseif (strtolower($STATIC['JOB']['fileformart']) == '.tar.bz2') { bzwrite($tarbackup, pack("a512", $filedata)); } else { fwrite($tarbackup, pack("a512", $filedata)); } } } fclose($fd); $WORKING['STEPDONE']++; update_working_file(); } if (strtolower($STATIC['JOB']['fileformart']) == '.tar.gz') { gzwrite($tarbackup, pack("a1024", "")); // Add 1024 bytes of NULLs to designate EOF gzclose($tarbackup); } elseif (strtolower($STATIC['JOB']['fileformart']) == '.tar.bz2') { bzwrite($tarbackup, pack("a1024", "")); // Add 1024 bytes of NULLs to designate EOF bzclose($tarbackup); } else { fwrite($tarbackup, pack("a1024", "")); // Add 1024 bytes of NULLs to designate EOF fclose($tarbackup); } trigger_error(sprintf(__('%s archive creation done', 'backwpup'), substr($STATIC['JOB']['fileformart'], 1)), E_USER_NOTICE); } $WORKING['STEPSDONE'][] = 'BACKUP_CREATE'; //set done if ($filesize = filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile'])) { trigger_error(sprintf(__('Archive size is %s', 'backwpup'), formatBytes($filesize)), E_USER_NOTICE); } }
function _db_dump_table($table, $status, $file) { global $WORKING, $STATIC; need_free_memory(($status['Data_length'] + $status['Index_length']) * 4); //get more memory if needed // create dump trigger_error(sprintf(__('Dump database table "%s"', 'backwpup'), $table), E_USER_NOTICE); fwrite($file, "\n"); fwrite($file, "--\n"); fwrite($file, "-- Table structure for table {$table}\n"); fwrite($file, "--\n\n"); fwrite($file, "DROP TABLE IF EXISTS `" . $table . "`;\n"); fwrite($file, "/*!40101 SET @saved_cs_client = @@character_set_client */;\n"); fwrite($file, "/*!40101 SET character_set_client = '" . mysql_client_encoding() . "' */;\n"); //Dump the table structure $result = mysql_query("SHOW CREATE TABLE `" . $table . "`"); if (!$result) { trigger_error(sprintf(__('Database error %1$s for query %2$s', 'backwpup'), mysql_error(), "SHOW CREATE TABLE `" . $table . "`"), E_USER_ERROR); return false; } $tablestruc = mysql_fetch_assoc($result); fwrite($file, $tablestruc['Create Table'] . ";\n"); fwrite($file, "/*!40101 SET character_set_client = @saved_cs_client */;\n"); //take data of table $result = mysql_query("SELECT * FROM `" . $table . "`"); if (!$result) { trigger_error(sprintf(__('Database error %1$s for query %2$s', 'backwpup'), mysql_error(), "SELECT * FROM `" . $table . "`"), E_USER_ERROR); return false; } //get field information $fieldsarray = array(); $fieldinfo = array(); $fields = mysql_num_fields($result); for ($i = 0; $i < $fields; $i++) { $fieldsarray[$i] = mysql_field_name($result, $i); $fieldinfo[$fieldsarray[$i]] = mysql_fetch_field($result, $i); } fwrite($file, "--\n"); fwrite($file, "-- Dumping data for table {$table}\n"); fwrite($file, "--\n\n"); if ($status['Engine'] == 'MyISAM') { fwrite($file, "/*!40000 ALTER TABLE `" . $table . "` DISABLE KEYS */;\n"); } while ($data = mysql_fetch_assoc($result)) { $keys = array(); $values = array(); foreach ($data as $key => $value) { if (!$STATIC['JOB']['dbshortinsert']) { $keys[] = "`" . str_replace("´", "´´", $key) . "`"; } // Add key to key list if (is_null($value) || !isset($value)) { // Make Value NULL to string NULL $value = "NULL"; } elseif ($fieldinfo[$key]->numeric == 1 && $fieldinfo[$key]->type != 'timestamp' && $fieldinfo[$key]->blob != 1) { //is value numeric no esc $value = empty($value) ? 0 : $value; } else { $value = "'" . mysql_real_escape_string($value) . "'"; } $values[] = $value; } // make data dump if ($STATIC['JOB']['dbshortinsert']) { fwrite($file, "INSERT INTO `" . $table . "` VALUES ( " . implode(", ", $values) . " );\n"); } else { fwrite($file, "INSERT INTO `" . $table . "` ( " . implode(", ", $keys) . " )\n\tVALUES ( " . implode(", ", $values) . " );\n"); } } if ($status['Engine'] == 'MyISAM') { fwrite($file, "/*!40000 ALTER TABLE " . $table . " ENABLE KEYS */;\n"); } }
function wp_export() { global $WORKING, $STATIC; $WORKING['STEPTODO'] = 1; trigger_error(sprintf(__('%d. try for wordpress export to XML file...', 'backwpup'), $WORKING['WP_EXPORT']['STEP_TRY']), E_USER_NOTICE); need_free_memory(10485760); //10MB free memory if (function_exists('curl_exec')) { $ch = curl_init(); curl_setopt($ch, CURLOPT_URL, substr($STATIC['JOBRUNURL'], 0, -11) . 'wp_export_generate.php'); curl_setopt($ch, CURLOPT_RETURNTRANSFER, true); curl_setopt($ch, CURLOPT_POST, true); curl_setopt($ch, CURLOPT_POSTFIELDS, array('nonce' => $WORKING['NONCE'], 'type' => 'getxmlexport')); curl_setopt($ch, CURLOPT_BINARYTRANSFER, true); curl_setopt($ch, CURLOPT_FRESH_CONNECT, true); curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, false); curl_setopt($ch, CURLOPT_SSL_VERIFYHOST, false); curl_setopt($ch, CURLOPT_USERAGENT, 'BackWPup'); if (defined('CURLOPT_PROGRESSFUNCTION')) { curl_setopt($ch, CURLOPT_NOPROGRESS, false); curl_setopt($ch, CURLOPT_PROGRESSFUNCTION, 'curl_progresscallback'); curl_setopt($ch, CURLOPT_BUFFERSIZE, 1048576); } if (!empty($STATIC['CFG']['httpauthuser']) and !empty($STATIC['CFG']['httpauthpassword'])) { curl_setopt($ch, CURLOPT_HTTPAUTH, CURLAUTH_ANY); curl_setopt($ch, CURLOPT_USERPWD, $STATIC['CFG']['httpauthuser'] . ':' . backwpup_base64($STATIC['CFG']['httpauthpassword'])); } curl_setopt($ch, CURLOPT_CONNECTTIMEOUT, 300); $return = curl_exec($ch); $status = curl_getinfo($ch); if ($status['http_code'] >= 300 or $status['http_code'] < 200 or curl_errno($ch) > 0) { if (0 != curl_errno($ch)) { trigger_error(__('cURL:', 'backwpup') . ' (' . curl_errno($ch) . ') ' . curl_error($ch), E_USER_ERROR); } else { trigger_error(__('cURL:', 'backwpup') . ' (' . $status['http_code'] . ') Invalid response.', E_USER_ERROR); } } else { file_put_contents($STATIC['TEMPDIR'] . preg_replace('/[^a-z0-9_\\-]/', '', strtolower($STATIC['WP']['BLOGNAME'])) . '.wordpress.' . date('Y-m-d') . '.xml', $return); } curl_close($ch); } else { //use fopen if no curl $urlParsed = parse_url(substr($STATIC['JOBRUNURL'], 0, -11) . 'wp_export_generate.php'); if ($urlParsed['scheme'] == 'https') { $host = 'ssl://' . $urlParsed['host']; $port = !empty($urlParsed['port']) ? $urlParsed['port'] : 443; } else { $host = $urlParsed['host']; $port = !empty($urlParsed['port']) ? $urlParsed['port'] : 80; } $query = http_build_query(array('nonce' => $WORKING['NONCE'], 'type' => 'getxmlexport')); $path = (isset($urlParsed['path']) ? $urlParsed['path'] : '/') . (isset($urlParsed['query']) ? '?' . $urlParsed['query'] : ''); $header = "POST " . $path . " HTTP/1.1\r\n"; $header .= "Host: " . $urlParsed['host'] . "\r\n"; $header .= "User-Agent: BackWPup\r\n"; $header .= "Content-Type: application/x-www-form-urlencoded\r\n"; $header .= "Content-Length: " . strlen($query) . "\r\n"; if (!empty($STATIC['CFG']['httpauthuser']) and !empty($STATIC['CFG']['httpauthpassword'])) { $header .= "Authorization: Basic " . base64_encode($STATIC['CFG']['httpauthuser'] . ':' . backwpup_base64($STATIC['CFG']['httpauthpassword'])) . "\r\n"; } $header .= "Connection: Close\r\n\r\n"; $header .= $query; $fp = fsockopen($host, $port, $errno, $errstr, 300); fwrite($fp, $header); $responseHeader = ''; do { $responseHeader .= fread($fp, 1); } while (!preg_match('/\\r\\n\\r\\n$/', $responseHeader)); while (!feof($fp)) { update_working_file(); file_put_contents($STATIC['TEMPDIR'] . preg_replace('/[^a-z0-9_\\-]/', '', strtolower($STATIC['WP']['BLOGNAME'])) . '.wordpress.' . date('Y-m-d') . '.xml', fgets($fp, 256), FILE_APPEND); } fclose($fp); } //add XML file to backupfiles if (is_readable($STATIC['TEMPDIR'] . preg_replace('/[^a-z0-9_\\-]/', '', strtolower($STATIC['WP']['BLOGNAME'])) . '.wordpress.' . date('Y-m-d') . '.xml')) { $filestat = stat($STATIC['TEMPDIR'] . preg_replace('/[^a-z0-9_\\-]/', '', strtolower($STATIC['WP']['BLOGNAME'])) . '.wordpress.' . date('Y-m-d') . '.xml'); trigger_error(sprintf(__('Add XML export "%1$s" to backup list with %2$s', 'backwpup'), preg_replace('/[^a-z0-9_\\-]/', '', strtolower($STATIC['WP']['BLOGNAME'])) . '.wordpress.' . date('Y-m-d') . '.xml', formatbytes($filestat['size'])), E_USER_NOTICE); $WORKING['ALLFILESIZE'] += $filestat['size']; add_file(array(array('FILE' => $STATIC['TEMPDIR'] . preg_replace('/[^a-z0-9_\\-]/', '', strtolower($STATIC['WP']['BLOGNAME'])) . '.wordpress.' . date('Y-m-d') . '.xml', 'OUTFILE' => preg_replace('/[^a-z0-9_\\-]/', '', strtolower($STATIC['WP']['BLOGNAME'])) . '.wordpress.' . date('Y-m-d') . '.xml', 'SIZE' => $filestat['size'], 'ATIME' => $filestat['atime'], 'MTIME' => $filestat['mtime'], 'CTIME' => $filestat['ctime'], 'UID' => $filestat['uid'], 'GID' => $filestat['gid'], 'MODE' => $filestat['mode']))); } $WORKING['STEPDONE'] = 1; $WORKING['STEPSDONE'][] = 'WP_EXPORT'; //set done }
function db_dump() { global $WORKING, $STATIC; trigger_error(sprintf(__('%d. try for database dump...', 'backwpup'), $WORKING['DB_DUMP']['STEP_TRY']), E_USER_NOTICE); if (!isset($WORKING['DB_DUMP']['DONETABLE']) or !is_array($WORKING['DB_DUMP']['DONETABLE'])) { $WORKING['DB_DUMP']['DONETABLE'] = array(); } mysql_update(); //to backup $tabelstobackup = array(); $result = mysql_query("SHOW TABLES FROM `" . $STATIC['WP']['DB_NAME'] . "`"); //get table status if (!$result) { trigger_error(sprintf(__('Database error %1$s for query %2$s', 'backwpup'), mysql_error(), "SHOW TABLE STATUS FROM `" . $STATIC['WP']['DB_NAME'] . "`;"), E_USER_ERROR); } while ($data = mysql_fetch_row($result)) { if (!in_array($data[0], $STATIC['JOB']['dbexclude'])) { $tabelstobackup[] = $data[0]; } } $WORKING['STEPTODO'] = count($tabelstobackup); //Set maintenance maintenance_mode(true); if (count($tabelstobackup) > 0) { $result = mysql_query("SHOW TABLE STATUS FROM `" . $STATIC['WP']['DB_NAME'] . "`"); //get table status if (!$result) { trigger_error(sprintf(__('Database error %1$s for query %2$s', 'backwpup'), mysql_error(), "SHOW TABLE STATUS FROM `" . $STATIC['WP']['DB_NAME'] . "`;"), E_USER_ERROR); } while ($data = mysql_fetch_assoc($result)) { $status[$data['Name']] = $data; } if ($file = fopen($STATIC['TEMPDIR'] . $STATIC['WP']['DB_NAME'] . '.sql', 'wb')) { fwrite($file, "-- ---------------------------------------------------------\n"); fwrite($file, "-- Dump with BackWPup ver.: " . $STATIC['BACKWPUP']['VERSION'] . "\n"); fwrite($file, "-- Plugin for WordPress " . $STATIC['WP']['VERSION'] . " by Daniel Huesken\n"); fwrite($file, "-- http://danielhuesken.de/portfolio/backwpup/\n"); fwrite($file, "-- Blog Name: " . $STATIC['WP']['BLOGNAME'] . "\n"); fwrite($file, "-- Blog URL: " . $STATIC['WP']['SITEURL'] . "\n"); fwrite($file, "-- Blog ABSPATH: " . $STATIC['WP']['ABSPATH'] . "\n"); fwrite($file, "-- Table Prefix: " . $STATIC['WP']['TABLE_PREFIX'] . "\n"); fwrite($file, "-- Database Name: " . $STATIC['WP']['DB_NAME'] . "\n"); fwrite($file, "-- Dump on: " . date('Y-m-d H:i.s', time() + $STATIC['WP']['TIMEDIFF']) . "\n"); fwrite($file, "-- ---------------------------------------------------------\n\n"); //for better import with mysql client fwrite($file, "/*!40101 SET @OLD_CHARACTER_SET_CLIENT=@@CHARACTER_SET_CLIENT */;\n"); fwrite($file, "/*!40101 SET @OLD_CHARACTER_SET_RESULTS=@@CHARACTER_SET_RESULTS */;\n"); fwrite($file, "/*!40101 SET @OLD_COLLATION_CONNECTION=@@COLLATION_CONNECTION */;\n"); fwrite($file, "/*!40101 SET NAMES '" . mysql_client_encoding() . "' */;\n"); fwrite($file, "/*!40103 SET @OLD_TIME_ZONE=@@TIME_ZONE */;\n"); fwrite($file, "/*!40103 SET TIME_ZONE='" . mysql_result(mysql_query("SELECT @@time_zone"), 0) . "' */;\n"); fwrite($file, "/*!40014 SET @OLD_UNIQUE_CHECKS=@@UNIQUE_CHECKS, UNIQUE_CHECKS=0 */;\n"); fwrite($file, "/*!40014 SET @OLD_FOREIGN_KEY_CHECKS=@@FOREIGN_KEY_CHECKS, FOREIGN_KEY_CHECKS=0 */;\n"); fwrite($file, "/*!40101 SET @OLD_SQL_MODE=@@SQL_MODE, SQL_MODE='NO_AUTO_VALUE_ON_ZERO' */;\n"); fwrite($file, "/*!40111 SET @OLD_SQL_NOTES=@@SQL_NOTES, SQL_NOTES=0 */;\n\n"); //make table dumps foreach ($tabelstobackup as $table) { if (in_array($table, $WORKING['DB_DUMP']['DONETABLE'])) { continue; } trigger_error(sprintf(__('Dump database table "%s"', 'backwpup'), $table), E_USER_NOTICE); need_free_memory(($status[$table]['Data_length'] + $status[$table]['Index_length']) * 3); //get more memory if needed _db_dump_table($table, $status[$table], $file); $WORKING['DB_DUMP']['DONETABLE'][] = $table; $WORKING['STEPDONE'] = count($WORKING['DB_DUMP']['DONETABLE']); } //for better import with mysql client fwrite($file, "\n"); fwrite($file, "/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;\n"); fwrite($file, "/*!40101 SET SQL_MODE=@OLD_SQL_MODE */;\n"); fwrite($file, "/*!40014 SET FOREIGN_KEY_CHECKS=@OLD_FOREIGN_KEY_CHECKS */;\n"); fwrite($file, "/*!40014 SET UNIQUE_CHECKS=@OLD_UNIQUE_CHECKS */;\n"); fwrite($file, "/*!40101 SET CHARACTER_SET_CLIENT=@OLD_CHARACTER_SET_CLIENT */;\n"); fwrite($file, "/*!40101 SET CHARACTER_SET_RESULTS=@OLD_CHARACTER_SET_RESULTS */;\n"); fwrite($file, "/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;\n"); fwrite($file, "/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;\n"); fclose($file); trigger_error(__('Database dump done!', 'backwpup'), E_USER_NOTICE); } else { trigger_error(__('Can not create database dump!', 'backwpup'), E_USER_ERROR); } } else { trigger_error(__('No tables to dump', 'backwpup'), E_USER_WARNING); } //add database file to backupfiles if (is_readable($STATIC['TEMPDIR'] . $STATIC['WP']['DB_NAME'] . '.sql')) { $filestat = stat($STATIC['TEMPDIR'] . $STATIC['WP']['DB_NAME'] . '.sql'); trigger_error(sprintf(__('Add database dump "%1$s" with %2$s to backup file list', 'backwpup'), $STATIC['WP']['DB_NAME'] . '.sql', formatbytes($filestat['size'])), E_USER_NOTICE); $WORKING['ALLFILESIZE'] += $filestat['size']; add_file(array(array('FILE' => $STATIC['TEMPDIR'] . $STATIC['WP']['DB_NAME'] . '.sql', 'OUTFILE' => $STATIC['WP']['DB_NAME'] . '.sql', 'SIZE' => $filestat['size'], 'ATIME' => $filestat['atime'], 'MTIME' => $filestat['mtime'], 'CTIME' => $filestat['ctime'], 'UID' => $filestat['uid'], 'GID' => $filestat['gid'], 'MODE' => $filestat['mode']))); } //Back from maintenance maintenance_mode(false); $WORKING['STEPSDONE'][] = 'DB_DUMP'; //set done }
function file_list() { global $WORKING, $STATIC, $tempfilelist; //Make filelist trigger_error(sprintf(__('%d. try for make list of files to backup....', 'backwpup'), $WORKING['FILE_LIST']['STEP_TRY']), E_USER_NOTICE); $WORKING['STEPTODO'] = 2; //Check free memory for file list need_free_memory('10MB'); //10MB free memory for filelist //empty filelist $tempfilelist = array(); //exlude of job $WORKING['FILEEXCLUDES'] = explode(',', trim($STATIC['JOB']['fileexclude'])); $WORKING['FILEEXCLUDES'][] = '.tmp'; //do not backup .tmp files $WORKING['FILEEXCLUDES'] = array_unique($WORKING['FILEEXCLUDES']); //File list for blog folders if ($STATIC['JOB']['backuproot']) { _file_list($STATIC['WP']['ABSPATH'], 100, array_merge($STATIC['JOB']['backuprootexcludedirs'], _get_exclude_dirs($STATIC['WP']['ABSPATH']))); } if ($STATIC['JOB']['backupcontent']) { _file_list($STATIC['WP']['WP_CONTENT_DIR'], 100, array_merge($STATIC['JOB']['backupcontentexcludedirs'], _get_exclude_dirs($STATIC['WP']['WP_CONTENT_DIR']))); } if ($STATIC['JOB']['backupplugins']) { _file_list($STATIC['WP']['WP_PLUGIN_DIR'], 100, array_merge($STATIC['JOB']['backuppluginsexcludedirs'], _get_exclude_dirs($STATIC['WP']['WP_PLUGIN_DIR']))); } if ($STATIC['JOB']['backupthemes']) { _file_list($STATIC['WP']['WP_THEMES_DIR'], 100, array_merge($STATIC['JOB']['backupthemesexcludedirs'], _get_exclude_dirs($STATIC['WP']['WP_THEMES_DIR']))); } if ($STATIC['JOB']['backupuploads']) { _file_list($STATIC['WP']['WP_UPLOAD_DIR'], 100, array_merge($STATIC['JOB']['backupuploadsexcludedirs'], _get_exclude_dirs($STATIC['WP']['WP_UPLOAD_DIR']))); } //include dirs if (!empty($STATIC['JOB']['dirinclude'])) { $dirinclude = explode(',', $STATIC['JOB']['dirinclude']); $dirinclude = array_unique($dirinclude); //Crate file list for includes foreach ($dirinclude as $dirincludevalue) { if (is_dir($dirincludevalue)) { _file_list($dirincludevalue, 100); } } } $tempfilelist = array_unique($tempfilelist); //all files only one time in list sort($tempfilelist); $WORKING['STEPDONE'] = 1; //Step done update_working_file(); //Check abs path if ($STATIC['WP']['ABSPATH'] == '/' or $STATIC['WP']['ABSPATH'] == '') { $removepath = ''; } else { $removepath = $STATIC['WP']['ABSPATH']; } //make file list $filelist = array(); for ($i = 0; $i < count($tempfilelist); $i++) { $filestat = stat($tempfilelist[$i]); $WORKING['ALLFILESIZE'] += $filestat['size']; $outfile = str_replace($removepath, '', $tempfilelist[$i]); if (substr($outfile, 0, 1) == '/') { //remove first / $outfile = substr($outfile, 1); } $filelist[] = array('FILE' => $tempfilelist[$i], 'OUTFILE' => $outfile, 'SIZE' => $filestat['size'], 'ATIME' => $filestat['atime'], 'MTIME' => $filestat['mtime'], 'CTIME' => $filestat['ctime'], 'UID' => $filestat['uid'], 'GID' => $filestat['gid'], 'MODE' => $filestat['mode']); } add_file($filelist); //add files to list $WORKING['STEPDONE'] = 2; $WORKING['STEPSDONE'][] = 'FILE_LIST'; //set done unset($tempfilelist); $filelist = get_filelist(); //get files from list if (!is_array($filelist[0])) { trigger_error(__('No files to backup', 'backwpup'), E_USER_ERROR); } else { trigger_error(sprintf(__('%1$d files with %2$s to backup', 'backwpup'), count($filelist), formatBytes($WORKING['ALLFILESIZE'])), E_USER_NOTICE); } }
function dest_ftp() { global $WORKING, $STATIC; if (empty($STATIC['JOB']['ftphost']) or empty($STATIC['JOB']['ftpuser']) or empty($STATIC['JOB']['ftppass'])) { $WORKING['STEPSDONE'][] = 'DEST_FTP'; //set done return; } $WORKING['STEPTODO'] = 2; trigger_error(sprintf(__('%d. try to sending backup file to a FTP Server...', 'backwpup'), $WORKING['DEST_FTP']['STEP_TRY']), E_USER_NOTICE); need_free_memory(filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']) * 1.5); if ($STATIC['JOB']['ftpssl']) { //make SSL FTP connection if (function_exists('ftp_ssl_connect')) { $ftp_conn_id = ftp_ssl_connect($STATIC['JOB']['ftphost'], $STATIC['JOB']['ftphostport'], 10); if ($ftp_conn_id) { trigger_error(sprintf(__('Connected by SSL-FTP to Server: %s', 'backwpup'), $STATIC['JOB']['ftphost'] . ':' . $STATIC['JOB']['ftphostport']), E_USER_NOTICE); } else { trigger_error(sprintf(__('Can not connect by SSL-FTP to Server: %s', 'backwpup'), $STATIC['JOB']['ftphost'] . ':' . $STATIC['JOB']['ftphostport']), E_USER_ERROR); return false; } } else { trigger_error(__('PHP function to connect with SSL-FTP to server not exists!', 'backwpup'), E_USER_ERROR); return false; } } else { //make normal FTP conection if SSL not work $ftp_conn_id = ftp_connect($STATIC['JOB']['ftphost'], $STATIC['JOB']['ftphostport'], 10); if ($ftp_conn_id) { trigger_error(sprintf(__('Connected to FTP server: %s', 'backwpup'), $STATIC['JOB']['ftphost'] . ':' . $STATIC['JOB']['ftphostport']), E_USER_NOTICE); } else { trigger_error(sprintf(__('Can not connect to FTP server: %s', 'backwpup'), $STATIC['JOB']['ftphost'] . ':' . $STATIC['JOB']['ftphostport']), E_USER_ERROR); return false; } } //FTP Login $loginok = false; trigger_error(sprintf(__('FTP Client command: %s', 'backwpup'), ' USER ' . $STATIC['JOB']['ftpuser']), E_USER_NOTICE); if ($loginok = ftp_login($ftp_conn_id, $STATIC['JOB']['ftpuser'], base64_decode($STATIC['JOB']['ftppass']))) { trigger_error(sprintf(__('FTP Server reply: %s', 'backwpup'), ' User ' . $STATIC['JOB']['ftpuser'] . ' logged in.'), E_USER_NOTICE); } else { //if PHP ftp login don't work use raw login $return = ftp_raw($ftp_conn_id, 'USER ' . $STATIC['JOB']['ftpuser']); trigger_error(sprintf(__('FTP Server reply: %s', 'backwpup'), $return[0]), E_USER_NOTICE); if (substr(trim($return[0]), 0, 3) <= 400) { trigger_error(sprintf(__('FTP Client command: %s', 'backwpup'), ' PASS *******'), E_USER_NOTICE); $return = ftp_raw($ftp_conn_id, 'PASS ' . base64_decode($STATIC['JOB']['ftppass'])); trigger_error(sprintf(__('FTP Server reply: %s', 'backwpup'), $return[0]), E_USER_NOTICE); if (substr(trim($return[0]), 0, 3) <= 400) { $loginok = true; } } } if (!$loginok) { return false; } //PASV trigger_error(sprintf(__('FTP Client command: %s', 'backwpup'), ' PASV'), E_USER_NOTICE); if ($STATIC['JOB']['ftppasv']) { if (ftp_pasv($ftp_conn_id, true)) { trigger_error(sprintf(__('FTP Server reply: %s', 'backwpup'), __('Entering Passive Mode', 'backwpup')), E_USER_NOTICE); } else { trigger_error(sprintf(__('FTP Server reply: %s', 'backwpup'), __('Can not Entering Passive Mode', 'backwpup')), E_USER_WARNING); } } else { if (ftp_pasv($ftp_conn_id, false)) { trigger_error(sprintf(__('FTP Server reply: %s', 'backwpup'), __('Entering Normal Mode', 'backwpup')), E_USER_NOTICE); } else { trigger_error(sprintf(__('FTP Server reply: %s', 'backwpup'), __('Can not Entering Normal Mode', 'backwpup')), E_USER_WARNING); } } //SYSTYPE trigger_error(sprintf(__('FTP Client command: %s', 'backwpup'), ' SYST'), E_USER_NOTICE); $systype = ftp_systype($ftp_conn_id); if ($systype) { trigger_error(sprintf(__('FTP Server reply: %s', 'backwpup'), $systype), E_USER_NOTICE); } else { trigger_error(sprintf(__('FTP Server reply: %s', 'backwpup'), __('Error getting SYSTYPE', 'backwpup')), E_USER_ERROR); } if ($WORKING['STEPDONE'] == 0) { //test ftp dir and create it f not exists $ftpdirs = explode("/", rtrim($STATIC['JOB']['ftpdir'], '/')); foreach ($ftpdirs as $ftpdir) { if (empty($ftpdir)) { continue; } if (!@ftp_chdir($ftp_conn_id, $ftpdir)) { if (@ftp_mkdir($ftp_conn_id, $ftpdir)) { trigger_error(sprintf(__('FTP Folder "%s" created!', 'backwpup'), $ftpdir), E_USER_NOTICE); ftp_chdir($ftp_conn_id, $ftpdir); } else { trigger_error(sprintf(__('FTP Folder "%s" can not created!', 'backwpup'), $ftpdir), E_USER_ERROR); return false; } } } trigger_error(__('Upload to FTP now started ... ', 'backwpup'), E_USER_NOTICE); if (ftp_put($ftp_conn_id, $STATIC['JOB']['ftpdir'] . $STATIC['backupfile'], $STATIC['JOB']['backupdir'] . $STATIC['backupfile'], FTP_BINARY)) { //transfere file $WORKING['STEPTODO'] = 1 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']); trigger_error(sprintf(__('Backup transferred to FTP server: %s', 'backwpup'), $STATIC['JOB']['ftpdir'] . $STATIC['backupfile']), E_USER_NOTICE); $STATIC['JOB']['lastbackupdownloadurl'] = "ftp://" . $STATIC['JOB']['ftpuser'] . ":" . base64_decode($STATIC['JOB']['ftppass']) . "@" . $STATIC['JOB']['ftphost'] . $STATIC['JOB']['ftpdir'] . $STATIC['backupfile']; $WORKING['STEPSDONE'][] = 'DEST_FTP'; //set done } else { trigger_error(__('Can not transfer backup to FTP server!', 'backwpup'), E_USER_ERROR); } } if ($STATIC['JOB']['ftpmaxbackups'] > 0) { //Delete old backups $backupfilelist = array(); if ($filelist = ftp_nlist($ftp_conn_id, $STATIC['JOB']['ftpdir'])) { foreach ($filelist as $files) { if ($STATIC['JOB']['fileprefix'] == substr(basename($files), 0, strlen($STATIC['JOB']['fileprefix'])) and $STATIC['JOB']['fileformart'] == substr(basename($files), -strlen($STATIC['JOB']['fileformart']))) { $backupfilelist[] = basename($files); } } if (sizeof($backupfilelist) > 0) { rsort($backupfilelist); $numdeltefiles = 0; for ($i = $STATIC['JOB']['ftpmaxbackups']; $i < sizeof($backupfilelist); $i++) { if (ftp_delete($ftp_conn_id, $STATIC['JOB']['ftpdir'] . $backupfilelist[$i])) { //delte files on ftp $numdeltefiles++; } else { trigger_error(sprintf(__('Can not delete "%s" on FTP server!', 'backwpup'), $STATIC['JOB']['ftpdir'] . $backupfilelist[$i]), E_USER_ERROR); } } if ($numdeltefiles > 0) { trigger_error(sprintf(_n('One file deleted on FTP Server', '%d files deleted on FTP Server', $numdeltefiles, 'backwpup'), $numdeltefiles), E_USER_NOTICE); } } } } ftp_close($ftp_conn_id); $WORKING['STEPDONE']++; }
function dest_dropbox() { global $WORKING, $STATIC; $WORKING['STEPTODO'] = 2 + filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']); $WORKING['STEPDONE'] = 0; trigger_error(sprintf(__('%d. Try to sending backup file to DropBox...', 'backwpup'), $WORKING['DEST_DROPBOX']['STEP_TRY']), E_USER_NOTICE); require_once realpath(dirname(__FILE__) . '/../libs/dropbox.php'); try { need_free_memory(10000000); //set boxtype $dropbox = new backwpup_Dropbox('dropbox'); // set the tokens $dropbox->setOAuthTokens($STATIC['JOB']['dropetoken'], $STATIC['JOB']['dropesecret']); $info = $dropbox->accountInfo(); if (!empty($info['uid'])) { trigger_error(sprintf(__('Authed with DropBox from %s', 'backwpup'), $info['display_name']), E_USER_NOTICE); } //Check Quota $dropboxfreespase = (double) $info['quota_info']['quota'] - (double) $info['quota_info']['shared'] - (double) $info['quota_info']['normal']; if (filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile']) > $dropboxfreespase) { trigger_error(__('No free space left on DropBox!!!', 'backwpup'), E_USER_ERROR); $WORKING['STEPSDONE'][] = 'DEST_DROPBOX'; //set done return; } else { trigger_error(sprintf(__('%s free on DropBox', 'backwpup'), formatBytes($dropboxfreespase)), E_USER_NOTICE); } //set callback function $dropbox->setProgressFunction('curl_progresscallback'); // put the file trigger_error(__('Upload to DropBox now started... ', 'backwpup'), E_USER_NOTICE); $response = $dropbox->upload($STATIC['JOB']['backupdir'] . $STATIC['backupfile'], $STATIC['JOB']['dropedir'] . $STATIC['backupfile']); if ($response['bytes'] == filesize($STATIC['JOB']['backupdir'] . $STATIC['backupfile'])) { $STATIC['JOB']['lastbackupdownloadurl'] = $STATIC['WP']['ADMINURL'] . '?page=backwpupbackups&action=downloaddropbox&file=' . $STATIC['JOB']['dropedir'] . $STATIC['backupfile'] . '&jobid=' . $STATIC['JOB']['jobid']; $WORKING['STEPDONE']++; $WORKING['STEPSDONE'][] = 'DEST_DROPBOX'; //set done trigger_error(sprintf(__('Backup transferred to %s', 'backwpup'), 'https://api-content.dropbox.com/1/files/' . $STATIC['JOB']['droperoot'] . '/' . $STATIC['JOB']['dropedir'] . $STATIC['backupfile']), E_USER_NOTICE); } //unset calback function $dropbox->setProgressFunction(); } catch (Exception $e) { trigger_error(sprintf(__('DropBox API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR); } try { if ($STATIC['JOB']['dropemaxbackups'] > 0 and is_object($dropbox)) { //Delete old backups $backupfilelist = array(); $metadata = $dropbox->metadata($STATIC['JOB']['dropedir']); if (is_array($metadata)) { foreach ($metadata['contents'] as $data) { $file = basename($data['path']); if ($data['is_dir'] != true and $STATIC['JOB']['fileprefix'] == substr($file, 0, strlen($STATIC['JOB']['fileprefix'])) and $STATIC['JOB']['fileformart'] == substr($file, -strlen($STATIC['JOB']['fileformart']))) { $backupfilelist[] = $file; } } } if (sizeof($backupfilelist) > 0) { rsort($backupfilelist); $numdeltefiles = 0; for ($i = $STATIC['JOB']['dropemaxbackups']; $i < count($backupfilelist); $i++) { $dropbox->fileopsDelete($STATIC['JOB']['dropedir'] . $backupfilelist[$i]); //delete files on Cloud $numdeltefiles++; } if ($numdeltefiles > 0) { trigger_error(sprintf(_n('One file deleted on DropBox', '%d files deleted on DropBox', $numdeltefiles, 'backwpup'), $numdeltefiles), E_USER_NOTICE); } } } } catch (Exception $e) { trigger_error(sprintf(__('DropBox API: %s', 'backwpup'), $e->getMessage()), E_USER_ERROR); } $WORKING['STEPDONE']++; }