Esempio n. 1
0
 function resetlog($args = array(), $vars = array())
 {
     DHDO::logger('reset');
     WP_CLI::success('Debug log wiped');
 }
Esempio n. 2
0
 function backup()
 {
     DHDO::logger('Begining Backup.');
     global $wpdb;
     if (!is_dir(content_url() . '/upgrade/')) {
         DHDO::logger('Upgrade folder missing. This will cause serious issues with WP in general, so we will create it for you.');
         mkdir(content_url() . '/upgrade/');
     }
     // Pull in data for what to backup
     $sections = get_option('dh-do-backupsection');
     if (!$sections) {
         $sections = array();
     }
     $file = WP_CONTENT_DIR . '/upgrade/dreamobject-backups.zip';
     $fileurl = content_url() . '/upgrade/dreamobject-backups.zip';
     // Pre-Cleanup
     if (file_exists($file)) {
         @unlink($file);
         DHDO::logger('Leftover zip file found, deleting ' . $file . ' ...');
     }
     try {
         $zip = new ZipArchive($file);
         $zaresult = true;
         DHDO::logger('ZipArchive found and will be used for backups.');
     } catch (Exception $e) {
         $error_string = $e->getMessage();
         $zip = new PclZip($file);
         DHDO::logger('ZipArchive not found. Error: ' . $error_string);
         DHDO::logger('PclZip will be used for backups.');
         require_once ABSPATH . '/wp-admin/includes/class-pclzip.php';
         $zaresult = false;
     }
     $backups = array();
     // All me files!
     if (in_array('files', $sections)) {
         DHDO::logger('Calculating backup size...');
         $trimdisk = WP_CONTENT_DIR;
         $diskcmd = sprintf("du -s %s", WP_CONTENT_DIR);
         $diskusage = exec($diskcmd);
         $diskusage = trim(str_replace($trimdisk, '', $diskusage));
         DHDO::logger(size_format($diskusage * 1024) . ' of diskspace will be processed.');
         if ($diskusage < 2000 * 1024) {
             $backups = array_merge($backups, DHDO::rscandir(WP_CONTENT_DIR));
             DHDO::logger(count($backups) . ' files added to backup list.');
         } else {
             DHDO::logger('ERROR! PHP is unable to backup your wp-content folder. Please consider cleaning out unused files (like plugins and themes).');
         }
         if (file_exists(ABSPATH . 'wp-config.php')) {
             $backups[] = ABSPATH . 'wp-config.php';
             DHDO::logger('wp-config.php added to backup list.');
         }
     }
     // And me DB!
     if (in_array('database', $sections)) {
         set_time_limit(300);
         $sqlhash = wp_hash(wp_rand());
         $sqlfile = WP_CONTENT_DIR . '/upgrade/' . $sqlhash . '.sql';
         $tables = $wpdb->get_col("SHOW TABLES LIKE '" . $wpdb->prefix . "%'");
         $tables_string = implode(' ', $tables);
         // Pre cleanup
         if (file_exists($sqlfile)) {
             @unlink($sqlfile);
             DHDO::logger('Leftover sql file found, deleting ' . $sqlfile . ' ...');
         }
         $dbcmd = sprintf("mysqldump -h'%s' -u'%s' -p'%s' %s %s --single-transaction 2>&1 >> %s", DB_HOST, DB_USER, DB_PASSWORD, DB_NAME, $tables_string, $sqlfile);
         exec($dbcmd);
         $sqlsize = size_format(@filesize($sqlfile));
         DHDO::logger('SQL file created: ' . $sqlfile . ' (' . $sqlsize . ').');
         $backups[] = $sqlfile;
         DHDO::logger('SQL added to backup list.');
     }
     if (!empty($backups)) {
         set_time_limit(300);
         // Increased timeout to 5 minutes. If the zip takes longer than that, I have a problem.
         if ($zaresult != 'true') {
             DHDO::logger('Creating zip file using PclZip.');
             DHDO::logger('NOTICE: If the log stops here, PHP failed to create a zip of your wp-content folder. Please consider increasing the server\'s PHP memory, RAM or CPU.');
             $zip->create($backups);
         } else {
             DHDO::logger('Creating zip file using ZipArchive.');
             DHDO::logger('NOTICE: If the log stops here, PHP failed to create a zip of your wp-content folder. Please consider cleaning out unused files (like plugins and themes), or increasing the server\'s PHP memory, RAM or CPU.');
             try {
                 $zip->open($file, ZipArchive::CREATE);
                 $trimpath = ABSPATH;
                 foreach ($backups as $backupfiles) {
                     if (strpos($backupfiles, DIRECTORY_SEPARATOR . 'cache' . DIRECTORY_SEPARATOR) === false) {
                         $zip->addFile($backupfiles, 'dreamobjects-backup' . str_replace($trimpath, '/', $backupfiles));
                         //DHDO::logger( $backupfiles );
                     }
                 }
                 $zip->close();
             } catch (Exception $e) {
                 $error_string = $e->getMessage();
                 DHDO::logger('ZipArchive failed to complete: ' . $error_string);
             }
         }
         if (@file_exists($file)) {
             DHDO::logger('Calculating zip file size ...');
             $zipsize = size_format(@filesize($file));
             DHDO::logger('Zip file generated: ' . $file . ' (' . $zipsize . ').');
         } else {
             @unlink($file);
             DHDO::logger('Zip file failed to generate. Nothing will be backed up.');
         }
         // Delete SQL
         if (file_exists($sqlfile)) {
             @unlink($sqlfile);
             DHDO::logger('Deleting SQL file: ' . $sqlfile . ' ...');
         }
         // Upload
         if (@file_exists($file)) {
             $s3 = AwsS3DHDO::factory(array('key' => get_option('dh-do-key'), 'secret' => get_option('dh-do-secretkey'), 'base_url' => get_option('dh-do-endpoint')));
             $bucket = get_option('dh-do-bucket');
             $parseUrl = parse_url(trim(home_url()));
             $url = $parseUrl['host'];
             if (isset($parseUrl['path'])) {
                 $url .= $parseUrl['path'];
             }
             // Rename file
             $newname = $url . '/' . date_i18n('Y-m-d-His', current_time('timestamp')) . '.zip';
             DHDO::logger('New filename ' . $newname . '.');
             // Uploading
             set_time_limit(180);
             DHDO::logger('Beginning upload to Object Store servers.');
             // Check the size of the file before we upload, in order to compensate for large files
             if (@filesize($file) >= 100 * 1024 * 1024) {
                 // Files larger than 100megs go through Multipart
                 DHDO::logger('Filesize is over 100megs, using Multipart uploader.');
                 // High Level
                 DHDO::logger('Prepare the upload parameters and upload parts in 25M chunks.');
                 $uploader = UploadBuilder::newInstance()->setClient($s3)->setSource($file)->setBucket($bucket)->setKey($newname)->setMinPartSize(25 * 1024 * 1024)->setOption('Metadata', array('UploadedBy' => 'DreamObjectsBackupPlugin', 'UploadedDate' => date_i18n('Y-m-d-His', current_time('timestamp'))))->setOption('ACL', 'private')->setConcurrency(3)->build();
                 // This will be called in the following try
                 $uploader->getEventDispatcher()->addListener('multipart_upload.after_part_upload', function ($event) {
                     DHDO::logger('Part ' . $event["state"]->count() . ' uploaded ...');
                 });
                 try {
                     DHDO::logger('Begin upload. This may take a while (5min for every 75 megs or so).');
                     set_time_limit(180);
                     $uploader->upload();
                     DHDO::logger('Upload complete');
                 } catch (MultipartUploadException $e) {
                     $uploader->abort();
                     DHDO::logger('Upload failed: ' . $e->getMessage());
                 }
             } else {
                 // If it's under 100megs, do it the old way
                 DHDO::logger('Filesize is under 100megs. This will be less spammy.');
                 set_time_limit(180);
                 // 3 min
                 try {
                     $result = $s3->putObject(array('Bucket' => $bucket, 'Key' => $newname, 'SourceFile' => $file, 'ContentType' => 'application/zip', 'ACL' => 'private', 'Metadata' => array('UploadedBy' => 'DreamObjectsBackupPlugin', 'UploadedDate' => date_i18n('Y-m-d-His', current_time('timestamp')))));
                     DHDO::logger('Upload complete');
                 } catch (S3Exception $e) {
                     DHDO::logger('Upload failed: ' . $e->getMessage());
                 }
             }
             /*
             				// https://dreamxtream.wordpress.com/2013/10/29/aws-php-sdk-logging-using-guzzle/
             				$s3->getEventDispatcher()->removeSubscriber($logPlugin);
             */
         } else {
             DHDO::logger('Nothing to upload.');
         }
         // Cleanup
         if (file_exists($file)) {
             @unlink($file);
             DHDO::logger('Deleting zip file: ' . $file . ' ...');
         }
         if (file_exists($sqlfile)) {
             @unlink($sqlfile);
             DHDO::logger('Deleting SQL file: ' . $sqlfile . ' ...');
         }
     }
     // Cleanup Old Backups
     DHDO::logger('Checking for backups to be deleted.');
     if ($backup_result = 'Yes' && get_option('dh-do-retain') && get_option('dh-do-retain') != 'all') {
         $num_backups = get_option('dh-do-retain');
         $s3 = AwsS3DHDO::factory(array('key' => get_option('dh-do-key'), 'secret' => get_option('dh-do-secretkey'), 'base_url' => get_option('dh-do-endpoint')));
         $bucket = get_option('dh-do-bucket');
         $parseUrl = parse_url(trim(home_url()));
         $prefixurl = $parseUrl['host'];
         if (isset($parseUrl['path'])) {
             $prefixurl .= $parseUrl['path'];
         }
         $backups = $s3->getIterator('ListObjects', array('Bucket' => $bucket, "Prefix" => $prefixurl));
         if ($backups !== false) {
             $backups = $backups->toArray();
             krsort($backups);
             $count = 0;
             foreach ($backups as $object) {
                 if (++$count > $num_backups) {
                     $s3->deleteObject(array('Bucket' => $bucket, 'Key' => $object['Key']));
                     DHDO::logger('Removed backup ' . $object['Key'] . ' from DreamObjects, per user retention choice.');
                 }
             }
         }
     } else {
         DHDO::logger('Per user retention choice, not deleteing a single old backup.');
     }
     DHDO::logger('Backup Complete.');
     DHDO::logger('');
 }