public function perform() { echo "[-] DataTransferJob starting" . PHP_EOL; $log = new DeploynautLogFile($this->args['logfile']); $dataTransfer = DNDataTransfer::get()->byID($this->args['dataTransferID']); $environment = $dataTransfer->Environment(); $backupDataTransfer = null; if (!empty($this->args['backupBeforePush']) && $dataTransfer->Direction == 'push') { $backupDataTransfer = DNDataTransfer::create(); $backupDataTransfer->EnvironmentID = $environment->ID; $backupDataTransfer->Direction = 'get'; $backupDataTransfer->Mode = $dataTransfer->Mode; $backupDataTransfer->DataArchiveID = null; $backupDataTransfer->ResqueToken = $dataTransfer->ResqueToken; $backupDataTransfer->AuthorID = $dataTransfer->AuthorID; $backupDataTransfer->write(); $dataTransfer->BackupDataTransferID = $backupDataTransfer->ID; $dataTransfer->write(); } // This is a bit icky, but there is no easy way of capturing a failed run by using the PHP Resque try { // Disallow concurrent jobs (don't rely on queuing implementation to restrict this) // Only consider data transfers started in the last 30 minutes (older jobs probably got stuck) $runningTransfers = DNDataTransfer::get()->filter(array('EnvironmentID' => $environment->ID, 'Status' => array('Queued', 'Started'), 'Created:GreaterThan' => strtotime('-30 minutes')))->exclude('ID', $dataTransfer->ID); if ($runningTransfers->count()) { $runningTransfer = $runningTransfers->first(); $log->write(sprintf('[-] Error: another transfer is in progress (started at %s by %s)', $runningTransfer->dbObject('Created')->Nice(), $runningTransfer->Author()->Title)); throw new RuntimeException(sprintf('Another transfer is in progress (started at %s by %s)', $runningTransfer->dbObject('Created')->Nice(), $runningTransfer->Author()->Title)); } // before we push data to an environment, we'll make a backup first if ($backupDataTransfer) { $log->write('Backing up existing data'); $environment->Backend()->dataTransfer($backupDataTransfer, $log); } $environment->Backend()->dataTransfer($dataTransfer, $log); } catch (RuntimeException $exc) { $log->write($exc->getMessage()); if ($backupDataTransfer) { $backupDataTransfer->Status = 'Failed'; $backupDataTransfer->write(); } $this->updateStatus('Failed'); echo "[-] DataTransferJob failed" . PHP_EOL; throw $exc; } if ($backupDataTransfer) { $backupDataTransfer->Status = 'Finished'; $backupDataTransfer->write(); } echo "[-] DataTransferJob finished" . PHP_EOL; }
public function perform() { echo "[-] DeployJob starting" . PHP_EOL; $log = new DeploynautLogFile($this->args['logfile']); $deployment = DNDeployment::get()->byID($this->args['deploymentID']); $environment = $deployment->Environment(); $currentBuild = $environment->CurrentBuild(); $project = $environment->Project(); $backupDataTransfer = null; $backupMode = !empty($this->args['backup_mode']) ? $this->args['backup_mode'] : 'db'; // Perform pre-deploy backup here if required. Note that the backup is done here within // the deploy job, so that the order of backup is done before deployment, and so it // doesn't tie up another worker. It also puts the backup output into // the same log as the deployment so there is visibility on what is going on. // Note that the code has to be present for a backup to be performed, so the first // deploy onto a clean environment will not be performing any backup regardless of // whether the predeploy_backup option was passed or not. // Sometimes predeploy_backup comes through as string false from the frontend. if (!empty($this->args['predeploy_backup']) && $this->args['predeploy_backup'] !== 'false' && !empty($currentBuild)) { $backupDataTransfer = DNDataTransfer::create(); $backupDataTransfer->EnvironmentID = $environment->ID; $backupDataTransfer->Direction = 'get'; $backupDataTransfer->Mode = $backupMode; $backupDataTransfer->ResqueToken = $deployment->ResqueToken; $backupDataTransfer->AuthorID = $deployment->DeployerID; $backupDataTransfer->write(); $deployment->BackupDataTransferID = $backupDataTransfer->ID; $deployment->write(); } try { // Disallow concurrent deployments (don't rely on queuing implementation to restrict this) // Only consider deployments started in the last 30 minutes (older jobs probably got stuck) $runningDeployments = $environment->runningDeployments()->exclude('ID', $this->args['deploymentID']); if ($runningDeployments->count()) { $runningDeployment = $runningDeployments->first(); $message = sprintf('Error: another deployment is in progress (started at %s by %s)', $runningDeployment->dbObject('Created')->Nice(), $runningDeployment->Deployer()->Title); $log->write($message); throw new \RuntimeException($message); } $this->performBackup($backupDataTransfer, $log); $environment->Backend()->deploy($environment, $log, $project, $this->args); } catch (Exception $e) { // DeploynautJob will automatically trigger onFailure. echo "[-] DeployJob failed" . PHP_EOL; throw $e; } $this->updateStatus(DNDeployment::TR_COMPLETE); echo "[-] DeployJob finished" . PHP_EOL; }
public function testGenerateFileName() { $project1 = $this->objFromFixture('DNProject', 'project1'); $project1uatEnv = $this->objFromFixture('DNEnvironment', 'project1-uat'); $dataTransfer = DNDataTransfer::create(); $dataTransfer->Direction = 'get'; $dataTransfer->Mode = 'all'; $dataTransfer->write(); $archive = DNDataArchive::create(); $archive->OriginalEnvironmentID = $project1uatEnv->ID; $archive->write(); $filename = $archive->generateFilename($dataTransfer); $this->assertNotNull($filename); $this->assertContains('project_1', $filename); $this->assertContains('uat', $filename); $this->assertContains('all', $filename); }
/** * Makes the dummy deployment step * * @return Pipeline */ public function getDummyPipeline($restoreDB = true) { // Get default backups $previous = DNDeployment::create(); $previous->write(); $current = DNDeployment::create(); $current->write(); $snapshot = DNDataTransfer::create(); $snapshot->write(); // Setup default pipeline $pipeline = $this->objFromFixture('Pipeline', 'testpipesmoketest'); $pipeline->Config = serialize(array('RollbackStep1' => array('Class' => 'RollbackStep', 'RestoreDB' => $restoreDB, 'MaxDuration' => '3600'))); $pipeline->PreviousDeploymentID = $previous->ID; $pipeline->CurrentDeploymentID = $current->ID; $pipeline->PreviousSnapshotID = $snapshot->ID; $pipeline->write(); return $pipeline; }
public function perform() { echo "[-] DataTransferJob starting" . PHP_EOL; $log = new DeploynautLogFile($this->args['logfile']); $dataTransfer = DNDataTransfer::get()->byID($this->args['dataTransferID']); $environment = $dataTransfer->Environment(); $backupDataTransfer = null; if (!empty($this->args['backupBeforePush']) && $this->args['backupBeforePush'] !== 'false' && $dataTransfer->Direction == 'push') { $backupDataTransfer = DNDataTransfer::create(); $backupDataTransfer->EnvironmentID = $environment->ID; $backupDataTransfer->Direction = 'get'; $backupDataTransfer->Mode = $dataTransfer->Mode; $backupDataTransfer->ResqueToken = $dataTransfer->ResqueToken; $backupDataTransfer->AuthorID = $dataTransfer->AuthorID; $backupDataTransfer->write(); $dataTransfer->BackupDataTransferID = $backupDataTransfer->ID; $dataTransfer->write(); } try { // Disallow concurrent jobs (don't rely on queuing implementation to restrict this) // Only consider data transfers started in the last 30 minutes (older jobs probably got stuck) $runningTransfers = DNDataTransfer::get()->filter(array('EnvironmentID' => $environment->ID, 'Status' => array('Queued', 'Started'), 'Created:GreaterThan' => strtotime('-30 minutes')))->exclude('ID', $dataTransfer->ID); if ($runningTransfers->count()) { $runningTransfer = $runningTransfers->first(); $message = sprintf('Error: another transfer is in progress (started at %s by %s)', $runningTransfer->dbObject('Created')->Nice(), $runningTransfer->Author()->Title); $log->write($message); throw new \RuntimeException($message); } $this->performBackup($backupDataTransfer, $log); $environment->Backend()->dataTransfer($dataTransfer, $log); } catch (Exception $e) { echo "[-] DataTransferJob failed" . PHP_EOL; throw $e; } $this->updateStatus('Finished'); echo "[-] DataTransferJob finished" . PHP_EOL; }
/** * @param array $data * @param Form $form * * @return SS_HTTPResponse * @throws SS_HTTPResponse_Exception */ public function doDataTransfer($data, Form $form) { $this->setCurrentActionType(self::ACTION_SNAPSHOT); // Performs canView permission check by limiting visible projects $project = $this->getCurrentProject(); if (!$project) { return $this->project404Response(); } $dataArchive = null; // Validate direction. if ($data['Direction'] == 'get') { $validEnvs = $this->getCurrentProject()->DNEnvironmentList()->filterByCallback(function ($item) { return $item->canBackup(); }); } else { if ($data['Direction'] == 'push') { $validEnvs = $this->getCurrentProject()->DNEnvironmentList()->filterByCallback(function ($item) { return $item->canRestore(); }); } else { throw new LogicException('Invalid direction'); } } // Validate $data['EnvironmentID'] by checking against $validEnvs. $environment = $validEnvs->find('ID', $data['EnvironmentID']); if (!$environment) { throw new LogicException('Invalid environment'); } $this->validateSnapshotMode($data['Mode']); // Only 'push' direction is allowed an association with an existing archive. if ($data['Direction'] == 'push' && isset($data['DataArchiveID']) && is_numeric($data['DataArchiveID'])) { $dataArchive = DNDataArchive::get()->byId($data['DataArchiveID']); if (!$dataArchive) { throw new LogicException('Invalid data archive'); } if (!$dataArchive->canDownload()) { throw new SS_HTTPResponse_Exception('Not allowed to access archive', 403); } } $transfer = DNDataTransfer::create(); $transfer->EnvironmentID = $environment->ID; $transfer->Direction = $data['Direction']; $transfer->Mode = $data['Mode']; $transfer->DataArchiveID = $dataArchive ? $dataArchive->ID : null; if ($data['Direction'] == 'push') { $transfer->setBackupBeforePush(!empty($data['BackupBeforePush'])); } $transfer->write(); $transfer->start(); return $this->redirect($transfer->Link()); }
/** * Create a snapshot of the db and store the ID on the Pipline * * @return bool True if success */ protected function createSnapshot() { // Mark self as creating a snapshot $this->Status = 'Started'; $this->Doing = 'Snapshot'; $this->log("{$this->Title} creating snapshot of database"); $this->write(); // Skip deployment for dry run if ($this->Pipeline()->DryRun) { $this->log("[Skipped] Create DNDataTransfer backup"); return true; } // create a snapshot $pipeline = $this->Pipeline(); $job = DNDataTransfer::create(); $job->EnvironmentID = $pipeline->EnvironmentID; $job->Direction = 'get'; $job->Mode = 'db'; $job->DataArchiveID = null; $job->AuthorID = $pipeline->AuthorID; $job->write(); $job->start(); $pipeline->PreviousSnapshotID = $job->ID; $pipeline->write(); return true; }
/** * Create a snapshot of the db and store the ID on the Pipline * * @return bool True if success */ protected function startRevertDatabase() { // Mark self as creating a snapshot $this->Status = 'Started'; $this->Doing = 'Snapshot'; $this->log("{$this->Title} reverting database from snapshot"); // Skip deployment for dry run if ($this->Pipeline()->DryRun) { $this->write(); $this->log("[Skipped] Create DNDataTransfer restore"); return true; } // Get snapshot $pipeline = $this->Pipeline(); $backup = $pipeline->PreviousSnapshot(); if (empty($backup) || !$backup->exists()) { $this->log("No database to revert for {$this->Title}"); $this->markFailed(); return false; } // Create restore job $job = DNDataTransfer::create(); $job->EnvironmentID = $pipeline->EnvironmentID; $job->Direction = 'push'; $job->Mode = 'db'; $job->DataArchiveID = $backup->DataArchiveID; $job->AuthorID = $pipeline->AuthorID; $job->EnvironmentID = $pipeline->EnvironmentID; $job->write(); $job->start(); // Save rollback $this->RollbackDatabaseID = $job->ID; $this->write(); return true; }
/** * Provide rollback-able pipeline on the verge of failing. */ public function getFailingPipeline() { // Get default backups $previous = DNDeployment::create(); $previous->SHA = '9f0a012e97715b1871n41gk30f34268u12a0029q'; $previous->write(); $current = DNDeployment::create(); $current->write(); $snapshot = DNDataTransfer::create(); $snapshot->write(); $pipeline = $this->objFromFixture('Pipeline', 'FailingPipe'); $pipeline->Config = serialize(array('RollbackStep1' => array('Class' => 'RollbackStep', 'RestoreDB' => false, 'MaxDuration' => '3600'), 'RollbackStep2' => array('Class' => 'SmokeTestPipelineStep', 'MaxDuration' => '3600'))); $pipeline->PreviousDeploymentID = $previous->ID; $pipeline->CurrentDeploymentID = $current->ID; $pipeline->PreviousSnapshotID = $snapshot->ID; $pipeline->write(); return $pipeline; }
public function doDataTransfer($data, $form) { // Performs canView permission check by limiting visible projects $project = $this->getCurrentProject(); if (!$project) { return new SS_HTTPResponse("Project '" . Convert::raw2xml($this->getRequest()->latestParam('Project')) . "' not found.", 404); } $member = Member::currentUser(); $dataArchive = null; // Validate direction. if ($data['Direction'] == 'get') { $validEnvs = $this->getCurrentProject()->DNEnvironmentList()->filterByCallback(function ($item) { return $item->canBackup(); }); } else { if ($data['Direction'] == 'push') { $validEnvs = $this->getCurrentProject()->DNEnvironmentList()->filterByCallback(function ($item) { return $item->canRestore(); }); } else { throw new LogicException('Invalid direction'); } } // Validate $data['EnvironmentID'] by checking against $validEnvs. $environment = $validEnvs->find('ID', $data['EnvironmentID']); if (!$environment) { throw new LogicException('Invalid environment'); } // Validate mode. if (!in_array($data['Mode'], array('all', 'assets', 'db'))) { throw new LogicException('Invalid mode'); } // Only 'push' direction is allowed an association with an existing archive. if ($data['Direction'] == 'push' && isset($data['DataArchiveID']) && is_numeric($data['DataArchiveID'])) { $dataArchive = DNDataArchive::get()->byId($data['DataArchiveID']); if (!$dataArchive) { throw new LogicException('Invalid data archive'); } if (!$dataArchive->canDownload()) { throw new SS_HTTPResponse_Exception('Not allowed to access archive', 403); } } $job = DNDataTransfer::create(); $job->EnvironmentID = $environment->ID; $job->Direction = $data['Direction']; $job->Mode = $data['Mode']; $job->DataArchiveID = $dataArchive ? $dataArchive->ID : null; $job->write(); $job->start(); return $this->redirect($job->Link()); }