public function perform() { set_time_limit(0); $log = new DeploynautLogFile($this->args['logfile']); $projects = DNProject::get()->filter('Name', Convert::raw2sql($this->args['projectName'])); $project = $projects->first(); $path = $project->getLocalCVSPath(); $env = $this->args['env']; $log->write('Starting git fetch for project "' . $project->Name . '"'); // if an alternate user has been configured for clone, run the command as that user // @todo Gitonomy doesn't seem to have any way to prefix the command properly, if you // set 'sudo -u composer git' as the "command" parameter, it tries to run the whole // thing as a single command and fails $user = DNData::inst()->getGitUser(); if (!empty($user)) { $command = sprintf('cd %s && sudo -u %s git fetch -p origin +refs/heads/*:refs/heads/* --tags', $path, $user); $process = new \Symfony\Component\Process\Process($command); $process->setEnv($env); $process->setTimeout(3600); $process->run(); if (!$process->isSuccessful()) { throw new RuntimeException($process->getErrorOutput()); } } else { $repository = new Gitonomy\Git\Repository($path, array('environment_variables' => $env)); $repository->run('fetch', array('-p', 'origin', '+refs/heads/*:refs/heads/*', '--tags')); } $log->write('Git fetch is finished'); }
public function perform() { echo "[-] DeployJob starting" . PHP_EOL; $log = new DeploynautLogFile($this->args['logfile']); $deployment = DNDeployment::get()->byID($this->args['deploymentID']); $environment = $deployment->Environment(); $project = $environment->Project(); // This is a bit icky, but there is no easy way of capturing a failed deploy by using the PHP Resque try { // Disallow concurrent deployments (don't rely on queuing implementation to restrict this) // Only consider deployments started in the last 30 minutes (older jobs probably got stuck) $runningDeployments = DNDeployment::get()->filter(array('EnvironmentID' => $environment->ID, 'Status' => array('Queued', 'Started'), 'Created:GreaterThan' => strtotime('-30 minutes')))->exclude('ID', $this->args['deploymentID']); if ($runningDeployments->count()) { $runningDeployment = $runningDeployments->first(); $log->write(sprintf('[-] Error: another deployment is in progress (started at %s by %s)', $runningDeployment->dbObject('Created')->Nice(), $runningDeployment->Deployer()->Title)); throw new RuntimeException(sprintf('Another deployment is in progress (started at %s by %s)', $runningDeployment->dbObject('Created')->Nice(), $runningDeployment->Deployer()->Title)); } $environment->Backend()->deploy($environment, $log, $project, $this->args); } catch (Exception $e) { $this->updateStatus('Failed'); echo "[-] DeployJob failed" . PHP_EOL; throw $e; } echo "[-] DeployJob finished" . PHP_EOL; }
/** * Execute an array of processes, one after the other, throwing an exception on the first failure. * * @param array $processes An array of Symfony\Component\Process\Process objects * @param DeploynautLogFile $log The log to send output to */ protected function executeProcesses($processes, DeploynautLogFile $log) { foreach ($processes as $process) { $process->mustRun(function ($type, $buffer) use($log) { $log->write($buffer); }); } }
/** * Deploy the given build to the given environment */ public function deploy($environment, $sha, DeploynautLogFile $log, DNProject $project) { GraphiteDeploymentNotifier::notify_start($environment, $sha, null, $project); $file = DEPLOYNAUT_LOG_PATH . '/' . $project->Name . ':' . $environment . ".deploy-history.txt"; $CLI_file = escapeshellarg($file); $CLI_line = escapeshellarg(date('Y-m-d H:i:s') . " => {$sha}"); $log->write("Demo deployment: echo {$CLI_line} >> {$CLI_file}"); `echo {$CLI_line} >> {$CLI_file}`; $log->write("Arbitrary pause for 10s"); sleep(10); $log->write("Well, that was a waste of time"); GraphiteDeploymentNotifier::notify_end($environment, $sha, null, $project); }
public function perform() { echo "[-] DataTransferJob starting" . PHP_EOL; $log = new DeploynautLogFile($this->args['logfile']); $dataTransfer = DNDataTransfer::get()->byID($this->args['dataTransferID']); $environment = $dataTransfer->Environment(); $backupDataTransfer = null; if (!empty($this->args['backupBeforePush']) && $dataTransfer->Direction == 'push') { $backupDataTransfer = DNDataTransfer::create(); $backupDataTransfer->EnvironmentID = $environment->ID; $backupDataTransfer->Direction = 'get'; $backupDataTransfer->Mode = $dataTransfer->Mode; $backupDataTransfer->DataArchiveID = null; $backupDataTransfer->ResqueToken = $dataTransfer->ResqueToken; $backupDataTransfer->AuthorID = $dataTransfer->AuthorID; $backupDataTransfer->write(); $dataTransfer->BackupDataTransferID = $backupDataTransfer->ID; $dataTransfer->write(); } // This is a bit icky, but there is no easy way of capturing a failed run by using the PHP Resque try { // Disallow concurrent jobs (don't rely on queuing implementation to restrict this) // Only consider data transfers started in the last 30 minutes (older jobs probably got stuck) $runningTransfers = DNDataTransfer::get()->filter(array('EnvironmentID' => $environment->ID, 'Status' => array('Queued', 'Started'), 'Created:GreaterThan' => strtotime('-30 minutes')))->exclude('ID', $dataTransfer->ID); if ($runningTransfers->count()) { $runningTransfer = $runningTransfers->first(); $log->write(sprintf('[-] Error: another transfer is in progress (started at %s by %s)', $runningTransfer->dbObject('Created')->Nice(), $runningTransfer->Author()->Title)); throw new RuntimeException(sprintf('Another transfer is in progress (started at %s by %s)', $runningTransfer->dbObject('Created')->Nice(), $runningTransfer->Author()->Title)); } // before we push data to an environment, we'll make a backup first if ($backupDataTransfer) { $log->write('Backing up existing data'); $environment->Backend()->dataTransfer($backupDataTransfer, $log); } $environment->Backend()->dataTransfer($dataTransfer, $log); } catch (RuntimeException $exc) { $log->write($exc->getMessage()); if ($backupDataTransfer) { $backupDataTransfer->Status = 'Failed'; $backupDataTransfer->write(); } $this->updateStatus('Failed'); echo "[-] DataTransferJob failed" . PHP_EOL; throw $exc; } if ($backupDataTransfer) { $backupDataTransfer->Status = 'Finished'; $backupDataTransfer->write(); } echo "[-] DataTransferJob finished" . PHP_EOL; }
protected function performBackup($backupDataTransfer, \DeploynautLogFile $log) { if (!$backupDataTransfer) { return false; } $log->write('Backing up existing data'); try { $backupDataTransfer->Environment()->Backend()->dataTransfer($backupDataTransfer, $log); global $databaseConfig; DB::connect($databaseConfig); $backupDataTransfer->Status = 'Finished'; $backupDataTransfer->write(); } catch (Exception $e) { global $databaseConfig; DB::connect($databaseConfig); $backupDataTransfer->Status = 'Failed'; $backupDataTransfer->write(); throw $e; } }
/** * Run a shell command. * * @param string $command The command to run * @param string|null $workingDir The working dir to run command in * @throws RuntimeException */ protected function runCommand($command, $workingDir = null) { if (!empty($this->user)) { $command = sprintf('sudo -u %s %s', $this->user, $command); } if ($this->log) { $this->log->write(sprintf('Running command: %s', $command)); } $process = new AbortableProcess($command, $workingDir); $process->setEnv($this->project->getProcessEnv()); $process->setTimeout(1800); $process->run(); if (!$process->isSuccessful()) { throw new RuntimeException($process->getErrorOutput()); } }
/** * Use snowcake to do the deployment */ public function deploy(DNEnvironment $environment, $sha, DeploynautLogFile $log, DNProject $project, $leaveMaintenancePage = false) { $log->write(sprintf('Deploying "%s" to "%s"', $sha, $environment->getFullName())); if (!defined('SNOWCAKE_PATH')) { $log->write('SNOWCAKE_PATH is not defined'); throw new RuntimeException('SNOWCAKE_PATH is not defined'); } // Construct our snowcake command $name = $environment->SnowcakeName . '-' . substr($sha, 0, 8) . '-' . mt_rand(); // Filter invalid characters out of $name (Value 'ssorg_uat-fdceda2e-1400725889-bake' at 'stackName' failed to satisfy constraint: // "Member must satisfy regular expression pattern: [a-zA-Z][-a-zA-Z0-9]*)" $name = str_replace('_', '-', $name); $command = sprintf('%s deploy %s %s %s', SNOWCAKE_PATH, $environment->SnowcakeName, $name, $sha); $log->write(sprintf('Running command: %s', $command)); $process = new Process($command, dirname(dirname(SNOWCAKE_PATH))); $process->setTimeout(3600); $process->run(function ($type, $buffer) use($log) { $log->write($buffer); }); if (!$process->isSuccessful()) { throw new RuntimeException($process->getErrorOutput()); } $log->write(sprintf('Deploy of "%s" to "%s" finished', $sha, $environment->getFullName())); }
public function ping(\DNEnvironment $environment, DeploynautLogFile $log, DNProject $project) { $log->write(sprintf('Ping "%s"', $environment->getFullName())); }
public function testLogDoesntExistMessage() { $log = new DeploynautLogFile('SomeSortOf Filename (UAT).log', $this->basePath); $this->assertNull($log->getLogFilePath()); $this->assertEquals('Log has not been created yet.', $log->content()); }
/** * Backs up database and/or assets to a designated folder, * and packs up the files into a single sspak. * * @param DNDataTransfer $dataTransfer * @param DeploynautLogFile $log */ protected function dataTransferBackup(DNDataTransfer $dataTransfer, DeploynautLogFile $log) { $environment = $dataTransfer->Environment(); $name = $environment->getFullName(); // Associate a new archive with the transfer. // Doesn't retrieve a filepath just yet, need to generate the files first. $dataArchive = DNDataArchive::create(); $dataArchive->Mode = $dataTransfer->Mode; $dataArchive->AuthorID = $dataTransfer->AuthorID; $dataArchive->OriginalEnvironmentID = $environment->ID; $dataArchive->EnvironmentID = $environment->ID; $dataArchive->IsBackup = $dataTransfer->IsBackupDataTransfer(); // Generate directory structure with strict permissions (contains very sensitive data) $filepathBase = $dataArchive->generateFilepath($dataTransfer); mkdir($filepathBase, 0700, true); $databasePath = $filepathBase . DIRECTORY_SEPARATOR . 'database.sql'; // Backup database if (in_array($dataTransfer->Mode, array('all', 'db'))) { $log->write(sprintf('Backup of database from "%s" started', $name)); $command = $this->getCommand('data:getdb', 'db', $environment, array('data_path' => $databasePath), $log); $command->run(function ($type, $buffer) use($log) { $log->write($buffer); }); if (!$command->isSuccessful()) { $this->extend('dataTransferFailure', $environment, $log); throw new RuntimeException($command->getErrorOutput()); } $log->write(sprintf('Backup of database from "%s" done', $name)); } // Backup assets if (in_array($dataTransfer->Mode, array('all', 'assets'))) { $log->write(sprintf('Backup of assets from "%s" started', $name)); $command = $this->getCommand('data:getassets', 'web', $environment, array('data_path' => $filepathBase), $log); $command->run(function ($type, $buffer) use($log) { $log->write($buffer); }); if (!$command->isSuccessful()) { $this->extend('dataTransferFailure', $environment, $log); throw new RuntimeException($command->getErrorOutput()); } $log->write(sprintf('Backup of assets from "%s" done', $name)); } $sspakFilename = sprintf('%s.sspak', $dataArchive->generateFilename($dataTransfer)); $sspakFilepath = $filepathBase . DIRECTORY_SEPARATOR . $sspakFilename; try { $dataArchive->attachFile($sspakFilepath, $dataTransfer); $dataArchive->setArchiveFromFiles($filepathBase); } catch (Exception $e) { $log->write($e->getMessage()); throw new RuntimeException($e->getMessage()); } // Remove any assets and db files lying around, they're not longer needed as they're now part // of the sspak file we just generated. Use --force to avoid errors when files don't exist, // e.g. when just an assets backup has been requested and no database.sql exists. $process = new Process(sprintf('rm -rf %s/assets && rm -f %s', $filepathBase, $databasePath)); $process->run(); if (!$process->isSuccessful()) { $log->write('Could not delete temporary files'); throw new RuntimeException($process->getErrorOutput()); } $log->write(sprintf('Creating sspak file done: %s', $dataArchive->ArchiveFile()->getAbsoluteURL())); }
/** * Backs up database and/or assets to a designated folder, * and packs up the files into a single sspak. * * @param DNDataTransfer $dataTransfer * @param DeploynautLogFile $log */ protected function dataTransferBackup(DNDataTransfer $dataTransfer, DeploynautLogFile $log) { $environmentObj = $dataTransfer->Environment(); $project = $environmentObj->Project(); $projectName = $project->Name; $environmentName = $environmentObj->Name; $env = $project->getProcessEnv(); $project = DNProject::get()->filter('Name', $projectName)->first(); $name = $projectName . ':' . $environmentName; // Associate a new archive with the transfer. // Doesn't retrieve a filepath just yet, need to generate the files first. $dataArchive = DNDataArchive::create(); $dataArchive->Mode = $dataTransfer->Mode; $dataArchive->AuthorID = $dataTransfer->AuthorID; $dataArchive->OriginalEnvironmentID = $dataTransfer->Environment()->ID; $dataArchive->EnvironmentID = $dataTransfer->Environment()->ID; $dataArchive->IsBackup = $dataTransfer->IsBackupDataTransfer(); // Generate directory structure with strict permissions (contains very sensitive data) $filepathBase = $dataArchive->generateFilepath($dataTransfer); mkdir($filepathBase, 0700, true); $databasePath = $filepathBase . DIRECTORY_SEPARATOR . 'database.sql'; // Backup database if (in_array($dataTransfer->Mode, array('all', 'db'))) { $log->write('Backup of database from "' . $name . '" started'); $args = array('data_path' => $databasePath); $command = $this->getCommand("data:getdb", 'db', $name, $args, $env, $log); $command->run(function ($type, $buffer) use($log) { $log->write($buffer); }); if (!$command->isSuccessful()) { throw new RuntimeException($command->getErrorOutput()); } $log->write('Backup of database from "' . $name . '" done'); } // Backup assets if (in_array($dataTransfer->Mode, array('all', 'assets'))) { $log->write('Backup of assets from "' . $name . '" started'); $args = array('data_path' => $filepathBase); $command = $this->getCommand("data:getassets", 'web', $name, $args, $env, $log); $command->run(function ($type, $buffer) use($log) { $log->write($buffer); }); if (!$command->isSuccessful()) { throw new RuntimeException($command->getErrorOutput()); } $log->write('Backup of assets from "' . $name . '" done'); } $log->write('Creating *.sspak file'); $sspakFilename = sprintf('%s.sspak', $dataArchive->generateFilename($dataTransfer)); $sspakCmd = sprintf('cd %s && sspak saveexisting %s 2>&1', $filepathBase, $sspakFilename); if ($dataTransfer->Mode == 'db') { $sspakCmd .= sprintf(' --db=%s', $databasePath); } elseif ($dataTransfer->Mode == 'assets') { $sspakCmd .= sprintf(' --assets=%s/assets', $filepathBase); } else { $sspakCmd .= sprintf(' --db=%s --assets=%s/assets', $databasePath, $filepathBase); } $process = new Process($sspakCmd); $process->setTimeout(3600); $process->run(); if (!$process->isSuccessful()) { $log->write('Could not package the backup via sspak'); throw new RuntimeException($process->getErrorOutput()); } // HACK: find_or_make() expects path relative to assets/ $sspakFilepath = ltrim(str_replace(array(ASSETS_PATH, realpath(ASSETS_PATH)), '', $filepathBase . DIRECTORY_SEPARATOR . $sspakFilename), DIRECTORY_SEPARATOR); try { $folder = Folder::find_or_make(dirname($sspakFilepath)); $file = new File(); $file->Name = $sspakFilename; $file->Filename = $sspakFilepath; $file->ParentID = $folder->ID; $file->write(); // "Status" will be updated by the job execution $dataTransfer->write(); // Get file hash to ensure consistency. // Only do this when first associating the file since hashing large files is expensive. $dataArchive->ArchiveFileHash = md5_file($file->FullPath); $dataArchive->ArchiveFileID = $file->ID; $dataArchive->DataTransfers()->add($dataTransfer); $dataArchive->write(); } catch (Exception $e) { $log->write('Failed to add sspak file: ' . $e->getMessage()); throw new RuntimeException($e->getMessage()); } // Remove any assets and db files lying around, they're not longer needed as they're now part // of the sspak file we just generated. Use --force to avoid errors when files don't exist, // e.g. when just an assets backup has been requested and no database.sql exists. $process = new Process(sprintf('rm -rf %s/assets && rm -f %s', $filepathBase, $databasePath)); $process->run(); if (!$process->isSuccessful()) { $log->write('Could not delete temporary files'); throw new RuntimeException($process->getErrorOutput()); } $log->write(sprintf('Creating *.sspak file done: %s', $file->getAbsoluteURL())); }
/** * This is mostly copy-pasted from Anthill/Smoketest. * * @param \DNEnvironment $environment * @param \DeploynautLogFile $log * @return bool */ protected function smokeTest(\DNEnvironment $environment, \DeploynautLogFile $log) { $url = $environment->getBareURL(); $timeout = 600; $tick = 60; if (!$url) { $log->write('Skipping site accessible check: no URL found.'); return true; } $start = time(); $infoTick = time() + $tick; $log->write(sprintf('Waiting for "%s" to become accessible... (timeout: %smin)', $url, $timeout / 60)); // configure curl so that curl_exec doesn't wait a long time for a response $ch = curl_init(); curl_setopt($ch, CURLOPT_CONNECTTIMEOUT, 5); curl_setopt($ch, CURLOPT_TIMEOUT, 5); curl_setopt($ch, CURLOPT_RETURNTRANSFER, true); curl_setopt($ch, CURLOPT_FOLLOWLOCATION, true); curl_setopt($ch, CURLOPT_MAXREDIRS, 10); // set a high number of max redirects (but not infinite amount) to avoid a potential infinite loop curl_setopt($ch, CURLOPT_SSL_VERIFYHOST, false); curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, false); curl_setopt($ch, CURLOPT_IPRESOLVE, CURL_IPRESOLVE_V4); curl_setopt($ch, CURLOPT_URL, $url); curl_setopt($ch, CURLOPT_USERAGENT, 'Rainforest'); $success = false; // query the site every second. Note that if the URL doesn't respond, // curl_exec will take 5 seconds to timeout (see CURLOPT_CONNECTTIMEOUT and CURLOPT_TIMEOUT above) do { if (time() > $start + $timeout) { $log->write(sprintf(' * Failed: check for %s timed out after %smin', $url, $timeout / 60)); return false; } $response = curl_exec($ch); // check the HTTP response code for HTTP protocols $status = curl_getinfo($ch, CURLINFO_HTTP_CODE); if ($status && !in_array($status, [500, 501, 502, 503, 504])) { $success = true; } // check for any curl errors, mostly for checking the response state of non-HTTP protocols, // but applies to checks of any protocol if ($response && !curl_errno($ch)) { $success = true; } // Produce an informational ticker roughly every $tick if (time() > $infoTick) { $message = []; // Collect status information from different sources. if ($status) { $message[] = sprintf('HTTP status code is %s', $status); } if (!$response) { $message[] = 'response is empty'; } if ($error = curl_error($ch)) { $message[] = sprintf('request error: %s', $error); } $log->write(sprintf(' * Still waiting: %s...', implode(', ', $message))); $infoTick = time() + $tick; } sleep(1); } while (!$success); curl_close($ch); $log->write(' * Success: site is accessible!'); return true; }
/** * Validate a specific alert configuration from configuration YAML is correct. * * @param string $name * @param array $config * @param DNProject $project * @param DeploynautLogFile $log * @return boolean */ public function validateAlert($name, $config, $project, $log) { // validate we have an environment set for the alert if (!isset($config['environment'])) { $log->write(sprintf('WARNING: Failed to configure alert "%s". Missing "environment" key in .alerts.yml. Skipped.', $name)); return false; } // validate we have an environmentcheck suite name to check if (!isset($config['check_url'])) { $log->write(sprintf('WARNING: Failed to configure alert "%s". Missing "check_url" key in .alerts.yml. Skipped.', $name)); return false; } // validate we have contacts for the alert if (!isset($config['contacts'])) { $log->write(sprintf('WARNING: Failed to configure alert "%s". Missing "contacts" key in .alerts.yml. Skipped.', $name)); return false; } // validate that each value in the config is valid, build up a list of contacts we'll use later foreach ($config['contacts'] as $contactEmail) { // special case for ops if ($contactEmail == 'ops') { continue; } $contact = $project->AlertContacts()->filter('Email', $contactEmail)->first(); if (!($contact && $contact->exists())) { $log->write(sprintf('WARNING: Failed to configure alert "%s". No such contact "%s". Skipped.', $name, $contactEmail)); return false; } } // validate the environment specified in the alert actually exists if (!DNEnvironment::get()->filter('Name', $config['environment'])->first()) { $log->write(sprintf('WARNING: Failed to configure alert "%s". Invalid environment "%s" in .alerts.yml. Skipped.', $name, $config['environment'])); return false; } return true; }
/** * Delete items in this directory until the number of items is <= $count. * Delete the oldest files first. * * @param string $dir The directory to remove items from * @param int $count The maximum number of .tar.gz files that can appear in that directory * @param DeploynautLogFile $log The log to send removal status messages to */ protected function reduceDirSizeTo($dir, $count, DeploynautLogFile $log) { $files = glob($dir . '/*.tar.gz'); if (sizeof($files) > $count) { usort($files, function ($a, $b) { return filemtime($a) > filemtime($b); }); for ($i = 0; $i < sizeof($files) - $count; $i++) { $log->write("Removing " . $files[$i] . " from package cache"); unlink($files[$i]); } } }
public function ping(\DNEnvironment $environment, \DeploynautLogFile $log, \DNProject $project) { $log->write("Ping \"{$project->Name}:{$environment->Name}\""); }