function writexml($dom, $url) { global $arrArgs, $arrArgs2, $targetdir, $logHandle, $errorhandle; echo 'writexml for ' . $url . "\n"; if ($arrArgs['verb'] == 'ListRecords') { $recordList = $dom->getElementsByTagname('record'); echo 'writexml: record list: ' . $recordList->length . " elements\n"; if (!$recordList->length) { message('no records found for ' . $url, true); return; } foreach ($recordList as $record) { // create new file $oaiID = $record->getElementsByTagname('identifier')->item(0)->nodeValue; $fileName = strtr(trim($oaiID), '/:', '_-'); $i = 0; $theSets = array(); while ($newSet = $record->getElementsByTagname('setSpec')->item($i)) { array_push($theSets, $newSet->nodeValue); $i++; } $theFileName = getFolder($theSets); $theFileName .= $fileName . '.xml'; echo 'New file: ' . $theFileName . "\n"; if (is_file($theFileName)) { continue; } printMetadata($record, $theFileName, $oaiID); unset($theFileName); unset($oaiID); } return; } if ($arrArgs['verb'] == 'ListIdentifiers') { $idList = $dom->getElementsByTagname('identifier'); if (!$idList->length) { return; } $urlBase = $arrArgs['oaiURL'] . '?verb=GetRecord&metadataPrefix=' . $arrArgs2['metadataPrefix'] . '&identifier='; foreach ($idList as $id) { $theFileName = $targetdir . strtr(trim($id->nodeValue), '/:', '_-') . '.xml'; if (is_file($theFileName)) { continue; } $recordxml = getXML($urlBase . trim($id->nodeValue)); $record = new DOMDocument(); $record->loadXML($recordxml); $metadataList = $record->getElementsByTagname('metadata'); if ($metadataList->length) { $metadata = $metadataList->item(0); printMetadata($metadata, $theFileName, $id->nodeValue); unset($metadata); } else { message('no record found ' . $id->nodeValue, true); } } } }
function run_workspace_backup($task, $args) { throw new Exception("Gulliver backup is no longer supported, use processmaker command-line instead."); try { ini_set('display_errors', 'on'); ini_set('error_reporting', E_ERROR); // the environment for poedit always is Development define('G_ENVIRONMENT', G_DEV_ENV); /* Look for -c and --compress in arguments */ $compress = array_search('-c', $args); if ($compress === false) { $compress = array_search('--compress', $args); } if ($compress !== false) { unset($args[$compress]); /* We need to reorder the args if we removed the compress switch */ $args = array_values($args); $compress = true; } /* Look for -c and --compress in arguments */ $overwrite = array_search('-o', $args); if ($overwrite === false) { $overwrite = array_search('--overwrite', $args); } if ($overwrite !== false) { unset($args[$overwrite]); /* We need to reorder the args if we removed the compress switch */ $args = array_values($args); $overwrite = true; } if (array_search('compress', $args)) { echo pakeColor::colorize("Compress is no longer an option, check if this is what you want\n", 'ERROR'); } if (count($args) > 2 || count($args) == 0) { throw new Exception('wrong arguments specified'); } $workspace = $args[0]; /* Use system gzip if not in Windows */ if ($compress && strtolower(reset(explode(' ', php_uname('s')))) != "windows") { /* Find the system gzip */ exec("whereis -b gzip", $whereisGzip); $gzipPaths = explode(' ', $whereisGzip[0]); if (isset($gzipPaths[1])) { $gzipPath = $gzipPaths[1]; } if (isset($gzipPath)) { echo "Using system gzip in {$gzipPath}\n"; } } if (isset($args[1])) { $fileTar = $args[1]; /* Check if the second argument is an absolute filename. If it is, use * it as the backup filename. Otherwise, use it as a filename relative * to the backups directory. This makes migration from previous versions * easier, which always expects a relative filename, while still accepting * absolute filenames. */ if (dirname($fileTar) == '.') { printf("Using %s as root. Use an absolute filename to change it.\n", pakeColor::colorize(PATH_TRUNK . 'backups', 'INFO')); G::mk_dir(PATH_DATA . 'backups'); $fileTar = PATH_DATA . 'backups' . PATH_SEP . $fileTar . '.tar'; if ($compress) { $fileTar .= '.gz'; } } printf("Backing up workspace %s to %s\n", pakeColor::colorize($workspace, 'INFO'), pakeColor::colorize($fileTar, 'INFO')); if (!$overwrite && file_exists($fileTar)) { $overwrite = strtolower(prompt('Backup file already exists, do you want to overwrite? [Y/n]')); if (array_search(trim($overwrite), array("y", "")) === false) { die; } $overwrite = true; } } else { G::mk_dir(PATH_DATA . 'backups'); $fileBase = PATH_DATA . 'backups' . PATH_SEP . $workspace . '.tar'; $fileTar = $fileBase; if ($compress) { $fileTar .= '.gz'; } printf("Backing up workspace %s to %s\n", pakeColor::colorize($workspace, 'INFO'), pakeColor::colorize($fileTar, 'INFO')); /* To avoid confusion, we remove both .tar and .tar.gz */ if (!$overwrite && (file_exists($fileBase) || file_exists($fileBase . '.gz'))) { $overwrite = strtolower(prompt('Backup file already exists, do you want to overwrite? [Y/n]')); if (array_search(trim($overwrite), array("y", "")) === false) { die; } $overwrite = true; } if (file_exists($fileBase)) { unlink($fileBase); } if (file_exists($fileBase . ".gz")) { unlink($fileBase . '.gz'); } } /* Remove the backup file before backing up. Previous versions didn't do * this, so backup files would increase indefinetely as new data was * appended to the tar file instead of replaced. */ if (file_exists($fileTar)) { unlink($fileTar); } /* If using the system gzip, create the tar using a temporary filename */ if (isset($gzipPath)) { $gzipFinal = $fileTar; $fileTar = tempnam(__FILE__, ''); } $aSerializeData = get_infoOnPM($workspace); $dbFile = PATH_DB . $workspace . PATH_SEP . 'db.php'; if (!file_exists($dbFile)) { throw new Exception("Invalid workspace, the db file does not exist, {$dbFile}"); } $dbOpt = @explode(SYSTEM_HASH, G::decrypt(HASH_INSTALLATION, SYSTEM_HASH)); G::LoadSystem('dbMaintenance'); $oDbMaintainer = new DataBaseMaintenance($dbOpt[0], $dbOpt[1], $dbOpt[2]); try { $oDbMaintainer->connect("mysql"); } catch (Exception $e) { echo "Problems contacting the database with the administrator user\n"; echo "The response was: {$e->getMessage()}\n"; } require_once $dbFile; require_once "propel/Propel.php"; G::LoadSystem('templatePower'); Propel::init(PATH_CORE . "config/databases.php"); $configuration = Propel::getConfiguration(); $connectionDSN = $configuration['datasources']['workflow']['connection']; printf("using DSN Connection %s \n", pakeColor::colorize($connectionDSN, 'INFO')); $con = Propel::getConnection('workflow'); $sql = "show variables like 'datadir'"; $stmt = $con->createStatement(); $rs = $stmt->executeQuery($sql, ResultSet::FETCHMODE_ASSOC); $rs->next(); $row = $rs->getRow(); if (!is_array($row)) { throw new Exception("unable to execute query in database"); } $dataDir = $row['Value']; if ($dataDir[count($dataDir) - 1] == '/') { $dataDir = substr($dataDir, count($dataDir) - 1); } printf("MySQL data dir %s \n", pakeColor::colorize($dataDir, 'INFO')); $sql = "SELECT VERSION();"; $stmt = $con->createStatement(); $rs = $stmt->executeQuery($sql, ResultSet::FETCHMODE_NUM); $rs->next(); $row = $rs->getRow(); $mysqlVersion = $row[0]; $aSerializeData['DATABASE'] = $mysqlVersion; //new db restore rotines, by Erik <*****@*****.**> on May 17th, 2010 //set the temporal directory for all tables into wf, rb, and rp databases $tmpDir = G::sys_get_temp_dir() . PATH_SEP . 'pmDbBackup' . PATH_SEP; //create the db maintenance temporal dir G::mk_dir($tmpDir); $fileMetadata = $tmpDir . 'metadata.txt'; $sMetadata = file_put_contents($fileMetadata, serialize($aSerializeData)); if ($sMetadata === false) { throw new Exception("Metadata file could not be written"); } G::LoadThirdParty('pear/Archive', 'Tar'); $tar = new Archive_Tar($fileTar); if (!isset($gzipPath)) { $tar->_compress = $compress; } /*** WORKFLOW DATABASE BACKUP ***/ $dbSettings = getDataBaseConfiguration($configuration['datasources']['workflow']['connection']); backupDB($dbOpt[0], $dbOpt[1], $dbOpt[2], $dbSettings['dbname'], $tmpDir); printf("Copying folder: %s \n", pakeColor::colorize($tmpDir, 'INFO')); backupAddTarFolder($tar, $tmpDir . $dbSettings['dbname'] . PATH_SEP, $tmpDir); /*** RBAC DATABASE BACKUP ***/ $dbSettings = getDataBaseConfiguration($configuration['datasources']['rbac']['connection']); backupDB($dbOpt[0], $dbOpt[1], $dbOpt[2], $dbSettings['dbname'], $tmpDir); printf("Copying folder: %s \n", pakeColor::colorize($tmpDir, 'INFO')); backupAddTarFolder($tar, $tmpDir . $dbSettings['dbname'] . PATH_SEP, $tmpDir); /*** RP DATABASE BACKUP ***/ $dbSettings = getDataBaseConfiguration($configuration['datasources']['rp']['connection']); backupDB($dbOpt[0], $dbOpt[1], $dbOpt[2], $dbSettings['dbname'], $tmpDir); printf("Copying folder: %s \n", pakeColor::colorize($tmpDir, 'INFO')); backupAddTarFolder($tar, $tmpDir . $dbSettings['dbname'] . PATH_SEP, $tmpDir); $pathSharedBase = PATH_DATA . 'sites' . PATH_SEP . $workspace . PATH_SEP; printf("copying folder: %s \n", pakeColor::colorize($pathSharedBase, 'INFO')); backupAddTarFolder($tar, $pathSharedBase, PATH_DATA . 'sites'); backupAddTarFolder($tar, $fileMetadata, dirname($fileMetadata)); unlink($fileMetadata); $aFiles = $tar->listContent(); $total = 0; foreach ($aFiles as $key => $val) { // printf( " %6d %s \n", $val['size'], pakeColor::colorize( $val['filename'], 'INFO') ); $total += $val['size']; } /* If using system gzip, compress the temporary tar to the original * filename. */ if (isset($gzipPath)) { exec("gzip -c \"{$fileTar}\" > {$gzipFinal}", $output, $ret); if ($ret != 0) { /* The error message is in stderr, which should be displayed already */ echo pakeColor::colorize("Error compressing backup", "ERROR") . "\n"; die(1); } unlink($fileTar); $fileTar = $gzipFinal; } printMetadata($aSerializeData); printf("%20s %s \n", 'Backup File', pakeColor::colorize($fileTar, 'INFO')); printf("%20s %s \n", 'Files in Backup', pakeColor::colorize(count($aFiles), 'INFO')); printf("%20s %s \n", 'Total Filesize', pakeColor::colorize(sprintf("%5.2f MB", $total / 1024 / 1024), 'INFO')); printf("%20s %s \n", 'Backup Filesize', pakeColor::colorize(sprintf("%5.2f MB", filesize($fileTar) / 1024 / 1024), 'INFO')); } catch (Exception $e) { printf("Error: %s\n", pakeColor::colorize($e->getMessage(), 'ERROR')); exit(0); } }