</td> <td> <span> <?php if (isset($jobinfo['usedspace'])) { echo $jobinfo['usedspace']; } else { echo "--"; } ?> </span> </td> <!-- occupied space tmp --> <td> <!-- commands --> <?php if ($status !== JobStatus::$RUNNING) { if (is_dir(get_job_exec_dir($job))) { ?> <a href="<?php echo $rerun_link; ?> ">Rerun</a> <?php } } ?> <a href="<?php echo $showjobinfojs; ?> ">Info</a>
/** * Executes a script in a remote server via SSH * * @param type $study the case study * @param type $jobid the job id * @param type $workdir the working directory * @param type $rerun if the execution is a re-run of an existing job * @param type $script_path the script file, with path relative to the study_dir */ function execute_script_remote($study, $jobid, $workdir, $rerun, $cmd, $args = "") { include 'config.inc.php'; $jobid_qsub = 'j' . str_replace(".", "_", $jobid); $orig_cmd = $cmd; # check if the command is a python script if (is_python_script($cmd) !== false) { // prepend the python script with our command $exec_dir_local = get_job_exec_dir($jobid); if (!$rerun) { // if the job is NOT a re-run, prepend the python script with the python-prepend.py file $script_content = file_get_contents($exec_dir_local . "/" . $cmd); $script_prepend = file_get_contents("lib/pipeline_prepend.py", true); $script_content = $script_prepend . $script_content; //error_log("Prepending python script at " . $exec_dir_local . "/" . $cmd); file_put_contents($exec_dir_local . "/" . $cmd, $script_content); // copying qsub_template.sh to workdir $template = file_get_contents("lib/qsub_template.sh", true); file_put_contents($exec_dir_local . "/qsub_template.sh", $template); } $cmd = $NC_CONFIG["python-bin"] . " " . $cmd; } elseif (strpos($cmd, ".sh") !== false) { #$cmd = "qsub -cwd -l mf=1.4G -N $jobid_qsub -sync y -o results/nc_stdout.log -e results/nc_stdout.log " . $cmd; $cmd = "/bin/bash " . $cmd; $args = $jobid_qsub; } // http://unix.stackexchange.com/a/29495 // The jobs submitted by nipype using qsub are set with the name // node_name.workflow_name.user_name // the user_name is passed to nipype through the environment variable $LOGNAME // we cannot customize easily this behavior, so we customize the variable $LOGNAME to // job_id.user_name // in this way, the final job name will be // node_name.workflow_name.user_name.job_id // and it will be easier for the Neurocloud webapp to know if there are still running/queued SGE jobs for this job id $cmdline = create_ssh_command() . " 'cd {$workdir} && chmod +x {$orig_cmd} && { LOGNAME={$jobid_qsub}.\$LOGNAME nohup {$cmd} {$args} >>results/nc_stdout.log 2>&1 & } && echo \$!' "; //error_log($cmdline); $pid = exec($cmdline); $jobinfo = array("study" => $study, "jobid" => $jobid, "qsub_jobname" => $jobid_qsub, "pid" => $pid, "start_date" => date("Y-m-d H:i:s"), "script" => $orig_cmd, "cmdline" => $cmdline, "exec_dir" => $workdir, "exec_type" => "remote"); save_job_info($study, $jobid, $jobinfo); return $pid; }
<?php /* * delete_results * Created on: Jan 30, 2013 12:30:40 PM * * Copyright 2013 EnginSoft S.p.A. * All rights reserved */ include_once 'neurocloud/lib/common.php'; $study = $_POST["study"]; $jobid = $_POST["jobid"]; $path = "{$study}/results/{$jobid}"; $jobinfo = get_job_info($study, $jobid); $usedspace = isset($jobinfo["usedspace"]) ? $jobinfo["usedspace"] : "undefined"; insert_job_log($study, $jobid, "deleted results. Used disk space: {$usedspace}"); if (\OC\Files\Filesystem::is_dir($path)) { //rmdirr($path); \OC\Files\Filesystem::unlink($path); // from Owncloud 5.0.0, this will recurse on subdirs (delTree) } $execdir = get_job_exec_dir($jobid); if (is_dir($execdir)) { rmdirr($execdir); } exit;
/** * * @param array $info the associative array of info for the file being processed */ public static function beforeFileRenameDelete($info) { $file = $info['path']; if (!$file) { $file = $info['oldpath']; } if (self::$DO_LOG) { error_log("path === {$file}"); } $pathsplit = explode("/", $file); $len = count($pathsplit); $lastelem = $pathsplit[$len - 1]; // the splitted path has always the first element empty, so $pathsplit[0] === "" $firstelem = $pathsplit[1]; if (is_valid_casestudy("/" . $firstelem) === null) { if (self::$DO_LOG) { error_log("{$firstelem} is a valid case study"); } if ($len > 3 && $pathsplit[2] === 'results' && \OC\Files\Filesystem::is_dir($file)) { // case 1: trying to delete/rename one the results directories of a running job if (is_job_running($firstelem, $lastelem)) { // since the job is still running, do not allow to rename/delete the results directory if (self::$DO_LOG) { error_log("beforeFileRenameDelete : trying to rename/delete {$file} while the job is still running"); } $info['run'] = false; } else { if (self::$DO_LOG) { error_log("beforeFileRenameDelete : no job running for {$file}"); } // the user is deleting a results directory: delete also the temp directory if existing $tempdir = get_job_exec_dir($file); if (is_dir($tempdir)) { rmdirr($tempdir); } } } if ($lastelem === 'data' || $lastelem === 'pipeline' || $lastelem === 'results') { // case 2: we are trying to rename/delete one of the directories data/pipeline/results of a case study if (any_jobs_running($firstelem)) { if (self::$DO_LOG) { error_log("beforeFileRenameDelete : trying to rename/delete {$file} while jobs are running"); } $info['run'] = false; } else { if (self::$DO_LOG) { error_log("beforeFileRenameDelete : no jobs running for {$file}"); } } } if ($len > 3 && $pathsplit[2] === 'data') { // case 3: renaming/deleting a file inside the data directory, while a job is running. Do not always allow it if (any_jobs_running($firstelem)) { if (self::$DO_LOG) { error_log("beforeFileRenameDelete : trying to rename/delete {$file} while jobs are running"); } $info['run'] = false; } else { if (self::$DO_LOG) { error_log("beforeFileRenameDelete : no jobs running for {$file}"); } } } } }