/** * The type and the sub-type are constant in the batch job, therefore they must be proveided * when a new batch job is generated. * @return BatchJob */ public function createChild($type, $subType = null, $same_root = true, $dc = null) { $child = new BatchJob(); $child->setJobType($type); if ($subType !== null) { $child->setJobSubType($subType); } $child->setParentJobId($this->id); $child->setPartnerId($this->partner_id); $child->setEntryId($this->entry_id); $child->setBulkJobId($this->bulk_job_id); // the condition is required in the special case of file_sync import jobs which are created on one dc but run from the other $child->setDc($dc === null ? $this->dc : $dc); if ($same_root && $this->root_job_id) { $child->setRootJobId($this->root_job_id); } else { $child->setRootJobId($this->id); } return $child; }
/** * @param BatchJob $batchJob * @param $data * @param int $type * @param int $subType * @return BatchJob */ public static function addJob(BatchJob $batchJob, kJobData $data, $type, $subType = null) { $batchJob->setJobType($type); $batchJob->setJobSubType($subType); $batchJob->setData($data); if (!$batchJob->getParentJobId() && $batchJob->getEntryId()) { $currentJob = kBatchManager::getCurrentUpdatingJob(); if ($currentJob && $currentJob->getEntryId() == $batchJob->getEntryId()) { $batchJob->setParentJobId($currentJob->getId()); $batchJob->setBulkJobId($currentJob->getBulkJobId()); $batchJob->setRootJobId($currentJob->getRootJobId()); } else { $entry = entryPeer::retrieveByPKNoFilter($batchJob->getEntryId()); // some jobs could be on deleted entry if ($entry) { $batchJob->setRootJobId($entry->getBulkUploadId()); $batchJob->setBulkJobId($entry->getBulkUploadId()); } } } $lockInfo = new kLockInfoData($batchJob); $lockInfo->setEstimatedEffort($data->calculateEstimatedEffort($batchJob)); $lockInfo->setPriority($data->calculatePriority($batchJob)); $lockInfo->setUrgency($data->calculateUrgency($batchJob)); $batchJob->setLockInfo($lockInfo); if (is_null($batchJob->getStatus())) { $batchJob = self::updateBatchJob($batchJob, BatchJob::BATCHJOB_STATUS_PENDING); } else { $batchJob = self::updateBatchJob($batchJob, $batchJob->getStatus()); } return $batchJob; }
/** * @return BatchJob */ public function createChild($same_root = true, $dc = null) { $child = new BatchJob(); $child->setStatus(self::BATCHJOB_STATUS_PENDING); $child->setParentJobId($this->id); $child->setPartnerId($this->partner_id); $child->setEntryId($this->entry_id); $child->setPriority($this->priority); $child->setSubpId($this->subp_id); $child->setBulkJobId($this->bulk_job_id); // the condition is required in the special case of file_sync import jobs which are created on one dc but run from the other $child->setDc($dc === null ? $this->dc : $dc); if ($same_root && $this->root_job_id) { $child->setRootJobId($this->root_job_id); } else { $child->setRootJobId($this->id); } $child->save(); return $child; }
/** * @return BatchJob */ public function createChild($same_root = true) { $child = new BatchJob(); $child->setStatus(self::BATCHJOB_STATUS_PENDING); $child->setParentJobId($this->id); $child->setPartnerId($this->partner_id); $child->setEntryId($this->entry_id); $child->setPriority($this->priority); $child->setSubpId($this->subp_id); $child->setBulkJobId($this->bulk_job_id); $child->setDc($this->dc); if ($same_root && $this->root_job_id) { $child->setRootJobId($this->root_job_id); } else { $child->setRootJobId($this->id); } $child->save(); return $child; }
/** * @param BatchJob $batchJob * @param $data * @param int $type * @param int $subType * @return BatchJob */ public static function addJob(BatchJob $batchJob, $data, $type, $subType = null) { $batchJob->setJobType($type); $batchJob->setJobSubType($subType); $batchJob->setData($data); if (!$batchJob->getParentJobId() && $batchJob->getEntryId()) { $currentJob = kBatchManager::getCurrentUpdatingJob(); if ($currentJob && $currentJob->getEntryId() == $batchJob->getEntryId()) { $batchJob->setParentJobId($currentJob->getId()); $batchJob->setBulkJobId($currentJob->getBulkJobId()); $batchJob->setRootJobId($currentJob->getRootJobId()); } else { $entry = entryPeer::retrieveByPKNoFilter($batchJob->getEntryId()); // some jobs could be on deleted entry if ($entry) { $batchJob->setRootJobId($entry->getBulkUploadId()); $batchJob->setBulkJobId($entry->getBulkUploadId()); } } } // validate partner id $partnerId = $batchJob->getPartnerId(); // if(!$partnerId) // throw new APIException(APIErrors::PARTNER_NOT_SET); // validate that partner exists $partner = PartnerPeer::retrieveByPK($partnerId); if (!$partner) { KalturaLog::err("Invalid partner id [{$partnerId}]"); throw new APIException(APIErrors::INVALID_PARTNER_ID, $partnerId); } // set the priority and work group $batchJob->setPriority($partner->getPriority($batchJob->getBulkJobId())); $batchJob = self::updateBatchJob($batchJob, BatchJob::BATCHJOB_STATUS_PENDING); // look for identical jobs $twinJobs = BatchJobPeer::retrieveDuplicated($type, $data); $twinJob = null; if (count($twinJobs)) { foreach ($twinJobs as $currentTwinJob) { if ($currentTwinJob->getId() != $batchJob->getId()) { $twinJob = reset($twinJobs); } } } if (!is_null($twinJob)) { $batchJob->setTwinJobId($twinJob->getId()); if (!kConf::get("batch_ignore_duplication")) { $batchJob = self::updateBatchJob($batchJob, $twinJob->getStatus(), $twinJob); } else { $batchJob->save(); } } return $batchJob; }