/** * @param string $puser_id * @param string $entry * @param string $version * @param string $file_format * @return BatchJob */ public static function addJob($puser_id, $entry, $version, $file_format) { $entryId = $entry->getId(); $entryIntId = $entry->getIntId(); $entryVersion = $version ? $version : $entry->getVersion(); if ($entry) { $partner = $entry->getPartner(); $email = $partner->getAdminEmail(); } $data = json_encode(array('puserId' => $puser_id, 'entryId' => $entryId, 'entryIntId' => $entryIntId, 'entryVersion' => $entryVersion, 'fileFormat' => $file_format, 'email' => $email)); $job = new BatchJob(); $job->setJobType(BatchJobType::FLATTEN); $job->setData($data, true); $job->setStatus(BatchJob::BATCHJOB_STATUS_PENDING); $job->setCheckAgainTimeout(time() + 10); $job->setProgress(0); $job->setMessage('Queued'); $job->setDescription('Queued, waiting to run'); $job->setUpdatesCount(0); $job->setEntryId($entryId); $job->setPartnerId($entry->getPartnerId()); $job->setSubpId($entry->getSubpId()); $job->save(); return $job; }
private function reconvertEntry($entry_id, $conversion_profile_id, $job_priority) { $entry = entryPeer::retrieveByPK($entry_id); $this->error = ""; if (!$entry) { $error = "Cannot reconvert entry [{$entry_id}]. Might be a deleted entry"; return array($entry_id, null, null, $error); } $flavorAsset = assetPeer::retrieveOriginalByEntryId($entry_id); if (!$flavorAsset) { $flavorAsset = assetPeer::retrieveReadyWebByEntryId($entry_id); if (!$flavorAsset) { $flavorAssets = assetPeer::retrieveFlavorsByEntryId($entry_id); if (!$flavorAssets) { $error = "Cannot find good enough flavor asset to re-convert from"; return array($entry_id, $entry, null, $error); } $flavorAsset = $flavorAssets[0]; // choose the first one } } $syncKey = $flavorAsset->getSyncKey(flavorAsset::FILE_SYNC_FLAVOR_ASSET_SUB_TYPE_ASSET); $filePath = kFileSyncUtils::getReadyLocalFilePathForKey($syncKey); if (!$filePath) { $error = "Cannot find a fileSync for the flavorAsset [" . $flavorAsset->getId() . "]"; return array($entry_id, $entry, null, $error); } $dbBatchJob = new BatchJob(); $dbBatchJob->setEntryId($entry_id); $dbBatchJob->setPartnerId($entry->getPartnerId()); $dbBatchJob->setStatus(BatchJob::BATCHJOB_STATUS_PENDING); $dbBatchJob->setDc(kDataCenterMgr::getCurrentDcId()); //$dbBatchJob->setPriority ( $job_priority ); Not supported anymore $dbBatchJob->setObjectId($entry_id); $dbBatchJob->setObjectType(BatchJobObjectType::ENTRY); $dbBatchJob->setJobType(BatchJobType::CONVERT_PROFILE); $dbBatchJob->save(); // creates a convert profile job $convertProfileData = new kConvertProfileJobData(); $convertProfileData->setFlavorAssetId($flavorAsset->getId()); $convertProfileData->setInputFileSyncLocalPath($filePath); kJobsManager::addJob($dbBatchJob, $convertProfileData, BatchJobType::CONVERT_PROFILE); // save again afget the addJob $dbBatchJob->save(); return array($entry_id, $entry, $dbBatchJob, $error); }
public function executeImpl($partner_id, $subp_id, $puser_id, $partner_prefix, $puser_kuser) { $entry_id = $this->getPM("entry_id"); $entry = entryPeer::retrieveByPK($entry_id); if (!$entry) { $this->addError(APIErrors::INVALID_ENTRY_ID, "entry", $entry_id); } else { $job = new BatchJob(); $job->setJobType(BatchJobType::DVDCREATOR); $job->setStatus(BatchJob::BATCHJOB_STATUS_PENDING); //$job->setCheckAgainTimeout(time() + 10); $job->setEntryId($entry_id); $job->setPartnerId($entry->getPartnerId()); $job->setSubpId($entry->getSubpId()); $job->save(); $wrapper = objectWrapperBase::getWrapperClass($job, objectWrapperBase::DETAIL_LEVEL_DETAILED); // TODO - remove this code when cache works properly when saving objects (in their save method) $wrapper->removeFromCache("batch_job", $job->getId()); $this->addMsg("batchjob", $wrapper); } }
/** * The type and the sub-type are constant in the batch job, therefore they must be proveided * when a new batch job is generated. * @return BatchJob */ public function createChild($type, $subType = null, $same_root = true, $dc = null) { $child = new BatchJob(); $child->setJobType($type); if ($subType !== null) { $child->setJobSubType($subType); } $child->setParentJobId($this->id); $child->setPartnerId($this->partner_id); $child->setEntryId($this->entry_id); $child->setBulkJobId($this->bulk_job_id); // the condition is required in the special case of file_sync import jobs which are created on one dc but run from the other $child->setDc($dc === null ? $this->dc : $dc); if ($same_root && $this->root_job_id) { $child->setRootJobId($this->root_job_id); } else { $child->setRootJobId($this->id); } return $child; }
public static function addExportLiveReportJob($reportType, KalturaLiveReportExportParams $params) { KalturaLog::debug("adding Export Live Report job"); // Calculate time offset from server time to UTC $dateTimeZoneServer = new DateTimeZone(kConf::get('date_default_timezone')); $dateTimeZoneUTC = new DateTimeZone("UTC"); $dateTimeUTC = new DateTime("now", $dateTimeZoneUTC); $timeOffsetSeconds = -1 * $dateTimeZoneServer->getOffset($dateTimeUTC); // Create job data $jobData = new kLiveReportExportJobData(); $jobData->entryIds = $params->entryIds; $jobData->recipientEmail = $params->recpientEmail; $jobData->timeZoneOffset = $timeOffsetSeconds - $params->timeZoneOffset * 60; // Convert minutes to seconds $jobData->timeReference = time(); $jobData->applicationUrlTemplate = $params->applicationUrlTemplate; $job = new BatchJob(); $job->setPartnerId(kCurrentContext::getCurrentPartnerId()); $job->setJobType(BatchJobType::LIVE_REPORT_EXPORT); $job->setJobSubType($reportType); $job->setData($jobData); return self::addJob($job, $jobData, BatchJobType::LIVE_REPORT_EXPORT, $reportType); }
/** * @param int $metadataProfileId * @param int $srcVersion * @param int $destVersion * @param string $xsl * * @return BatchJob */ private static function addTransformMetadataJob($partnerId, $metadataProfileId, $srcVersion, $destVersion, $xsl = null) { // check if any metadata objects require the transform $c = new Criteria(); $c->add(MetadataPeer::METADATA_PROFILE_ID, $metadataProfileId); $c->add(MetadataPeer::METADATA_PROFILE_VERSION, $destVersion, Criteria::LESS_THAN); $c->add(MetadataPeer::STATUS, Metadata::STATUS_VALID); $metadataCount = MetadataPeer::doCount($c); if (!$metadataCount) { return null; } $job = new BatchJob(); $job->setJobType(BatchJobType::METADATA_TRANSFORM); $job->setPartnerId($partnerId); $job->setObjectId($metadataProfileId); $job->setObjectType(kPluginableEnumsManager::apiToCore('BatchJobObjectType', MetadataBatchJobObjectType::METADATA_PROFILE)); $data = new kTransformMetadataJobData(); if ($xsl) { $job->save(); $key = $job->getSyncKey(BatchJob::FILE_SYNC_BATCHJOB_SUB_TYPE_CONFIG); kFileSyncUtils::file_put_contents($key, $xsl); $xslPath = kFileSyncUtils::getLocalFilePathForKey($key); $data->setSrcXslPath($xslPath); } $data->setMetadataProfileId($metadataProfileId); $data->setSrcVersion($srcVersion); $data->setDestVersion($destVersion); return kJobsManager::addJob($job, $data, BatchJobType::METADATA_TRANSFORM); }
/** * Function adds bulk upload job to the queue * @param Partner $partner * @param kBulkUploadJobData $jobData * @param string $bulkUploadType * @throws APIException * @return BatchJob */ public static function addBulkUploadJob(Partner $partner, kBulkUploadJobData $jobData, $bulkUploadType = null) { $job = new BatchJob(); $job->setPartnerId($partner->getId()); $job->setJobType(BatchJobType::BULKUPLOAD); $job->setJobSubType($bulkUploadType); $job->save(); $syncKey = $job->getSyncKey(BatchJob::FILE_SYNC_BATCHJOB_SUB_TYPE_BULKUPLOAD); // kFileSyncUtils::file_put_contents($syncKey, file_get_contents($csvFileData["tmp_name"])); try { kFileSyncUtils::moveFromFile($jobData->getFilePath(), $syncKey, true); } catch (Exception $e) { throw new APIException(APIErrors::BULK_UPLOAD_CREATE_CSV_FILE_SYNC_ERROR); } $filePath = kFileSyncUtils::getLocalFilePathForKey($syncKey); if (is_null($jobData)) { throw new APIException(APIErrors::BULK_UPLOAD_BULK_UPLOAD_TYPE_NOT_VALID, $bulkUploadType); } if (!$jobData->getBulkUploadObjectType()) { $jobData->setBulkUploadObjectType(BulkUploadObjectType::ENTRY); } $jobData->setFilePath($filePath); if ($jobData->getBulkUploadObjectType() == BulkUploadObjectType::ENTRY && !$jobData->getObjectData()->getConversionProfileId()) { $jobData->setConversionProfileId($partner->getDefaultConversionProfileId()); $kmcVersion = $partner->getKmcVersion(); $check = null; if ($kmcVersion < 2) { $check = ConversionProfilePeer::retrieveByPK($jobData->getConversionProfileId()); } else { $check = conversionProfile2Peer::retrieveByPK($jobData->getConversionProfileId()); } if (!$check) { throw new APIException(APIErrors::CONVERSION_PROFILE_ID_NOT_FOUND, $jobData->getConversionProfileId()); } } return kJobsManager::addJob($job, $jobData, BatchJobType::BULKUPLOAD, kPluginableEnumsManager::apiToCore("BulkUploadType", $bulkUploadType)); }
/** * @param BatchJob $batchJob * @param $data * @param int $type * @param int $subType * @return BatchJob */ public static function addJob(BatchJob $batchJob, $data, $type, $subType = null) { $batchJob->setJobType($type); $batchJob->setJobSubType($subType); $batchJob->setData($data); if (!$batchJob->getParentJobId() && $batchJob->getEntryId()) { $entry = entryPeer::retrieveByPKNoFilter($batchJob->getEntryId()); // some jobs could be on deleted entry $batchJob->setRootJobId($entry->getBulkUploadId()); $batchJob->setBulkJobId($entry->getBulkUploadId()); } // validate partner id $partnerId = $batchJob->getPartnerId(); // if(!$partnerId) // throw new APIException(APIErrors::PARTNER_NOT_SET); // validate that partner exists $partner = PartnerPeer::retrieveByPK($partnerId); if (!$partner) { KalturaLog::err("Invalid partner id [{$partnerId}]"); throw new APIException(APIErrors::INVALID_PARTNER_ID, $partnerId); } // set the priority and work group $batchJob->setPriority($partner->getPriority($batchJob->getBulkJobId())); $batchJob = self::updateBatchJob($batchJob, BatchJob::BATCHJOB_STATUS_PENDING); // look for identical jobs $twinJobs = BatchJobPeer::retrieveDuplicated($type, $data); $twinJob = null; if (count($twinJobs)) { foreach ($twinJobs as $currentTwinJob) { if ($currentTwinJob->getId() != $batchJob->getId()) { $twinJob = reset($twinJobs); } } } if (!is_null($twinJob)) { $batchJob->setTwinJobId($twinJob->getId()); if (!kConf::get("batch_ignore_duplication")) { $batchJob = self::updateBatchJob($batchJob, $twinJob->getStatus(), $twinJob); } else { $batchJob->save(); } } return $batchJob; }