public function executeImpl($partner_id, $subp_id, $puser_id, $partner_prefix, $puser_kuser) { $limit = $this->getP("page_size", 20); $limit = min($limit, 100); $page = $this->getP("page", 1); $offset = ($page - 1) * $limit; $c = new Criteria(); $c->addAnd(BatchJobPeer::PARTNER_ID, $partner_id); $c->addAnd(BatchJobPeer::JOB_TYPE, BatchJobType::BULKUPLOAD); $c->addDescendingOrderByColumn(BatchJobPeer::ID); $count = BatchJobPeer::doCount($c); $c->setLimit($limit); $c->setOffset($offset); $jobs = BatchJobPeer::doSelect($c); $obj = array(); foreach ($jobs as $job) { $jobData = $job->getData(); if (!$jobData instanceof kBulkUploadJobData) { continue; } $bulkResults = BulkUploadResultPeer::retrieveWithEntryByBulkUploadId($job->getId()); $obj[] = array("uploadedBy" => $jobData->getUploadedBy(), "uploadedOn" => $job->getCreatedAt(null), "numOfEntries" => count($bulkResults), "status" => $job->getStatus(), "error" => $job->getStatus() == BatchJob::BATCHJOB_STATUS_FAILED ? $job->getMessage() : '', "logFileUrl" => requestUtils::getCdnHost() . "/index.php/extwidget/bulkuploadfile/id/{$job->getId()}/pid/{$job->getPartnerId()}/type/log", "csvFileUrl" => requestUtils::getCdnHost() . "/index.php/extwidget/bulkuploadfile/id/{$job->getId()}/pid/{$job->getPartnerId()}/type/csv"); } $this->addMsg("count", $count); $this->addMsg("page_size", $limit); $this->addMsg("page", $page); $this->addMsg("bulk_uploads", $obj); }
public function executeImpl($partner_id, $subp_id, $puser_id, $partner_prefix, $puser_kuser) { // TODO - verify permissions for viewing lists $detailed = $this->getP("detailed", false); $limit = $this->getP("page_size", 10); $page = $this->getP("page", 1); //$order_by = int( $this->getP ( "order_by" , -1 ) ); $offset = ($page - 1) * $limit; $c = new Criteria(); $download_types = array(BatchJobType::FLATTEN, BatchJobType::DOWNLOAD); $c->add(BatchJobPeer::JOB_TYPE, $download_types, Criteria::IN); // filter $filter = new BatchJobFilter(true); $fields_set = $filter->fillObjectFromRequest($this->getInputParams(), "filter_", null); $filter->attachToCriteria($c); //if ($order_by != -1) kshowPeer::setOrder( $c , $order_by ); $count = BatchJobPeer::doCount($c); $offset = ($page - 1) * $limit; $c->setLimit($limit); if ($offset > 0) { $c->setOffset($offset); } $list = BatchJobPeer::doSelect($c); $level = objectWrapperBase::DETAIL_LEVEL_REGULAR; $this->addMsg("count", $count); $this->addMsg("page_size", $limit); $this->addMsg("page", $page); $wrapper = objectWrapperBase::getWrapperClass($list, $level); $this->addMsg("downloads", $wrapper); }
public function getLockedJobs() { $c = new Criteria(); $c->add(BatchJobPeer::BATCH_INDEX, null, Criteria::ISNOTNULL); $c->add(BatchJobPeer::SCHEDULER_ID, $this->scheduler_configured_id); $c->add(BatchJobPeer::WORKER_ID, $this->configured_id); return BatchJobPeer::doSelect($c, myDbHelper::getConnection(myDbHelper::DB_HELPER_CONN_PROPEL2)); }
/** * Will list out all kind of perspectives of entries */ public function execute() { $this->forceSystemAuthentication(); $conversion_count = $this->getRequestParameter("conv_count", 10); if ($conversion_count > 40) { $conversion_count = 40; } $c = new Criteria(); $c->addDescendingOrderByColumn(conversionPeer::ID); $c->setLimit($conversion_count); $this->conversions = conversionPeer::doSelect($c); $this->conversion_count = $conversion_count; /* $conv = new conversion(); $conv->setId(6); $conv->setinFileName ( "Dasd"); $conv->setStatus ( 1 ); $conv->setCreatedAt ( 1234545 ); $convs = array ( $conv ); $this->conversions = $convs; */ $import_count = $this->getRequestParameter("impo_count", 10); if ($import_count > 40) { $import_count = 40; } $c = new Criteria(); $c->addDescendingOrderByColumn(BatchJobPeer::ID); $c->setLimit($import_count); $this->imports = BatchJobPeer::doSelect($c); $this->import_count = $import_count; /* $batch= new BatchJob(); $batch->setId ( 4 ); $batch->setData ( 'a:3:{s:7:"entryId";i:11077;s:9:"sourceUrl";s:84:"http://youtube.com/get_video?video_id=KDko2MFvY-s&t=OEgsToPDskKuVircfOJDjTh4FENGwQ0g";s:8:"destFile";s:36:"/web//content/imports/data/11077.flv";}' ); $imports = array( $batch ); $this->imports = $imports; */ }
public function getDirectChildJobs() { $c = new Criteria(); $c->add(BatchJobPeer::PARENT_JOB_ID, $this->id); return BatchJobPeer::doSelect($c, myDbHelper::getConnection(myDbHelper::DB_HELPER_CONN_PROPEL2)); }
/** * list Batch Jobs * * @action listBatchJobs * @param KalturaBatchJobFilter $filter * @param KalturaFilterPager $pager * @return KalturaBatchJobListResponse */ function listBatchJobsAction(KalturaBatchJobFilter $filter = null, KalturaFilterPager $pager = null) { if (!$filter) { $filter = new KalturaBatchJobFilter(); } $batchJobFilter = new BatchJobFilter(); $filter->toObject($batchJobFilter); $c = new Criteria(); // $c->add(BatchJobPeer::DELETED_AT, null); $batchJobFilter->attachToCriteria($c); if (!$pager) { $pager = new KalturaFilterPager(); } $pager->attachToCriteria($c); myDbHelper::$use_alternative_con = myDbHelper::DB_HELPER_CONN_PROPEL2; $list = BatchJobPeer::doSelect($c); $c->setLimit(false); $count = BatchJobPeer::doCount($c); $newList = KalturaBatchJobArray::fromStatisticsBatchJobArray($list); $response = new KalturaBatchJobListResponse(); $response->objects = $newList; $response->totalCount = $count; return $response; }
public function myBatchFlattenServer($script_name) { $this->script_name = $script_name; $this->register($script_name); SET_CONTEXT("FS"); $MAX_ITERATIONS_DUE_TO_PROPEL_MEMORY_LEAK = 10000000; self::initDb(); list($sleep_between_cycles, $number_of_times_to_skip_writing_sleeping) = self::getSleepParams('app_flatten_'); $last_worker_count = 0; $iteration = 0; $c = new Criteria(); $currentDc = kDataCenterMgr::getCurrentDc(); $c->add(BatchJobPeer::DC, kDataCenterMgr::getCurrentDcId()); $c->add(BatchJobPeer::JOB_TYPE, BatchJobType::FLATTEN); $c->add(BatchJobPeer::STATUS, BatchJob::BATCHJOB_STATUS_PROCESSED); $temp_count = 0; while (1) { self::exitIfDone(); try { sleep($sleep_between_cycles); $jobs = BatchJobPeer::doSelect($c); foreach ($jobs as $job) { $data = json_decode($job->getData(true), true); $entry_id = $data['entryId']; $entry_int_id = $data['entryIntId']; $entry_version = $data['entryVersion']; $file_format = $data['fileFormat']; $entry = entryPeer::retrieveByPK($entry_id); if (!$entry) { // entry is probably deleted if it is not returned from retrieveByPK // close job as failed $job->setStatus(BatchJob::BATCHJOB_STATUS_FAILED); $job->setDescription("could not retrieve entry, probably deleted"); TRACE("could not retrieve entry {$entry_id} , probably deleted"); $job->save(); continue; } $fileSyncKey = $entry->getSyncKey(entry::FILE_SYNC_ENTRY_SUB_TYPE_DOWNLOAD, $file_format); $fullFinalPath = kFileSyncUtils::getLocalFilePathForKey($fileSyncKey); $finalPathNoExt = substr($fullFinalPath, 0, strlen($fullFinalPath) - strlen($file_format)); myContentStorage::fullMkdir($fullFinalPath); $wildcardFinalPath = $finalPathNoExt . "*"; $older_files = glob($wildcardFinalPath); foreach ($older_files as $older_file) { TRACE("removing old file: [{$older_file}]"); @unlink($older_file); } TRACE("Downloading: {$fullFinalPath}"); kFile::downloadUrlToFile($data["serverUrl"], $fullFinalPath); if (!file_exists($fullFinalPath)) { TRACE("file doesnt exist: " . $data["serverUrl"]); $job->setDescription("file doesnt exist: " . $data["serverUrl"]); $job->setStatus(BatchJob::BATCHJOB_STATUS_FAILED); } else { if (filesize($fullFinalPath) < 100000) { @unlink($fullFinalPath); TRACE("file too small: " . $data["serverUrl"]); $job->setDescription("file too small: " . $data["serverUrl"]); $job->setStatus(BatchJob::BATCHJOB_STATUS_FAILED); } else { if ($data['email']) { $downloadLink = $entry->getDownloadUrl() . '/format/' . $file_format; kJobsManager::addMailJob(null, $entry_id, $entry->getPartnerId(), self::KALTURAS_FLATTEN_READY, kMailJobData::MAIL_PRIORITY_NORMAL, kConf::get("batch_flatten_video_sender_email"), kConf::get("batch_flatten_video_sender_name"), $data['email'], array($data['email'], $downloadLink)); } TRACE("Deleting: " . $data["deleteUrl"]); kFile::downloadUrlToString($data["deleteUrl"]); myNotificationMgr::createNotification(kNotificationJobData::NOTIFICATION_TYPE_ENTRY_UPDATE, $entry); $job->setStatus(BatchJob::BATCHJOB_STATUS_FINISHED); $filePath = kFileSyncUtils::getLocalFilePathForKey($fileSyncKey); if (file_exists($filePath)) { try { kFileSyncUtils::createSyncFileForKey($fileSyncKey); } catch (Exception $ex) { TRACE("ignore ERROR: " . $ex->getMessage()); } } else { TRACE("The file [{$filePath}] doesn't exists, not creating FileSync"); } } } $job->save(); } } catch (Exception $ex) { TRACE("ERROR: " . $ex->getMessage()); self::initDb(true); self::failed(); } if ($temp_count == 0) { TRACE("Ended conversion. sleeping for a while (" . $sleep_between_cycles . " seconds). Will write to the log in (" . $sleep_between_cycles * $number_of_times_to_skip_writing_sleeping . ") seconds"); } $temp_count++; if ($temp_count >= $number_of_times_to_skip_writing_sleeping) { $temp_count = 0; } } }
/** * Will investigate a single entry */ public function execute() { $this->forceSystemAuthentication(); myDbHelper::$use_alternative_con = myDbHelper::DB_HELPER_CONN_PROPEL2; entryPeer::setUseCriteriaFilter(false); $this->result = NULL; $fast = $this->getRequestParameter("fast", "") != ""; $this->fast = $fast; $kshow_id = $this->getRequestParameter("kshow_id"); $this->kshow_id = $kshow_id; $this->kshow = NULL; $entry_id = $this->getRequestParameter("entry_id"); $this->entry_id = $entry_id; $this->entry = NULL; $this->error = $this->getRequestParameter("error"); $this->bg_entry = NULL; if (!empty($kshow_id)) { $c = new Criteria(); $c->add(kshowPeer::ID, $kshow_id); $kshows = kshowPeer::doSelect($c); $kshow = new kshow(); if (!$kshows) { $this->result = "No kshow [{$kshow_id}] in DB"; return; } $kshow_original = $kshows[0]; $kshow_original->getShowEntry(); // pre fetch $kshow_original->getIntro(); // pre fetch $this->kshow_original = $kshows[0]; $this->kshow = new genericObjectWrapper($this->kshow_original, true); $alredy_exist_entries = array(); $alredy_exist_entries[] = $kshow_original->getShowEntryId(); if ($kshow_original->getIntroId()) { $alredy_exist_entries[] = $kshow_original->getIntroId(); } $skin_obj = $this->kshow_original->getSkinObj(); $bg_entry_id = $skin_obj->get("bg_entry_id"); if ($bg_entry_id) { $alredy_exist_entries[] = $bg_entry_id; $this->bg_entry = new genericObjectWrapper(entryPeer::retrieveByPK($bg_entry_id), true); } $c = new Criteria(); $c->add(entryPeer::ID, $alredy_exist_entries, Criteria::NOT_IN); $c->setLimit(100); $this->kshow_entries = $this->kshow_original->getEntrysJoinKuser($c); return; //return "KshowSuccess"; } if (empty($entry_id)) { return; } entryPeer::setUseCriteriaFilter(false); // from entry table $c = new Criteria(); $c->add(entryPeer::ID, $entry_id); //$entries = entryPeer::doSelectJoinAll ( $c ); $entries = entryPeer::doSelect($c); if (!$entries) { $this->result = "No entry [{$entry_id}] in DB"; return; } $this->entry = new genericObjectWrapper($entries[0], true); // from conversion table $c = new Criteria(); $c->add(conversionPeer::ENTRY_ID, $entry_id); $original_conversions = conversionPeer::doSelect($c); //$this->conversions = array() ; // $this->conversions = $original_conversions; //new genericObjectWrapper( $original_conversions ); // find all relevant batches in DB // from batch_job table $c = new Criteria(); //$c->add ( BatchJobPeer::DATA , "%\"entryId\";i:" . $entry_id . ";%" , Criteria::LIKE ); $c->add(BatchJobPeer::ENTRY_ID, $entry_id); $original_batch_jobs = BatchJobPeer::doSelect($c); $this->batch_jobs = $original_batch_jobs; // new genericObjectWrapper( $original_batch_jobs ); // use this as a refernece of all the directories // myBatchFileConverterServer::init( true ); $entry_patttern = "/" . $entry_id . "\\..*/"; $getFileData_method = array('kFile', 'getFileData'); $getFileDataWithContent_method = array('kFile', 'getFileDataWithContent'); // find all relevant files on disk $c = new Criteria(); $c->add(FileSyncPeer::OBJECT_TYPE, FileSyncObjectType::ENTRY); $c->add(FileSyncPeer::OBJECT_ID, $entry_id); // order by OBJECT SUB TYPE $c->addAscendingOrderByColumn(FileSyncPeer::OBJECT_SUB_TYPE); $this->file_syncs = FileSyncPeer::doSelect($c); $file_sync_links = array(); $flavors = assetPeer::retrieveFlavorsByEntryId($entry_id); $flavor_ids = array(); $this->flavors = array(); foreach ($flavors as $f) { $flavor_ids[] = $f->getId(); $f->getflavorParamsOutputs(); $f->getflavorParams(); $f->getmediaInfos(); $this->flavors[] = $f; } // find all relevant files on disk $c = new Criteria(); $c->add(FileSyncPeer::OBJECT_TYPE, FileSyncObjectType::FLAVOR_ASSET); $c->add(FileSyncPeer::OBJECT_ID, $flavor_ids, Criteria::IN); // order by OBJECT SUB TYPE $c->addAscendingOrderByColumn(FileSyncPeer::OBJECT_SUB_TYPE); $flavors_file_syncs = FileSyncPeer::doSelect($c); $this->flavors_file_syncs = array(); foreach ($flavors as $flav) { foreach ($flavors_file_syncs as $f) { if ($f->getLinkedId()) { $file_sync_links[] = $f->getLinkedId(); } if ($f->getObjectId() == $flav->getId()) { $this->flavors_file_syncs[$flav->getId()][] = $f; } } } if ($this->file_syncs) { $this->file_syncs_by_sub_type = array(); foreach ($this->file_syncs as $fs) { if ($fs->getLinkedId()) { $file_sync_links[] = $fs->getLinkedId(); } $sub_type = $fs->getObjectSubType(); if (!isset($this->file_syncs_by_sub_type[$sub_type])) { // create the array $this->file_syncs_by_sub_type[$sub_type] = array(); } $this->file_syncs_by_sub_type[$sub_type][] = $fs; } } else { $this->file_syncs_by_sub_type = array(); } $file_sync_criteria = new Criteria(); $file_sync_criteria->add(FileSyncPeer::ID, $file_sync_links, Criteria::IN); $this->file_sync_links = FileSyncPeer::doSelect($file_sync_criteria); $track_entry_c = new Criteria(); $track_entry_c->add(TrackEntryPeer::ENTRY_ID, $entry_id); $track_entry_list = TrackEntryPeer::doSelect($track_entry_c); $more_interesting_track_entries = array(); foreach ($track_entry_list as $track_entry) { if ($track_entry->getTrackEventTypeId() == TrackEntry::TRACK_ENTRY_EVENT_TYPE_ADD_ENTRY) { $more_interesting_track_entries[] = $track_entry->getParam3Str(); } } $track_entry_list2 = array(); //very heavy query, skip this /* // add all the track_entry objects that are related (joined on PARAM_3_STR) $track_entry_c2 = new Criteria(); $track_entry_c2->add ( TrackEntryPeer::TRACK_EVENT_TYPE_ID , TrackEntry::TRACK_ENTRY_EVENT_TYPE_UPLOADED_FILE ); $track_entry_c2->add ( TrackEntryPeer::PARAM_3_STR , $more_interesting_track_entries , Criteria::IN ); $track_entry_list2 = TrackEntryPeer::doSelect ( $track_entry_c2 ); */ // first add the TRACK_ENTRY_EVENT_TYPE_UPLOADED_FILE - they most probably happend before the rest $this->track_entry_list = array_merge($track_entry_list2, $track_entry_list); }
/** * Gets an array of BatchJob objects which contain a foreign key that references this object. * * If this collection has already been initialized with an identical Criteria, it returns the collection. * Otherwise if this BatchJobLock has previously been saved, it will retrieve * related BatchJobs from storage. If this BatchJobLock is new, it will return * an empty collection or the current collection, the criteria is ignored on a new object. * * @param PropelPDO $con * @param Criteria $criteria * @return array BatchJob[] * @throws PropelException */ public function getBatchJobs($criteria = null, PropelPDO $con = null) { if ($criteria === null) { $criteria = new Criteria(BatchJobLockPeer::DATABASE_NAME); } elseif ($criteria instanceof Criteria) { $criteria = clone $criteria; } if ($this->collBatchJobs === null) { if ($this->isNew()) { $this->collBatchJobs = array(); } else { $criteria->add(BatchJobPeer::BATCH_JOB_LOCK_ID, $this->id); BatchJobPeer::addSelectColumns($criteria); $this->collBatchJobs = BatchJobPeer::doSelect($criteria, $con); } } else { // criteria has no effect for a new object if (!$this->isNew()) { // the following code is to determine if a new query is // called for. If the criteria is the same as the last // one, just return the collection. $criteria->add(BatchJobPeer::BATCH_JOB_LOCK_ID, $this->id); BatchJobPeer::addSelectColumns($criteria); if (!isset($this->lastBatchJobCriteria) || !$this->lastBatchJobCriteria->equals($criteria)) { $this->collBatchJobs = BatchJobPeer::doSelect($criteria, $con); } } } $this->lastBatchJobCriteria = $criteria; return $this->collBatchJobs; }
} else { if (isset($argv[3]) && $argv[3]) { $c->addAnd(BatchJobPeer::UPDATED_AT, $argv[3], Criteria::GREATER_EQUAL); } } } $c->addAnd(BatchJobPeer::JOB_TYPE, BatchJobType::BULKUPLOAD, Criteria::EQUAL); $c->setLimit($countLimitEachLoop); $c->addAscendingOrderByColumn(BatchJobPeer::UPDATED_AT); $batchJobResults = BatchJobPeer::doSelect($c, $con); while ($batchJobResults && count($batchJobResults)) { foreach ($batchJobResults as $batchJob) { /* @var $batchJob BatchJob */ $batchJobLog = new BatchJobLog(); $batchJob->copyInto($batchJobLog, true); $batchJobLog->setJobId($batchJob->getId()); //migrate jobData without unnecessary serialization $batchJobLog->setData($batchJob->getData(true), true); $batchJobData = $batchJob->getData(); //set param_1 for the $batchJobLog $batchJobData = $batchJob->getData(); /* @var $batchJobData kBulkUploadJobData */ $batchJobLog->setParam1($batchJobData->getBulkUploadObjectType() ? $batchJobData->getBulkUploadObjectType() : 1); $batchJobLog->save(); var_dump("Last handled id: " . $batchJob->getId()); } $countLimitEachLoop += $countLimitEachLoop; $c->setOffset($countLimitEachLoop); $batchJobResults = BatchJobPeer::doSelect($c, $con); usleep(100); }
public static function getExpiredJobs() { $jobTypes = kPluginableEnumsManager::coreValues('BatchJobType'); $c = new Criteria(); $c->add(BatchJobPeer::STATUS, BatchJob::BATCHJOB_STATUS_FATAL, Criteria::NOT_EQUAL); $c->add(BatchJobPeer::DC, kDataCenterMgr::getCurrentDcId()); // each DC should clean its own jobs // $c->add(BatchJobPeer::PROCESSOR_EXPIRATION, time(), Criteria::LESS_THAN); // $c->add(BatchJobPeer::SCHEDULER_ID, 0, Criteria::GREATER_THAN); // $c->add(BatchJobPeer::WORKER_ID, 0, Criteria::GREATER_THAN); $jobs = array(); foreach ($jobTypes as $jobType) { $typedCrit = clone $c; $typedCrit->add(BatchJobPeer::EXECUTION_ATTEMPTS, BatchJobPeer::getMaxExecutionAttempts($jobType), Criteria::GREATER_THAN); $typedCrit->add(BatchJobPeer::JOB_TYPE, $jobType); $typedJobs = BatchJobPeer::doSelect($typedCrit, myDbHelper::getConnection(myDbHelper::DB_HELPER_CONN_PROPEL2)); foreach ($typedJobs as $typedJob) { $jobs[] = $typedJob; } } return $jobs; }
/** * Retrieve multiple objects by pkey. * * @param array $pks List of primary keys * @param PropelPDO $con the connection to use * @throws PropelException Any exceptions caught during processing will be * rethrown wrapped into a PropelException. */ public static function retrieveByPKs($pks, PropelPDO $con = null) { $objs = null; if (empty($pks)) { $objs = array(); } else { $criteria = new Criteria(BatchJobPeer::DATABASE_NAME); $criteria->add(BatchJobPeer::ID, $pks, Criteria::IN); $objs = BatchJobPeer::doSelect($criteria, $con); } return $objs; }
break; } } if ($passed) { KalturaLog::notice("YouTube Distribution profile [{$youTubeDistributionProfileId}] passed"); } else { KalturaLog::err("YouTube Distribution profile [{$youTubeDistributionProfileId}] failed"); } } $batchJobCriteria = new Criteria(); $batchJobCriteria->add(BatchJobPeer::STATUS, BatchJob::BATCHJOB_STATUS_FINISHED); $batchJobCriteria->add(BatchJobPeer::JOB_TYPE, BatchJobType::IMPORT); $batchJobCriteria->add(BatchJobPeer::DATA, '%sftp%', Criteria::LIKE); $batchJobCriteria->addDescendingOrderByColumn(BatchJobPeer::ID); $batchJobCriteria->setLimit(20); $batchJobs = BatchJobPeer::doSelect($batchJobCriteria); while (count($batchJobs)) { KalturaLog::debug("Import Batch Jobs count [" . count($batchJobs) . "]"); $batchJobId = null; foreach ($batchJobs as $batchJob) { /* @var $batchJob BatchJob */ $batchJobId = $batchJob->getId(); $batchJobPartnerId = $batchJob->getPartnerId(); $jobData = $batchJob->getData(); if (!$jobData instanceof kImportJobData) { continue; } $parsedUrl = parse_url($jobData->getSrcFileUrl()); $host = isset($parsedUrl['host']) ? $parsedUrl['host'] : null; if (!$host) { KalturaLog::err("Import Batch Job [{$batchJobPartnerId}::{$batchJobId}] Missing host");
/** * List bulk upload batch jobs * * @action list * @param KalturaFilterPager $pager * @return KalturaBulkUploadListResponse */ function listAction(KalturaFilterPager $pager = null) { if (!$pager) { $pager = new KalturaFilterPager(); } $c = new Criteria(); $c->addAnd(BatchJobPeer::PARTNER_ID, $this->getPartnerId()); $c->addAnd(BatchJobPeer::JOB_TYPE, BatchJobType::BULKUPLOAD); $c->addDescendingOrderByColumn(BatchJobPeer::ID); $count = BatchJobPeer::doCount($c); $pager->attachToCriteria($c); $jobs = BatchJobPeer::doSelect($c); $response = new KalturaBulkUploadListResponse(); $response->objects = KalturaBulkUploads::fromBatchJobArray($jobs); $response->totalCount = $count; return $response; }
/** * * add DeleteStorage job for key * * @param entry $entry * @param StorageProfile $profile * @param FileSyncKey $key */ protected static function delete(entry $entry, StorageProfile $profile, FileSyncKey $key) { $externalFileSync = kFileSyncUtils::getReadyPendingExternalFileSyncForKey($key, $profile->getId()); if (!$externalFileSync) { return; } $c = new Criteria(); $c->add(BatchJobPeer::OBJECT_ID, $externalFileSync->getId()); $c->add(BatchJobPeer::OBJECT_TYPE, BatchJobObjectType::FILE_SYNC); $c->add(BatchJobPeer::JOB_TYPE, BatchJobType::STORAGE_EXPORT); $c->add(BatchJobPeer::JOB_SUB_TYPE, $profile->getProtocol()); $c->add(BatchJobPeer::ENTRY_ID, $entry->getId()); $c->add(BatchJobPeer::STATUS, array(BatchJob::BATCHJOB_STATUS_RETRY, BatchJob::BATCHJOB_STATUS_PENDING), Criteria::IN); $exportJobs = BatchJobPeer::doSelect($c); if (!$exportJobs) { kJobsManager::addStorageDeleteJob(null, $entry->getId(), $profile, $externalFileSync); } else { foreach ($exportJobs as $exportJob) { kJobsManager::abortDbBatchJob($exportJob); } } }
public static function cleanExclusiveJobs() { $jobs = kBatchExclusiveLock::getExpiredJobs(); foreach ($jobs as $job) { KalturaLog::log("Cleaning job id[" . $job->getId() . "]"); kJobsManager::updateBatchJob($job, BatchJob::BATCHJOB_STATUS_FATAL); } $c = new Criteria(); $c->add(BatchJobPeer::STATUS, BatchJobPeer::getClosedStatusList(), Criteria::IN); $c->add(BatchJobPeer::BATCH_INDEX, null, Criteria::ISNOTNULL); // The 'closed' jobs should be donn for at least 10min. // before the cleanup starts messing upo with'em // This solves cases when job (convert) completes succesfully, // but the next job (closure)does not get a chance to take over due to the clean-up $c->add(BatchJobPeer::FINISH_TIME, time() - 600, Criteria::LESS_THAN); // MUST be the master DB $jobs = BatchJobPeer::doSelect($c, myDbHelper::getConnection(myDbHelper::DB_HELPER_CONN_PROPEL2)); foreach ($jobs as $job) { KalturaLog::log("Cleaning job id[" . $job->getId() . "]"); $job->setSchedulerId(null); $job->setWorkerId(null); $job->setBatchIndex(null); $job->setProcessorExpiration(null); $job->save(); } return count($jobs); }