/** * Page tree indexing type * * @param array $cfgRec Indexing Configuration Record * @param array $session_data Session data for the indexing session spread over multiple instances of the script. Passed by reference so changes hereto will be saved for the next call! * @param array $params Parameters from the log queue. * @param object $pObj Parent object (from "crawler" extension!) * @return void */ public function crawler_execute_type4($cfgRec, &$session_data, $params, &$pObj) { // Base page uid: $pageUid = (int) $params['url']; // Get array of URLs from page: $pageRow = BackendUtility::getRecord('pages', $pageUid); $res = $pObj->getUrlsForPageRow($pageRow); $duplicateTrack = array(); // Registry for duplicates $downloadUrls = array(); // Dummy. // Submit URLs: if (!empty($res)) { foreach ($res as $paramSetKey => $vv) { $urlList = $pObj->urlListFromUrlArray($vv, $pageRow, $GLOBALS['EXEC_TIME'], 30, 1, 0, $duplicateTrack, $downloadUrls, array('tx_indexedsearch_reindex')); } } // Add subpages to log now: if ($params['depth'] < $cfgRec['depth']) { // Subpages selected $recs = $GLOBALS['TYPO3_DB']->exec_SELECTgetRows('uid,title', 'pages', 'pid = ' . $pageUid . BackendUtility::deleteClause('pages')); // Traverse subpages and add to queue: if (!empty($recs)) { foreach ($recs as $r) { $this->instanceCounter++; $url = 'pages:' . $r['uid'] . ': ' . $r['title']; $session_data['urlLog'][] = $url; // Parameters: $nparams = array('indexConfigUid' => $cfgRec['uid'], 'url' => $r['uid'], 'procInstructions' => array('[Index Cfg UID#' . $cfgRec['uid'] . ']'), 'depth' => $params['depth'] + 1); $pObj->addQueueEntry_callBack($cfgRec['set_id'], $nparams, $this->callBack, $cfgRec['pid'], $GLOBALS['EXEC_TIME'] + $this->instanceCounter * $this->secondsPerExternalUrl); } } } }
/** * Page tree indexing type * * @param array $cfgRec Indexing Configuration Record * @param array $session_data Session data for the indexing session spread over multiple instances of the script. Passed by reference so changes hereto will be saved for the next call! * @param array $params Parameters from the log queue. * @param object $pObj Parent object (from "crawler" extension!) * @return void */ public function crawler_execute_type4($cfgRec, &$session_data, $params, &$pObj) { // Base page uid: $pageUid = (int) $params['url']; // Get array of URLs from page: $pageRow = BackendUtility::getRecord('pages', $pageUid); $res = $pObj->getUrlsForPageRow($pageRow); $duplicateTrack = []; // Registry for duplicates $downloadUrls = []; // Dummy. // Submit URLs: if (!empty($res)) { foreach ($res as $paramSetKey => $vv) { $pObj->urlListFromUrlArray($vv, $pageRow, $GLOBALS['EXEC_TIME'], 30, 1, 0, $duplicateTrack, $downloadUrls, ['tx_indexedsearch_reindex']); } } // Add subpages to log now: if ($params['depth'] < $cfgRec['depth']) { // Subpages selected $queryBuilder = GeneralUtility::makeInstance(ConnectionPool::class)->getQueryBuilderForTable('pages'); $queryBuilder->getRestrictions()->removeAll()->add(GeneralUtility::makeInstance(DeletedRestriction::class)); $result = $queryBuilder->select('uid', 'title')->from('pages')->where($queryBuilder->expr()->eq('pid', $queryBuilder->createNamedParameter($pageUid, \PDO::PARAM_INT)))->execute(); // Traverse subpages and add to queue: while ($row = $result->fetch()) { $this->instanceCounter++; $url = 'pages:' . $row['uid'] . ': ' . $row['title']; $session_data['urlLog'][] = $url; // Parameters: $nparams = ['indexConfigUid' => $cfgRec['uid'], 'url' => $row['uid'], 'procInstructions' => ['[Index Cfg UID#' . $cfgRec['uid'] . ']'], 'depth' => $params['depth'] + 1]; $pObj->addQueueEntry_callBack($cfgRec['set_id'], $nparams, $this->callBack, $cfgRec['pid'], $GLOBALS['EXEC_TIME'] + $this->instanceCounter * $this->secondsPerExternalUrl); } } }