/** * This function prints all selected pages, specified as an array of page * names (strings with namespace identifiers). * * @param array $pages list of page names to export * @param integer $recursion determines how pages are exported recursively: * "0" means that referenced resources are only declared briefly, "1" means * that all referenced resources are also exported recursively (propbably * retrieving the whole wiki). * @param string $revisiondate filter page list by including only pages * that have been changed since this date; format "YmdHis" * * @todo Consider dropping the $revisiondate filtering and all associated * functionality. Is anybody using this? */ public function printPages($pages, $recursion = 1, $revisiondate = false) { $linkCache = LinkCache::singleton(); $this->prepareSerialization(); $this->delay_flush = 10; // flush only after (fully) printing 11 objects // transform pages into queued short titles foreach ($pages as $page) { $title = Title::newFromText($page); if (null === $title) { continue; // invalid title name given } if ($revisiondate !== '') { // filter page list by revision date $rev = Revision::getTimeStampFromID($title, $title->getLatestRevID()); if ($rev < $revisiondate) { continue; } } $diPage = SMWDIWikiPage::newFromTitle($title); $this->queuePage($diPage, $recursion == 1 ? -1 : 1); } $this->serializer->startSerialization(); if (count($pages) == 1) { // ensure that ontologies that are retrieved as linked data are not confused with their subject! $ontologyuri = SMWExporter::getInstance()->expandURI('&export;') . '/' . urlencode(end($pages)); } else { // use empty URI, i.e. "location" as URI otherwise $ontologyuri = ''; } $this->serializer->serializeExpData(SMWExporter::getInstance()->getOntologyExpData($ontologyuri)); while (count($this->element_queue) > 0) { $diPage = reset($this->element_queue); $this->serializePage($diPage, $diPage->recdepth); $this->flush(); $linkCache->clear(); // avoid potential memory leak } $this->serializer->finishSerialization(); $this->flush(true); }
/** * This function takes care of storing the collected semantic data and takes * care of clearing out any outdated entries for the processed page. It assume that * parsing has happened and that all relevant data is contained in the provided parser * output. * * Optionally, this function also takes care of triggering indirect updates that might be * needed for overall database consistency. If the saved page describes a property or data type, * the method checks whether the property type, the data type, the allowed values, or the * conversion factors have changed. If so, it triggers SMWUpdateJobs for the relevant articles, * which then asynchronously update the semantic data in the database. * * @param $parseroutput ParserOutput object that contains the results of parsing which will * be stored. * @param $title Title object specifying the page that should be saved. * @param $makejobs Bool stating whether jobs should be created to trigger further updates if * this appears to be necessary after this update. * * @todo FIXME: Some job generations here might create too many jobs at once on a large wiki. Use incremental jobs instead. */ public static function storeData($parseroutput, Title $title, $makejobs = true) { global $smwgEnableUpdateJobs, $smwgDeclarationProperties, $smwgPageSpecialProperties; $semdata = $parseroutput->mSMWData; $namespace = $title->getNamespace(); $processSemantics = smwfIsSemanticsProcessed($namespace); if (!isset($semdata)) { // no data at all? $semdata = new SMWSemanticData(SMWDIWikiPage::newFromTitle($title)); } if ($processSemantics) { $props = array(); foreach ($smwgPageSpecialProperties as $propId) { // Do not calculate the same property again. if (array_key_exists($propId, $props)) { continue; } // Remember the property is processed. $props[$propId] = true; $prop = new SMWDIProperty($propId); if (count($semdata->getPropertyValues($prop)) > 0) { continue; } // Calculate property value. $value = null; switch ($propId) { case '_MDAT': $timestamp = Revision::getTimeStampFromID($title, $title->getLatestRevID()); $value = self::getDataItemFromMWTimestamp($timestamp); break; case '_CDAT': $timestamp = $title->getFirstRevision()->getTimestamp(); $value = self::getDataItemFromMWTimestamp($timestamp); break; case '_NEWP': $value = new SMWDIBoolean($title->isNewPage()); break; case '_LEDT': $revision = Revision::newFromId($title->getLatestRevID()); $user = User::newFromId($revision->getUser()); $value = SMWDIWikiPage::newFromTitle($user->getUserPage()); break; } if (!is_null($value)) { $semdata->addPropertyObjectValue($prop, $value); } // Issue error or warning? } // foreach } else { // data found, but do all operations as if it was empty $semdata = new SMWSemanticData($semdata->getSubject()); } // Check if the semantic data has been changed. // Sets the updateflag to true if so. // Careful: storage access must happen *before* the storage update; // even finding uses of a property fails after its type was changed. $updatejobflag = false; $jobs = array(); if ($makejobs && $smwgEnableUpdateJobs && $namespace == SMW_NS_PROPERTY) { // If it is a property, then we need to check if the type or the allowed values have been changed. $ptype = new SMWDIProperty('_TYPE'); $oldtype = smwfGetStore()->getPropertyValues($semdata->getSubject(), $ptype); $newtype = $semdata->getPropertyValues($ptype); if (!self::equalDatavalues($oldtype, $newtype)) { $updatejobflag = true; } else { foreach ($smwgDeclarationProperties as $prop) { $pv = new SMWDIProperty($prop); $oldvalues = smwfGetStore()->getPropertyValues($semdata->getSubject(), $pv); $newvalues = $semdata->getPropertyValues($pv); $updatejobflag = !self::equalDatavalues($oldvalues, $newvalues); } } if ($updatejobflag) { $prop = new SMWDIProperty($title->getDBkey()); $subjects = smwfGetStore()->getAllPropertySubjects($prop); foreach ($subjects as $subject) { $subjectTitle = $subject->getTitle(); if (!is_null($subjectTitle)) { // wikia change start - jobqueue migration $task = new \Wikia\Tasks\Tasks\JobWrapperTask(); $task->call('SMWUpdateJob', $subjectTitle); $jobs[] = $task; // wikia change end } } wfRunHooks('smwUpdatePropertySubjects', array(&$jobs)); $subjects = smwfGetStore()->getPropertySubjects(new SMWDIProperty('_ERRP'), $semdata->getSubject()); foreach ($subjects as $subject) { $subjectTitle = $subject->getTitle(); if (!is_null($subjectTitle)) { // wikia change start - jobqueue migration $task = new \Wikia\Tasks\Tasks\JobWrapperTask(); $task->call('SMWUpdateJob', $subjectTitle); $jobs[] = $task; // wikia change end } } } } elseif ($makejobs && $smwgEnableUpdateJobs && $namespace == SMW_NS_TYPE) { // if it is a type we need to check if the conversion factors have been changed $pconv = new SMWDIProperty('_CONV'); $ptype = new SMWDIProperty('_TYPE'); $oldfactors = smwfGetStore()->getPropertyValues($semdata->getSubject(), $pconv); $newfactors = $semdata->getPropertyValues($pconv); $updatejobflag = !self::equalDatavalues($oldfactors, $newfactors); if ($updatejobflag) { $store = smwfGetStore(); /// FIXME: this will kill large wikis! Use incremental updates! $dv = SMWDataValueFactory::newTypeIdValue('__typ', $title->getDBkey()); $proppages = $store->getPropertySubjects($ptype, $dv); foreach ($proppages as $proppage) { $propertyTitle = $proppage->getTitle(); if (!is_null($propertyTitle)) { // wikia change start - jobqueue migration $task = new \Wikia\Tasks\Tasks\JobWrapperTask(); $task->call('SMWUpdateJob', $propertyTitle); $jobs[] = $task; // wikia change end } $prop = new SMWDIProperty($proppage->getDBkey()); $subjects = $store->getAllPropertySubjects($prop); foreach ($subjects as $subject) { $subjectTitle = $subject->getTitle(); if (!is_null($subjectTitle)) { // wikia change start - jobqueue migration $task = new \Wikia\Tasks\Tasks\JobWrapperTask(); $task->call('SMWUpdateJob', $subjectTitle); $jobs[] = $task; // wikia change end } } $subjects = smwfGetStore()->getPropertySubjects(new SMWDIProperty('_ERRP'), $prop->getWikiPageValue()); foreach ($subjects as $subject) { $subjectTitle = $subject->getTitle(); if (!is_null($subjectTitle)) { // wikia change start - jobqueue migration $task = new \Wikia\Tasks\Tasks\JobWrapperTask(); $task->call('SMWUpdateJob', $subjectTitle); $jobs[] = $task; // wikia change end } } } } } // Actually store semantic data, or at least clear it if needed if ($processSemantics) { smwfGetStore()->updateData($semdata); } else { smwfGetStore()->clearData($semdata->getSubject()); } // Finally trigger relevant Updatejobs if necessary if ($updatejobflag) { // wikia change start - jobqueue migration \Wikia\Tasks\Tasks\BaseTask::batch($jobs); // wikia change end } return true; }