/** * Fetch the set of available skins. * @return array associative array of strings */ static function getSkinNames() { global $wgValidSkinNames; static $skinsInitialised = false; if (!$skinsInitialised || !count($wgValidSkinNames)) { # Get a list of available skins # Build using the regular expression '^(.*).php$' # Array keys are all lower case, array value keep the case used by filename # wfProfileIn(__METHOD__ . '-init'); global $wgStyleDirectory; $skinDir = dir($wgStyleDirectory); if ($skinDir !== false && $skinDir !== null) { # while code from www.php.net while (false !== ($file = $skinDir->read())) { // Skip non-PHP files, hidden files, and '.dep' includes $matches = array(); if (preg_match('/^([^.]*)\\.php$/', $file, $matches)) { $aSkin = $matches[1]; // We're still loading core skins via the autodiscovery mechanism... :( if (!in_array($aSkin, array('CologneBlue', 'Modern', 'MonoBook', 'Vector'))) { wfLogWarning("A skin using autodiscovery mechanism, {$aSkin}, was found in your skins/ directory. " . "The mechanism will be removed in MediaWiki 1.25 and the skin will no longer be recognized. " . "See https://www.mediawiki.org/wiki/Manual:Skin_autodiscovery for information how to fix this."); } $wgValidSkinNames[strtolower($aSkin)] = $aSkin; } } $skinDir->close(); } $skinsInitialised = true; wfProfileOut(__METHOD__ . '-init'); } return $wgValidSkinNames; }
/** * @param EntityTitleLookup $titleLookup * @param EntityContentFactory $entityContentFactory * @param IContextSource $context */ public function __construct(EntityTitleLookup $titleLookup, EntityContentFactory $entityContentFactory, IContextSource $context) { if (!$context instanceof MutableContext) { wfLogWarning('$context is not an instanceof MutableContext.'); $context = new DerivativeContext($context); } $this->titleLookup = $titleLookup; $this->entityContentFactory = $entityContentFactory; $this->context = $context; }
/** * @since 0.1 * * @param string $cache set to 'cache' to cache the unserialized diff. * * @return Diff */ public function getDiff($cache = 'no') { $info = $this->getInfo($cache); if (!array_key_exists('diff', $info)) { // This shouldn't happen, but we should be robust against corrupt, incomplete // obsolete instances in the database, etc. wfLogWarning('Cannot get the diff when it has not been set yet.'); return new Diff(); } else { return $info['diff']; } }
/** * @param ItemId $itemId * @param array $sidebar * * @return array */ private function runHook(ItemId $itemId, array $sidebar) { $newSidebar = $sidebar; Hooks::run('WikibaseClientOtherProjectsSidebar', array($itemId, &$newSidebar)); if ($newSidebar === $sidebar) { return $sidebar; } if (!is_array($newSidebar) || !$this->isValidSidebar($newSidebar)) { wfLogWarning('Other projects sidebar data invalid after hook run.'); return $sidebar; } return $newSidebar; }
/** * @since 0.3 * * @return Diff */ public function getSiteLinkDiff() { $diff = $this->getDiff(); if (!$diff instanceof ItemDiff) { // This shouldn't happen, but we should be robust against corrupt, incomplete // or obsolete instances in the database, etc. $cls = $diff === null ? 'null' : get_class($diff); wfLogWarning('Cannot get sitelink diff from ' . $cls . '. Change #' . $this->getId() . ", type " . $this->getType()); return new Diff(); } else { return $diff->getSiteLinkDiff(); } }
/** * @param OutputPage $out * * @return EntityId|null */ public function getEntityIdFromOutputPage(OutputPage $out) { if (!$this->entityContentFactory->isEntityContentModel($out->getTitle()->getContentModel())) { return null; } $jsConfigVars = $out->getJsConfigVars(); if (array_key_exists('wbEntityId', $jsConfigVars)) { $idString = $jsConfigVars['wbEntityId']; try { return $this->entityIdParser->parse($idString); } catch (EntityIdParsingException $ex) { wfLogWarning('Failed to parse EntityId config var: ' . $idString); } } return null; }
/** * @param string|null $dataTypeId * @param string $dataValueType * * @return null|ValueSnakRdfBuilder */ private function getValueBuilder($dataTypeId, $dataValueType) { if ($dataTypeId !== null) { if (isset($this->valueBuilders["PT:{$dataTypeId}"])) { return $this->valueBuilders["PT:{$dataTypeId}"]; } } if (isset($this->valueBuilders["VT:{$dataValueType}"])) { return $this->valueBuilders["VT:{$dataValueType}"]; } if ($dataTypeId !== null) { wfLogWarning(__METHOD__ . ": No RDF builder defined for data type {$dataTypeId} nor for value type {$dataValueType}."); } else { wfLogWarning(__METHOD__ . ": No RDF builder defined for value type {$dataValueType}."); } return null; }
/** * @since 0.4 * @see EntityRevisionLookup::getEntityRevision * * @param EntityId $entityId * @param int|string $revisionId The desired revision id, or LATEST_FROM_SLAVE or LATEST_FROM_MASTER. * * @throws RevisionedUnresolvedRedirectException * @throws StorageException * @return EntityRevision|null */ public function getEntityRevision(EntityId $entityId, $revisionId = self::LATEST_FROM_SLAVE) { wfDebugLog(__CLASS__, __FUNCTION__ . ': Looking up entity ' . $entityId . " (revision {$revisionId})."); // default changed from false to 0 and then to LATEST_FROM_SLAVE if ($revisionId === false || $revisionId === 0) { wfWarn('getEntityRevision() called with $revisionId = false or 0, ' . 'use EntityRevisionLookup::LATEST_FROM_SLAVE or EntityRevisionLookup::LATEST_FROM_MASTER instead.'); $revisionId = self::LATEST_FROM_SLAVE; } /** @var EntityRevision $entityRevision */ $entityRevision = null; if (is_int($revisionId)) { $row = $this->entityMetaDataAccessor->loadRevisionInformationByRevisionId($entityId, $revisionId); } else { $rows = $this->entityMetaDataAccessor->loadRevisionInformation(array($entityId), $revisionId); $row = $rows[$entityId->getSerialization()]; } if ($row) { /** @var EntityRedirect $redirect */ try { list($entityRevision, $redirect) = $this->loadEntity($row); } catch (MWContentSerializationException $ex) { throw new StorageException('Failed to unserialize the content object.', 0, $ex); } if ($redirect !== null) { throw new RevisionedUnresolvedRedirectException($entityId, $redirect->getTargetId(), (int) $row->rev_id, $row->rev_timestamp); } if ($entityRevision === null) { // This only happens when there is a problem with the external store. wfLogWarning(__METHOD__ . ': Entity not loaded for ' . $entityId); } } if ($entityRevision !== null && !$entityRevision->getEntity()->getId()->equals($entityId)) { // This can happen when giving a revision ID that doesn't belong to the given entity, // or some meta data is incorrect. $actualEntityId = $entityRevision->getEntity()->getId()->getSerialization(); // Get the revision id we actually loaded, if none was passed explicitly $revisionId = is_int($revisionId) ? $revisionId : $entityRevision->getRevisionId(); throw new BadRevisionException("Revision {$revisionId} belongs to {$actualEntityId} instead of expected {$entityId}"); } if (is_int($revisionId) && $entityRevision === null) { // If a revision ID was specified, but that revision doesn't exist: throw new BadRevisionException("No such revision found for {$entityId}: {$revisionId}"); } return $entityRevision; }
/** * Checks the type of a substructure diff, and replaces it if needed. * This is needed for backwards compatibility with old versions of * MapDiffer: As of commit ff65735a125e, MapDiffer may generate atomic diffs for * substructures even in recursive mode (bug 51363). * * @param array &$operations All change ops; This is a reference, so the * substructure diff can be replaced if need be. * @param string $key The key of the substructure */ protected function fixSubstructureDiff(array &$operations, $key) { if (!isset($operations[$key])) { return; } if (!$operations[$key] instanceof Diff) { $warning = "Invalid substructure diff for key {$key}: " . get_class($operations[$key]); if (function_exists('wfLogWarning')) { wfLogWarning($warning); } else { trigger_error($warning, E_USER_WARNING); } // We could look into the atomic diff, see if it uses arrays as values, // and construct a new Diff according to these values. But since the // actual old behavior of MapDiffer didn't cause that to happen, let's // just use an empty diff, which is what MapDiffer should have returned // in the actual broken case mentioned in bug 51363. $operations[$key] = new Diff(array(), true); } }
/** * Search revisions with provided term. * * @param string $term Term to search * @return Status */ public function searchText($term) { // full-text search $queryString = new QueryString($term); $queryString->setFields(array('revisions.text')); $this->query->setQuery($queryString); // add aggregation to determine exact amount of matching search terms $terms = $this->getTerms($term); $this->query->addAggregation($this->termsAggregation($terms)); // @todo: abstract-away this config? (core/cirrus also has this - share it somehow?) $this->query->setHighlight(array('fields' => array(static::HIGHLIGHT_FIELD => array('type' => 'plain', 'order' => 'score', 'number_of_fragments' => 1, 'fragment_size' => 10000)), 'pre_tags' => array(static::HIGHLIGHT_PRE), 'post_tags' => array(static::HIGHLIGHT_POST))); // @todo: support insource: queries (and perhaps others) $searchable = Connection::getFlowIndex($this->indexBaseName); if ($this->type !== false) { $searchable = $searchable->getType($this->type); } $search = $searchable->createSearch($this->query); // @todo: PoolCounter config at PoolCounterSettings-eqiad.php // @todo: do we want this class to extend from ElasticsearchIntermediary and use its success & failure methods (like CirrusSearch/Searcher does)? // Perform the search $work = new PoolCounterWorkViaCallback('Flow-Search', "_elasticsearch", array('doWork' => function () use($search) { try { $result = $search->search(); return Status::newGood($result); } catch (ExceptionInterface $e) { if (strpos($e->getMessage(), 'dynamic scripting for [groovy] disabled')) { // known issue with default ES config, let's display a more helpful message return Status::newFatal(new \RawMessage("Couldn't complete search: dynamic scripting needs to be enabled. " . "Please add 'script.disable_dynamic: false' to your elasticsearch.yml")); } return Status::newFatal('flow-error-search'); } }, 'error' => function (Status $status) { $status = $status->getErrorsArray(); wfLogWarning('Pool error searching Elasticsearch: ' . $status[0][0]); return Status::newFatal('flow-error-search'); })); $result = $work->execute(); return $result; }
/** * Coalesce consecutive changes by the same user to the same entity into one. * A run of changes may be broken if the action performed changes (e.g. deletion * instead of update) or if a sitelink pointing to the local wiki was modified. * * Some types of actions, like deletion, will break runs. * Interleaved changes to different items will break runs. * * @param EntityChange[] $changes * * @return EntityChange[] grouped changes */ private function coalesceRuns(array $changes) { $coalesced = array(); $currentRun = array(); $currentUser = null; $currentEntity = null; $currentAction = null; $breakNext = false; foreach ($changes as $change) { try { $action = $change->getAction(); $meta = $change->getMetadata(); $user = $meta['user_text']; $entityId = $change->getEntityId()->__toString(); $break = $breakNext || $currentAction !== $action || $currentUser !== $user || $currentEntity !== $entityId; $breakNext = false; if (!$break && $change instanceof ItemChange) { $siteLinkDiff = $change->getSiteLinkDiff(); if (isset($siteLinkDiff[$this->localSiteId])) { // TODO: don't break if only the link's badges changed $break = true; $breakNext = true; } } if ($break) { if (!empty($currentRun)) { try { $coalesced[] = $this->mergeChanges($currentRun); } catch (MWException $ex) { // Something went wrong while trying to merge the changes. // Just keep the original run. wfWarn($ex->getMessage()); $coalesced = array_merge($coalesced, $currentRun); } } $currentRun = array(); $currentUser = $user; $currentEntity = $entityId; $currentAction = $action === EntityChange::ADD ? EntityChange::UPDATE : $action; } $currentRun[] = $change; // skip any change that failed to process in some way (bug T51417) } catch (Exception $ex) { wfLogWarning(__METHOD__ . ':' . $ex->getMessage()); } } if (!empty($currentRun)) { try { $coalesced[] = $this->mergeChanges($currentRun); } catch (MWException $ex) { // Something went wrong while trying to merge the changes. // Just keep the original run. wfWarn($ex->getMessage()); $coalesced = array_merge($coalesced, $currentRun); } } return $coalesced; }
/** * Attempts to fix an edit conflict by patching the intended change into the latest revision after * checking for conflicts. This modifies $this->newEntity but does not write anything to the * database. Saving of the new content may still fail. * * @return bool True if the conflict could be resolved, false otherwise */ public function fixEditConflict() { $baseRev = $this->getBaseRevision(); $latestRev = $this->getLatestRevision(); if (!$latestRev) { wfLogWarning('Failed to load latest revision of entity ' . $this->newEntity->getId() . '! ' . 'This may indicate entries missing from thw wb_entities_per_page table.'); return false; } $entityDiffer = new EntityDiffer(); $entityPatcher = new EntityPatcher(); // calculate patch against base revision // NOTE: will fail if $baseRev or $base are null, which they may be if // this gets called at an inappropriate time. The data flow in this class // should be improved. $patch = $entityDiffer->diffEntities($baseRev->getEntity(), $this->newEntity); if ($patch->isEmpty()) { // we didn't technically fix anything, but if there is nothing to change, // so just keep the current content as it is. $this->newEntity = $latestRev->getEntity()->copy(); return true; } // apply the patch( base -> new ) to the latest revision. $patchedLatest = $latestRev->getEntity()->copy(); $entityPatcher->patchEntity($patchedLatest, $patch); // detect conflicts against latest revision $cleanPatch = $entityDiffer->diffEntities($latestRev->getEntity(), $patchedLatest); $conflicts = $patch->count() - $cleanPatch->count(); if ($conflicts > 0) { // patch doesn't apply cleanly if ($this->userWasLastToEdit($this->user, $this->newEntity->getId(), $this->getBaseRevisionId())) { // it's a self-conflict if ($cleanPatch->count() === 0) { // patch collapsed, possibly because of diff operation change from base to latest return false; } else { // we still have a working patch, try to apply $this->status->warning('wikibase-self-conflict-patched'); } } else { // there are unresolvable conflicts. return false; } } else { // can apply cleanly $this->status->warning('wikibase-conflict-patched'); } // remember the patched entity as the actual new entity to save $this->newEntity = $patchedLatest; return true; }
/** * Utility method for transformResourceFilePath(). * * Caller is responsible for ensuring the file exists. Emits a PHP warning otherwise. * * @since 1.27 * @param string $remotePath URL path that points to $localPath * @param string $localPath File directory exposed at $remotePath * @param string $file Path to target file relative to $localPath * @return string URL */ public static function transformFilePath($remotePath, $localPath, $file) { $hash = md5_file("{$localPath}/{$file}"); if ($hash === false) { wfLogWarning(__METHOD__ . ": Failed to hash {$localPath}/{$file}"); $hash = ''; } return "{$remotePath}/{$file}?" . substr($hash, 0, 5); }
/** * Re-converts revision text according to it's flags. * * @param mixed $text Reference to a text * @param array $flags Compression flags * @return string|bool Decompressed text, or false on failure */ public static function decompressRevisionText($text, $flags) { if (in_array('gzip', $flags)) { # Deal with optional compression of archived pages. # This can be done periodically via maintenance/compressOld.php, and # as pages are saved if $wgCompressRevisions is set. $text = gzinflate($text); if ($text === false) { wfLogWarning(__METHOD__ . ': gzinflate() failed'); return false; } } if (in_array('object', $flags)) { # Generic compressed storage $obj = unserialize($text); if (!is_object($obj)) { // Invalid object return false; } $text = $obj->getText(); } global $wgLegacyEncoding; if ($text !== false && $wgLegacyEncoding && !in_array('utf-8', $flags) && !in_array('utf8', $flags)) { # Old revisions kept around in a legacy encoding? # Upconvert on demand. # ("utf8" checked for compatibility with some broken # conversion scripts 2008-12-30) global $wgContLang; $text = $wgContLang->iconv($wgLegacyEncoding, 'UTF-8', $text); } return $text; }
/** * @return array */ private function getLegacySkinNames() { static $skinsInitialised = false; if (!$skinsInitialised || !count($this->legacySkins)) { # Get a list of available skins # Build using the regular expression '^(.*).php$' # Array keys are all lower case, array value keep the case used by filename # wfProfileIn(__METHOD__ . '-init'); global $wgStyleDirectory; $skinDir = dir($wgStyleDirectory); if ($skinDir !== false && $skinDir !== null) { # while code from www.php.net while (false !== ($file = $skinDir->read())) { // Skip non-PHP files, hidden files, and '.dep' includes $matches = array(); if (preg_match('/^([^.]*)\\.php$/', $file, $matches)) { $aSkin = $matches[1]; // Explicitly disallow loading core skins via the autodiscovery mechanism. // // They should be loaded already (in a non-autodicovery way), but old files might still // exist on the server because our MW version upgrade process is widely documented as // requiring just copying over all files, without removing old ones. // // This is one of the reasons we should have never used autodiscovery in the first // place. This hack can be safely removed when autodiscovery is gone. if (in_array($aSkin, array('CologneBlue', 'Modern', 'MonoBook', 'Vector'))) { wfLogWarning("An old copy of the {$aSkin} skin was found in your skins/ directory. " . "You should remove it to avoid problems in the future." . "See https://www.mediawiki.org/wiki/Manual:Skin_autodiscovery for details."); continue; } wfLogWarning("A skin using autodiscovery mechanism, {$aSkin}, was found in your skins/ directory. " . "The mechanism will be removed in MediaWiki 1.25 and the skin will no longer be recognized. " . "See https://www.mediawiki.org/wiki/Manual:Skin_autodiscovery for information how to fix this."); $this->legacySkins[strtolower($aSkin)] = $aSkin; } } $skinDir->close(); } $skinsInitialised = true; wfProfileOut(__METHOD__ . '-init'); } return $this->legacySkins; }
/** * Find the object with a given name and return it (or NULL) * * @param string $name Special page name, may be localised and/or an alias * @return SpecialPage|null SpecialPage object or null if the page doesn't exist */ public static function getPage($name) { list($realName, ) = self::resolveAlias($name); if (isset(self::$pageObjectCache[$realName])) { return self::$pageObjectCache[$realName]; } $specialPageList = self::getPageList(); if (isset($specialPageList[$realName])) { $rec = $specialPageList[$realName]; if (is_callable($rec)) { // Use callback to instantiate the special page $page = call_user_func($rec); } elseif (is_string($rec)) { $className = $rec; $page = new $className(); } elseif (is_array($rec)) { $className = array_shift($rec); // @deprecated, officially since 1.18, unofficially since forever wfDeprecated("Array syntax for \$wgSpecialPages is deprecated ({$className}), " . "define a subclass of SpecialPage instead.", '1.18'); $page = ObjectFactory::getObjectFromSpec(['class' => $className, 'args' => $rec, 'closure_expansion' => false]); } elseif ($rec instanceof SpecialPage) { $page = $rec; // XXX: we should deep clone here } else { $page = null; } self::$pageObjectCache[$realName] = $page; if ($page instanceof SpecialPage) { return $page; } else { // It's not a classname, nor a callback, nor a legacy constructor array, // nor a special page object. Give up. wfLogWarning("Cannot instantiate special page {$realName}: bad spec!"); return null; } } else { return null; } }
/** * @see Store::rebuild * * @since 0.1 */ public function rebuild() { $dbw = wfGetDB(DB_MASTER); // TODO: refactor selection code out (relevant for other stores) $pages = $dbw->select(array('page'), array('page_id', 'page_latest'), array('page_content_model' => WikibaseRepo::getDefaultInstance()->getEntityContentFactory()->getEntityContentModels()), __METHOD__, array('LIMIT' => 1000)); foreach ($pages as $pageRow) { $page = WikiPage::newFromID($pageRow->page_id); $revision = Revision::newFromId($pageRow->page_latest); try { $page->doEditUpdates($revision, $GLOBALS['wgUser']); } catch (DBQueryError $e) { wfLogWarning('editUpdateFailed for ' . $page->getId() . ' on revision ' . $revision->getId() . ': ' . $e->getMessage()); } } }
/** * @param Exception $ex */ private function handleException(Exception $ex) { if ($this->exceptionCallback) { call_user_func($this->exceptionCallback, $ex); } else { wfLogWarning($ex->getMessage()); } }
/** * @param Title $oldTitle * @param Title $newTitle * @param User $user * * @return bool */ private function doTitleMoveComplete(Title $oldTitle, Title $newTitle, User $user) { if (!$this->isWikibaseEnabled($newTitle->getNamespace())) { return true; } if ($this->propagateChangesToRepo !== true) { return true; } $updateRepo = new UpdateRepoOnMove($this->repoDatabase, $this->siteLinkLookup, $user, $this->siteGlobalID, $oldTitle, $newTitle); if (!$this->shouldBePushed($updateRepo)) { return true; } try { $updateRepo->injectJob($this->jobQueueGroup); // To be able to find out about this in the SpecialMovepageAfterMove hook $newTitle->wikibasePushedMoveToRepo = true; } catch (MWException $e) { // This is not a reason to let an exception bubble up, we just // show a message to the user that the Wikibase item needs to be // manually updated. wfLogWarning($e->getMessage()); wfDebugLog('UpdateRepo', "OnMove: Failed to inject job: " . $e->getMessage()); } return true; }
private function setBadgesProperty(ItemId $itemId, ParserOutput $out) { /** @var Item $item */ $item = $this->entityLookup->getEntity($itemId); if (!$item || !$item->getSiteLinkList()->hasLinkWithSiteId($this->siteId)) { // Probably some sort of race condition or data inconsistency, better log a warning wfLogWarning('According to a SiteLinkLookup ' . $itemId->getSerialization() . ' is linked to ' . $this->siteId . ' while it is not or it does not exist.'); return; } $siteLink = $item->getSiteLinkList()->getBySiteId($this->siteId); foreach ($siteLink->getBadges() as $badge) { $out->setProperty('wikibase-badge-' . $badge->getSerialization(), true); } }
/** * @see PropertyInfoStore::getPropertyInfo * * @param PropertyId $propertyId * * @return array|null * @throws InvalidArgumentException * @throws DBError */ public function getPropertyInfo(PropertyId $propertyId) { $dbr = $this->getConnection(DB_SLAVE); $res = $dbr->selectField($this->tableName, 'pi_info', array('pi_property_id' => $propertyId->getNumericId()), __METHOD__); $this->releaseConnection($dbr); if ($res === false) { $info = null; } else { $info = $this->decodeBlob($res); if ($info === null) { wfLogWarning("failed to decode property info blob for " . $propertyId . ": " . substr($res, 0, 200)); } } return $info; }
/** * Reports the exception to wfLogWarning. * * @see ExceptionHandler::handleException() * * @param Exception $exception * @param string $errorCode * @param string $explanation */ public function handleException(Exception $exception, $errorCode, $explanation) { $msg = $exception->getMessage(); $msg = '[' . $errorCode . ']: ' . $explanation . ' (' . $msg . ')'; wfLogWarning($msg, 2); }
/** * @param string $name * * @return User|bool */ private function getUser($name) { $user = User::newFromName($name); if (!$user || !$user->isLoggedIn()) { // This should never happen as we check with CentralAuth // that the user actually does exist wfLogWarning("User {$name} doesn't exist while CentralAuth pretends it does"); return false; } return $user; }
/** * Unserializes the info field using json_decode. * This may be overridden by subclasses to implement special handling * for information in the info field. * * @since 0.4 * * @param string $str * * @return array the info array */ public function unserializeInfo($str) { if ($str[0] === '{') { // json $info = json_decode($str, true); } else { // we may still have legacy stuff in the database for a while! $info = unserialize($str); } if (!is_array($info)) { wfLogWarning("Failed to unserializeInfo of id: " . $this->getObjectId()); return array(); } return $info; }
/** * Make directory, and make all parent directories if they don't exist * * @param string $dir Full path to directory to create * @param int $mode Chmod value to use, default is $wgDirectoryMode * @param string $caller Optional caller param for debugging. * @throws MWException * @return bool */ function wfMkdirParents($dir, $mode = null, $caller = null) { global $wgDirectoryMode; if (FileBackend::isStoragePath($dir)) { // sanity throw new MWException(__FUNCTION__ . " given storage path '{$dir}'."); } if (!is_null($caller)) { wfDebug("{$caller}: called wfMkdirParents({$dir})\n"); } if (strval($dir) === '' || is_dir($dir)) { return true; } $dir = str_replace(array('\\', '/'), DIRECTORY_SEPARATOR, $dir); if (is_null($mode)) { $mode = $wgDirectoryMode; } // Turn off the normal warning, we're doing our own below MediaWiki\suppressWarnings(); $ok = mkdir($dir, $mode, true); // PHP5 <3 MediaWiki\restoreWarnings(); if (!$ok) { //directory may have been created on another request since we last checked if (is_dir($dir)) { return true; } // PHP doesn't report the path in its warning message, so add our own to aid in diagnosis. wfLogWarning(sprintf("failed to mkdir \"%s\" mode 0%o", $dir, $mode)); } return $ok; }
private function warn($message) { wfLogWarning($message); $this->messageReporter->reportMessage($message); }
/** * Back-end article deletion * Deletes the article with database consistency, writes logs, purges caches * * @since 1.19 * * @param string $reason Delete reason for deletion log * @param bool $suppress Suppress all revisions and log the deletion in * the suppression log instead of the deletion log * @param int $u1 Unused * @param bool $u2 Unused * @param array|string &$error Array of errors to append to * @param User $user The deleting user * @param array $tags Tags to apply to the deletion action * @return Status Status object; if successful, $status->value is the log_id of the * deletion log entry. If the page couldn't be deleted because it wasn't * found, $status is a non-fatal 'cannotdelete' error */ public function doDeleteArticleReal($reason, $suppress = false, $u1 = null, $u2 = null, &$error = '', User $user = null, $tags = []) { global $wgUser, $wgContentHandlerUseDB; wfDebug(__METHOD__ . "\n"); $status = Status::newGood(); if ($this->mTitle->getDBkey() === '') { $status->error('cannotdelete', wfEscapeWikiText($this->getTitle()->getPrefixedText())); return $status; } $user = is_null($user) ? $wgUser : $user; if (!Hooks::run('ArticleDelete', [&$this, &$user, &$reason, &$error, &$status, $suppress])) { if ($status->isOK()) { // Hook aborted but didn't set a fatal status $status->fatal('delete-hook-aborted'); } return $status; } $dbw = wfGetDB(DB_MASTER); $dbw->startAtomic(__METHOD__); $this->loadPageData(self::READ_LATEST); $id = $this->getId(); // T98706: lock the page from various other updates but avoid using // WikiPage::READ_LOCKING as that will carry over the FOR UPDATE to // the revisions queries (which also JOIN on user). Only lock the page // row and CAS check on page_latest to see if the trx snapshot matches. $lockedLatest = $this->lockAndGetLatest(); if ($id == 0 || $this->getLatest() != $lockedLatest) { $dbw->endAtomic(__METHOD__); // Page not there or trx snapshot is stale $status->error('cannotdelete', wfEscapeWikiText($this->getTitle()->getPrefixedText())); return $status; } // Given the lock above, we can be confident in the title and page ID values $namespace = $this->getTitle()->getNamespace(); $dbKey = $this->getTitle()->getDBkey(); // At this point we are now comitted to returning an OK // status unless some DB query error or other exception comes up. // This way callers don't have to call rollback() if $status is bad // unless they actually try to catch exceptions (which is rare). // we need to remember the old content so we can use it to generate all deletion updates. $revision = $this->getRevision(); try { $content = $this->getContent(Revision::RAW); } catch (Exception $ex) { wfLogWarning(__METHOD__ . ': failed to load content during deletion! ' . $ex->getMessage()); $content = null; } $fields = Revision::selectFields(); $bitfield = false; // Bitfields to further suppress the content if ($suppress) { $bitfield = Revision::SUPPRESSED_ALL; $fields = array_diff($fields, ['rev_deleted']); } // For now, shunt the revision data into the archive table. // Text is *not* removed from the text table; bulk storage // is left intact to avoid breaking block-compression or // immutable storage schemes. // In the future, we may keep revisions and mark them with // the rev_deleted field, which is reserved for this purpose. // Get all of the page revisions $res = $dbw->select('revision', $fields, ['rev_page' => $id], __METHOD__, 'FOR UPDATE'); // Build their equivalent archive rows $rowsInsert = []; foreach ($res as $row) { $rowInsert = ['ar_namespace' => $namespace, 'ar_title' => $dbKey, 'ar_comment' => $row->rev_comment, 'ar_user' => $row->rev_user, 'ar_user_text' => $row->rev_user_text, 'ar_timestamp' => $row->rev_timestamp, 'ar_minor_edit' => $row->rev_minor_edit, 'ar_rev_id' => $row->rev_id, 'ar_parent_id' => $row->rev_parent_id, 'ar_text_id' => $row->rev_text_id, 'ar_text' => '', 'ar_flags' => '', 'ar_len' => $row->rev_len, 'ar_page_id' => $id, 'ar_deleted' => $suppress ? $bitfield : $row->rev_deleted, 'ar_sha1' => $row->rev_sha1]; if ($wgContentHandlerUseDB) { $rowInsert['ar_content_model'] = $row->rev_content_model; $rowInsert['ar_content_format'] = $row->rev_content_format; } $rowsInsert[] = $rowInsert; } // Copy them into the archive table $dbw->insert('archive', $rowsInsert, __METHOD__); // Save this so we can pass it to the ArticleDeleteComplete hook. $archivedRevisionCount = $dbw->affectedRows(); // Clone the title and wikiPage, so we have the information we need when // we log and run the ArticleDeleteComplete hook. $logTitle = clone $this->mTitle; $wikiPageBeforeDelete = clone $this; // Now that it's safely backed up, delete it $dbw->delete('page', ['page_id' => $id], __METHOD__); $dbw->delete('revision', ['rev_page' => $id], __METHOD__); // Log the deletion, if the page was suppressed, put it in the suppression log instead $logtype = $suppress ? 'suppress' : 'delete'; $logEntry = new ManualLogEntry($logtype, 'delete'); $logEntry->setPerformer($user); $logEntry->setTarget($logTitle); $logEntry->setComment($reason); $logEntry->setTags($tags); $logid = $logEntry->insert(); $dbw->onTransactionPreCommitOrIdle(function () use($dbw, $logEntry, $logid) { // Bug 56776: avoid deadlocks (especially from FileDeleteForm) $logEntry->publish($logid); }, __METHOD__); $dbw->endAtomic(__METHOD__); $this->doDeleteUpdates($id, $content, $revision); Hooks::run('ArticleDeleteComplete', [&$wikiPageBeforeDelete, &$user, $reason, $id, $content, $logEntry, $archivedRevisionCount]); $status->value = $logid; // Show log excerpt on 404 pages rather than just a link $cache = ObjectCache::getMainStashInstance(); $key = wfMemcKey('page-recent-delete', md5($logTitle->getPrefixedText())); $cache->set($key, 1, $cache::TTL_DAY); return $status; }
/** * Get entity from prefixed ID (e.g. "Q23") and return it as serialized array. * * @since 0.5 * * @param string $prefixedEntityId * * @return array|null */ public function getEntity($prefixedEntityId) { $prefixedEntityId = trim($prefixedEntityId); $entityId = $this->entityIdParser->parse($prefixedEntityId); $this->usageAccumulator->addAllUsage($entityId); try { $entityObject = $this->entityLookup->getEntity($entityId); } catch (RevisionedUnresolvedRedirectException $ex) { // We probably hit a double redirect wfLogWarning('Encountered a UnresolvedRedirectException when trying to load ' . $prefixedEntityId); return null; } if ($entityObject === null) { return null; } $entityArr = $this->newClientEntitySerializer()->serialize($entityObject); // Renumber the entity as Lua uses 1-based array indexing $this->renumber($entityArr); $entityArr['schemaVersion'] = 2; return $entityArr; }
/** * Get the list of module CSS to include on this page * * @param bool $filter * @param string|null $position * * @return array Array of module names */ public function getModuleStyles($filter = false, $position = null) { // T97420 $resourceLoader = $this->getResourceLoader(); foreach ($this->mModuleStyles as $val) { $module = $resourceLoader->getModule($val); if ($module instanceof ResourceLoaderModule && $module->isPositionDefault()) { $warning = __METHOD__ . ': style module should define its position explicitly: ' . $val . ' ' . get_class($module); wfDebugLog('resourceloader', $warning); wfLogWarning($warning); } } return $this->getModules($filter, $position, 'mModuleStyles'); }
/** * Fetches some basic entity information from a set of entity IDs. * * @param ParserOutput $parserOutput * * @return EntityInfo */ private function getEntityInfo(ParserOutput $parserOutput) { /** * Set in ReferencedEntitiesDataUpdater. * * @see ReferencedEntitiesDataUpdater::updateParserOutput * @fixme Use ReferencedEntitiesDataUpdater::getEntityIds instead. */ $entityIds = $parserOutput->getExtensionData('referenced-entities'); if (!is_array($entityIds)) { wfLogWarning('$entityIds from ParserOutput "referenced-entities" extension data' . ' expected to be an array'); $entityIds = array(); } $entityInfoBuilder = $this->entityInfoBuilderFactory->newEntityInfoBuilder($entityIds); $entityInfoBuilder->resolveRedirects(); $entityInfoBuilder->removeMissing(); $entityInfoBuilder->collectTerms(array('label', 'description'), $this->languageFallbackChain->getFetchLanguageCodes()); $entityInfoBuilder->collectDataTypes(); $entityInfoBuilder->retainEntityInfo($entityIds); return $entityInfoBuilder->getEntityInfo(); }