/**
	 * Enable expansion of local URLs.
	 *
	 * In order to output stand-alone content with all absolute links, it is
	 * necessary to expand local URLs. MediaWiki tries to do this in a few
	 * places by sniffing into the 'action' GET request parameter, but this
	 * fails in many ways. This function tries to remedy this.
	 *
	 * This function pre-expands all base URL fragments used by MediaWiki,
	 * and also enables URL expansion in the Wikilog::GetLocalURL hook.
	 * The original values of all URLs are saved when $enable = true, and
	 * restored back when $enabled = false.
	 *
	 * The proper way to use this function is:
	 * @code
	 *   $saveExpUrls = WikilogParser::expandLocalUrls();
	 *   # ...code that uses $wgParser in order to parse articles...
	 *   WikilogParser::expandLocalUrls( $saveExpUrls );
	 * @endcode
	 *
	 * @note Using this function changes the behavior of Parser. When enabled,
	 *   parsed content should be cached under a different key.
	 */
	public static function expandLocalUrls( $enable = true ) {
		global $wgScriptPath, $wgUploadPath, $wgStylePath, $wgMathPath, $wgLocalFileRepo;
		static $originalPaths = null;

		$prev = self::$expandingUrls;

		if ( $enable ) {
			if ( !self::$expandingUrls ) {
				self::$expandingUrls = true;

				# Save original values.
				$originalPaths = array( $wgScriptPath, $wgUploadPath,
					$wgStylePath, $wgMathPath, $wgLocalFileRepo['url'] );

				# Expand paths.
				$wgScriptPath = wfExpandUrl( $wgScriptPath );
				$wgUploadPath = wfExpandUrl( $wgUploadPath );
				$wgStylePath  = wfExpandUrl( $wgStylePath  );
				$wgMathPath   = wfExpandUrl( $wgMathPath   );
				$wgLocalFileRepo['url'] = wfExpandUrl( $wgLocalFileRepo['url'] );

				# Destroy existing RepoGroup, if any.
				RepoGroup::destroySingleton();
			}
		} else {
			if ( self::$expandingUrls ) {
				self::$expandingUrls = false;

				# Restore original values.
				list( $wgScriptPath, $wgUploadPath, $wgStylePath, $wgMathPath,
					$wgLocalFileRepo['url'] ) = $originalPaths;

				# Destroy existing RepoGroup, if any.
				RepoGroup::destroySingleton();
			}
		}

		return $prev;
	}
예제 #2
0
	/**
	 * WikilogFeed constructor.
	 *
	 * @param $title Title  Feed title and URL.
	 * @param $format string  Feed format ('atom' or 'rss').
	 * @param $query WikilogQuery  Query options.
	 * @param $limit integer  Number of items to generate.
	 */
	public function __construct( Title $title, $format, WikilogQuery $query, $limit )
	{
		global $wgUser;

		$this->mTitle = $title;
		$this->mFormat = $format;
		$this->mQuery = $query;
		$this->mLimit = $limit;
		$this->mDb = wfGetDB( DB_SLAVE );
		$this->mIndexField = $this->getIndexField();

		# Retrieve copyright notice.
		$skin = $wgUser->getSkin();
		$saveExpUrls = WikilogParser::expandLocalUrls();
		$this->mCopyright = $skin->getCopyright( 'normal' );
		WikilogParser::expandLocalUrls( $saveExpUrls );
	}
예제 #3
0
	/**
	 * Retrieves an article parsed output either from parser cache or by
	 * parsing it again. If parsing again, stores it back into parser cache.
	 *
	 * @param $title Article title object.
	 * @param $feed Whether the result should be part of a feed.
	 * @return Two-element array containing the article and its parser output.
	 *
	 * @note Mw1.16+ provides Article::getParserOptions() and
	 *   Article::getParserOutput(), that could be used here in the future.
	 *   The problem is that getParserOutput() uses ParserCache exclusively,
	 *   which means that only ParserOptions control the key used to store
	 *   the output in the cache and there is no hook yet in
	 *   ParserCache::getKey() to set these extra bits (and the
	 *   'PageRenderingCache' hook is not useful here, it is in the wrong
	 *   place without access to the parser options). This is certainly
	 *   something that should be fixed in the future.  FIXME
	 *
	 * @note This function makes a clone of the parser if
	 *   $wgWikilogCloneParser is set, but cloning the parser is not
	 *   officially supported. The problem here is that we need a different
	 *   parser that we could mess up without interfering with normal page
	 *   rendering, and we can't create a new instance because of too many
	 *   broken extensions around. Check self::parserSanityCheck().
	 */
	public static function parsedArticle( Title $title, $feed = false ) {
		global $wgWikilogCloneParser;
		global $wgUser, $wgEnableParserCache;
		global $wgParser, $wgParserConf;

		static $parser = null;

		$article = new Article( $title );

		# First try the parser cache.
		$useParserCache = $wgEnableParserCache &&
			intval( $wgUser->getOption( 'stubthreshold' ) ) == 0 &&
			$article->exists();
		$parserCache = ParserCache::singleton();

		# Parser options.
		$parserOpt = ParserOptions::newFromUser( $wgUser );
		$parserOpt->setTidy( true );
		if ( $feed ) {
			$parserOpt->setEditSection( false );

			$parserOpt->addExtraKey( "WikilogFeed" );
		} else {
			$parserOpt->enableLimitReport();
		}

		if ( $useParserCache ) {
			# Look for the parsed article output in the parser cache.
			$parserOutput = $parserCache->get( $article, $parserOpt );

			# On success, return the object retrieved from the cache.
			if ( $parserOutput ) {
				return array( $article, $parserOutput );
			}
		}

		# Enable some feed-specific behavior.
		if ( $feed ) {
			$saveFeedParse = WikilogParser::enableFeedParsing();
			$saveExpUrls = WikilogParser::expandLocalUrls();
		}

		# Get a parser instance, if not already cached.
		if ( is_null( $parser ) ) {
			if ( !StubObject::isRealObject( $wgParser ) ) {
				$wgParser->_unstub();
			}
			if ( $wgWikilogCloneParser ) {
				$parser = clone $wgParser;
			} else {
				$class = $wgParserConf['class'];
				$parser = new $class( $wgParserConf );
			}
		}
		$parser->startExternalParse( $title, $parserOpt, Parser::OT_HTML );

		# Parse article.
		$arttext = $article->fetchContent();
		$parserOutput = $parser->parse( $arttext, $title, $parserOpt );

		# Save in parser cache.
		if ( $useParserCache && $parserOutput->getCacheTime() != -1 ) {
			$parserCache->save( $parserOutput, $article, $parserOpt );
		}

		# Restore default behavior.
		if ( $feed ) {
			WikilogParser::enableFeedParsing( $saveFeedParse );
			WikilogParser::expandLocalUrls( $saveExpUrls );
		}

		return array( $article, $parserOutput );
	}
예제 #4
0
	/**
	 * Extension setup function.
	 */
	static function ExtensionInit() {
		global $wgWikilogStylePath, $wgWikilogNamespaces;
		global $wgScriptPath, $wgNamespacesWithSubpages;

		# Set default style path, if not set.
		if ( !$wgWikilogStylePath ) {
			$wgWikilogStylePath = "$wgScriptPath/extensions/Wikilog/style";
		}

		# Find assigned namespaces and make sure they have subpages
		foreach ( $wgWikilogNamespaces as $ns ) {
			$wgNamespacesWithSubpages[$ns  ] = true;
			$wgNamespacesWithSubpages[$ns ^ 1] = true;
		}

		# Work around bug in MediaWiki 1.13 when '?action=render'.
		# https://bugzilla.wikimedia.org/show_bug.cgi?id=15512
		global $wgRequest;
		if ( $wgRequest->getVal( 'action' ) == 'render' ) {
			WikilogParser::expandLocalUrls();
		}
	}