コード例 #1
0
 function dump($history)
 {
     # This shouldn't happen if on console... ;)
     header('Content-type: text/html; charset=UTF-8');
     # Notice messages will foul up your XML output even if they're
     # relatively harmless.
     ini_set('display_errors', false);
     $this->startTime = wfTime();
     $dbr =& wfGetDB(DB_SLAVE);
     $this->maxCount = $dbr->selectField('page', 'MAX(page_id)', '', 'BackupDumper::dump');
     $this->startTime = wfTime();
     $db =& $this->backupDb();
     $exporter = new WikiExporter($db, $history, MW_EXPORT_STREAM);
     $exporter->setPageCallback(array(&$this, 'reportPage'));
     $exporter->setRevisionCallback(array(&$this, 'revCount'));
     $exporter->openStream();
     if (is_null($this->pages)) {
         $exporter->allPages();
     } else {
         $exporter->pagesByName($this->pages);
     }
     $exporter->closeStream();
     $this->report(true);
 }
コード例 #2
0
ファイル: SpecialExport.php プロジェクト: yusufchang/app
 /**
  * Do the actual page exporting
  *
  * @param $page String: user input on what page(s) to export
  * @param $history Mixed: one of the WikiExporter history export constants
  * @param $list_authors Boolean: Whether to add distinct author list (when
  *                      not returning full history)
  * @param $exportall Boolean: Whether to export everything
  */
 private function doExport($page, $history, $list_authors, $exportall)
 {
     // If we are grabbing everything, enable full history and ignore the rest
     if ($exportall) {
         $history = WikiExporter::FULL;
     } else {
         $pageSet = array();
         // Inverted index of all pages to look up
         // Split up and normalize input
         foreach (explode("\n", $page) as $pageName) {
             $pageName = trim($pageName);
             $title = Title::newFromText($pageName);
             if ($title && $title->getInterwiki() == '' && $title->getText() !== '') {
                 // Only record each page once!
                 $pageSet[$title->getPrefixedText()] = true;
             }
         }
         // Set of original pages to pass on to further manipulation...
         $inputPages = array_keys($pageSet);
         // Look up any linked pages if asked...
         if ($this->templates) {
             $pageSet = $this->getTemplates($inputPages, $pageSet);
         }
         $linkDepth = $this->pageLinkDepth;
         if ($linkDepth) {
             $pageSet = $this->getPageLinks($inputPages, $pageSet, $linkDepth);
         }
         /*
          // Enable this when we can do something useful exporting/importing image information. :)
          if( $this->images ) ) {
          $pageSet = $this->getImages( $inputPages, $pageSet );
          }
         */
         $pages = array_keys($pageSet);
         // Normalize titles to the same format and remove dupes, see bug 17374
         foreach ($pages as $k => $v) {
             $pages[$k] = str_replace(" ", "_", $v);
         }
         $pages = array_unique($pages);
     }
     /* Ok, let's get to it... */
     if ($history == WikiExporter::CURRENT) {
         $lb = false;
         $db = wfGetDB(DB_SLAVE);
         $buffer = WikiExporter::BUFFER;
     } else {
         // Use an unbuffered query; histories may be very long!
         $lb = wfGetLBFactory()->newMainLB();
         $db = $lb->getConnection(DB_SLAVE);
         $buffer = WikiExporter::STREAM;
         // This might take a while... :D
         wfSuppressWarnings();
         set_time_limit(0);
         wfRestoreWarnings();
     }
     $exporter = new WikiExporter($db, $history, $buffer);
     $exporter->list_authors = $list_authors;
     $exporter->openStream();
     if ($exportall) {
         $exporter->allPages();
     } else {
         foreach ($pages as $page) {
             /*
             			 if( $wgExportMaxHistory && !$this->curonly ) {
             			 $title = Title::newFromText( $page );
             			 if( $title ) {
             			 $count = Revision::countByTitle( $db, $title );
             			 if( $count > $wgExportMaxHistory ) {
             			 wfDebug( __FUNCTION__ .
             			 ": Skipped $page, $count revisions too big\n" );
             			 continue;
             			 }
             			 }
             			 }*/
             #Bug 8824: Only export pages the user can read
             $title = Title::newFromText($page);
             if (is_null($title)) {
                 continue;
                 #TODO: perhaps output an <error> tag or something.
             }
             if (!$title->userCan('read', $this->getUser())) {
                 continue;
                 #TODO: perhaps output an <error> tag or something.
             }
             $exporter->pageByTitle($title);
         }
     }
     $exporter->closeStream();
     if ($lb) {
         $lb->closeAll();
     }
 }
コード例 #3
0
ファイル: SpecialExport.php プロジェクト: paladox/mediawiki
 /**
  * Do the actual page exporting
  *
  * @param string $page User input on what page(s) to export
  * @param int $history One of the WikiExporter history export constants
  * @param bool $list_authors Whether to add distinct author list (when
  *   not returning full history)
  * @param bool $exportall Whether to export everything
  */
 private function doExport($page, $history, $list_authors, $exportall)
 {
     // If we are grabbing everything, enable full history and ignore the rest
     if ($exportall) {
         $history = WikiExporter::FULL;
     } else {
         $pageSet = [];
         // Inverted index of all pages to look up
         // Split up and normalize input
         foreach (explode("\n", $page) as $pageName) {
             $pageName = trim($pageName);
             $title = Title::newFromText($pageName);
             if ($title && !$title->isExternal() && $title->getText() !== '') {
                 // Only record each page once!
                 $pageSet[$title->getPrefixedText()] = true;
             }
         }
         // Set of original pages to pass on to further manipulation...
         $inputPages = array_keys($pageSet);
         // Look up any linked pages if asked...
         if ($this->templates) {
             $pageSet = $this->getTemplates($inputPages, $pageSet);
         }
         $linkDepth = $this->pageLinkDepth;
         if ($linkDepth) {
             $pageSet = $this->getPageLinks($inputPages, $pageSet, $linkDepth);
         }
         $pages = array_keys($pageSet);
         // Normalize titles to the same format and remove dupes, see bug 17374
         foreach ($pages as $k => $v) {
             $pages[$k] = str_replace(" ", "_", $v);
         }
         $pages = array_unique($pages);
     }
     /* Ok, let's get to it... */
     if ($history == WikiExporter::CURRENT) {
         $lb = false;
         $db = wfGetDB(DB_REPLICA);
         $buffer = WikiExporter::BUFFER;
     } else {
         // Use an unbuffered query; histories may be very long!
         $lb = wfGetLBFactory()->newMainLB();
         $db = $lb->getConnection(DB_REPLICA);
         $buffer = WikiExporter::STREAM;
         // This might take a while... :D
         MediaWiki\suppressWarnings();
         set_time_limit(0);
         MediaWiki\restoreWarnings();
     }
     $exporter = new WikiExporter($db, $history, $buffer);
     $exporter->list_authors = $list_authors;
     $exporter->openStream();
     if ($exportall) {
         $exporter->allPages();
     } else {
         foreach ($pages as $page) {
             # Bug 8824: Only export pages the user can read
             $title = Title::newFromText($page);
             if (is_null($title)) {
                 // @todo Perhaps output an <error> tag or something.
                 continue;
             }
             if (!$title->userCan('read', $this->getUser())) {
                 // @todo Perhaps output an <error> tag or something.
                 continue;
             }
             $exporter->pageByTitle($title);
         }
     }
     $exporter->closeStream();
     if ($lb) {
         $lb->closeAll();
     }
 }