$dumper->progress(<<<ENDS This script dumps the wiki page or logging database into an XML interchange wrapper format for export or backup. XML output is sent to stdout; progress reports are sent to stderr. WARNING: this is not a full database dump! It is merely for public export \t\t of your wiki. For full backup, see our online help at: https://www.mediawiki.org/wiki/Backup Usage: php dumpBackup.php <action> [<options>] Actions: --full Dump all revisions of every page. --current Dump only the latest revision of every page. --logs Dump all log events. --stable Stable versions of pages? --pagelist=<file> \t\t\t Where <file> is a list of page titles to be dumped --revrange Dump specified range of revisions, requires revstart and revend options. Options: --quiet Don't dump status reports to stderr. --report=n Report position and speed after every n pages processed. \t\t\t (Default: 100) --server=h Force reading from MySQL server h --start=n Start from page_id or log_id n --end=n Stop before page_id or log_id n (exclusive) --revstart=n Start from rev_id n --revend=n Stop before rev_id n (exclusive) --skip-header Don't output the <mediawiki> header --skip-footer Don't output the </mediawiki> footer --stub Don't perform old_text lookups; for 2-pass dump --uploads Include upload records without files --include-files Include files within the XML stream --conf=<file> Use the specified configuration file (LocalSettings.php) --wiki=<wiki> Only back up the specified <wiki> Fancy stuff: (Works? Add examples please.) --plugin=<class>[:<file>] Load a dump plugin class --output=<type>:<file> Begin a filtered output stream; <type>s: file, gzip, bzip2, 7zip --filter=<type>[:<options>] Add a filter on an output branch ENDS );
} else { $dumper->progress(<<<END This script dumps the wiki page database into an XML interchange wrapper format for export or backup. XML output is sent to stdout; progress reports are sent to stderr. Usage: php dumpBackup.php <action> [<options>] Actions: --full Dump complete history of every page. --current Includes only the latest revision of each page. Options: --quiet Don't dump status reports to stderr. --report=n Report position and speed after every n pages processed. (Default: 100) --server=h Force reading from MySQL server h --start=n Start from page_id n --end=n Stop before page_id n (exclusive) --skip-header Don't output the <mediawiki> header --skip-footer Don't output the </mediawiki> footer --stub Don't perform old_text lookups; for 2-pass dump Fancy stuff: --plugin=<class>[:<file>] Load a dump plugin class --output=<type>:<file> Begin a filtered output stream; <type>s: file, gzip, bzip2, 7zip --filter=<type>[:<options>] Add a filter on an output branch END ); }
$pages = file($options['pagelist']); chdir($olddir); if ($pages === false) { print "Unable to open file {$options['pagelist']}\n"; exit; } $pages = array_map('trim', $pages); $dumper->pages = array_filter($pages, create_function('$x', 'return $x !== "";')); } if (isset($options['full'])) { $dumper->dump(MW_EXPORT_FULL); } elseif (isset($options['current'])) { $dumper->dump(MW_EXPORT_CURRENT); } else { $dumper->progress(<<<END This script dumps the wiki page database into an XML interchange wrapper format for export or backup. XML output is sent to stdout; progress reports are sent to stderr. Usage: php dumpBackup.php <action> [<options>] Actions: --full Dump complete history of every page. --current Includes only the latest revision of each page. Options: --quiet Don't dump status reports to stderr. --report=n Report position and speed after every n pages processed. (Default: 100) END ); }