public function execute()
 {
     global $wgLang, $wgParser;
     $provided = $this->getArg(0);
     $namespace = $wgLang->getNsIndex($provided);
     if (!$namespace) {
         $this->error("Invalid namespace provided: {$provided}");
         return;
     }
     $namespaceName = $wgLang->getNsText($namespace);
     if (!MWNamespace::hasSubpages($namespace)) {
         $this->error("Subpages are not enabled in the {$namespaceName} namespace.");
         $this->error("In order to convert this namespace to Flow, you must enable subpages using:");
         $this->error("\$wgNamespacesWithSubpages[{$namespace}] = true;");
         return;
     }
     $noConvertTemplates = explode(',', $this->getOption('no-convert-templates', ''));
     if ($noConvertTemplates === array('')) {
         // explode( ',', '' ) returns array( '' )
         $noConvertTemplates = array();
     }
     // Convert to Title objects
     foreach ($noConvertTemplates as &$template) {
         $title = Title::newFromText($template, NS_TEMPLATE);
         if (!$title) {
             $this->error("Invalid template name: {$template}");
             return;
         }
         $template = $title;
     }
     // @todo send to prod logger?
     $logger = new MaintenanceDebugLogger($this);
     $dbw = wfGetDB(DB_MASTER);
     $converter = new \Flow\Import\Converter($dbw, Flow\Container::get('importer'), $logger, FlowHooks::getOccupationController()->getTalkpageManager(), new Flow\Import\Wikitext\ConversionStrategy($wgParser, new Flow\Import\NullImportSourceStore(), $logger, $noConvertTemplates, $this->getOption('archive-pattern', null)));
     $logger->info("Starting conversion of {$namespaceName} namespace");
     // Iterate over all existing pages of the namespace.
     $it = new NamespaceIterator($dbw, $namespace);
     // NamespaceIterator is an IteratorAggregate. Get an Iterator
     // so we can wrap that.
     $it = $it->getIterator();
     $converter->convertAll($it);
     $logger->info("Finished conversion of {$namespaceName} namespace");
 }
 public function execute()
 {
     $cacheDir = $this->getOption('cacheremoteapidir');
     if (!is_dir($cacheDir)) {
         if (!mkdir($cacheDir)) {
             throw new Flow\Exception\FlowException('Provided dir for caching remote api calls is not creatable.');
         }
     }
     if (!is_writable($cacheDir)) {
         throw new Flow\Exception\FlowException('Provided dir for caching remote api calls is not writable.');
     }
     $api = new RemoteApiBackend($this->getOption('remoteapi'), $cacheDir);
     $importer = Flow\Container::get('importer');
     $importer->setAllowUnknownUsernames(true);
     $talkPageManagerUser = \FlowHooks::getOccupationController()->getTalkpageManager();
     $srcPageName = $this->getOption('srcpage');
     if ($this->hasOption('dstpage')) {
         $dstPageName = $this->getOption('dstpage');
     } else {
         $dstPageName = $srcPageName;
     }
     $dstTitle = Title::newFromText($dstPageName);
     $source = new LiquidThreadsApiImportSource($api, $srcPageName, $talkPageManagerUser);
     $logFilename = $this->getOption('logfile');
     $sourceStore = new FileImportSourceStore($logFilename);
     $logger = new MaintenanceDebugLogger($this);
     if ($this->getOption('debug')) {
         $logger->setMaximumLevel(LogLevel::DEBUG);
     } else {
         $logger->setMaximumLevel(LogLevel::INFO);
     }
     $importer->setLogger($logger);
     $api->setLogger($logger);
     $logger->info("Starting LQT conversion of page {$srcPageName}");
     $importer->import($source, $dstTitle, $sourceStore);
     $logger->info("Finished LQT conversion of page {$srcPageName}");
 }
<?php

require_once "{$IP}/maintenance/commandLine.inc";
require_once "{$IP}/extensions/Flow/FlowActions.php";
$moderationChangeTypes = array('hide-post', 'hide-topic', 'delete-post', 'delete-topic', 'suppress-post', 'suppress-topic', 'lock-topic', 'restore-post', 'restore-topic');
$csvOutput = fopen('repair_results_from_parent_' . wfWikiId() . '.csv', 'w');
if (!$csvOutput) {
    die("Could not open results file\n");
}
fputcsv($csvOutput, array("uuid", "esurl", "flags"));
$dbr = Flow\Container::get('db.factory')->getDB(DB_SLAVE);
$it = new EchoBatchRowIterator($dbr, 'flow_revision', array('rev_id'), 10);
$it->addConditions(array('rev_user_wiki' => wfWikiId()));
$it->setFetchColumns(array('rev_change_type', 'rev_parent_id'));
$totalNullContentWithParent = 0;
$totalNullParentContent = 0;
$totalBadQueryResult = 0;
$totalMatched = 0;
foreach ($it as $batch) {
    foreach ($batch as $rev) {
        $item = ExternalStore::fetchFromURL($rev->rev_content);
        if ($item) {
            // contains valid data
            continue;
        }
        $changeType = $rev->rev_change_type;
        while (is_string($wgFlowActions[$changeType])) {
            $changeType = $wgFlowActions[$changeType];
        }
        if (!in_array($changeType, $moderationChangeTypes)) {
            // doesn't inherit content
<?php

$c = new Flow\Container();
// MediaWiki
if (defined('RUN_MAINTENANCE_IF_MAIN')) {
    $c['user'] = new User();
} else {
    $c['user'] = isset($GLOBALS['wgUser']) ? $GLOBALS['wgUser'] : new User();
}
$c['output'] = $GLOBALS['wgOut'];
$c['request'] = $GLOBALS['wgRequest'];
$c['memcache'] = function ($c) {
    global $wgFlowUseMemcache, $wgMemc;
    if ($wgFlowUseMemcache) {
        return $wgMemc;
    } else {
        return new \HashBagOStuff();
    }
};
$c['cache.version'] = $GLOBALS['wgFlowCacheVersion'];
// Flow config
$c['flow_actions'] = function ($c) {
    global $wgFlowActions;
    return new Flow\FlowActions($wgFlowActions);
};
// Always returns the correct database for flow storage
$c['db.factory'] = function ($c) {
    global $wgFlowDefaultWikiDb, $wgFlowCluster;
    return new Flow\DbFactory($wgFlowDefaultWikiDb, $wgFlowCluster);
};
// Database Access Layer external from main implementation
<?php

require_once "{$IP}/maintenance/commandLine.inc";
require_once "{$IP}/extensions/Flow/FlowActions.php";
$moderationChangeTypes = array('hide-post', 'hide-topic', 'delete-post', 'delete-topic', 'suppress-post', 'suppress-topic', 'lock-topic', 'restore-post', 'restore-topic');
$plaintextChangeTypes = array('edit-title', 'new-topic');
$csvOutput = fopen('repair_results_' . wfWikiId() . '.csv', 'w');
if (!$csvOutput) {
    die("Could not open results file\n");
}
fputcsv($csvOutput, array("uuid", "esurl", "flags"));
$it = new EchoBatchRowIterator(Flow\Container::get('db.factory')->getDB(DB_SLAVE), 'flow_revision', array('rev_id'), 10);
$it->addConditions(array('rev_user_wiki' => wfWikiId()));
$it->setFetchColumns(array('rev_content', 'rev_content_length', 'rev_change_type', 'rev_parent_id'));
$dbr = wfGetDB(DB_SLAVE);
$totalMissingConsidered = 0;
$totalCompleteMatch = 0;
$totalMultipleMatches = 0;
$totalResolvedMultipleMatches = 0;
$totalNoMatch = 0;
$totalNoChangeRevisions = 0;
$totalMatchButInvalid = 0;
foreach ($it as $batch) {
    foreach ($batch as $rev) {
        $item = ExternalStore::fetchFromURL($rev->rev_content);
        if ($item) {
            // contains valid data
            continue;
        }
        ++$totalMissingConsidered;
        $uuid = Flow\Model\UUID::create($rev->rev_id);
<?php

require_once "{$IP}/maintenance/commandLine.inc";
require_once "{$IP}/extensions/Flow/FlowActions.php";
if (!isset($argv[1])) {
    die("Usage: {$argv[0]} <csv>\n\n");
}
if (!is_file($argv[1])) {
    die('Provided CSV file does not exist');
}
$csv = fopen($argv[1], "r");
if (fgetcsv($csv) !== array('uuid', 'esurl', 'flags')) {
    die('Provided CSV file does not have the expected header');
}
$fixed = 0;
$dbw = Flow\Container::get('db.factory')->getDB(DB_MASTER);
while ($row = fgetcsv($csv)) {
    if (count($row) !== 3) {
        var_dump($row);
        die('All rows in CSV file must have 2 entries');
    }
    list($uuid, $esUrl, $flags) = $row;
    if (!$uuid || !$esUrl || !$flags) {
        var_dump($row);
        die('All rows in CSV file must have a uuid, flags and an external store url');
    }
    $uuid = Flow\Model\UUID::create($uuid);
    $dbw->update('flow_revision', array('rev_content' => $esUrl, 'rev_flags' => $flags), array('rev_id' => $uuid->getBinary()));
    ++$fixed;
}
echo "Updated {$fixed} revisions\n\n";