function execute() { $dbw = wfGetDB(DB_MASTER); $table = 'user_properties'; $oldPropName = 'narayamDisable'; $newPropName = 'narayamEnable'; $this->output("Changing {$oldPropName} to {$newPropName}\n"); $allIds = array(); while (true) { $dbw->begin(); $res = $dbw->select($table, array('up_user'), array('up_property' => $oldPropName, 'up_value' => 1), __METHOD__, array('LIMIT' => $this->mBatchSize, 'FOR UPDATE')); if (!$res->numRows()) { break; } $ids = array(); foreach ($res as $row) { $ids[] = $row->up_user; } $dbw->update($table, array('up_property' => $newPropName, 'up_value' => 0), array('up_property' => $oldPropName, 'up_user' => $ids), __METHOD__); $dbw->commit(); foreach ($ids as $id) { $user = User::newFromID($id); if ($user) { $user->invalidateCache(); } } wfWaitForSlaves(10); } $this->output("Old preference {$oldPropName} was migrated to {$newPropName}\n"); }
public function execute() { global $wgEchoCluster; $this->output("Started processing... \n"); $startUserId = 0; $count = $this->batchSize; while ($count === $this->batchSize) { $count = 0; $res = MWEchoEmailBatch::getUsersToNotify($startUserId, $this->batchSize); $updated = false; foreach ($res as $row) { $userId = intval($row->eeb_user_id); if ($userId && $userId > $startUserId) { $emailBatch = MWEchoEmailBatch::newFromUserId($userId); if ($emailBatch) { $this->output("processing user_Id " . $userId . " \n"); $emailBatch->process(); } $startUserId = $userId; $updated = true; } $count++; } wfWaitForSlaves(false, false, $wgEchoCluster); // This is required since we are updating user properties in main wikidb wfWaitForSlaves(); // double check to make sure that the id is updated if (!$updated) { break; } } $this->output("Completed \n"); }
public function execute() { $userName = '******'; // <- targer username $user = new CentralAuthUser( $userName ); if ( !$user->exists() ) { echo "Cannot unsuppress non-existent user {$userName}!\n"; exit( 0 ); } $userName = $user->getName(); // sanity $wikis = $user->listAttached(); // wikis with attached accounts foreach ( $wikis as $wiki ) { $lb = wfGetLB( $wiki ); $dbw = $lb->getConnection( DB_MASTER, array(), $wiki ); # Get local ID like $user->localUserData( $wiki ) does $localUserId = $dbw->selectField( 'user', 'user_id', array( 'user_name' => $userName ), __METHOD__ ); $delUserBit = Revision::DELETED_USER; $hiddenCount = $dbw->selectField( 'revision', 'COUNT(*)', array( 'rev_user' => $localUserId, "rev_deleted & $delUserBit != 0" ), __METHOD__ ); echo "$hiddenCount edits have the username hidden on \"$wiki\"\n"; # Unsuppress username on edits if ( $hiddenCount > 0 ) { echo "Unsuppressed edits of attached account (local id $localUserId) on \"$wiki\"..."; IPBlockForm::unsuppressUserName( $userName, $localUserId, $dbw ); echo "done!\n\n"; } $lb->reuseConnection( $dbw ); // not really needed # Don't lag too bad wfWaitForSlaves( 5 ); } }
public function execute() { $db = wfGetDB(DB_MASTER); $start = $db->selectField('logging', 'MIN(log_id)', false, __METHOD__); if (!$start) { $this->output("Nothing to do.\n"); return true; } $end = $db->selectField('logging', 'MAX(log_id)', false, __METHOD__); # Do remaining chunk $end += $this->mBatchSize - 1; $blockStart = $start; $blockEnd = $start + $this->mBatchSize - 1; while ($blockEnd <= $end) { $this->output("...doing log_id from {$blockStart} to {$blockEnd}\n"); $cond = "log_id BETWEEN {$blockStart} AND {$blockEnd} AND log_user = user_id"; $res = $db->select(array('logging', 'user'), array('log_id', 'user_name'), $cond, __METHOD__); $db->begin(); foreach ($res as $row) { $db->update('logging', array('log_user_text' => $row->user_name), array('log_id' => $row->log_id), __METHOD__); } $db->commit(); $blockStart += $this->mBatchSize; $blockEnd += $this->mBatchSize; wfWaitForSlaves(5); } if ($db->insert('updatelog', array('ul_key' => 'populate log_usertext'), __METHOD__, 'IGNORE')) { $this->output("log_usertext population complete.\n"); return true; } else { $this->output("Could not insert log_usertext population row.\n"); return false; } }
protected function doDBUpdates() { $db = $this->getDB(DB_MASTER); $start = $db->selectField('logging', 'MIN(log_id)', false, __METHOD__); if (!$start) { $this->output("Nothing to do.\n"); return true; } $end = $db->selectField('logging', 'MAX(log_id)', false, __METHOD__); # Do remaining chunk $end += $this->mBatchSize - 1; $blockStart = $start; $blockEnd = $start + $this->mBatchSize - 1; while ($blockEnd <= $end) { $this->output("...doing log_id from {$blockStart} to {$blockEnd}\n"); $cond = "log_id BETWEEN {$blockStart} AND {$blockEnd} AND log_user = user_id"; $res = $db->select(array('logging', 'user'), array('log_id', 'user_name'), $cond, __METHOD__); $db->begin(__METHOD__); foreach ($res as $row) { $db->update('logging', array('log_user_text' => $row->user_name), array('log_id' => $row->log_id), __METHOD__); } $db->commit(__METHOD__); $blockStart += $this->mBatchSize; $blockEnd += $this->mBatchSize; wfWaitForSlaves(); } $this->output("Done populating log_user_text field.\n"); return true; }
protected function prune($table, $ts_column, $maxAge) { $dbw = wfGetDB(DB_MASTER); $expiredCond = "{$ts_column} < " . $dbw->addQuotes($dbw->timestamp(time() - $maxAge)); $count = 0; while (true) { // Get the first $this->mBatchSize (or less) items $res = $dbw->select($table, $ts_column, $expiredCond, __METHOD__, array('ORDER BY' => "{$ts_column} ASC", 'LIMIT' => $this->mBatchSize)); if (!$res->numRows()) { break; // all cleared } // Record the start and end timestamp for the set $blockStart = $dbw->addQuotes($res->fetchObject()->{$ts_column}); $res->seek($res->numRows() - 1); $blockEnd = $dbw->addQuotes($res->fetchObject()->{$ts_column}); $res->free(); // Do the actual delete... $dbw->begin(); $dbw->delete($table, array("{$ts_column} BETWEEN {$blockStart} AND {$blockEnd}"), __METHOD__); $count += $dbw->affectedRows(); $dbw->commit(); wfWaitForSlaves(); } return $count; }
public function execute() { $dbw = wfGetDB(DB_MASTER); $rl = new ResourceLoader(ConfigFactory::getDefaultInstance()->makeConfig('main')); $moduleNames = $rl->getModuleNames(); $moduleList = implode(', ', array_map(array($dbw, 'addQuotes'), $moduleNames)); $limit = max(1, intval($this->getOption('batchsize', 500))); $this->output("Cleaning up module_deps table...\n"); $i = 1; $modDeps = $dbw->tableName('module_deps'); do { // $dbw->delete() doesn't support LIMIT :( $where = $moduleList ? "md_module NOT IN ({$moduleList})" : '1=1'; $dbw->query("DELETE FROM {$modDeps} WHERE {$where} LIMIT {$limit}", __METHOD__); $numRows = $dbw->affectedRows(); $this->output("Batch {$i}: {$numRows} rows\n"); $i++; wfWaitForSlaves(); } while ($numRows > 0); $this->output("done\n"); $this->output("Cleaning up msg_resource table...\n"); $i = 1; $mrRes = $dbw->tableName('msg_resource'); do { $where = $moduleList ? "mr_resource NOT IN ({$moduleList})" : '1=1'; $dbw->query("DELETE FROM {$mrRes} WHERE {$where} LIMIT {$limit}", __METHOD__); $numRows = $dbw->affectedRows(); $this->output("Batch {$i}: {$numRows} rows\n"); $i++; wfWaitForSlaves(); } while ($numRows > 0); $this->output("done\n"); }
public function processSynonym( $synonym ) { $dbr = wfGetDB( DB_SLAVE ); $pCount = 0; $vCount = 0; $this->output( "Fixing pages with template links to $synonym ...\n" ); while ( true ) { $res = $dbr->select( 'templatelinks', array( 'tl_title', 'tl_from' ), array( 'tl_namespace' => NS_TEMPLATE, 'tl_title ' . $dbr->buildLike( $synonym, $dbr->anyString() ) ), __METHOD__, array( 'ORDER BY' => array( 'tl_title', 'tl_from' ), 'LIMIT' => $this->batchsize ) ); if ( $dbr->numRows( $res ) == 0 ) { // No more rows, we're done break; } $processed = array(); foreach ( $res as $row ) { $vCount++; if ( isset( $processed[$row->tl_from] ) ) { // We've already processed this page, skip it continue; } RefreshLinks::fixLinksFromArticle( $row->tl_from ); $processed[$row->tl_from] = true; $pCount++; } $this->output( "{$pCount}/{$vCount} pages processed\n" ); wfWaitForSlaves(); } }
/** * @param $fileHandle * @param DatabaseBase $db * @param ImportContext $importContext * @throws UnexpectedValueException */ private function doImport($fileHandle, DatabaseBase $db, ImportContext $importContext) { $accumulator = array(); $batchSize = $importContext->getBatchSize(); $i = 0; $header = fgetcsv($fileHandle, 0, $importContext->getCsvDelimiter()); //this is to get the csv-header $expectedHeader = array('pid1', 'qid1', 'pid2', 'count', 'probability', 'context'); if ($header != $expectedHeader) { throw new UnexpectedValueException("provided csv-file does not match the expected format:\n" . join(',', $expectedHeader)); } while (true) { $data = fgetcsv($fileHandle, 0, $importContext->getCsvDelimiter()); if ($data == false || ++$i % $batchSize == 0) { $db->commit(__METHOD__, 'flush'); wfWaitForSlaves(); $db->insert($importContext->getTargetTableName(), $accumulator); if (!$importContext->isQuiet()) { print "{$i} rows inserted\n"; } $accumulator = array(); if ($data == false) { break; } } $qid1 = is_numeric($data[1]) ? $data[1] : 0; $accumulator[] = array('pid1' => $data[0], 'qid1' => $qid1, 'pid2' => $data[2], 'count' => $data[3], 'probability' => $data[4], 'context' => $data[5]); } }
public function doDBUpdates() { $force = $this->getOption('force'); $db = $this->getDB(DB_MASTER); $this->output("Updating *_from_namespace fields in links tables.\n"); $start = $this->getOption('lastUpdatedId'); if (!$start) { $start = $db->selectField('page', 'MIN(page_id)', false, __METHOD__); } if (!$start) { $this->output("Nothing to do."); return false; } $end = $db->selectField('page', 'MAX(page_id)', false, __METHOD__); # Do remaining chunk $end += $this->mBatchSize - 1; $blockStart = $start; $blockEnd = $start + $this->mBatchSize - 1; while ($blockEnd <= $end) { $this->output("...doing page_id from {$blockStart} to {$blockEnd}\n"); $cond = "page_id BETWEEN {$blockStart} AND {$blockEnd}"; $res = $db->select('page', array('page_id', 'page_namespace'), $cond, __METHOD__); foreach ($res as $row) { $db->update('pagelinks', array('pl_from_namespace' => $row->page_namespace), array('pl_from' => $row->page_id), __METHOD__); $db->update('templatelinks', array('tl_from_namespace' => $row->page_namespace), array('tl_from' => $row->page_id), __METHOD__); $db->update('imagelinks', array('il_from_namespace' => $row->page_namespace), array('il_from' => $row->page_id), __METHOD__); } $blockStart += $this->mBatchSize - 1; $blockEnd += $this->mBatchSize - 1; wfWaitForSlaves(); } return true; }
public function execute() { $count = 0; $oldGroup = $this->getArg(0); $newGroup = $this->getArg(1); $dbw = wfGetDB(DB_MASTER); $start = $dbw->selectField('user_groups', 'MIN(ug_user)', array('ug_group' => $oldGroup), __FUNCTION__); $end = $dbw->selectField('user_groups', 'MAX(ug_user)', array('ug_group' => $oldGroup), __FUNCTION__); if ($start === null) { $this->error("Nothing to do - no users in the '{$oldGroup}' group", true); } # Do remaining chunk $end += $this->mBatchSize - 1; $blockStart = $start; $blockEnd = $start + $this->mBatchSize - 1; // Migrate users over in batches... while ($blockEnd <= $end) { $this->output("Doing users {$blockStart} to {$blockEnd}\n"); $dbw->begin(); $dbw->update('user_groups', array('ug_group' => $newGroup), array('ug_group' => $oldGroup, "ug_user BETWEEN {$blockStart} AND {$blockEnd}")); $count += $dbw->affectedRows(); $dbw->commit(); $blockStart += $this->mBatchSize; $blockEnd += $this->mBatchSize; wfWaitForSlaves(5); } $this->output("Done! {$count} user(s) in group '{$oldGroup}' are now in '{$newGroup}' instead.\n"); }
function moveToExternal($cluster, $maxID) { $fname = 'moveToExternal'; $dbw =& wfGetDB(DB_MASTER); print "Moving {$maxID} text rows to external storage\n"; $ext = new ExternalStoreDB(); for ($id = 1; $id <= $maxID; $id++) { if (!($id % REPORTING_INTERVAL)) { print "{$id}\n"; wfWaitForSlaves(5); } $row = $dbw->selectRow('text', array('old_flags', 'old_text'), array('old_id' => $id, "old_flags NOT LIKE '%external%'"), $fname); if (!$row) { # Non-existent or already done continue; } # Resolve stubs $flags = explode(',', $row->old_flags); if (in_array('object', $flags) && substr($row->old_text, 0, strlen(STUB_HEADER)) === STUB_HEADER) { resolveStub($id, $row->old_text, $row->old_flags); continue; } $url = $ext->store($cluster, $row->old_text); if (!$url) { print "Error writing to external storage\n"; exit; } if ($row->old_flags === '') { $flags = 'external'; } else { $flags = "{$row->old_flags},external"; } $dbw->update('text', array('old_flags' => $flags, 'old_text' => $url), array('old_id' => $id), $fname); } }
public function execute() { $this->commit = $this->hasOption('commit'); $dbr = $this->getDB(DB_SLAVE); $dbw = $this->getDB(DB_MASTER); $lastId = 0; do { $rows = $dbr->select('user', array('user_id', 'user_email'), array('user_id > ' . $dbr->addQuotes($lastId), 'user_email != ""', 'user_email_authenticated IS NULL'), __METHOD__, array('LIMIT' => $this->mBatchSize)); $count = $rows->numRows(); $badIds = array(); foreach ($rows as $row) { if (!Sanitizer::validateEmail(trim($row->user_email))) { $this->output("Found bad email: {$row->user_email} for user #{$row->user_id}\n"); $badIds[] = $row->user_id; } if ($row->user_id > $lastId) { $lastId = $row->user_id; } } if ($badIds) { $badCount = count($badIds); if ($this->commit) { $this->output("Removing {$badCount} emails from the database.\n"); $dbw->update('user', array('user_email' => ''), array('user_id' => $badIds), __METHOD__); foreach ($badIds as $badId) { User::newFromId($badId)->invalidateCache(); } wfWaitForSlaves(); } else { $this->output("Would have removed {$badCount} emails from the database.\n"); } } } while ($count !== 0); $this->output("Done.\n"); }
protected function doDBUpdates() { $dbw = $this->getDB(DB_MASTER); if (!$dbw->fieldExists('recentchanges', 'rc_source')) { $this->error('rc_source field in recentchanges table does not exist.'); } $start = $dbw->selectField('recentchanges', 'MIN(rc_id)', false, __METHOD__); if (!$start) { $this->output("Nothing to do.\n"); return true; } $end = $dbw->selectField('recentchanges', 'MAX(rc_id)', false, __METHOD__); $end += $this->mBatchSize - 1; $blockStart = $start; $blockEnd = $start + $this->mBatchSize - 1; $updatedValues = $this->buildUpdateCondition($dbw); while ($blockEnd <= $end) { $cond = "rc_id BETWEEN {$blockStart} AND {$blockEnd}"; $dbw->update('recentchanges', [$updatedValues], ["rc_source = ''", "rc_id BETWEEN {$blockStart} AND {$blockEnd}"], __METHOD__); $this->output("."); wfWaitForSlaves(); $blockStart += $this->mBatchSize; $blockEnd += $this->mBatchSize; } $this->output("\nDone.\n"); }
public function execute() { $user = '******'; $reason = 'No longer required'; $this->output("Checking existence of old default messages..."); $dbr = wfGetDB(DB_SLAVE); $res = $dbr->select(array('page', 'revision'), array('page_namespace', 'page_title'), array('page_namespace' => NS_MEDIAWIKI, 'page_latest=rev_id', 'rev_user_text' => 'MediaWiki default')); if ($dbr->numRows($res) == 0) { # No more messages left $this->output("done.\n"); return; } # Deletions will be made by $user temporarly added to the bot group # in order to hide it in RecentChanges. global $wgUser; $wgUser = User::newFromName($user); $wgUser->addGroup('bot'); # Handle deletion $this->output("\n...deleting old default messages (this may take a long time!)...", 'msg'); $dbw = wfGetDB(DB_MASTER); foreach ($res as $row) { if (function_exists('wfWaitForSlaves')) { wfWaitForSlaves(5); } $dbw->ping(); $title = Title::makeTitle($row->page_namespace, $row->page_title); $article = new Article($title); $dbw->begin(); $article->doDeleteArticle($reason); $dbw->commit(); } $this->output('done!', 'msg'); }
public function execute() { global $wgUser; $this->output("Checking existence of old default messages..."); $dbr = $this->getDB(DB_REPLICA); $res = $dbr->select(['page', 'revision'], ['page_namespace', 'page_title'], ['page_namespace' => NS_MEDIAWIKI, 'page_latest=rev_id', 'rev_user_text' => 'MediaWiki default']); if ($dbr->numRows($res) == 0) { # No more messages left $this->output("done.\n"); return; } # Deletions will be made by $user temporarly added to the bot group # in order to hide it in RecentChanges. $user = User::newFromName('MediaWiki default'); if (!$user) { $this->error("Invalid username", true); } $user->addGroup('bot'); $wgUser = $user; # Handle deletion $this->output("\n...deleting old default messages (this may take a long time!)...", 'msg'); $dbw = $this->getDB(DB_MASTER); foreach ($res as $row) { wfWaitForSlaves(); $dbw->ping(); $title = Title::makeTitle($row->page_namespace, $row->page_title); $page = WikiPage::factory($title); $error = ''; // Passed by ref // FIXME: Deletion failures should be reported, not silently ignored. $page->doDeleteArticle('No longer required', false, 0, true, $error, $user); } $this->output("done!\n", 'msg'); }
public function execute() { if (!$this->invalidEventType) { $this->output("There is nothing to process\n"); return; } global $wgEchoCluster; $dbw = MWEchoDbFactory::getDB(DB_MASTER); $dbr = MWEchoDbFactory::getDB(DB_SLAVE); $count = $this->batchSize; while ($count == $this->batchSize) { $res = $dbr->select(array('echo_event'), array('event_id'), array('event_type' => $this->invalidEventType), __METHOD__, array('LIMIT' => $this->batchSize)); $event = array(); $count = 0; foreach ($res as $row) { if (!in_array($row->event_id, $event)) { $event[] = $row->event_id; } $count++; } if ($event) { $dbw->begin(); $dbw->delete('echo_event', array('event_id' => $event), __METHOD__); $dbw->delete('echo_notification', array('notification_event' => $event), __METHOD__); $dbw->commit(); $this->output("processing " . count($event) . " invalid events\n"); wfWaitForSlaves(false, false, $wgEchoCluster); } // Cleanup is not necessary for // 1. echo_email_batch, invalid notification is removed during the cron } }
public function execute() { $dbw = $this->getDB(DB_MASTER); $lastId = 0; do { // Get user IDs which need fixing $res = $dbw->select('user', 'user_id', array('user_id > ' . $dbw->addQuotes($lastId), 'user_registration IS NULL'), __METHOD__, array('LIMIT' => $this->mBatchSize, 'ORDER BY' => 'user_id')); foreach ($res as $row) { $id = $row->user_id; $lastId = $id; // Get first edit time $timestamp = $dbw->selectField('revision', 'MIN(rev_timestamp)', array('rev_user' => $id), __METHOD__); // Update if ($timestamp !== null) { $dbw->update('user', array('user_registration' => $timestamp), array('user_id' => $id), __METHOD__); $user = User::newFromId($id); $user->invalidateCache(); $this->output("Set registration for #{$id} to {$timestamp}\n"); } else { $this->output("Could not find registration for #{$id} NULL\n"); } } $this->output("Waiting for slaves..."); wfWaitForSlaves(); $this->output(" done.\n"); } while ($res->numRows() >= $this->mBatchSize); }
public function execute() { $this->nullsOnly = $this->getOption('nulls'); if (!$this->getOption('nowarn')) { if ($this->nullsOnly) { $this->output("The script is about to reset the user_token " . "for USERS WITH NULL TOKENS in the database.\n"); } else { $this->output("The script is about to reset the user_token for ALL USERS in the database.\n"); $this->output("This may log some of them out and is not necessary unless you believe your\n"); $this->output("user table has been compromised.\n"); } $this->output("\n"); $this->output("Abort with control-c in the next five seconds " . "(skip this countdown with --nowarn) ... "); wfCountDown(5); } // We list user by user_id from one of the slave database $dbr = $this->getDB(DB_SLAVE); $where = array(); if ($this->nullsOnly) { // Have to build this by hand, because \ is escaped in helper functions $where = array('user_token = \'' . str_repeat('\\0', 32) . '\''); } $maxid = $dbr->selectField('user', 'MAX(user_id)', array(), __METHOD__); $min = 0; $max = $this->mBatchSize; do { $result = $dbr->select('user', array('user_id'), array_merge($where, array('user_id > ' . $dbr->addQuotes($min), 'user_id <= ' . $dbr->addQuotes($max))), __METHOD__); foreach ($result as $user) { $this->updateUser($user->user_id); } $min = $max; $max = $min + $this->mBatchSize; wfWaitForSlaves(); } while ($min <= $maxid); }
function execute() { $dbw = wfGetDB(DB_MASTER); $batchSize = 100; $total = 0; $lastUserID = 0; while (true) { $res = $dbw->select('user_properties', array('up_user'), array('up_property' => 'vector-noexperiments', "up_user > {$lastUserID}"), __METHOD__, array('LIMIT' => $batchSize)); if (!$res->numRows()) { $dbw->commit(); break; } $total += $res->numRows(); $ids = array(); foreach ($res as $row) { $ids[] = $row->up_user; } $lastUserID = max($ids); foreach ($ids as $id) { $user = User::newFromId($id); if (!$user->isLoggedIn()) { continue; } $user->setOption($this->getOption('pref'), $this->getOption('value')); $user->saveSettings(); } echo "{$total}\n"; wfWaitForSlaves(); // Must be wfWaitForSlaves_masterPos(); on 1.17wmf1 } echo "Done\n"; }
function execute() { $start = $this->getOption('start', 0); $this->output("Rebuilding titlekey table...\n"); $dbr = $this->getDB(DB_SLAVE); $maxId = $dbr->selectField('page', 'MAX(page_id)', '', __METHOD__); $lastId = 0; for (; $start <= $maxId; $start += $this->mBatchSize) { if ($start != 0) { $this->output("... {$start}...\n"); } $result = $dbr->select('page', array('page_id', 'page_namespace', 'page_title'), array('page_id > ' . intval($start)), __METHOD__, array('ORDER BY' => 'page_id', 'LIMIT' => $this->mBatchSize)); $titles = array(); foreach ($result as $row) { $titles[$row->page_id] = Title::makeTitle($row->page_namespace, $row->page_title); $lastId = $row->page_id; } $result->free(); TitleKey::setBatchKeys($titles); wfWaitForSlaves(20); } if ($lastId) { $this->output("... {$lastId} ok.\n"); } else { $this->output("... no pages.\n"); } }
public function execute() { $dbw = wfGetDb(DB_MASTER); // Determining what groups the account was in before the change // would be difficult and unnecessary 99.9% of the time, so we just // assume the account was in no other groups $params = array('grant' => "\nbot", 'revoke' => "bot\n"); $logrows = $dbw->select('logging', array('log_id', 'log_action'), array('log_type' => 'makebot', 'log_action' => array('grant', 'revoke')), __METHOD__); $count = $logrows->numRows(); $this->output("Updating {$count} entries in the logging table\n"); $batch = 0; foreach ($logrows as $row) { $dbw->update('logging', array('log_action' => 'rights', 'log_type' => 'rights', 'log_params' => $params[$row->log_action]), array('log_id' => $row->log_id), __METHOD__); $batch++; if ($batch == 100) { wfWaitForSlaves(5); $batch = 0; } } $rcrows = $dbw->select('recentchanges', array('rc_id', 'rc_log_action'), array('rc_log_type' => 'makebot', 'rc_log_action' => array('grant', 'revoke')), __METHOD__); $count = $rcrows->numRows(); $this->output("Updating {$count} entries in the recentchanges table\n"); foreach ($rcrows as $row) { $dbw->update('recentchanges', array('rc_log_action' => 'rights', 'rc_log_type' => 'rights', 'rc_params' => $params[$row->rc_log_action]), array('rc_id' => $row->rc_id), __METHOD__); } $this->output("Done!\n"); }
private function insert(array $rows) { $count = count($rows); $this->output("Inserting {$count} rows into log_search\n"); $dbw = wfGetDB(DB_MASTER); $dbw->insert('log_search', $rows, __METHOD__, 'IGNORE'); wfWaitForSlaves(); }
/** * @param array $updates Array of arrays each containing two keys, 'primaryKey' * and 'changes'. primaryKey must contain a map of column names to values * sufficient to uniquely identify the row changes must contain a map of column * names to update values to apply to the row. */ public function write(array $updates) { $this->db->begin(); foreach ($updates as $update) { $this->db->update($this->table, $update['changes'], $update['primaryKey'], __METHOD__); } $this->db->commit(); wfWaitForSlaves(false, false, $this->clusterName); }
protected function update_images_bug_28348($start = null) { $this->output("Correcting fi_img_timestamp column in flaggedimages\n"); $db = wfGetDB(DB_MASTER); if ($start === null) { $start = $db->selectField('flaggedimages', 'MIN(fi_rev_id)', false, __METHOD__); } $end = $db->selectField('flaggedimages', 'MAX(fi_rev_id)', false, __METHOD__); if (is_null($start) || is_null($end)) { $this->output("...flaggedimages table seems to be empty.\n"); return; } # Do remaining chunk $end += $this->mBatchSize - 1; $blockStart = $start; $blockEnd = $start + $this->mBatchSize - 1; $count = $changed = 0; while ($blockEnd <= $end) { $this->output("...doing fi_rev_id from {$blockStart} to {$blockEnd}\n"); $cond = "fi_rev_id BETWEEN {$blockStart} AND {$blockEnd} AND fi_img_timestamp IS NOT NULL" . " AND img_name IS NULL AND oi_name IS NULL"; // optimize $res = $db->select(array('flaggedimages', 'image', 'oldimage'), '*', $cond, __FUNCTION__, array(), array('image' => array('LEFT JOIN', 'img_sha1 = fi_img_sha1 AND img_timestamp = fi_img_timestamp'), 'oldimage' => array('LEFT JOIN', 'oi_sha1 = fi_img_sha1 AND oi_timestamp = fi_img_timestamp'))); $db->begin(); # Go through and clean up missing items, as well as correct fr_quality... foreach ($res as $row) { $count++; $fi_img_timestamp = trim($row->fi_img_timestamp); // clear pad garbage if (!$fi_img_timestamp) { continue; // nothing to check } $time = wfTimestamp(TS_MW, $fi_img_timestamp); $sha1 = $row->fi_img_sha1; # Check if the specified file exists... $file = RepoGroup::singleton()->findFileFromKey($sha1, array('time' => $time)); if (!$file) { // doesn't exist? $time = wfTimestamp(TS_MW, wfTimestamp(TS_UNIX, $time) + 1); # Check if the fi_img_timestamp value is off by 1 second... $file = RepoGroup::singleton()->findFileFromKey($sha1, array('time' => $time)); if ($file) { $this->output("fixed file {$row->fi_name} reference in rev ID {$row->fi_rev_id}\n"); # Fix the fi_img_timestamp value... $db->update('flaggedimages', array('fi_img_timestamp' => $db->timestamp($time)), array('fi_rev_id' => $row->fi_rev_id, 'fi_name' => $row->fi_name), __METHOD__); $changed++; } } } $db->commit(); $db->freeResult($res); $blockStart += $this->mBatchSize; $blockEnd += $this->mBatchSize; wfWaitForSlaves(5); } $this->output("fi_img_timestamp column fixes complete ... {$count} rows [{$changed} changed]\n"); }
public function doDBUpdates() { $method = $this->getOption('method', 'normal'); $file = $this->getOption('file'); $t = -microtime(true); $dbw = wfGetDB(DB_MASTER); if ($file) { $res = $dbw->select('image', array('img_name'), array('img_name' => $file), __METHOD__); if (!$res) { $this->error("No such file: {$file}", true); return false; } $this->output("Populating img_sha1 field for specified files\n"); } else { $res = $dbw->select('image', array('img_name'), array('img_sha1' => ''), __METHOD__); $this->output("Populating img_sha1 field\n"); } $imageTable = $dbw->tableName('image'); if ($method == 'pipe') { // @todo FIXME: Kill this and replace with a second unbuffered DB connection. global $wgDBuser, $wgDBserver, $wgDBpassword, $wgDBname; $cmd = 'mysql -u' . wfEscapeShellArg($wgDBuser) . ' -h' . wfEscapeShellArg($wgDBserver) . ' -p' . wfEscapeShellArg($wgDBpassword, $wgDBname); $this->output("Using pipe method\n"); $pipe = popen($cmd, 'w'); } $numRows = $res->numRows(); $i = 0; foreach ($res as $row) { if ($i % $this->mBatchSize == 0) { $this->output(sprintf("Done %d of %d, %5.3f%% \r", $i, $numRows, $i / $numRows * 100)); wfWaitForSlaves(); } $file = wfLocalFile($row->img_name); if (!$file) { continue; } $sha1 = $file->getRepo()->getFileSha1($file->getPath()); if (strval($sha1) !== '') { $sql = "UPDATE {$imageTable} SET img_sha1=" . $dbw->addQuotes($sha1) . " WHERE img_name=" . $dbw->addQuotes($row->img_name); if ($method == 'pipe') { fwrite($pipe, "{$sql};\n"); } else { $dbw->query($sql, __METHOD__); } } $i++; } if ($method == 'pipe') { fflush($pipe); pclose($pipe); } $t += microtime(true); $this->output(sprintf("\nDone %d files in %.1f seconds\n", $numRows, $t)); return !$file; // we only updated *some* files, don't log }
protected function doDBUpdates() { $dbw = wfGetDB(DB_MASTER); $continue = 0; while ($continue !== null) { $continue = $this->refreshBatch($dbw, $continue); wfWaitForSlaves(); } return true; }
public function execute() { global $IP; $dbw = wfGetDB( DB_MASTER ); if ( $dbw->tableExists( 'job_explosion_tmp' ) ) { echo "Temporary table already exists!\n" . "To restart, drop the job table and rename job_explosion_tmp back to job.\n"; exit( 1 ); } $batchSize = 1000; $jobTable = $dbw->tableName( 'job' ); $jobTmpTable = $dbw->tableName( 'job_explosion_tmp' ); $dbw->query( "RENAME TABLE $jobTable TO $jobTmpTable" ); $dbw->sourceFile( "$IP/maintenance/archives/patch-job.sql" ); $start = 0; $numBatchesDone = 0; $newId = 1; while ( true ) { $res = $dbw->select( 'job_explosion_tmp', '*', array( 'job_id > ' . $dbw->addQuotes( $start ), "NOT ( job_cmd = 'htmlCacheUpdate' AND " . "job_params LIKE '%s:13:\"categorylinks\"%' )" ), __METHOD__, array( 'LIMIT' => $batchSize ) ); if ( !$res->numRows() ) { break; } $insertBatch = array(); foreach ( $res as $row ) { $start = $row->job_id; $insertRow = array(); foreach ( (array)$row as $name => $value ) { $insertRow[$name] = $value; } unset( $insertRow['job_id'] ); // use autoincrement to avoid key conflicts $insertBatch[] = $insertRow; } $dbw->insert( 'job', $insertBatch, __METHOD__ ); $numBatchesDone++; wfWaitForSlaves( 2 ); if ( $numBatchesDone % 1000 == 0 ) { echo "$start\n"; } elseif ( $numBatchesDone % 10 == 0 ) { echo "$start\r"; } } #$dbw->query( "DROP TABLE $jobTmpTable" ); }
public function execute() { global $wgGlobalUsageDatabase; $dbr = wfGetDB(DB_SLAVE); $dbw = wfGetDB(DB_MASTER, array(), $wgGlobalUsageDatabase); $gu = new GlobalUsage(wfWikiId(), $dbw); $lastPageId = intval($this->getOption('start-page', 0)); $lastIlTo = $this->getOption('start-image'); $limit = 500; $maxlag = intval($this->getOption('maxlag', 5)); do { $this->output("Querying links after (page_id, il_to) = ({$lastPageId}, {$lastIlTo})\n"); # Query all pages and any imagelinks associated with that $quotedLastIlTo = $dbr->addQuotes($lastIlTo); $res = $dbr->select(array('page', 'imagelinks', 'image'), array('page_id', 'page_namespace', 'page_title', 'il_to', 'img_name'), "(page_id = {$lastPageId} AND il_to > {$quotedLastIlTo})" . " OR page_id > {$lastPageId}", __METHOD__, array('ORDER BY' => $dbr->implicitOrderBy() ? 'page_id' : 'page_id, il_to', 'LIMIT' => $limit), array('imagelinks' => array('LEFT JOIN', 'page_id = il_from'), 'image' => array('LEFT JOIN', 'il_to = img_name'))); # Build up a tree per pages $pages = array(); $lastRow = null; foreach ($res as $row) { if (!isset($pages[$row->page_id])) { $pages[$row->page_id] = array(); } # Add the imagelinks entry to the pages array if the image # does not exist locally if (!is_null($row->il_to) && is_null($row->img_name)) { $pages[$row->page_id][$row->il_to] = $row; } $lastRow = $row; } # Insert the imagelinks data to the global table foreach ($pages as $pageId => $rows) { # Delete all original links if this page is not a continuation # of last iteration. if ($pageId != $lastPageId) { $gu->deleteLinksFromPage($pageId); } if ($rows) { $title = Title::newFromRow(reset($rows)); $images = array_keys($rows); # Since we have a pretty accurate page_id, don't specify # Title::GAID_FOR_UPDATE $gu->insertLinks($title, $images, 0); } } if ($lastRow) { # We've processed some rows in this iteration, so save # continuation variables $lastPageId = $lastRow->page_id; $lastIlTo = $lastRow->il_to; # Be nice to the database $dbw->commit(); wfWaitForSlaves($maxlag, $wgGlobalUsageDatabase); } } while (!is_null($lastRow)); }
protected function autoreview_current(User $user) { $this->output("Auto-reviewing all current page versions...\n"); if (!$user->getID()) { $this->output("Invalid user specified.\n"); return; } elseif (!$user->isAllowed('review')) { $this->output("User specified (id: {$user->getID()}) does not have \"review\" rights.\n"); return; } $db = wfGetDB(DB_MASTER); $this->output("Reviewer username: "******"\n"); $start = $db->selectField('page', 'MIN(page_id)', false, __METHOD__); $end = $db->selectField('page', 'MAX(page_id)', false, __METHOD__); if (is_null($start) || is_null($end)) { $this->output("...page table seems to be empty.\n"); return; } # Do remaining chunk $end += $this->mBatchSize - 1; $blockStart = $start; $blockEnd = $start + $this->mBatchSize - 1; $count = 0; $changed = 0; $flags = FlaggedRevs::quickTags(FR_CHECKED); // Assume basic level while ($blockEnd <= $end) { $this->output("...doing page_id from {$blockStart} to {$blockEnd}\n"); $res = $db->select(array('page', 'revision'), '*', array("page_id BETWEEN {$blockStart} AND {$blockEnd}", 'page_namespace' => FlaggedRevs::getReviewNamespaces(), 'rev_id = page_latest'), __METHOD__); # Go through and autoreview the current version of every page... foreach ($res as $row) { $title = Title::newFromRow($row); $rev = Revision::newFromRow($row); # Is it already reviewed? $frev = FlaggedRevision::newFromTitle($title, $row->page_latest, FR_MASTER); # Rev should exist, but to be safe... if (!$frev && $rev) { $article = new Article($title); $db->begin(); FlaggedRevs::autoReviewEdit($article, $user, $rev, $flags, true); FlaggedRevs::HTMLCacheUpdates($article->getTitle()); $db->commit(); $changed++; } $count++; } $db->freeResult($res); $blockStart += $this->mBatchSize - 1; $blockEnd += $this->mBatchSize - 1; // XXX: Don't let deferred jobs array get absurdly large (bug 24375) DeferredUpdates::doUpdates('commit'); wfWaitForSlaves(5); } $this->output("Auto-reviewing of all pages complete ..." . "{$count} rows [{$changed} changed]\n"); }