protected function insertRelated(array $rows) { if (!is_array(reset($rows))) { $rows = array($rows); } $trees = array(); foreach ($rows as $key => $row) { $trees[$key] = $this->splitUpdate($row, 'tree'); } $dbw = $this->dbFactory->getDB(DB_MASTER); $res = $dbw->insert($this->joinTable(), $this->preprocessNestedSqlArray($trees), __METHOD__); // If this is a brand new root revision it needs to be added to the tree // If it has a rev_parent_id then its already a part of the tree if ($res) { foreach ($rows as $row) { if ($row['rev_parent_id'] === null) { $res = $res && $this->treeRepo->insert(UUID::create($row['tree_rev_descendant_id']), UUID::create($row['tree_parent_id'])); } } } if (!$res) { return array(); } return $rows; }
public function updateRevision($columnPrefix, DatabaseBase $dbw, $continue = null) { $rows = $dbw->select('flow_revision', array('rev_id', 'rev_type'), array('rev_id > ' . $dbw->addQuotes($continue), "{$columnPrefix}_id > 0", "{$columnPrefix}_ip IS NOT NULL"), __METHOD__, array('LIMIT' => $this->mBatchSize, 'ORDER BY' => 'rev_id')); $ids = $objs = array(); foreach ($rows as $row) { $id = UUID::create($row->rev_id); $type = self::$types[$row->rev_type]; $om = $this->storage->getStorage($type); $obj = $om->get($id); if ($obj) { $om->merge($obj); $ids[] = $row->rev_id; $objs[] = $obj; } else { $this->error(__METHOD__ . ": Failed loading {$type}: " . $id->getAlphadecimal()); } } if (!$ids) { return null; } $dbw->update('flow_revision', array("{$columnPrefix}_ip" => null), array('rev_id' => $ids), __METHOD__); foreach ($objs as $obj) { $this->storage->cachePurge($obj); } $this->completeCount += count($ids); return end($ids); }
/** * Common case 4: alphadecimal from cache to bianry and timestamp. */ public function case4($alpha) { // clone to avoid internal object caching $id = clone UUID::create($alpha); $id->getTimestampObj(); $id->getBinary(); }
protected function processPosts($recorder) { $storage = Container::get('storage.post'); $count = $this->mBatchSize; $id = ''; $dbr = Container::get('db.factory')->getDB(DB_SLAVE); while ($count === $this->mBatchSize) { $count = 0; $res = $dbr->select(array('flow_tree_revision'), array('tree_rev_id'), array('tree_parent_id IS NOT NULL', 'tree_rev_id > ' . $dbr->addQuotes($id)), __METHOD__, array('ORDER BY' => 'tree_rev_id ASC', 'LIMIT' => $this->mBatchSize)); if (!$res) { throw new \MWException('SQL error in maintenance script ' . __METHOD__); } foreach ($res as $row) { $count++; $id = $row->tree_rev_id; $uuid = UUID::create($id); $alpha = $uuid->getAlphadecimal(); $post = $storage->get($uuid); if ($post) { echo "Processing post {$alpha}\n"; $recorder->onAfterInsert($post, array(), array('workflow' => $post->getCollection()->getWorkflow())); } } } }
/** * Retrieves a single post and the related topic title. * * @param UUID|string $postId The uid of the post being requested * @return PostRevision[]|null[] associative array with 'root' and 'post' keys. Array * values may be null if not found. * @throws InvalidDataException */ public function getWithRoot($postId) { $postId = UUID::create($postId); $rootId = $this->treeRepo->findRoot($postId); $found = $this->storage->findMulti('PostRevision', array(array('rev_type_id' => $postId), array('rev_type_id' => $rootId)), array('sort' => 'rev_id', 'order' => 'DESC', 'limit' => 1)); $res = array('post' => null, 'root' => null); if (!$found) { return $res; } foreach ($found as $result) { // limit = 1 means single result $post = reset($result); if ($postId->equals($post->getPostId())) { $res['post'] = $post; } elseif ($rootId->equals($post->getPostId())) { $res['root'] = $post; } else { throw new InvalidDataException('Unmatched: ' . $post->getPostId()->getAlphadecimal()); } } // The above doesn't catch this condition if ($postId->equals($rootId)) { $res['root'] = $res['post']; } return $res; }
public function refreshBatch(DatabaseBase $dbr, UUID $continue, $countableActions, UUID $stop) { $rows = $dbr->select('flow_revision', array('rev_id', 'rev_user_id'), array('rev_id > ' . $dbr->addQuotes($continue->getBinary()), 'rev_id <= ' . $dbr->addQuotes($stop->getBinary()), 'rev_user_id > 0', 'rev_user_wiki' => wfWikiID(), 'rev_change_type' => $countableActions), __METHOD__, array('ORDER BY' => 'rev_id ASC', 'LIMIT' => $this->mBatchSize)); // end of data if (!$rows || $rows->numRows() === 0) { return false; } foreach ($rows as $row) { // User::incEditCount only allows for edit count to be increased 1 // at a time. It'd be better to immediately be able to increase the // edit count by the exact number it should be increased with, but // I'd rather re-use existing code, especially in a run-once script, // where performance is not the most important thing ;) $user = User::newFromId($row->rev_user_id); $user->incEditCount(); // save updates so we can print them when the script is done running if (!isset($this->updates[$user->getId()])) { $this->updates[$user->getId()] = 0; } $this->updates[$user->getId()]++; // set value for next batch to continue at $continue = $row->rev_id; } return UUID::create($continue); }
public function renderApi(array $options) { global $wgRequest; if ($this->workflow->isNew()) { return array('type' => $this->getName(), 'revisions' => array(), 'links' => array()); } /** @var BoardHistoryQuery $query */ $query = Container::get('query.board-history'); /** @var RevisionFormatter $formatter */ $formatter = Container::get('formatter.revision'); $formatter->setIncludeHistoryProperties(true); list($limit, ) = $wgRequest->getLimitOffset(); // don't use offset from getLimitOffset - that assumes an int, which our // UUIDs are not $offset = $wgRequest->getText('offset'); $offset = $offset ? UUID::create($offset) : null; $pager = new HistoryPager($query, $this->workflow->getId()); $pager->setLimit($limit); $pager->setOffset($offset); $pager->doQuery(); $history = $pager->getResult(); $revisions = array(); foreach ($history as $row) { $serialized = $formatter->formatApi($row, $this->context, 'history'); if ($serialized) { $revisions[$serialized['revisionId']] = $serialized; } } return array('type' => $this->getName(), 'revisions' => $revisions, 'navbar' => $pager->getNavigationBar(), 'links' => array()); }
/** * @param string[] $row * @param Header|null $obj * @return Header */ public static function fromStorageRow(array $row, $obj = null) { /** @var $obj Header */ $obj = parent::fromStorageRow($row, $obj); $obj->workflowId = UUID::create($row['rev_type_id']); return $obj; }
/** * @param stdClass $row * @return array * @throws TimestampException * @throws \Flow\Exception\FlowException * @throws \Flow\Exception\InvalidInputException */ public function update($row) { $uuid = UUID::create($row->workflow_id); switch ($row->workflow_type) { case 'discussion': $revision = $this->storage->get('Header', $uuid); break; case 'topic': // fetch topic (has same id as workflow) via RootPostLoader so // all children are populated $revision = $this->rootPostLoader->get($uuid); break; default: throw new FlowException('Unknown workflow type: ' . $row->workflow_type); } if (!$revision) { return array(); } $timestamp = $this->getUpdateTimestamp($revision)->getTimestamp(TS_MW); if ($timestamp === $row->workflow_last_update_timestamp) { // correct update timestamp already, nothing to update return array(); } return array('workflow_last_update_timestamp' => $timestamp); }
/** * Accepts a result set as sent out to the CategoryViewer::doCategoryQuery * hook. * * @param ResultWrapper|array $rows */ public function loadMetadataBatch($rows) { $neededPosts = array(); $neededWorkflows = array(); foreach ($rows as $row) { if ($row->page_namespace != NS_TOPIC) { continue; } $uuid = UUID::create(strtolower($row->page_title)); if ($uuid) { $alpha = $uuid->getAlphadecimal(); $neededPosts[$alpha] = array('rev_type_id' => $uuid); $neededWorkflows[$alpha] = $uuid; } } if (!$neededPosts) { return; } $this->posts = $this->storage->findMulti('PostRevision', $neededPosts, array('sort' => 'rev_id', 'order' => 'DESC', 'limit' => 1)); $workflows = $this->storage->getMulti('Workflow', $neededWorkflows); // @todo fixme: these should have come back with the apropriate array // key since we passed it in above, but didn't. foreach ($workflows as $workflow) { $this->workflows[$workflow->getId()->getAlphadecimal()] = $workflow; } }
/** * Instantiates a URLReference object from a storage row. * * @param \StdClass $row * @return URLReference */ public static function fromStorageRow($row) { $workflow = UUID::create($row['ref_src_workflow_id']); $objectType = $row['ref_src_object_type']; $objectId = UUID::create($row['ref_src_object_id']); $url = $row['ref_target']; $type = $row['ref_type']; $srcTitle = Title::makeTitle($row['ref_src_namespace'], $row['ref_src_title']); return new URLReference($workflow, $srcTitle, $objectType, $objectId, $type, $url); }
public function testAcceptsParsoidHrefs() { $workflow = $this->getMock('Flow\\Model\\Workflow'); $workflow->expects($this->any())->method('getId')->will($this->returnValue(UUID::create())); $workflow->expects($this->any())->method('getArticleTitle')->will($this->returnValue(Title::newMainPage())); $factory = new ReferenceFactory($workflow, 'foo', UUID::create()); $ref = $factory->createWikiReference('file', './File:Foo.jpg'); $this->assertInstanceOf('Flow\\Model\\WikiReference', $ref); $this->assertEquals('title:File:Foo.jpg', $ref->getTargetIdentifier()); }
/** * @param Title $title * @param PostRevision $post * @param User $user * @param string $content * @param string $format wikitext|html * @param string $changeType * @return PostSummary */ public static function create(Title $title, PostRevision $post, User $user, $content, $format, $changeType) { $obj = new self(); $obj->revId = UUID::create(); $obj->user = UserTuple::newFromUser($user); $obj->prevRevision = null; $obj->changeType = $changeType; $obj->summaryTargetId = $post->getPostId(); $obj->setContent($content, $format, $title); return $obj; }
public function execute() { global $wgFlowSearchMaintenanceTimeout; // Set the timeout for maintenance actions Connection::getSingleton()->setTimeout2($wgFlowSearchMaintenanceTimeout); /** @var Updater[] $updaters */ $updaters = Container::get('searchindex.updaters'); foreach ($updaters as $updaterType => $updater) { $fromId = $this->getOption('fromId', null); $fromId = $fromId ? UUID::create($fromId) : null; $toId = $this->getOption('toId', null); $toId = $toId ? UUID::create($toId) : null; $namespace = $this->getOption('namespace', null); $numRevisionsToIndex = $this->getOption('limit', null); $total = 0; while (true) { // if a limit was provided, we should make sure to not fetch // more revisions than asked for $options = array('LIMIT' => $this->mBatchSize); if ($numRevisionsToIndex) { $options['LIMIT'] = min($numRevisionsToIndex, $this->mBatchSize); // since we do this in batches, we'll subtract the size of // each batch until $numRevisionsToIndex is reached $numRevisionsToIndex -= $this->mBatchSize; if ($options['LIMIT'] <= 0) { break; } } $conditions = $updater->buildQueryConditions($fromId, $toId, $namespace); $revisions = $updater->getRevisions($conditions, $options); // stop if we're all out of revisions if (!$revisions) { break; } $total += $updater->updateRevisions($revisions, null, null); $this->output("Indexed {$total} {$updaterType} document(s)\n"); // prepare for next batch, starting at the next id // prevFromId will default to around unix epoch - there can be // no data before that $prevFromId = $fromId ?: UUID::getComparisonUUID('1'); $fromId = $this->getNextFromId($revisions); // make sure we don't get stuck in an infinite loop $diff = $prevFromId->getTimestampObj()->diff($fromId->getTimestampObj()); // invert will be 1 if the diff is a negative time period from // $prevFromId to $fromId, which means that the new $timestamp is // more recent than our current $result if ($diff->invert) { $this->error('Got stuck in an infinite loop.' . "\n" . 'workflow_last_update_timestamp is likely incorrect ' . 'for some workflows.' . "\n" . 'Run maintenance/FlowFixWorkflowLastUpdateTimestamp.php ' . 'to automatically fix those.', 1); } // prevent memory from being filled up Container::get('storage')->clear(); } } }
public static function checkUserProvider() { $topicId = UUID::create(); $revId = UUID::create(); $postId = UUID::create(); return array(array('With only a topicId reply should not fail', function ($test, $message, $result) { $test->assertNotNull($result); $test->assertArrayHasKey('links', $result, $message); }, 'reply', $topicId, $revId, null), array('With topicId and postId should not fail', function ($test, $message, $result) { $test->assertNotNull($result); $test->assertArrayHasKey('links', $result, $message); }, 'reply', $topicId, $revId, $postId)); }
/** * {@inheritDoc} */ public function getRevisions(array $conditions = array(), array $options = array()) { $dbr = $this->dbFactory->getDB(DB_SLAVE); // get the current (=most recent, =max) revision id for all headers $rows = $dbr->select(array('flow_revision', 'flow_workflow'), array('rev_id' => 'MAX(rev_id)'), $conditions, __METHOD__, array('ORDER BY' => 'rev_id ASC', 'GROUP BY' => 'rev_type_id') + $options, array('flow_workflow' => array('INNER JOIN', array('workflow_id = rev_type_id', 'rev_type' => 'header')))); $uuids = array(); foreach ($rows as $row) { $uuids[] = UUID::create($row->rev_id); } /** @var ManagerGroup $storage */ $storage = Container::get('storage'); return $storage->getStorage('Header')->getMulti($uuids); }
public function testTocOnly() { $topicData = array(); for ($i = 0; $i < 3; $i++) { $title = self::TITLE_PREFIX . $i; $topic = $this->createTopic($title); $data = $this->doApiRequest(array('page' => $topic['topic-page'], 'action' => 'flow', 'submodule' => 'view-topic')); $topicData[$i]['response'] = $data[0]['flow']['view-topic']['result']['topic']; $topicData[$i]['page'] = $topic['topic-page']; $topicData[$i]['id'] = $topic['topic-id']; $topicData[$i]['revisionId'] = $topic['topic-revision-id']; $actualRevision = $topicData[$i]['response']['revisions'][$topicData[$i]['revisionId']]; $topicData[$i]['expectedRevision'] = array('content' => array('content' => $title, 'format' => 'plaintext'), 'last_updated' => $actualRevision['last_updated']); } $flowQaTitle = Title::newFromText('Talk:Flow_QA'); $expectedCommonResponse = array('flow' => array('view-topiclist' => array('result' => array('topiclist' => array('submitted' => array('savesortby' => false, 'offset-dir' => 'fwd', 'offset-id' => null, 'offset' => null, 'limit' => 2, 'toconly' => true, 'include-offset' => false, 'format' => 'fixed-html'), 'errors' => array(), 'type' => 'topiclist')), 'status' => 'ok'))); $expectedEmptyPageResponse = array_merge_recursive(array('flow' => array('view-topiclist' => array('result' => array('topiclist' => array('submitted' => array('sortby' => 'user'), 'sortby' => 'newest', 'roots' => array(), 'posts' => array(), 'revisions' => array(), 'links' => array('pagination' => array())))))), $expectedCommonResponse); $actualEmptyPageResponse = $this->doApiRequest(array('action' => 'flow', 'page' => 'Talk:Intentionally blank', 'submodule' => 'view-topiclist', 'vtllimit' => 2, 'vtltoconly' => true, 'vtlformat' => 'fixed-html')); $actualEmptyPageResponse = $actualEmptyPageResponse[0]; $this->assertEquals($expectedEmptyPageResponse, $actualEmptyPageResponse, 'TOC-only output for an empty, but occupied, Flow board'); $expectedNewestResponse = array_merge_recursive(array('flow' => array('view-topiclist' => array('result' => array('topiclist' => array('submitted' => array('sortby' => 'newest'), 'sortby' => 'newest', 'roots' => array($topicData[2]['id'], $topicData[1]['id']), 'posts' => array($topicData[2]['id'] => $topicData[2]['response']['posts'][$topicData[2]['id']], $topicData[1]['id'] => $topicData[1]['response']['posts'][$topicData[1]['id']]), 'revisions' => array($topicData[2]['revisionId'] => $topicData[2]['expectedRevision'], $topicData[1]['revisionId'] => $topicData[1]['expectedRevision']), 'links' => array('pagination' => array('fwd' => array('url' => $flowQaTitle->getLinkURL(array('topiclist_offset-dir' => 'fwd', 'topiclist_limit' => '2', 'topiclist_offset-id' => $topicData[1]['id'], 'topiclist_sortby' => 'newest')), 'title' => 'fwd', 'text' => 'fwd')))))))), $expectedCommonResponse); $actualNewestResponse = $this->doApiRequest(array('action' => 'flow', 'page' => 'Talk:Flow QA', 'submodule' => 'view-topiclist', 'vtllimit' => 2, 'vtlsortby' => 'newest', 'vtltoconly' => true, 'vtlformat' => 'fixed-html')); $actualNewestResponse = $actualNewestResponse[0]; $this->assertEquals($expectedNewestResponse, $actualNewestResponse, 'TOC-only output for "newest" order'); // Make it so update order is chronologically (1, 0, 2) // We then expect it to be returned reverse chronologically (2, 0) $updateList = array(1, 0, 2); foreach ($updateList as $updateListInd => $topicDataInd) { $replyResponse = $this->doApiRequest(array('action' => 'flow', 'page' => $topicData[$topicDataInd]['page'], 'submodule' => 'reply', 'token' => $this->getEditToken(), 'repreplyTo' => $topicData[$topicDataInd]['id'], 'repcontent' => "Reply to topic {$topicDataInd}")); // This is because we use timestamps with second granularity. // Without this, the timestamp can be exactly the same // for two topics, which means the ordering is undefined (and thus // untestable). This was causing failures on Jenkins. // // Possible improvement: Make a simple class for getting the current // time that normally calls wfTimestampNow. Have an alternative // implementation for tests that can be controlled by an API like // http://sinonjs.org/ (which we use on the client side). // Pimple can be in charge of which is used. if ($updateListInd !== count($updateList) - 1) { sleep(1); } $newPostId = $replyResponse[0]['flow']['reply']['committed']['topic']['post-id']; $topicData[$topicDataInd]['updateTimestamp'] = UUID::create($newPostId)->getTimestamp(); $topicData[$topicDataInd]['expectedRevision']['last_updated'] = wfTimestamp(TS_UNIX, $topicData[$topicDataInd]['updateTimestamp']) * 1000; } $expectedUpdatedResponse = array_merge_recursive(array('flow' => array('view-topiclist' => array('result' => array('topiclist' => array('submitted' => array('sortby' => 'updated'), 'sortby' => 'updated', 'roots' => array($topicData[2]['id'], $topicData[0]['id']), 'posts' => array($topicData[2]['id'] => $topicData[2]['response']['posts'][$topicData[2]['id']], $topicData[0]['id'] => $topicData[0]['response']['posts'][$topicData[0]['id']]), 'revisions' => array($topicData[2]['revisionId'] => $topicData[2]['expectedRevision'], $topicData[0]['revisionId'] => $topicData[0]['expectedRevision']), 'links' => array('pagination' => array('fwd' => array('url' => $flowQaTitle->getLinkURL(array('topiclist_offset-dir' => 'fwd', 'topiclist_limit' => '2', 'topiclist_offset' => $topicData[0]['updateTimestamp'], 'topiclist_sortby' => 'updated')), 'title' => 'fwd', 'text' => 'fwd')))))))), $expectedCommonResponse); $actualUpdatedResponse = $this->doApiRequest(array('action' => 'flow', 'page' => 'Talk:Flow QA', 'submodule' => 'view-topiclist', 'vtllimit' => 2, 'vtlsortby' => 'updated', 'vtltoconly' => true, 'vtlformat' => 'fixed-html')); $actualUpdatedResponse = $actualUpdatedResponse[0]; $this->assertEquals($expectedUpdatedResponse, $actualUpdatedResponse, 'TOC-only output for "updated" order'); }
/** * @param array $row * @param TopicListEntry|null $obj * @return TopicListEntry * @throws DataModelException */ public static function fromStorageRow(array $row, $obj = null) { if ($obj === null) { $obj = new self(); } elseif (!$obj instanceof self) { throw new DataModelException('Wrong obj type: ' . get_class($obj), 'process-data'); } $obj->topicListId = UUID::create($row['topic_list_id']); $obj->topicId = UUID::create($row['topic_id']); if (isset($row['workflow_last_update_timestamp'])) { $obj->topicWorkflowLastUpdated = $row['workflow_last_update_timestamp']; } return $obj; }
/** * Unserializes a Content object of the type supported by this ContentHandler. * * @since 1.21 * * @param string $blob Serialized form of the content * @param string $format The format used for serialization * * @return Content The Content object created by deserializing $blob */ public function unserializeContent($blob, $format = null) { $info = FormatJson::decode($blob, true); $uuid = null; if (!$info) { // For transition from wikitext-type pages // Make a plain content object and then when we get a chance // we can insert a proper object. return $this->makeEmptyContent(); } elseif (isset($info['flow-workflow'])) { $uuid = UUID::create($info['flow-workflow']); } return new BoardContent(CONTENT_MODEL_FLOW_BOARD, $uuid); }
protected function helperToTestUpdating($old, $new, $expectedUpdateValues, $isContentUpdatingAllowed) { $dbw = $this->getMockBuilder('DatabaseMysql')->disableOriginalConstructor()->getMock(); $factory = $this->getMockBuilder('Flow\\DbFactory')->disableOriginalConstructor()->getMock(); $factory->expects($this->any())->method('getDB')->will($this->returnValue($dbw)); $id = UUID::create(); $old['rev_id'] = $id->getBinary(); $new['rev_id'] = $id->getBinary(); $dbw->expects($this->once())->method('update')->with($this->equalTo('flow_revision'), $this->equalTo($expectedUpdateValues), $this->equalTo(array('rev_id' => $id->getBinary())))->will($this->returnValue(true)); $dbw->expects($this->any())->method('affectedRows')->will($this->returnValue(1)); // Header is bare bones implementation, sufficient for testing // the parent class. $storage = new HeaderRevisionStorage($factory, $this->MOCK_EXTERNAL_STORE_CONFIG); $this->setWhetherContentUpdatingAllowed($storage, $isContentUpdatingAllowed); $storage->update($old, $new); }
public function doQuery() { $direction = $this->mIsBackwards ? 'rev' : 'fwd'; // over-fetch so we can figure out if there's anything after what we're showing $this->mResult = $this->query->getResults($this->id, $this->getLimit() + 1, $this->mOffset, $direction); if (!$this->mResult) { throw new InvalidDataException('Unable to load history for ' . $this->id->getAlphadecimal(), 'fail-load-history'); } $this->mQueryDone = true; // we over-fetched, now get rid of redundant value for our "real" data $overfetched = null; if (count($this->mResult) > $this->getLimit()) { // when traversing history reverse, the overfetched entry will be at // the beginning of the list; in normal mode it'll be last if ($this->mIsBackwards) { $overfetched = array_shift($this->mResult); } else { $overfetched = array_pop($this->mResult); } } // set some properties that'll be used to generate navigation bar $this->mLastShown = $this->mResult[count($this->mResult) - 1]->revision->getRevisionId()->getAlphadecimal(); $this->mFirstShown = $this->mResult[0]->revision->getRevisionId()->getAlphadecimal(); /* * By overfetching, we've already figured out if there's additional * entries at the next page (according to the current direction). Now * go fetch 1 more in the other direction (the one we likely came from, * when navigating) */ $nextOffset = $this->mIsBackwards ? $this->mFirstShown : $this->mLastShown; $nextOffset = UUID::create($nextOffset); $reverseDirection = $this->mIsBackwards ? 'fwd' : 'rev'; $this->mIsLast = !$overfetched; $this->mIsFirst = !$this->mOffset || count($this->query->getResults($this->id, 1, $nextOffset, $reverseDirection)) === 0; if ($this->mIsBackwards) { // swap values if we're going backwards list($this->mIsFirst, $this->mIsLast) = array($this->mIsLast, $this->mIsFirst); // id of the overfetched entry, used to build new links starting at // this offset if ($overfetched) { $this->mPastTheEndIndex = $overfetched->revision->getRevisionId()->getAlphadecimal(); } } }
protected function doDBUpdates() { $container = Container::getContainer(); $dbFactory = $container['db.factory']; $dbw = $dbFactory->getDb(DB_MASTER); $storage = $container['storage']; $moderationLoggingListener = $container['storage.post.listeners.moderation_logging']; $rowIterator = new EchoBatchRowIterator($dbw, 'flow_revision', 'rev_id', $this->mBatchSize); $rowIterator->setFetchColumns(array('rev_id', 'rev_type')); // Fetch rows that are a moderation action $rowIterator->addConditions(array('rev_change_type' => ModerationLoggingListener::getModerationChangeTypes(), 'rev_user_wiki' => wfWikiID())); $start = $this->getOption('start'); $startId = UUID::create($start); $rowIterator->addConditions(array('rev_id > ' . $dbw->addQuotes($startId->getBinary()))); $stop = $this->getOption('stop'); $stopId = UUID::create($stop); $rowIterator->addConditions(array('rev_id < ' . $dbw->addQuotes($stopId->getBinary()))); $total = $fail = 0; foreach ($rowIterator as $batch) { $dbw->begin(); foreach ($batch as $row) { $total++; $objectManager = $storage->getStorage($row->rev_type); $revId = UUID::create($row->rev_id); $obj = $objectManager->get($revId); if (!$obj) { $this->error('Could not load revision: ' . $revId->getAlphadecimal()); $fail++; continue; } $workflow = $obj->getCollection()->getWorkflow(); $moderationLoggingListener->onAfterInsert($obj, array(), array('workflow' => $workflow)); } $dbw->commit(); $storage->clear(); $dbFactory->waitForSlaves(); } $this->output("Processed a total of {$total} moderation revisions.\n"); if ($fail !== 0) { $this->error("Errors were encountered while processing {$fail} of them.\n"); } return true; }
/** * @dataProvider referenceExtractorProvider */ public function testReferenceExtractor($description, $wikitext, $expectedClass, $expectedType, $expectedTarget, $page = 'UTPage') { $referenceExtractor = Container::get('reference.extractor'); $workflow = $this->getMock('Flow\\Model\\Workflow'); $workflow->expects($this->any())->method('getId')->will($this->returnValue(UUID::create())); $workflow->expects($this->any())->method('getArticleTitle')->will($this->returnValue(Title::newMainPage())); $factory = new ReferenceFactory($workflow, 'foo', UUID::create()); $reflMethod = new ReflectionMethod($referenceExtractor, 'extractReferences'); $reflMethod->setAccessible(true); $reflProperty = new \ReflectionProperty($referenceExtractor, 'extractors'); $reflProperty->setAccessible(true); $extractors = $reflProperty->getValue($referenceExtractor); $html = Utils::convert('wt', 'html', $wikitext, Title::newFromText($page)); $result = $reflMethod->invoke($referenceExtractor, $factory, $extractors['post'], $html); $this->assertCount(1, $result, $html); $result = reset($result); $this->assertInstanceOf($expectedClass, $result, $description); $this->assertEquals($expectedType, $result->getType(), $description); $this->assertEquals($expectedTarget, $result->getTargetIdentifier(), $description); }
public function update($row) { $title = Title::makeTitleSafe($row->workflow_namespace, $row->workflow_title_text); if ($title === null) { throw new Exception(sprintf('Could not create title for %s at %s:%s', UUID::create($row->workflow_id)->getAlphadecimal(), $this->lang->getNsText($row->workflow_namespace) ?: $row->workflow_namespace, $row->workflow_title_text)); } // at some point, we failed to create page entries for new workflows: only // create that page if the workflow was stored with a 0 page id (otherwise, // we could mistake the $title for a deleted page) if ($row->workflow_page_id === 0 && $title->getArticleID() === 0) { // build workflow object (yes, loading them piecemeal is suboptimal, but // this is just a one-time script; considering the alternative is // creating a derivative EchoBatchRowIterator that returns workflows, // it doesn't really matter) $storage = Container::get('storage'); $workflow = $storage->get('Workflow', UUID::create($row->workflow_id)); try { /** @var OccupationController $occupationController */ $occupationController = Container::get('occupation_controller'); $occupationController->allowCreation($title, $occupationController->getTalkpageManager()); $occupationController->ensureFlowRevision(new Article($title), $workflow); // force article id to be refetched from db $title->getArticleID(Title::GAID_FOR_UPDATE); } catch (\Exception $e) { // catch all exception to keep going with the rest we want to // iterate over, we'll report on the failed entries at the end $this->failed[] = $row; } } // re-associate the workflow with the correct page; only if a page exists if ($title->getArticleID() !== 0 && $title->getArticleID() !== (int) $row->workflow_page_id) { // This makes the assumption the page has not moved or been deleted? ++$this->fixedCount; return array('workflow_page_id' => $title->getArticleID()); } elseif (!$row->workflow_page_id) { // No id exists for this workflow? $this->failed[] = $row; } return array(); }
public function provideDataGetWatchStatus() { // number of test cases $testCount = 10; $tests = array(); while ($testCount > 0) { $testCount--; // number of uuid per test case $uuidCount = 10; $uuids = $dbResult = $result = array(); while ($uuidCount > 0) { $uuidCount--; $uuid = UUID::create()->getAlphadecimal(); $rand = rand(0, 1); // put in the query result if ($rand) { $dbResult[] = (object) array('wl_title' => $uuid); $result[$uuid] = true; } else { $result[$uuid] = false; } $uuids[] = $uuid; } $dbResult = new \ArrayObject($dbResult); $tests[] = array($uuids, $dbResult->getIterator(), $result); } // attach empty uuids array to query $uuids = $dbResult = $result = array(); $emptyCount = 10; while ($emptyCount > 0) { $emptyCount--; $uuid = UUID::create()->getAlphadecimal(); $dbResult[] = (object) array('wl_title' => $uuid); } $dbResult = new \ArrayObject($dbResult); $tests[] = array($uuids, $dbResult->getIterator(), $result); return $tests; }
/** * Diff would format against two revisions */ public function formatApi(FormatterRow $newRow, FormatterRow $oldRow, IContextSource $ctx) { $oldRes = $this->revisionViewFormatter->formatApi($oldRow, $ctx); $newRes = $this->revisionViewFormatter->formatApi($newRow, $ctx); $oldContent = $oldRow->revision->getContent('wikitext'); $newContent = $newRow->revision->getContent('wikitext'); $differenceEngine = new \DifferenceEngine(); $differenceEngine->setContent(new \TextContent($oldContent), new \TextContent($newContent)); if ($oldRow->revision->isFirstRevision()) { $prevLink = null; } else { $prevLink = $this->urlGenerator->diffLink($oldRow->revision, $ctx->getTitle(), UUID::create($oldRes['workflowId']))->getLocalURL(); } // this is probably a network request which typically goes in the query // half, but we don't have to worry about batching because we only show // one diff at a time so just do it. $nextRevision = $newRow->revision->getCollection()->getNextRevision($newRow->revision); if ($nextRevision === null) { $nextLink = null; } else { $nextLink = $this->urlGenerator->diffLink($nextRevision, $ctx->getTitle(), UUID::create($newRes['workflowId']))->getLocalURL(); } return array('new' => $newRes, 'old' => $oldRes, 'diff_content' => $differenceEngine->getDiffBody(), 'links' => array('previous' => $prevLink, 'next' => $nextLink)); }
public function provideDataMakePagingLink() { return array(array($this->mockStorage(array($this->mockTopicListEntry(), $this->mockTopicListEntry(), $this->mockTopicListEntry()), UUID::create(), array('topic_id')), array('topic_list_id' => '123456'), array('pager-limit' => 2, 'order' => 'desc', 'sort' => 'topic_id'), 'offset-id'), array($this->mockStorage(array($this->mockTopicListEntry(), $this->mockTopicListEntry()), UUID::create(), array('workflow_last_update_timestamp')), array('topic_list_id' => '123456'), array('pager-limit' => 1, 'order' => 'desc', 'sort' => 'workflow_last_update_timestamp', 'sortby' => 'updated'), 'offset')); }
/** * Render the data for API request * * @param array $options * @return array * @throws InvalidInputException */ public function renderApi(array $options) { $output = array('type' => $this->getName()); switch ($this->action) { case 'view-topic-summary': // @Todo - duplicated logic in other single view block if (isset($options['revId']) && $options['revId']) { /** @var PostSummaryViewQuery $query */ $query = Container::get('query.postsummary.view'); $row = $query->getSingleViewResult($options['revId']); if (!$this->permissions->isAllowed($row->revision, 'view-topic-summary')) { $this->addError('permissions', $this->context->msg('flow-error-not-allowed')); break; } /** @var RevisionViewFormatter $formatter */ $formatter = Container::get('formatter.revisionview'); $output['revision'] = $formatter->formatApi($row, $this->context); } else { $format = isset($options['format']) ? $options['format'] : 'fixed-html'; $output += $this->renderNewestTopicSummary($format); } break; case 'edit-topic-summary': // default to wikitext for no-JS $format = isset($options['format']) ? $options['format'] : 'wikitext'; $output += $this->renderNewestTopicSummary($format); break; case 'undo-edit-topic-summary': $output = $this->renderUndoApi($options) + $output; break; case 'compare-postsummary-revisions': // @Todo - duplicated logic in other diff view block if (!isset($options['newRevision'])) { throw new InvalidInputException('A revision must be provided for comparison', 'revision-comparison'); } $oldRevision = null; if (isset($options['oldRevision'])) { $oldRevision = $options['newRevision']; } list($new, $old) = Container::get('query.postsummary.view')->getDiffViewResult(UUID::create($options['newRevision']), UUID::create($oldRevision)); if (!$this->permissions->isAllowed($new->revision, 'view-topic-summary') || !$this->permissions->isAllowed($old->revision, 'view-topic-summary')) { $this->addError('permissions', $this->context->msg('flow-error-not-allowed')); break; } $output['revision'] = Container::get('formatter.revision.diff.view')->formatApi($new, $old, $this->context); break; } if ($this->wasSubmitted()) { $output += array('submitted' => $this->submitted, 'errors' => $this->errors); } else { $output += array('submitted' => array(), 'errors' => $this->errors); } return $output; }
public function doDBUpdates() { // Can't be done in constructor, happens too early in // boot process $this->dbFactory = Container::get('db.factory'); $this->storage = Container::get('storage'); // Since this is a one-shot maintenance script just reach in via reflection // to change lenghts $this->contentLengthProperty = new ReflectionProperty('Flow\\Model\\AbstractRevision', 'contentLength'); $this->contentLengthProperty->setAccessible(true); $this->previousContentLengthProperty = new ReflectionProperty('Flow\\Model\\AbstractRevision', 'previousContentLength'); $this->previousContentLengthProperty->setAccessible(true); $dbw = $this->dbFactory->getDb(DB_MASTER); // Walk through the flow_revision table $it = new EchoBatchRowIterator($dbw, 'flow_revision', 'rev_id', $this->mBatchSize); // Only fetch rows created by users from the current wiki. $it->addConditions(array('rev_user_wiki' => wfWikiId())); // We only need the id and type field $it->setFetchColumns(array('rev_id', 'rev_type')); $total = $fail = 0; foreach ($it as $batch) { $dbw->begin(); foreach ($batch as $row) { $total++; if (!isset(self::$revisionTypes[$row->rev_type])) { $this->output('Unknown revision type: ' . $row->rev_type); $fail++; continue; } $om = $this->storage->getStorage(self::$revisionTypes[$row->rev_type]); $revId = UUID::create($row->rev_id); $obj = $om->get($revId); if (!$obj) { $this->output('Could not load revision: ' . $revId->getAlphadecimal()); $fail++; continue; } if ($obj->isFirstRevision()) { $previous = null; } else { $previous = $om->get($obj->getPrevRevisionId()); if (!$previous) { $this->output('Could not locate previous revision: ' . $obj->getPrevRevisionId()->getAlphadecimal()); $fail++; continue; } } $this->updateRevision($obj, $previous); try { $om->put($obj); } catch (\Exception $e) { $this->error('Failed to update revision ' . $obj->getRevisionId()->getAlphadecimal() . ': ' . $e->getMessage() . "\n" . 'Please make sure rev_content, rev_content_length, rev_flags & rev_previous_content_length are part of RevisionStorage::$allowedUpdateColumns.'); throw $e; } $this->output('.'); } $dbw->commit(); $this->storage->clear(); $this->dbFactory->waitForSlaves(); } return true; }
/** * Get the URL of a UUID for a workflow. * @return string|null */ protected function getWorkflowUrl() { try { $rootId = UUID::create($this->uuid); /** @var ObjectManager $om */ $om = Container::get('storage.workflow'); $workflow = $om->get($rootId); if ($workflow instanceof Workflow) { /** @var UrlGenerator $urlGenerator */ $urlGenerator = Container::get('url_generator'); return $urlGenerator->workflowLink(null, $rootId)->getFullUrl(); } else { return null; } } catch (FlowException $e) { return null; // The UUID is invalid or has no root post. } }