public function refreshBatch(DatabaseBase $dbr, UUID $continue, $countableActions, UUID $stop) { $rows = $dbr->select('flow_revision', array('rev_id', 'rev_user_id'), array('rev_id > ' . $dbr->addQuotes($continue->getBinary()), 'rev_id <= ' . $dbr->addQuotes($stop->getBinary()), 'rev_user_id > 0', 'rev_user_wiki' => wfWikiID(), 'rev_change_type' => $countableActions), __METHOD__, array('ORDER BY' => 'rev_id ASC', 'LIMIT' => $this->mBatchSize)); // end of data if (!$rows || $rows->numRows() === 0) { return false; } foreach ($rows as $row) { // User::incEditCount only allows for edit count to be increased 1 // at a time. It'd be better to immediately be able to increase the // edit count by the exact number it should be increased with, but // I'd rather re-use existing code, especially in a run-once script, // where performance is not the most important thing ;) $user = User::newFromId($row->rev_user_id); $user->incEditCount(); // save updates so we can print them when the script is done running if (!isset($this->updates[$user->getId()])) { $this->updates[$user->getId()] = 0; } $this->updates[$user->getId()]++; // set value for next batch to continue at $continue = $row->rev_id; } return UUID::create($continue); }
public function getResult(UUID $uuid) { $alpha = $uuid->getAlphadecimal(); // Minimal set of data needed for the CategoryViewFormatter $row = new FormatterRow(); if (!isset($this->posts[$alpha])) { throw new FlowException("A required post has not been loaded: {$alpha}"); } $row->revision = reset($this->posts[$alpha]); if (!isset($this->workflows[$alpha])) { throw new FlowException("A required workflow has not been loaded: {$alpha}"); } $row->workflow = $this->workflows[$alpha]; return $row; }
public function renderApi(array $options) { global $wgRequest; if ($this->workflow->isNew()) { return array('type' => $this->getName(), 'revisions' => array(), 'links' => array()); } /** @var BoardHistoryQuery $query */ $query = Container::get('query.board-history'); /** @var RevisionFormatter $formatter */ $formatter = Container::get('formatter.revision'); $formatter->setIncludeHistoryProperties(true); list($limit, ) = $wgRequest->getLimitOffset(); // don't use offset from getLimitOffset - that assumes an int, which our // UUIDs are not $offset = $wgRequest->getText('offset'); $offset = $offset ? UUID::create($offset) : null; $pager = new HistoryPager($query, $this->workflow->getId()); $pager->setLimit($limit); $pager->setOffset($offset); $pager->doQuery(); $history = $pager->getResult(); $revisions = array(); foreach ($history as $row) { $serialized = $formatter->formatApi($row, $this->context, 'history'); if ($serialized) { $revisions[$serialized['revisionId']] = $serialized; } } return array('type' => $this->getName(), 'revisions' => $revisions, 'navbar' => $pager->getNavigationBar(), 'links' => array()); }
/** * @param string[] $row * @param Header|null $obj * @return Header */ public static function fromStorageRow(array $row, $obj = null) { /** @var $obj Header */ $obj = parent::fromStorageRow($row, $obj); $obj->workflowId = UUID::create($row['rev_type_id']); return $obj; }
protected function processPosts($recorder) { $storage = Container::get('storage.post'); $count = $this->mBatchSize; $id = ''; $dbr = Container::get('db.factory')->getDB(DB_SLAVE); while ($count === $this->mBatchSize) { $count = 0; $res = $dbr->select(array('flow_tree_revision'), array('tree_rev_id'), array('tree_parent_id IS NOT NULL', 'tree_rev_id > ' . $dbr->addQuotes($id)), __METHOD__, array('ORDER BY' => 'tree_rev_id ASC', 'LIMIT' => $this->mBatchSize)); if (!$res) { throw new \MWException('SQL error in maintenance script ' . __METHOD__); } foreach ($res as $row) { $count++; $id = $row->tree_rev_id; $uuid = UUID::create($id); $alpha = $uuid->getAlphadecimal(); $post = $storage->get($uuid); if ($post) { echo "Processing post {$alpha}\n"; $recorder->onAfterInsert($post, array(), array('workflow' => $post->getCollection()->getWorkflow())); } } } }
protected function insertRelated(array $rows) { if (!is_array(reset($rows))) { $rows = array($rows); } $trees = array(); foreach ($rows as $key => $row) { $trees[$key] = $this->splitUpdate($row, 'tree'); } $dbw = $this->dbFactory->getDB(DB_MASTER); $res = $dbw->insert($this->joinTable(), $this->preprocessNestedSqlArray($trees), __METHOD__); // If this is a brand new root revision it needs to be added to the tree // If it has a rev_parent_id then its already a part of the tree if ($res) { foreach ($rows as $row) { if ($row['rev_parent_id'] === null) { $res = $res && $this->treeRepo->insert(UUID::create($row['tree_rev_descendant_id']), UUID::create($row['tree_parent_id'])); } } } if (!$res) { return array(); } return $rows; }
public function updateRevision($columnPrefix, DatabaseBase $dbw, $continue = null) { $rows = $dbw->select('flow_revision', array('rev_id', 'rev_type'), array('rev_id > ' . $dbw->addQuotes($continue), "{$columnPrefix}_id > 0", "{$columnPrefix}_ip IS NOT NULL"), __METHOD__, array('LIMIT' => $this->mBatchSize, 'ORDER BY' => 'rev_id')); $ids = $objs = array(); foreach ($rows as $row) { $id = UUID::create($row->rev_id); $type = self::$types[$row->rev_type]; $om = $this->storage->getStorage($type); $obj = $om->get($id); if ($obj) { $om->merge($obj); $ids[] = $row->rev_id; $objs[] = $obj; } else { $this->error(__METHOD__ . ": Failed loading {$type}: " . $id->getAlphadecimal()); } } if (!$ids) { return null; } $dbw->update('flow_revision', array("{$columnPrefix}_ip" => null), array('rev_id' => $ids), __METHOD__); foreach ($objs as $obj) { $this->storage->cachePurge($obj); } $this->completeCount += count($ids); return end($ids); }
/** * Common case 4: alphadecimal from cache to bianry and timestamp. */ public function case4($alpha) { // clone to avoid internal object caching $id = clone UUID::create($alpha); $id->getTimestampObj(); $id->getBinary(); }
/** * @return PostRevision[] * @throws DataModelException */ public function getChildren() { if ($this->children === null) { throw new DataModelException('Children not loaded for post: ' . $this->postId->getAlphadecimal(), 'process-data'); } return $this->children; }
/** * Retrieves a single post and the related topic title. * * @param UUID|string $postId The uid of the post being requested * @return PostRevision[]|null[] associative array with 'root' and 'post' keys. Array * values may be null if not found. * @throws InvalidDataException */ public function getWithRoot($postId) { $postId = UUID::create($postId); $rootId = $this->treeRepo->findRoot($postId); $found = $this->storage->findMulti('PostRevision', array(array('rev_type_id' => $postId), array('rev_type_id' => $rootId)), array('sort' => 'rev_id', 'order' => 'DESC', 'limit' => 1)); $res = array('post' => null, 'root' => null); if (!$found) { return $res; } foreach ($found as $result) { // limit = 1 means single result $post = reset($result); if ($postId->equals($post->getPostId())) { $res['post'] = $post; } elseif ($rootId->equals($post->getPostId())) { $res['root'] = $post; } else { throw new InvalidDataException('Unmatched: ' . $post->getPostId()->getAlphadecimal()); } } // The above doesn't catch this condition if ($postId->equals($rootId)) { $res['root'] = $res['post']; } return $res; }
/** * @param UUID|null $fromId * @param UUID|null $toId * @param int|null $namespace * @return array */ public function buildQueryConditions(UUID $fromId = null, UUID $toId = null, $namespace = null) { $dbr = $this->dbFactory->getDB(DB_SLAVE); $conditions = array(); // only find entries in a given range if ($fromId !== null) { $conditions[] = 'rev_id >= ' . $dbr->addQuotes($fromId->getBinary()); } if ($toId !== null) { $conditions[] = 'rev_id <= ' . $dbr->addQuotes($toId->getBinary()); } // find only within requested wiki/namespace $conditions['workflow_wiki'] = wfWikiId(); if ($namespace !== null) { $conditions['workflow_namespace'] = $namespace; } return $conditions; }
public function doQuery() { $direction = $this->mIsBackwards ? 'rev' : 'fwd'; // over-fetch so we can figure out if there's anything after what we're showing $this->mResult = $this->query->getResults($this->id, $this->getLimit() + 1, $this->mOffset, $direction); if (!$this->mResult) { throw new InvalidDataException('Unable to load history for ' . $this->id->getAlphadecimal(), 'fail-load-history'); } $this->mQueryDone = true; // we over-fetched, now get rid of redundant value for our "real" data $overfetched = null; if (count($this->mResult) > $this->getLimit()) { // when traversing history reverse, the overfetched entry will be at // the beginning of the list; in normal mode it'll be last if ($this->mIsBackwards) { $overfetched = array_shift($this->mResult); } else { $overfetched = array_pop($this->mResult); } } // set some properties that'll be used to generate navigation bar $this->mLastShown = $this->mResult[count($this->mResult) - 1]->revision->getRevisionId()->getAlphadecimal(); $this->mFirstShown = $this->mResult[0]->revision->getRevisionId()->getAlphadecimal(); /* * By overfetching, we've already figured out if there's additional * entries at the next page (according to the current direction). Now * go fetch 1 more in the other direction (the one we likely came from, * when navigating) */ $nextOffset = $this->mIsBackwards ? $this->mFirstShown : $this->mLastShown; $nextOffset = UUID::create($nextOffset); $reverseDirection = $this->mIsBackwards ? 'fwd' : 'rev'; $this->mIsLast = !$overfetched; $this->mIsFirst = !$this->mOffset || count($this->query->getResults($this->id, 1, $nextOffset, $reverseDirection)) === 0; if ($this->mIsBackwards) { // swap values if we're going backwards list($this->mIsFirst, $this->mIsLast) = array($this->mIsLast, $this->mIsFirst); // id of the overfetched entry, used to build new links starting at // this offset if ($overfetched) { $this->mPastTheEndIndex = $overfetched->revision->getRevisionId()->getAlphadecimal(); } } }
public function testAcceptsParsoidHrefs() { $workflow = $this->getMock('Flow\\Model\\Workflow'); $workflow->expects($this->any())->method('getId')->will($this->returnValue(UUID::create())); $workflow->expects($this->any())->method('getArticleTitle')->will($this->returnValue(Title::newMainPage())); $factory = new ReferenceFactory($workflow, 'foo', UUID::create()); $ref = $factory->createWikiReference('file', './File:Foo.jpg'); $this->assertInstanceOf('Flow\\Model\\WikiReference', $ref); $this->assertEquals('title:File:Foo.jpg', $ref->getTargetIdentifier()); }
/** * Instantiates a URLReference object from a storage row. * * @param \StdClass $row * @return URLReference */ public static function fromStorageRow($row) { $workflow = UUID::create($row['ref_src_workflow_id']); $objectType = $row['ref_src_object_type']; $objectId = UUID::create($row['ref_src_object_id']); $url = $row['ref_target']; $type = $row['ref_type']; $srcTitle = Title::makeTitle($row['ref_src_namespace'], $row['ref_src_title']); return new URLReference($workflow, $srcTitle, $objectType, $objectId, $type, $url); }
/** * @param Title $title * @param PostRevision $post * @param User $user * @param string $content * @param string $format wikitext|html * @param string $changeType * @return PostSummary */ public static function create(Title $title, PostRevision $post, User $user, $content, $format, $changeType) { $obj = new self(); $obj->revId = UUID::create(); $obj->user = UserTuple::newFromUser($user); $obj->prevRevision = null; $obj->changeType = $changeType; $obj->summaryTargetId = $post->getPostId(); $obj->setContent($content, $format, $title); return $obj; }
protected function findTopicListHistory(array $queries, array $options = array()) { $queries = $this->preprocessSqlArray(reset($queries)); $res = $this->dbFactory->getDB(DB_SLAVE)->select(array('flow_topic_list', 'flow_tree_node', 'flow_tree_revision', 'flow_revision'), array('*'), array('topic_id = tree_ancestor_id', 'tree_descendant_id = tree_rev_descendant_id', 'tree_rev_id = rev_id') + $queries, __METHOD__, $options); $retval = array(); if ($res) { foreach ($res as $row) { $row = UUID::convertUUIDs((array) $row, 'alphadecimal'); $retval[$row['rev_id']] = $row; } } return $retval; }
public static function checkUserProvider() { $topicId = UUID::create(); $revId = UUID::create(); $postId = UUID::create(); return array(array('With only a topicId reply should not fail', function ($test, $message, $result) { $test->assertNotNull($result); $test->assertArrayHasKey('links', $result, $message); }, 'reply', $topicId, $revId, null), array('With topicId and postId should not fail', function ($test, $message, $result) { $test->assertNotNull($result); $test->assertArrayHasKey('links', $result, $message); }, 'reply', $topicId, $revId, $postId)); }
/** * {@inheritDoc} */ public function getRevisions(array $conditions = array(), array $options = array()) { $dbr = $this->dbFactory->getDB(DB_SLAVE); // get the current (=most recent, =max) revision id for all headers $rows = $dbr->select(array('flow_revision', 'flow_workflow'), array('rev_id' => 'MAX(rev_id)'), $conditions, __METHOD__, array('ORDER BY' => 'rev_id ASC', 'GROUP BY' => 'rev_type_id') + $options, array('flow_workflow' => array('INNER JOIN', array('workflow_id = rev_type_id', 'rev_type' => 'header')))); $uuids = array(); foreach ($rows as $row) { $uuids[] = UUID::create($row->rev_id); } /** @var ManagerGroup $storage */ $storage = Container::get('storage'); return $storage->getStorage('Header')->getMulti($uuids); }
public function testTocOnly() { $topicData = array(); for ($i = 0; $i < 3; $i++) { $title = self::TITLE_PREFIX . $i; $topic = $this->createTopic($title); $data = $this->doApiRequest(array('page' => $topic['topic-page'], 'action' => 'flow', 'submodule' => 'view-topic')); $topicData[$i]['response'] = $data[0]['flow']['view-topic']['result']['topic']; $topicData[$i]['page'] = $topic['topic-page']; $topicData[$i]['id'] = $topic['topic-id']; $topicData[$i]['revisionId'] = $topic['topic-revision-id']; $actualRevision = $topicData[$i]['response']['revisions'][$topicData[$i]['revisionId']]; $topicData[$i]['expectedRevision'] = array('content' => array('content' => $title, 'format' => 'plaintext'), 'last_updated' => $actualRevision['last_updated']); } $flowQaTitle = Title::newFromText('Talk:Flow_QA'); $expectedCommonResponse = array('flow' => array('view-topiclist' => array('result' => array('topiclist' => array('submitted' => array('savesortby' => false, 'offset-dir' => 'fwd', 'offset-id' => null, 'offset' => null, 'limit' => 2, 'toconly' => true, 'include-offset' => false, 'format' => 'fixed-html'), 'errors' => array(), 'type' => 'topiclist')), 'status' => 'ok'))); $expectedEmptyPageResponse = array_merge_recursive(array('flow' => array('view-topiclist' => array('result' => array('topiclist' => array('submitted' => array('sortby' => 'user'), 'sortby' => 'newest', 'roots' => array(), 'posts' => array(), 'revisions' => array(), 'links' => array('pagination' => array())))))), $expectedCommonResponse); $actualEmptyPageResponse = $this->doApiRequest(array('action' => 'flow', 'page' => 'Talk:Intentionally blank', 'submodule' => 'view-topiclist', 'vtllimit' => 2, 'vtltoconly' => true, 'vtlformat' => 'fixed-html')); $actualEmptyPageResponse = $actualEmptyPageResponse[0]; $this->assertEquals($expectedEmptyPageResponse, $actualEmptyPageResponse, 'TOC-only output for an empty, but occupied, Flow board'); $expectedNewestResponse = array_merge_recursive(array('flow' => array('view-topiclist' => array('result' => array('topiclist' => array('submitted' => array('sortby' => 'newest'), 'sortby' => 'newest', 'roots' => array($topicData[2]['id'], $topicData[1]['id']), 'posts' => array($topicData[2]['id'] => $topicData[2]['response']['posts'][$topicData[2]['id']], $topicData[1]['id'] => $topicData[1]['response']['posts'][$topicData[1]['id']]), 'revisions' => array($topicData[2]['revisionId'] => $topicData[2]['expectedRevision'], $topicData[1]['revisionId'] => $topicData[1]['expectedRevision']), 'links' => array('pagination' => array('fwd' => array('url' => $flowQaTitle->getLinkURL(array('topiclist_offset-dir' => 'fwd', 'topiclist_limit' => '2', 'topiclist_offset-id' => $topicData[1]['id'], 'topiclist_sortby' => 'newest')), 'title' => 'fwd', 'text' => 'fwd')))))))), $expectedCommonResponse); $actualNewestResponse = $this->doApiRequest(array('action' => 'flow', 'page' => 'Talk:Flow QA', 'submodule' => 'view-topiclist', 'vtllimit' => 2, 'vtlsortby' => 'newest', 'vtltoconly' => true, 'vtlformat' => 'fixed-html')); $actualNewestResponse = $actualNewestResponse[0]; $this->assertEquals($expectedNewestResponse, $actualNewestResponse, 'TOC-only output for "newest" order'); // Make it so update order is chronologically (1, 0, 2) // We then expect it to be returned reverse chronologically (2, 0) $updateList = array(1, 0, 2); foreach ($updateList as $updateListInd => $topicDataInd) { $replyResponse = $this->doApiRequest(array('action' => 'flow', 'page' => $topicData[$topicDataInd]['page'], 'submodule' => 'reply', 'token' => $this->getEditToken(), 'repreplyTo' => $topicData[$topicDataInd]['id'], 'repcontent' => "Reply to topic {$topicDataInd}")); // This is because we use timestamps with second granularity. // Without this, the timestamp can be exactly the same // for two topics, which means the ordering is undefined (and thus // untestable). This was causing failures on Jenkins. // // Possible improvement: Make a simple class for getting the current // time that normally calls wfTimestampNow. Have an alternative // implementation for tests that can be controlled by an API like // http://sinonjs.org/ (which we use on the client side). // Pimple can be in charge of which is used. if ($updateListInd !== count($updateList) - 1) { sleep(1); } $newPostId = $replyResponse[0]['flow']['reply']['committed']['topic']['post-id']; $topicData[$topicDataInd]['updateTimestamp'] = UUID::create($newPostId)->getTimestamp(); $topicData[$topicDataInd]['expectedRevision']['last_updated'] = wfTimestamp(TS_UNIX, $topicData[$topicDataInd]['updateTimestamp']) * 1000; } $expectedUpdatedResponse = array_merge_recursive(array('flow' => array('view-topiclist' => array('result' => array('topiclist' => array('submitted' => array('sortby' => 'updated'), 'sortby' => 'updated', 'roots' => array($topicData[2]['id'], $topicData[0]['id']), 'posts' => array($topicData[2]['id'] => $topicData[2]['response']['posts'][$topicData[2]['id']], $topicData[0]['id'] => $topicData[0]['response']['posts'][$topicData[0]['id']]), 'revisions' => array($topicData[2]['revisionId'] => $topicData[2]['expectedRevision'], $topicData[0]['revisionId'] => $topicData[0]['expectedRevision']), 'links' => array('pagination' => array('fwd' => array('url' => $flowQaTitle->getLinkURL(array('topiclist_offset-dir' => 'fwd', 'topiclist_limit' => '2', 'topiclist_offset' => $topicData[0]['updateTimestamp'], 'topiclist_sortby' => 'updated')), 'title' => 'fwd', 'text' => 'fwd')))))))), $expectedCommonResponse); $actualUpdatedResponse = $this->doApiRequest(array('action' => 'flow', 'page' => 'Talk:Flow QA', 'submodule' => 'view-topiclist', 'vtllimit' => 2, 'vtlsortby' => 'updated', 'vtltoconly' => true, 'vtlformat' => 'fixed-html')); $actualUpdatedResponse = $actualUpdatedResponse[0]; $this->assertEquals($expectedUpdatedResponse, $actualUpdatedResponse, 'TOC-only output for "updated" order'); }
/** * Adds a moderation activity item to the log under the appropriate action * * @param PostRevision $post * @param string $action The action we'll be logging * @param string $reason Comment, reason for the moderation * @param UUID $workflowId Workflow being worked on * @return int The id of the newly inserted log entry */ public function log(PostRevision $post, $action, $reason, UUID $workflowId) { if (!$this->canLog($post, $action)) { return null; } $params = array('topicId' => $workflowId->getAlphadecimal()); if (!$post->isTopicTitle()) { $params['postId'] = $post->getPostId()->getAlphadecimal(); } $logType = $this->getLogType($post, $action); // reasonably likely this is already loaded in-process and just returns that object /** @var Workflow $workflow */ $workflow = Container::get('storage.workflow')->get($workflowId); if ($workflow) { $title = $workflow->getArticleTitle(); } else { $title = false; } $error = false; if (!$title) { // We dont want to fail logging due to this, so repoint it at Main_Page which // will probably be noticed, also log it below once we know the logId $title = Title::newMainPage(); $error = true; } // insert logging entry $logEntry = new ManualLogEntry($logType, "flow-{$action}"); $logEntry->setTarget($title); $logEntry->setPerformer($post->getUserTuple()->createUser()); $logEntry->setParameters($params); $logEntry->setComment($reason); $logEntry->setTimestamp($post->getModerationTimestamp()); $logId = $logEntry->insert(); if ($error) { wfDebugLog('Flow', __METHOD__ . ': Could not map workflowId to workflow object for ' . $workflowId->getAlphadecimal() . " log entry {$logId} defaulted to Main_Page"); } return $logId; }
/** * @param array $row * @param TopicListEntry|null $obj * @return TopicListEntry * @throws DataModelException */ public static function fromStorageRow(array $row, $obj = null) { if ($obj === null) { $obj = new self(); } elseif (!$obj instanceof self) { throw new DataModelException('Wrong obj type: ' . get_class($obj), 'process-data'); } $obj->topicListId = UUID::create($row['topic_list_id']); $obj->topicId = UUID::create($row['topic_id']); if (isset($row['workflow_last_update_timestamp'])) { $obj->topicWorkflowLastUpdated = $row['workflow_last_update_timestamp']; } return $obj; }
/** * Unserializes a Content object of the type supported by this ContentHandler. * * @since 1.21 * * @param string $blob Serialized form of the content * @param string $format The format used for serialization * * @return Content The Content object created by deserializing $blob */ public function unserializeContent($blob, $format = null) { $info = FormatJson::decode($blob, true); $uuid = null; if (!$info) { // For transition from wikitext-type pages // Make a plain content object and then when we get a chance // we can insert a proper object. return $this->makeEmptyContent(); } elseif (isset($info['flow-workflow'])) { $uuid = UUID::create($info['flow-workflow']); } return new BoardContent(CONTENT_MODEL_FLOW_BOARD, $uuid); }
/** * Tests that a PostRevision::fromStorageRow & ::toStorageRow roundtrip * returns the same DB data. */ public function testRoundtrip() { $row = $this->generateRow(); $object = PostRevision::fromStorageRow($row); // toStorageRow will add a bogus column 'rev_content_url' - that's ok. // It'll be caught in code to distinguish between external content and // content to be saved in rev_content, and, before inserting into DB, // it'll be unset. We'll ignore this column here. $roundtripRow = PostRevision::toStorageRow($object); unset($roundtripRow['rev_content_url']); // Due to our desire to store alphadecimal values in cache and binary values on // disk we need to perform uuid conversion before comparing $roundtripRow = UUID::convertUUIDs($roundtripRow, 'binary'); $this->assertEquals($row, $roundtripRow); }
/** * At the moment, does three things: * 1. Finds UUID objects and returns their database representation. * 2. Checks for unarmoured raw SQL and errors out if it exists. * 3. Finds armoured raw SQL and expands it out. * * @param array $data Query conditions for DatabaseBase::select * @return array query conditions escaped for use * @throws DataModelException */ protected function preprocessSqlArray(array $data) { // Assuming that all databases have the same escaping settings. $db = $this->dbFactory->getDB(DB_SLAVE); $data = UUID::convertUUIDs($data, 'binary'); foreach ($data as $key => $value) { if ($value instanceof RawSql) { $data[$key] = $value->getSql($db); } elseif (is_numeric($key)) { throw new DataModelException("Unescaped raw SQL found in " . __METHOD__, 'process-data'); } elseif (!preg_match('/^[A-Za-z0-9\\._]+$/', $key)) { throw new DataModelException("Dangerous SQL field name '{$key}' found in " . __METHOD__, 'process-data'); } } return $data; }
protected function helperToTestUpdating($old, $new, $expectedUpdateValues, $isContentUpdatingAllowed) { $dbw = $this->getMockBuilder('DatabaseMysql')->disableOriginalConstructor()->getMock(); $factory = $this->getMockBuilder('Flow\\DbFactory')->disableOriginalConstructor()->getMock(); $factory->expects($this->any())->method('getDB')->will($this->returnValue($dbw)); $id = UUID::create(); $old['rev_id'] = $id->getBinary(); $new['rev_id'] = $id->getBinary(); $dbw->expects($this->once())->method('update')->with($this->equalTo('flow_revision'), $this->equalTo($expectedUpdateValues), $this->equalTo(array('rev_id' => $id->getBinary())))->will($this->returnValue(true)); $dbw->expects($this->any())->method('affectedRows')->will($this->returnValue(1)); // Header is bare bones implementation, sufficient for testing // the parent class. $storage = new HeaderRevisionStorage($factory, $this->MOCK_EXTERNAL_STORE_CONFIG); $this->setWhetherContentUpdatingAllowed($storage, $isContentUpdatingAllowed); $storage->update($old, $new); }
/** * Query topic list ordered by last updated field. The sort field is in a * different table so we need to overwrite parent find() method slightly to * achieve this goal */ public function find(array $attributes, array $options = array()) { $attributes = $this->preprocessSqlArray($attributes); if (!$this->validateOptions($options)) { throw new \MWException("Validation error in database options"); } $res = $this->dbFactory->getDB(DB_MASTER)->select(array($this->table, 'flow_workflow'), 'topic_list_id, topic_id, workflow_last_update_timestamp', array_merge($attributes, array('topic_id = workflow_id')), __METHOD__ . " ({$this->table})", $options); if (!$res) { // TODO: This should probably not silently fail on database errors. return null; } $result = array(); foreach ($res as $row) { $result[] = UUID::convertUUIDs((array) $row, 'alphadecimal'); } return $result; }
protected function doDBUpdates() { $container = Container::getContainer(); $dbFactory = $container['db.factory']; $dbw = $dbFactory->getDb(DB_MASTER); $storage = $container['storage']; $moderationLoggingListener = $container['storage.post.listeners.moderation_logging']; $rowIterator = new EchoBatchRowIterator($dbw, 'flow_revision', 'rev_id', $this->mBatchSize); $rowIterator->setFetchColumns(array('rev_id', 'rev_type')); // Fetch rows that are a moderation action $rowIterator->addConditions(array('rev_change_type' => ModerationLoggingListener::getModerationChangeTypes(), 'rev_user_wiki' => wfWikiID())); $start = $this->getOption('start'); $startId = UUID::create($start); $rowIterator->addConditions(array('rev_id > ' . $dbw->addQuotes($startId->getBinary()))); $stop = $this->getOption('stop'); $stopId = UUID::create($stop); $rowIterator->addConditions(array('rev_id < ' . $dbw->addQuotes($stopId->getBinary()))); $total = $fail = 0; foreach ($rowIterator as $batch) { $dbw->begin(); foreach ($batch as $row) { $total++; $objectManager = $storage->getStorage($row->rev_type); $revId = UUID::create($row->rev_id); $obj = $objectManager->get($revId); if (!$obj) { $this->error('Could not load revision: ' . $revId->getAlphadecimal()); $fail++; continue; } $workflow = $obj->getCollection()->getWorkflow(); $moderationLoggingListener->onAfterInsert($obj, array(), array('workflow' => $workflow)); } $dbw->commit(); $storage->clear(); $dbFactory->waitForSlaves(); } $this->output("Processed a total of {$total} moderation revisions.\n"); if ($fail !== 0) { $this->error("Errors were encountered while processing {$fail} of them.\n"); } return true; }
/** * @dataProvider referenceExtractorProvider */ public function testReferenceExtractor($description, $wikitext, $expectedClass, $expectedType, $expectedTarget, $page = 'UTPage') { $referenceExtractor = Container::get('reference.extractor'); $workflow = $this->getMock('Flow\\Model\\Workflow'); $workflow->expects($this->any())->method('getId')->will($this->returnValue(UUID::create())); $workflow->expects($this->any())->method('getArticleTitle')->will($this->returnValue(Title::newMainPage())); $factory = new ReferenceFactory($workflow, 'foo', UUID::create()); $reflMethod = new ReflectionMethod($referenceExtractor, 'extractReferences'); $reflMethod->setAccessible(true); $reflProperty = new \ReflectionProperty($referenceExtractor, 'extractors'); $reflProperty->setAccessible(true); $extractors = $reflProperty->getValue($referenceExtractor); $html = Utils::convert('wt', 'html', $wikitext, Title::newFromText($page)); $result = $reflMethod->invoke($referenceExtractor, $factory, $extractors['post'], $html); $this->assertCount(1, $result, $html); $result = reset($result); $this->assertInstanceOf($expectedClass, $result, $description); $this->assertEquals($expectedType, $result->getType(), $description); $this->assertEquals($expectedTarget, $result->getTargetIdentifier(), $description); }
public function update($row) { $title = Title::makeTitleSafe($row->workflow_namespace, $row->workflow_title_text); if ($title === null) { throw new Exception(sprintf('Could not create title for %s at %s:%s', UUID::create($row->workflow_id)->getAlphadecimal(), $this->lang->getNsText($row->workflow_namespace) ?: $row->workflow_namespace, $row->workflow_title_text)); } // at some point, we failed to create page entries for new workflows: only // create that page if the workflow was stored with a 0 page id (otherwise, // we could mistake the $title for a deleted page) if ($row->workflow_page_id === 0 && $title->getArticleID() === 0) { // build workflow object (yes, loading them piecemeal is suboptimal, but // this is just a one-time script; considering the alternative is // creating a derivative EchoBatchRowIterator that returns workflows, // it doesn't really matter) $storage = Container::get('storage'); $workflow = $storage->get('Workflow', UUID::create($row->workflow_id)); try { /** @var OccupationController $occupationController */ $occupationController = Container::get('occupation_controller'); $occupationController->allowCreation($title, $occupationController->getTalkpageManager()); $occupationController->ensureFlowRevision(new Article($title), $workflow); // force article id to be refetched from db $title->getArticleID(Title::GAID_FOR_UPDATE); } catch (\Exception $e) { // catch all exception to keep going with the rest we want to // iterate over, we'll report on the failed entries at the end $this->failed[] = $row; } } // re-associate the workflow with the correct page; only if a page exists if ($title->getArticleID() !== 0 && $title->getArticleID() !== (int) $row->workflow_page_id) { // This makes the assumption the page has not moved or been deleted? ++$this->fixedCount; return array('workflow_page_id' => $title->getArticleID()); } elseif (!$row->workflow_page_id) { // No id exists for this workflow? $this->failed[] = $row; } return array(); }
public function provideDataGetWatchStatus() { // number of test cases $testCount = 10; $tests = array(); while ($testCount > 0) { $testCount--; // number of uuid per test case $uuidCount = 10; $uuids = $dbResult = $result = array(); while ($uuidCount > 0) { $uuidCount--; $uuid = UUID::create()->getAlphadecimal(); $rand = rand(0, 1); // put in the query result if ($rand) { $dbResult[] = (object) array('wl_title' => $uuid); $result[$uuid] = true; } else { $result[$uuid] = false; } $uuids[] = $uuid; } $dbResult = new \ArrayObject($dbResult); $tests[] = array($uuids, $dbResult->getIterator(), $result); } // attach empty uuids array to query $uuids = $dbResult = $result = array(); $emptyCount = 10; while ($emptyCount > 0) { $emptyCount--; $uuid = UUID::create()->getAlphadecimal(); $dbResult[] = (object) array('wl_title' => $uuid); } $dbResult = new \ArrayObject($dbResult); $tests[] = array($uuids, $dbResult->getIterator(), $result); return $tests; }