/** * @param array $updates Array of arrays each containing two keys, 'primaryKey' * and 'changes'. primaryKey must contain a map of column names to values * sufficient to uniquely identify the row changes must contain a map of column * names to update values to apply to the row. */ public function write(array $updates) { $this->db->begin(); foreach ($updates as $update) { $this->db->update($this->table, $update['changes'], $update['primaryKey'], __METHOD__); } $this->db->commit(); wfGetLBFactory()->waitForReplication(); }
/** * @param array $updates Array of arrays each containing two keys, 'primaryKey' * and 'changes'. primaryKey must contain a map of column names to values * sufficient to uniquely identify the row changes must contain a map of column * names to update values to apply to the row. */ public function write(array $updates) { $this->db->begin(); foreach ($updates as $update) { $this->db->update($this->table, $update['changes'], $update['primaryKey'], __METHOD__); } $this->db->commit(); wfWaitForSlaves(false, false, $this->clusterName); }
/** * Begin a transcation on a DB * * This method makes it clear that begin() is called from a maintenance script, * which has outermost scope. This is safe, unlike $dbw->begin() called in other places. * * @param IDatabase $dbw * @param string $fname Caller name * @since 1.27 */ protected function beginTransaction(IDatabase $dbw, $fname) { $dbw->begin($fname); }
/** * This function should *not* be called outside of JobQueueDB * * @param IDatabase $dbw * @param array $jobs * @param int $flags * @param string $method * @throws DBError * @return void */ public function doBatchPushInternal(IDatabase $dbw, array $jobs, $flags, $method) { if (!count($jobs)) { return; } $rowSet = array(); // (sha1 => job) map for jobs that are de-duplicated $rowList = array(); // list of jobs for jobs that are not de-duplicated foreach ($jobs as $job) { $row = $this->insertFields($job); if ($job->ignoreDuplicates()) { $rowSet[$row['job_sha1']] = $row; } else { $rowList[] = $row; } } if ($flags & self::QOS_ATOMIC) { $dbw->begin($method); // wrap all the job additions in one transaction } try { // Strip out any duplicate jobs that are already in the queue... if (count($rowSet)) { $res = $dbw->select('job', 'job_sha1', array('job_sha1' => array_keys($rowSet), 'job_token' => ''), $method); foreach ($res as $row) { wfDebug("Job with hash '{$row->job_sha1}' is a duplicate.\n"); unset($rowSet[$row->job_sha1]); // already enqueued } } // Build the full list of job rows to insert $rows = array_merge($rowList, array_values($rowSet)); // Insert the job rows in chunks to avoid slave lag... foreach (array_chunk($rows, 50) as $rowBatch) { $dbw->insert('job', $rowBatch, $method); } JobQueue::incrStats('inserts', $this->type, count($rows)); JobQueue::incrStats('dupe_inserts', $this->type, count($rowSet) + count($rowList) - count($rows)); } catch (DBError $e) { if ($flags & self::QOS_ATOMIC) { $dbw->rollback($method); } throw $e; } if ($flags & self::QOS_ATOMIC) { $dbw->commit($method); } return; }
private function badLockingMethodExplicit(IDatabase $db) { $lock = $db->getScopedLockAndFlush('meow', __METHOD__, 1); $db->begin(__METHOD__); throw new RunTimeException("Uh oh!"); }