Ejemplo n.º 1
0
 public function finishWrite()
 {
     if ($this->readOnly) {
         return;
     } elseif (is_null($this->currentLang)) {
         throw new MWException(__CLASS__ . ': must call startWrite() before finishWrite()');
     }
     $this->dbw->startAtomic(__METHOD__);
     try {
         $this->dbw->delete('l10n_cache', ['lc_lang' => $this->currentLang], __METHOD__);
         foreach (array_chunk($this->batch, 500) as $rows) {
             $this->dbw->insert('l10n_cache', $rows, __METHOD__);
         }
         $this->writesDone = true;
     } catch (DBQueryError $e) {
         if ($this->dbw->wasReadOnlyError()) {
             $this->readOnly = true;
             // just avoid site down time
         } else {
             throw $e;
         }
     }
     $this->dbw->endAtomic(__METHOD__);
     $this->currentLang = null;
     $this->batch = [];
 }
Ejemplo n.º 2
0
 protected function initConnection($lockDb, IDatabase $db)
 {
     # Let this transaction see lock rows from other transactions
     $db->query("SET SESSION TRANSACTION ISOLATION LEVEL READ UNCOMMITTED;");
     # Do everything in a transaction as it all gets rolled back eventually
     $db->startAtomic(__CLASS__);
 }
Ejemplo n.º 3
0
 /**
  * This function should *not* be called outside of JobQueueDB
  *
  * @param IDatabase $dbw
  * @param IJobSpecification[] $jobs
  * @param int $flags
  * @param string $method
  * @throws DBError
  * @return void
  */
 public function doBatchPushInternal(IDatabase $dbw, array $jobs, $flags, $method)
 {
     if (!count($jobs)) {
         return;
     }
     $rowSet = array();
     // (sha1 => job) map for jobs that are de-duplicated
     $rowList = array();
     // list of jobs for jobs that are not de-duplicated
     foreach ($jobs as $job) {
         $row = $this->insertFields($job);
         if ($job->ignoreDuplicates()) {
             $rowSet[$row['job_sha1']] = $row;
         } else {
             $rowList[] = $row;
         }
     }
     if ($flags & self::QOS_ATOMIC) {
         $dbw->startAtomic($method);
         // wrap all the job additions in one transaction
     }
     try {
         // Strip out any duplicate jobs that are already in the queue...
         if (count($rowSet)) {
             $res = $dbw->select('job', 'job_sha1', array('job_sha1' => array_keys($rowSet), 'job_token' => ''), $method);
             foreach ($res as $row) {
                 wfDebug("Job with hash '{$row->job_sha1}' is a duplicate.\n");
                 unset($rowSet[$row->job_sha1]);
                 // already enqueued
             }
         }
         // Build the full list of job rows to insert
         $rows = array_merge($rowList, array_values($rowSet));
         // Insert the job rows in chunks to avoid slave lag...
         foreach (array_chunk($rows, 50) as $rowBatch) {
             $dbw->insert('job', $rowBatch, $method);
         }
         JobQueue::incrStats('inserts', $this->type, count($rows));
         JobQueue::incrStats('dupe_inserts', $this->type, count($rowSet) + count($rowList) - count($rows));
     } catch (DBError $e) {
         if ($flags & self::QOS_ATOMIC) {
             $dbw->rollback($method);
         }
         throw $e;
     }
     if ($flags & self::QOS_ATOMIC) {
         $dbw->endAtomic($method);
     }
     return;
 }