function handle($data) { $topic = $data['topic']; $atom = $data['atom']; $pushCallbacks = $data['pushCallbacks']; assert(is_string($atom)); assert(is_string($topic)); assert(is_array($pushCallbacks)); // Set up distribution for the first n subscribing sites... // If we encounter an uncatchable error, queue handling should // automatically re-run the batch, which could lead to some dupe // distributions. // // Worst case is if one of these hubprep entries dies too many // times and gets dropped; the rest of the batch won't get processed. try { $n = 0; while (count($pushCallbacks) && $n < self::ROLLING_BATCH) { $n++; $callback = array_shift($pushCallbacks); $sub = HubSub::staticGet($topic, $callback); if (!$sub) { common_log(LOG_ERR, "Skipping PuSH delivery for deleted(?) consumer {$callback} on {$topic}"); continue; } $sub->distribute($atom); } } catch (Exception $e) { common_log(LOG_ERR, "Exception during PuSH batch out: " . $e->getMessage() . " prepping {$topic} to {$callback}"); } // And re-queue the rest of the batch! if (count($pushCallbacks) > 0) { $sub = new HubSub(); $sub->topic = $topic; $sub->bulkDistribute($atom, $pushCallbacks); } return true; }
/** * Queue up direct feed update pushes to subscribers on our internal hub. * If there are a large number of subscriber sites, intermediate bulk * distribution triggers may be queued. * * @param string $atom update feed, containing only new/changed items * @param HubSub $sub open query of subscribers */ function pushFeedInternal($atom, $sub) { common_log(LOG_INFO, "Preparing {$sub->N} PuSH distribution(s) for {$sub->topic}"); $n = 0; $batch = array(); while ($sub->fetch()) { $n++; if ($n < self::MAX_UNBATCHED) { $sub->distribute($atom); } else { $batch[] = $sub->callback; if (count($batch) >= self::BATCH_SIZE) { $sub->bulkDistribute($atom, $batch); $batch = array(); } } } if (count($batch) >= 0) { $sub->bulkDistribute($atom, $batch); } }