static function getStream($tag, $offset = 0, $limit = 20) { $ids = Notice::stream(array('Notice_tag', '_streamDirect'), array($tag), 'notice_tag:notice_ids:' . common_keyize($tag), $offset, $limit); return Notice::getStreamByIds($ids); }
function getNotices($offset, $limit, $since_id = null, $max_id = null) { $ids = Notice::stream(array($this, '_streamDirect'), array(), 'user_group:notice_ids:' . $this->id, $offset, $limit, $since_id, $max_id); return Notice::getStreamByIds($ids); }
/** * Wrapper for Inbox::stream() and Notice::getStreamByIds() returning * additional items up to the limit if we were short due to deleted * notices still being listed in the inbox. * * The fast path (when no items are deleted) should be just as fast; the * offset parameter is applied *before* lookups for maximum efficiency. * * This means offset-based paging may show duplicates, but similar behavior * already exists when new notices are posted between page views, so we * think people will be ok with this until id-based paging is introduced * to the user interface. * * @param int $user_id * @param int $offset skip past the most recent N notices (after since_id checks) * @param int $limit * @param mixed $since_id return only notices after but not including this id * @param mixed $max_id return only notices up to and including this id * @param mixed $own ignored? * @return array of Notice objects * * @todo consider repacking the inbox when this happens? * @fixme reimplement $own if we need it? */ function streamNotices($user_id, $offset, $limit, $since_id, $max_id, $own = false) { $ids = self::stream($user_id, $offset, self::MAX_NOTICES, $since_id, $max_id, $own); // Do a bulk lookup for the first $limit items // Fast path when nothing's deleted. $firstChunk = array_slice($ids, 0, $limit); $notices = Notice::getStreamByIds($firstChunk); $wanted = count($firstChunk); // raw entry count in the inbox up to our $limit if ($notices->N >= $wanted) { return $notices; } // There were deleted notices, we'll need to look for more. assert($notices instanceof ArrayWrapper); $items = $notices->_items; $remainder = array_slice($ids, $limit); while (count($items) < $wanted && count($remainder) > 0) { $notice = Notice::staticGet(array_shift($remainder)); if ($notice) { $items[] = $notice; } else { } } return new ArrayWrapper($items); }
function repeatsOfMe($offset = 0, $limit = 20, $since_id = null, $max_id = null) { $ids = Notice::stream(array($this, '_repeatsOfMeDirect'), array(), 'user:repeats_of_me:' . $this->id, $offset, $limit, $since_id, $max_id); return Notice::getStreamByIds($ids); }
function getNotices($offset = 0, $limit = NOTICES_PER_PAGE, $since_id = 0, $max_id = 0) { // XXX: I'm not sure this is going to be any faster. It probably isn't. $ids = Notice::stream(array($this, '_streamDirect'), array(), 'profile:notice_ids:' . $this->id, $offset, $limit, $since_id, $max_id); return Notice::getStreamByIds($ids); }
function repeatStream($limit = 100) { $cache = common_memcache(); if (empty($cache)) { $ids = $this->_repeatStreamDirect($limit); } else { $idstr = $cache->get(common_cache_key('notice:repeats:' . $this->id)); if ($idstr !== false) { $ids = explode(',', $idstr); } else { $ids = $this->_repeatStreamDirect(100); $cache->set(common_cache_key('notice:repeats:' . $this->id), implode(',', $ids)); } if ($limit < 100) { // We do a max of 100, so slice down to limit $ids = array_slice($ids, 0, $limit); } } return Notice::getStreamByIds($ids); }
/** * Stream of notices linking to this URL * * @param integer $offset Offset to show; default is 0 * @param integer $limit Limit of notices to show * @param integer $since_id Since this notice * @param integer $max_id Before this notice * * @return array ids of notices that link to this file */ function stream($offset = 0, $limit = NOTICES_PER_PAGE, $since_id = 0, $max_id = 0) { $ids = Notice::stream(array($this, '_streamDirect'), array(), 'file:notice-ids:' . $this->url, $offset, $limit, $since_id, $max_id); return Notice::getStreamByIds($ids); }