function commitOffset($groupId, $topic, $brokerId, $partition, \Kafka\Offset $offset) { $this->zkConnect(); $path = "/consumers/{$groupId}/offsets/{$topic}"; if (!$this->zk->exists($path)) { $this->createPermaNode($path); } if (!$this->zk->exists("{$path}/{$brokerId}-{$partition}")) { $this->createPermaNode("{$path}/{$brokerId}-{$partition}", $offset->__toString()); } else { $this->zk->set("{$path}/{$brokerId}-{$partition}", $offset->__toString()); } }
/** * on partition eof call * * @param \Kafka\Protocol\Fetch\Partition $partition * @access public * @return void */ public function onPartitionEof($partition) { $partitionId = $partition->key(); $topicName = $partition->getTopicName(); $offset = $partition->getMessageOffset() + 1; $offsetObject = new Offset($this->client, $this->group, $topicName, $partitionId); $offsetObject->setOffset($offset); }
/** * Offsets * * @param string $topic * @param int $partition * @param mixed $time * @param unknown_type $maxNumOffsets */ public function offsets($topic, $partition = 0, $time = \Kafka\Kafka::OFFSETS_LATEST, $maxNumOffsets = 2) { $data = pack('n', \Kafka\Kafka::REQUEST_KEY_OFFSETS); $data .= pack('n', strlen($topic)) . $topic; $data .= pack('N', $partition); if (is_string($time) || $time < 0) { //convert literal to Offset $offset = new Offset($time); } else { //make 64-bit unix timestamp offset $offset = new Offset(); for ($i = 0; $i < 1000 * 1; $i++) { $offset->addInt($time); } } $data .= $offset->getData(); $data .= pack('N', $maxNumOffsets); $this->send($data); if ($this->hasIncomingData()) { $h = unpack('N', $this->read(4)); $offsetsLength = array_shift($h); if ($offsetsLength > 0) { $offsets = array_fill(0, $offsetsLength, null); for ($i = 0; $i < $offsetsLength; $i++) { $offset = new Offset(); $offset->setData($this->read(8)); $offsets[$i] = $offset; } if (!$this->hasIncomingData()) { return $offsets; } } } return false; }