Skip to content
This repository was archived by the owner on Jun 10, 2022. It is now read-only.

fix: offset + 1 #233

Closed
wants to merge 11 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion composer.json
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
"sort-packages": true
},
"require": {
"php": "^5.6",
"php": ">=5.6",
"amphp/amp": "^v1.2.2",
"psr/log": "^1.0.2"
},
Expand Down
18 changes: 16 additions & 2 deletions src/Broker.php
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,6 @@

class Broker
{
use SingletonTrait;

private $groupBrokerId = null;

private $topics = [];
Expand All @@ -23,6 +21,22 @@ class Broker

private $config;

private static $instance = [];

/**
* @return static
*/
public static function getInstance($instance_name = 'default')
{

if(isset(self::$instance[$instance_name] )) {
return self::$instance[$instance_name];
}else{
return self::$instance[$instance_name] = new static();
}
}


public function setProcess(callable $process)
{
$this->process = $process;
Expand Down
34 changes: 22 additions & 12 deletions src/CommonSocket.php
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@

abstract class CommonSocket
{
use LoggerTrait;
use \Psr\Log\LoggerAwareTrait;

const READ_MAX_LENGTH = 5242880; // read socket max length 5MB

Expand Down Expand Up @@ -273,23 +275,28 @@ public function readBlocking($len)
throw new \Kafka\Exception('Invalid length given, it should be lesser than or equals to ' . self:: READ_MAX_LENGTH);
}

//http://php.net/manpual/en/function.stream-select.ph

try_again:

$null = null;
$read = [$this->stream];
$readable = $this->select($read, $this->recvTimeoutSec, $this->recvTimeoutUsec);
if ($readable === false) {
$this->close();
throw new \Kafka\Exception('Could not read ' . $len . ' bytes from stream (not readable)');
if($this->isSocketDead()) {
$res = $this->getMetaData();
$this->debug(json_encode($res));
$this->debug('socket dead, reconnecting...');
$this->reconnect();
}
if ($readable === 0) { // select timeout
$read = [$this->stream];
$readable = $this->select($read, null, $this->recvTimeoutUsec);

if ($readable == false) {
$res = $this->getMetaData();
$this->close();
if (! empty($res['timed_out'])) {
throw new \Kafka\Exception('Timed out reading ' . $len . ' bytes from stream');
} else {
throw new \Kafka\Exception('Could not read ' . $len . ' bytes from stream (not readable)');
}
$this->debug(json_encode($res));
$this->debug('select read socket failed try_again');
goto try_again;
}


$remainingBytes = $len;
$data = $chunk = '';
while ($remainingBytes > 0) {
Expand Down Expand Up @@ -378,4 +385,7 @@ public function writeBlocking($buf)
* @return void
*/
abstract public function close();

abstract protected function isSocketDead();
abstract protected function reconnect();
}
15 changes: 6 additions & 9 deletions src/Consumer/Process.php
Original file line number Diff line number Diff line change
Expand Up @@ -490,7 +490,7 @@ public function succFetchOffset($result)
foreach ($consumerOffsets as $topic => $value) {
foreach ($value as $partId => $offset) {
if (isset($lastOffsets[$topic][$partId]) && $lastOffsets[$topic][$partId] > $offset) {
$consumerOffsets[$topic][$partId] = $offset + 1;
$consumerOffsets[$topic][$partId] = $offset;
}
}
}
Expand Down Expand Up @@ -557,23 +557,20 @@ public function succFetch($result, $fd)
continue;
}

$consumerOffset = $assign->getConsumerOffset($topic['topicName'], $part['partition']);
if ($consumerOffset === false) {
$offset = $assign->getConsumerOffset($topic['topicName'], $part['partition']);
if ($offset === false) {
return; // current is rejoin....
}
foreach ($part['messages'] as $message) {
$this->messages[$topic['topicName']][$part['partition']][] = $message;
//if ($this->consumer != null) {
// call_user_func($this->consumer, $topic['topicName'], $part['partition'], $message);
//}
$commitOffset = $message['offset'];
$offset = $message['offset'] + 1;
}

$commitOffset = isset($commitOffset) ? $commitOffset : $consumerOffset - 1;
$consumerOffset = $commitOffset + 1;

$assign->setConsumerOffset($topic['topicName'], $part['partition'], $consumerOffset);
$assign->setCommitOffset($topic['topicName'], $part['partition'], $commitOffset);
$assign->setConsumerOffset($topic['topicName'], $part['partition'], $offset);
$assign->setCommitOffset($topic['topicName'], $part['partition'], $offset);
}
}
$this->state->succRun(\Kafka\Consumer\State::REQUEST_FETCH, $fd);
Expand Down
5 changes: 5 additions & 0 deletions src/ConsumerConfig.php
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,11 @@
/**
* @method string|false ietGroupId()
* @method array|false ietTopics()
* @method setMaxBytes
* @method setMaxWaitTime
* @method getMaxWaitTime
* @method getMaxBytes
* @method getOffsetReset
*/
class ConsumerConfig extends Config
{
Expand Down
10 changes: 5 additions & 5 deletions src/Producer/Process.php
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ public function init()
\Kafka\Protocol::init($config->getBrokerVersion(), $this->logger);

// init process request
$broker = \Kafka\Broker::getInstance();
$broker = \Kafka\Broker::getInstance(__CLASS__);
$broker->setConfig($config);
$broker->setProcess(function ($data, $fd) {
$this->processRequest($data, $fd);
Expand Down Expand Up @@ -124,7 +124,7 @@ public function syncMeta()
}

shuffle($brokerHost);
$broker = \Kafka\Broker::getInstance();
$broker = \Kafka\Broker::getInstance(__CLASS__);
foreach ($brokerHost as $host) {
$socket = $broker->getMetaConnect($host);
if ($socket) {
Expand Down Expand Up @@ -160,7 +160,7 @@ protected function processRequest($data, $fd)
$this->error('Get metadata is fail, brokers or topics is null.');
$this->state->failRun(\Kafka\Producer\State::REQUEST_METADATA);
} else {
$broker = \Kafka\Broker::getInstance();
$broker = \Kafka\Broker::getInstance(__CLASS__);
$isChange = $broker->setData($result['topics'], $result['brokers']);
$this->state->succRun(\Kafka\Producer\State::REQUEST_METADATA, $isChange);
}
Expand All @@ -177,7 +177,7 @@ protected function processRequest($data, $fd)
protected function produce()
{
$context = [];
$broker = \Kafka\Broker::getInstance();
$broker = \Kafka\Broker::getInstance(__CLASS__);
$requiredAck = \Kafka\ProducerConfig::getInstance()->getRequiredAck();
$timeout = \Kafka\ProducerConfig::getInstance()->getTimeout();

Expand Down Expand Up @@ -260,7 +260,7 @@ protected function stateConvert($errorCode, $context = null)
protected function convertMessage($data)
{
$sendData = [];
$broker = \Kafka\Broker::getInstance();
$broker = \Kafka\Broker::getInstance(__CLASS__);
$topicInfos = $broker->getTopics();
foreach ($data as $value) {
if (! isset($value['topic']) || ! trim($value['topic'])) {
Expand Down
10 changes: 5 additions & 5 deletions src/Producer/SyncProcess.php
Original file line number Diff line number Diff line change
Expand Up @@ -12,15 +12,15 @@ public function __construct()
$config = \Kafka\ProducerConfig::getInstance();
\Kafka\Protocol::init($config->getBrokerVersion(), $this->logger);
// init broker
$broker = \Kafka\Broker::getInstance();
$broker = \Kafka\Broker::getInstance(__CLASS__);
$broker->setConfig($config);

$this->syncMeta();
}

public function send($data)
{
$broker = \Kafka\Broker::getInstance();
$broker = \Kafka\Broker::getInstance(__CLASS__);
$requiredAck = \Kafka\ProducerConfig::getInstance()->getRequiredAck();
$timeout = \Kafka\ProducerConfig::getInstance()->getTimeout();

Expand Down Expand Up @@ -80,7 +80,7 @@ public function syncMeta()
}

shuffle($brokerHost);
$broker = \Kafka\Broker::getInstance();
$broker = \Kafka\Broker::getInstance(__CLASS__);
foreach ($brokerHost as $host) {
$socket = $broker->getMetaConnect($host, true);
if ($socket) {
Expand All @@ -95,7 +95,7 @@ public function syncMeta()
if (! isset($result['brokers']) || ! isset($result['topics'])) {
throw new \Kafka\Exception('Get metadata is fail, brokers or topics is null.');
} else {
$broker = \Kafka\Broker::getInstance();
$broker = \Kafka\Broker::getInstance(__CLASS__);
$broker->setData($result['topics'], $result['brokers']);
}
return;
Expand All @@ -113,7 +113,7 @@ public function syncMeta()
protected function convertMessage($data)
{
$sendData = [];
$broker = \Kafka\Broker::getInstance();
$broker = \Kafka\Broker::getInstance(__CLASS__);
$topicInfos = $broker->getTopics();
foreach ($data as $value) {
if (! isset($value['topic']) || ! trim($value['topic'])) {
Expand Down
23 changes: 23 additions & 0 deletions src/SocketSync.php
Original file line number Diff line number Diff line change
Expand Up @@ -100,4 +100,27 @@ public function rewind()
rewind($this->stream);
}
}

/**
* reconnect the socket
*
* @access public
* @return void
*/
function reconnect()
{
$this->close();
$this->connect();
}


/**
* check the stream is close
*
* @return bool
*/
function isSocketDead()
{
return ! is_resource($this->stream) || @feof($this->stream);
}
}