1. Go to this page and download the library: Download jobcloud/php-kafka-lib library. Choose the download type require.
2. Extract the ZIP file and open the index.php.
3. Add this code to the index.php.
<?php
require_once('vendor/autoload.php');
/* Start to develop here. Best regards https://php-download.com/ */
jobcloud / php-kafka-lib example snippets
use Jobcloud\Kafka\Message\KafkaProducerMessage;
use Jobcloud\Kafka\Producer\KafkaProducerBuilder;
$producer = KafkaProducerBuilder::create()
->withAdditionalBroker('localhost:9092')
->build();
$message = KafkaProducerMessage::create('test-topic', 0)
->withKey('asdf-asdf-asfd-asdf')
->withBody('some test message payload')
->withHeaders([ 'key' => 'value' ]);
$producer->produce($message);
// Shutdown producer, flush messages that are in queue. Give up after 20s
$result = $producer->flush(20000);
use Jobcloud\Kafka\Message\KafkaProducerMessage;
use Jobcloud\Kafka\Producer\KafkaProducerBuilder;
use Jobcloud\Kafka\Exception\KafkaProducerTransactionRetryException;
use Jobcloud\Kafka\Exception\KafkaProducerTransactionAbortException;
use Jobcloud\Kafka\Exception\KafkaProducerTransactionFatalException;
$producer = KafkaProducerBuilder::create()
->withAdditionalBroker('localhost:9092')
->build();
$message = KafkaProducerMessage::create('test-topic', 0)
->withKey('asdf-asdf-asfd-asdf')
->withBody('some test message payload')
->withHeaders([ 'key' => 'value' ]);
try {
$producer->beginTransaction(10000);
$producer->produce($message);
$producer->commitTransaction(10000);
} catch (KafkaProducerTransactionRetryException $e) {
// something went wrong but you can retry the failed call (either beginTransaction or commitTransaction)
} catch (KafkaProducerTransactionAbortException $e) {
// you need to call $producer->abortTransaction(10000); and try again
} catch (KafkaProducerTransactionFatalException $e) {
// something went very wrong, re-create your producer, otherwise you could jeopardize the idempotency guarantees
}
// Shutdown producer, flush messages that are in queue. Give up after 20s
$result = $producer->flush(20000);
use FlixTech\AvroSerializer\Objects\RecordSerializer;
use Jobcloud\Kafka\Message\KafkaProducerMessage;
use Jobcloud\Kafka\Message\Encoder\AvroEncoder;
use Jobcloud\Kafka\Message\Registry\AvroSchemaRegistry;
use Jobcloud\Kafka\Producer\KafkaProducerBuilder;
use Jobcloud\Kafka\Message\KafkaAvroSchema;
use FlixTech\SchemaRegistryApi\Registry\CachedRegistry;
use FlixTech\SchemaRegistryApi\Registry\BlockingRegistry;
use FlixTech\SchemaRegistryApi\Registry\PromisingRegistry;
use FlixTech\SchemaRegistryApi\Registry\Cache\AvroObjectCacheAdapter;
use GuzzleHttp\Client;
$cachedRegistry = new CachedRegistry(
new BlockingRegistry(
new PromisingRegistry(
new Client(['base_uri' => 'jobcloud-kafka-schema-registry:9081'])
)
),
new AvroObjectCacheAdapter()
);
$registry = new AvroSchemaRegistry($cachedRegistry);
$recordSerializer = new RecordSerializer($cachedRegistry);
//if no version is defined, latest version will be used
//if no schema definition is defined, the appropriate version will be fetched form the registry
$registry->addBodySchemaMappingForTopic(
'test-topic',
new KafkaAvroSchema('bodySchemaName' /*, int $version, AvroSchema $definition */)
);
$registry->addKeySchemaMappingForTopic(
'test-topic',
new KafkaAvroSchema('keySchemaName' /*, int $version, AvroSchema $definition */)
);
// if you are only encoding key or value, you can pass that mode as additional third argument
// per default both key and body will get encoded
$encoder = new AvroEncoder($registry, $recordSerializer /*, AvroEncoderInterface::ENCODE_BODY */);
$producer = KafkaProducerBuilder::create()
->withAdditionalBroker('kafka:9092')
->withEncoder($encoder)
->build();
$schemaName = 'testSchema';
$version = 1;
$message = KafkaProducerMessage::create('test-topic', 0)
->withKey('asdf-asdf-asfd-asdf')
->withBody(['name' => 'someName'])
->withHeaders([ 'key' => 'value' ]);
$producer->produce($message);
// Shutdown producer, flush messages that are in queue. Give up after 20s
$result = $producer->flush(20000);
use Jobcloud\Kafka\Consumer\KafkaConsumerBuilder;
use Jobcloud\Kafka\Exception\KafkaConsumerConsumeException;
use Jobcloud\Kafka\Exception\KafkaConsumerEndOfPartitionException;
use Jobcloud\Kafka\Exception\KafkaConsumerTimeoutException;
$consumer = KafkaConsumerBuilder::create()
->withAdditionalConfig(
[
'compression.codec' => 'lz4',
'auto.commit.interval.ms' => 500
]
)
->withAdditionalBroker('kafka:9092')
->withConsumerGroup('testGroup')
->withAdditionalSubscription('test-topic')
->build();
$consumer->subscribe();
while (true) {
try {
$message = $consumer->consume();
// your business logic
$consumer->commit($message);
} catch (KafkaConsumerTimeoutException $e) {
//no messages were read in a given time
} catch (KafkaConsumerEndOfPartitionException $e) {
//only occurs if enable.partition.eof is true (default: false)
} catch (KafkaConsumerConsumeException $e) {
// Failed
}
}
use Jobcloud\Kafka\Consumer\KafkaConsumerBuilder;
use Jobcloud\Kafka\Exception\KafkaConsumerConsumeException;
use Jobcloud\Kafka\Exception\KafkaConsumerEndOfPartitionException;
use Jobcloud\Kafka\Exception\KafkaConsumerTimeoutException;
$consumer = KafkaConsumerBuilder::create()
->withAdditionalConfig(
[
'compression.codec' => 'lz4',
'auto.commit.interval.ms' => 500
]
)
->withAdditionalBroker('kafka:9092')
->withConsumerGroup('testGroup')
->withAdditionalSubscription('test-topic')
->withConsumerType(KafkaConsumerBuilder::CONSUMER_TYPE_LOW_LEVEL)
->build();
$consumer->subscribe();
while (true) {
try {
$message = $consumer->consume();
// your business logic
$consumer->commit($message);
} catch (KafkaConsumerTimeoutException $e) {
//no messages were read in a given time
} catch (KafkaConsumerEndOfPartitionException $e) {
//only occurs if enable.partition.eof is true (default: false)
} catch (KafkaConsumerConsumeException $e) {
// Failed
}
}
use FlixTech\AvroSerializer\Objects\RecordSerializer;
use \Jobcloud\Messaging\Kafka\Consumer\KafkaConsumerBuilder;
use Jobcloud\Kafka\Exception\KafkaConsumerConsumeException;
use Jobcloud\Kafka\Exception\KafkaConsumerEndOfPartitionException;
use Jobcloud\Kafka\Exception\KafkaConsumerTimeoutException;
use Jobcloud\Kafka\Message\Decoder\AvroDecoder;
use Jobcloud\Kafka\Message\KafkaAvroSchema;
use Jobcloud\Kafka\Message\Registry\AvroSchemaRegistry;
use FlixTech\SchemaRegistryApi\Registry\CachedRegistry;
use FlixTech\SchemaRegistryApi\Registry\BlockingRegistry;
use FlixTech\SchemaRegistryApi\Registry\PromisingRegistry;
use FlixTech\SchemaRegistryApi\Registry\Cache\AvroObjectCacheAdapter;
use GuzzleHttp\Client;
$cachedRegistry = new CachedRegistry(
new BlockingRegistry(
new PromisingRegistry(
new Client(['base_uri' => 'jobcloud-kafka-schema-registry:9081'])
)
),
new AvroObjectCacheAdapter()
);
$registry = new AvroSchemaRegistry($cachedRegistry);
$recordSerializer = new RecordSerializer($cachedRegistry);
//if no version is defined, latest version will be used
//if no schema definition is defined, the appropriate version will be fetched form the registry
$registry->addBodySchemaMappingForTopic(
'test-topic',
new KafkaAvroSchema('bodySchema' , 9 /* , AvroSchema $definition */)
);
$registry->addKeySchemaMappingForTopic(
'test-topic',
new KafkaAvroSchema('keySchema' , 9 /* , AvroSchema $definition */)
);
// If you are only encoding / decoding key or value, only register the schema(s) you need.
// It is advised against doing that though, some tools might not play
// nice if you don't fully encode your message
$decoder = new AvroDecoder($registry, $recordSerializer);
$consumer = KafkaConsumerBuilder::create()
->withAdditionalConfig(
[
'compression.codec' => 'lz4',
'auto.commit.interval.ms' => 500
]
)
->withDecoder($decoder)
->withAdditionalBroker('kafka:9092')
->withConsumerGroup('testGroup')
->withAdditionalSubscription('test-topic')
->build();
$consumer->subscribe();
while (true) {
try {
$message = $consumer->consume();
// your business logic
$consumer->commit($message);
} catch (KafkaConsumerTimeoutException $e) {
//no messages were read in a given time
} catch (KafkaConsumerEndOfPartitionException $e) {
//only occurs if enable.partition.eof is true (default: false)
} catch (KafkaConsumerConsumeException $e) {
// Failed
}
}
composer
composer
Loading please wait ...
Before you can download the PHP files, the dependencies should be resolved. This can take some minutes. Please be patient.