public class KafkaCluster.Usage extends Object
| Constructor and Description |
|---|
Usage() |
| Modifier and Type | Method and Description |
|---|---|
<K,V> void |
consume(String groupId,
String clientId,
org.apache.kafka.clients.consumer.OffsetResetStrategy autoOffsetReset,
org.apache.kafka.common.serialization.Deserializer<K> keyDeserializer,
org.apache.kafka.common.serialization.Deserializer<V> valueDeserializer,
BooleanSupplier continuation,
org.apache.kafka.clients.consumer.OffsetCommitCallback offsetCommitCallback,
Runnable completion,
Collection<String> topics,
Consumer<org.apache.kafka.clients.consumer.ConsumerRecord<K,V>> consumerFunction)
Use the supplied function to asynchronously consume messages from the cluster.
|
void |
consumeDocuments(BooleanSupplier continuation,
Runnable completion,
Collection<String> topics,
Consumer<org.apache.kafka.clients.consumer.ConsumerRecord<String,io.debezium.document.Document>> consumerFunction)
Asynchronously consume all messages from the cluster.
|
void |
consumeDocuments(String topicName,
int count,
long timeout,
TimeUnit unit,
Runnable completion)
Asynchronously consume all messages on the given topic from the cluster.
|
void |
consumeDocuments(String topicName,
int count,
long timeout,
TimeUnit unit,
Runnable completion,
BiPredicate<String,io.debezium.document.Document> consumer)
Asynchronously consume all messages on the given topic from the cluster.
|
void |
consumeIntegers(BooleanSupplier continuation,
Runnable completion,
Collection<String> topics,
Consumer<org.apache.kafka.clients.consumer.ConsumerRecord<String,Integer>> consumerFunction)
Asynchronously consume all messages from the cluster.
|
void |
consumeIntegers(String topicName,
int count,
long timeout,
TimeUnit unit,
Runnable completion)
Asynchronously consume all messages on the given topic from the cluster.
|
void |
consumeIntegers(String topicName,
int count,
long timeout,
TimeUnit unit,
Runnable completion,
BiPredicate<String,Integer> consumer)
Asynchronously consume all messages on the given topic from the cluster.
|
void |
consumeStrings(BooleanSupplier continuation,
Runnable completion,
Collection<String> topics,
Consumer<org.apache.kafka.clients.consumer.ConsumerRecord<String,String>> consumerFunction)
Asynchronously consume all messages from the cluster.
|
void |
consumeStrings(String topicName,
int count,
long timeout,
TimeUnit unit,
Runnable completion)
Asynchronously consume all messages on the given topic from the cluster.
|
void |
consumeStrings(String topicName,
int count,
long timeout,
TimeUnit unit,
Runnable completion,
BiPredicate<String,String> consumer)
Asynchronously consume all messages on the given topic from the cluster.
|
protected BooleanSupplier |
continueIfNotExpired(BooleanSupplier continuation,
long timeout,
TimeUnit unit) |
<K,V> KafkaCluster.InteractiveConsumer<K,V> |
createConsumer(String groupId,
String clientId,
Set<String> topicNames,
org.apache.kafka.common.serialization.Deserializer<K> keyDeserializer,
org.apache.kafka.common.serialization.Deserializer<V> valueDeserializer,
Runnable completion)
Create an
simple consumer that can be used to read messages from the cluster. |
KafkaCluster.InteractiveConsumer<String,io.debezium.document.Document> |
createConsumer(String groupId,
String clientId,
Set<String> topicNames,
Runnable completion)
Create an
simple consumer that can be used to read messages from the cluster. |
<K,V> KafkaCluster.InteractiveConsumer<K,V> |
createConsumer(String groupId,
String clientId,
String topicName,
org.apache.kafka.common.serialization.Deserializer<K> keyDeserializer,
org.apache.kafka.common.serialization.Deserializer<V> valueDeserializer,
Runnable completion)
Create an
simple consumer that can be used to read messages from the cluster. |
KafkaCluster.InteractiveConsumer<String,io.debezium.document.Document> |
createConsumer(String groupId,
String clientId,
String topicName,
Runnable completion)
Create an
simple consumer that can be used to read messages from the cluster. |
KafkaCluster.InteractiveProducer<String,io.debezium.document.Document> |
createProducer(String producerName)
Create an
simple producer that can be used to write Document messages to the
cluster. |
<K,V> KafkaCluster.InteractiveProducer<K,V> |
createProducer(String producerName,
org.apache.kafka.common.serialization.Serializer<K> keySerializer,
org.apache.kafka.common.serialization.Serializer<V> valueSerializer)
Create an
simple producer that can be used to write messages to the cluster. |
Properties |
getConsumerProperties(String groupId,
String clientId,
org.apache.kafka.clients.consumer.OffsetResetStrategy autoOffsetReset)
Get a new set of properties for consumers that want to talk to this server.
|
Properties |
getProducerProperties(String clientId)
Get a new set of properties for producers that want to talk to this server.
|
<K,V> void |
produce(String producerName,
Consumer<KafkaCluster.InteractiveProducer<String,io.debezium.document.Document>> producer)
Use the supplied function to asynchronously produce
Document messages and write them to the cluster. |
<K,V> void |
produce(String producerName,
int messageCount,
org.apache.kafka.common.serialization.Serializer<K> keySerializer,
org.apache.kafka.common.serialization.Serializer<V> valueSerializer,
Runnable completionCallback,
Supplier<org.apache.kafka.clients.producer.ProducerRecord<K,V>> messageSupplier)
Use the supplied function to asynchronously produce messages and write them to the cluster.
|
<K,V> void |
produce(String producerName,
org.apache.kafka.common.serialization.Serializer<K> keySerializer,
org.apache.kafka.common.serialization.Serializer<V> valueSerializer,
Consumer<KafkaCluster.InteractiveProducer<K,V>> producer)
Use the supplied function to asynchronously produce messages and write them to the cluster.
|
void |
produceDocuments(int messageCount,
Runnable completionCallback,
Supplier<org.apache.kafka.clients.producer.ProducerRecord<String,io.debezium.document.Document>> messageSupplier)
Use the supplied function to asynchronously produce messages with String keys and
Document values, and write
them to the cluster. |
void |
produceDocuments(String topic,
int messageCount,
Runnable completionCallback,
Supplier<io.debezium.document.Document> valueSupplier)
Asynchronously produce messages with monotonically increasing String keys and values obtained from the supplied
function, and write them to the cluster.
|
void |
produceIntegers(int messageCount,
Runnable completionCallback,
Supplier<org.apache.kafka.clients.producer.ProducerRecord<String,Integer>> messageSupplier)
Use the supplied function to asynchronously produce messages with String keys and Integer values, and write them to the
cluster.
|
void |
produceIntegers(String topic,
int messageCount,
int initialValue,
Runnable completionCallback)
Asynchronously produce messages with String keys and sequential Integer values, and write them to the cluster.
|
void |
produceStrings(int messageCount,
Runnable completionCallback,
Supplier<org.apache.kafka.clients.producer.ProducerRecord<String,String>> messageSupplier)
Use the supplied function to asynchronously produce messages with String keys and values, and write them to the
cluster.
|
void |
produceStrings(String topic,
int messageCount,
Runnable completionCallback,
Supplier<String> valueSupplier)
Asynchronously produce messages with monotonically increasing String keys and values obtained from the supplied
function, and write them to the cluster.
|
public Properties getConsumerProperties(String groupId, String clientId, org.apache.kafka.clients.consumer.OffsetResetStrategy autoOffsetReset)
groupId - the group ID for the consumer; may not be nullclientId - the optional identifier for the client; may be null if not neededautoOffsetReset - how to pick a starting offset when there is no initial offset in ZooKeeper or if an offset is
out of range; may be null for the default to be usedgetProducerProperties(String)public Properties getProducerProperties(String clientId)
clientId - the optional identifier for the client; may be null if not neededgetConsumerProperties(String, String, OffsetResetStrategy)public <K,V> KafkaCluster.InteractiveProducer<K,V> createProducer(String producerName, org.apache.kafka.common.serialization.Serializer<K> keySerializer, org.apache.kafka.common.serialization.Serializer<V> valueSerializer)
simple producer that can be used to write messages to the cluster.producerName - the name of the producer; may not be nullkeySerializer - the serializer for the keys; may not be nullvalueSerializer - the serializer for the values; may not be nullpublic KafkaCluster.InteractiveProducer<String,io.debezium.document.Document> createProducer(String producerName)
simple producer that can be used to write Document messages to the
cluster.producerName - the name of the producer; may not be nullpublic <K,V> KafkaCluster.InteractiveConsumer<K,V> createConsumer(String groupId, String clientId, String topicName, org.apache.kafka.common.serialization.Deserializer<K> keyDeserializer, org.apache.kafka.common.serialization.Deserializer<V> valueDeserializer, Runnable completion)
simple consumer that can be used to read messages from the cluster.groupId - the name of the group; may not be nullclientId - the name of the client; may not be nulltopicName - the name of the topic to read; may not be null and may not be emptykeyDeserializer - the deserializer for the keys; may not be nullvalueDeserializer - the deserializer for the values; may not be nullcompletion - the function to call when the consumer terminates; may be nullpublic <K,V> KafkaCluster.InteractiveConsumer<K,V> createConsumer(String groupId, String clientId, Set<String> topicNames, org.apache.kafka.common.serialization.Deserializer<K> keyDeserializer, org.apache.kafka.common.serialization.Deserializer<V> valueDeserializer, Runnable completion)
simple consumer that can be used to read messages from the cluster.groupId - the name of the group; may not be nullclientId - the name of the client; may not be nulltopicNames - the names of the topics to read; may not be null and may not be emptykeyDeserializer - the deserializer for the keys; may not be nullvalueDeserializer - the deserializer for the values; may not be nullcompletion - the function to call when the consumer terminates; may be nullpublic KafkaCluster.InteractiveConsumer<String,io.debezium.document.Document> createConsumer(String groupId, String clientId, String topicName, Runnable completion)
simple consumer that can be used to read messages from the cluster.groupId - the name of the group; may not be nullclientId - the name of the client; may not be nulltopicName - the name of the topic to read; may not be null and may not be emptycompletion - the function to call when the consumer terminates; may be nullpublic KafkaCluster.InteractiveConsumer<String,io.debezium.document.Document> createConsumer(String groupId, String clientId, Set<String> topicNames, Runnable completion)
simple consumer that can be used to read messages from the cluster.groupId - the name of the group; may not be nullclientId - the name of the client; may not be nulltopicNames - the names of the topics to read; may not be null and may not be emptycompletion - the function to call when the consumer terminates; may be nullpublic <K,V> void produce(String producerName, Consumer<KafkaCluster.InteractiveProducer<String,io.debezium.document.Document>> producer)
Document messages and write them to the cluster.producerName - the name of the producer; may not be nullproducer - the function that will asynchronouslypublic <K,V> void produce(String producerName, org.apache.kafka.common.serialization.Serializer<K> keySerializer, org.apache.kafka.common.serialization.Serializer<V> valueSerializer, Consumer<KafkaCluster.InteractiveProducer<K,V>> producer)
producerName - the name of the producer; may not be nullkeySerializer - the serializer for the keys; may not be nullvalueSerializer - the serializer for the values; may not be nullproducer - the function that will asynchronouslypublic <K,V> void produce(String producerName, int messageCount, org.apache.kafka.common.serialization.Serializer<K> keySerializer, org.apache.kafka.common.serialization.Serializer<V> valueSerializer, Runnable completionCallback, Supplier<org.apache.kafka.clients.producer.ProducerRecord<K,V>> messageSupplier)
producerName - the name of the producer; may not be nullmessageCount - the number of messages to produce; must be positivekeySerializer - the serializer for the keys; may not be nullvalueSerializer - the serializer for the values; may not be nullcompletionCallback - the function to be called when the producer is completed; may be nullmessageSupplier - the function to produce messages; may not be nullpublic void produceStrings(int messageCount,
Runnable completionCallback,
Supplier<org.apache.kafka.clients.producer.ProducerRecord<String,String>> messageSupplier)
messageCount - the number of messages to produce; must be positivecompletionCallback - the function to be called when the producer is completed; may be nullmessageSupplier - the function to produce messages; may not be nullpublic void produceDocuments(int messageCount,
Runnable completionCallback,
Supplier<org.apache.kafka.clients.producer.ProducerRecord<String,io.debezium.document.Document>> messageSupplier)
Document values, and write
them to the cluster.messageCount - the number of messages to produce; must be positivecompletionCallback - the function to be called when the producer is completed; may be nullmessageSupplier - the function to produce messages; may not be nullpublic void produceIntegers(int messageCount,
Runnable completionCallback,
Supplier<org.apache.kafka.clients.producer.ProducerRecord<String,Integer>> messageSupplier)
messageCount - the number of messages to produce; must be positivecompletionCallback - the function to be called when the producer is completed; may be nullmessageSupplier - the function to produce messages; may not be nullpublic void produceIntegers(String topic, int messageCount, int initialValue, Runnable completionCallback)
topic - the name of the topic to which the messages should be written; may not be nullmessageCount - the number of messages to produce; must be positiveinitialValue - the first integer value to producecompletionCallback - the function to be called when the producer is completed; may be nullpublic void produceStrings(String topic, int messageCount, Runnable completionCallback, Supplier<String> valueSupplier)
topic - the name of the topic to which the messages should be written; may not be nullmessageCount - the number of messages to produce; must be positivecompletionCallback - the function to be called when the producer is completed; may be nullvalueSupplier - the value supplier; may not be nullpublic void produceDocuments(String topic, int messageCount, Runnable completionCallback, Supplier<io.debezium.document.Document> valueSupplier)
topic - the name of the topic to which the messages should be written; may not be nullmessageCount - the number of messages to produce; must be positivecompletionCallback - the function to be called when the producer is completed; may be nullvalueSupplier - the value supplier; may not be nullpublic <K,V> void consume(String groupId, String clientId, org.apache.kafka.clients.consumer.OffsetResetStrategy autoOffsetReset, org.apache.kafka.common.serialization.Deserializer<K> keyDeserializer, org.apache.kafka.common.serialization.Deserializer<V> valueDeserializer, BooleanSupplier continuation, org.apache.kafka.clients.consumer.OffsetCommitCallback offsetCommitCallback, Runnable completion, Collection<String> topics, Consumer<org.apache.kafka.clients.consumer.ConsumerRecord<K,V>> consumerFunction)
groupId - the name of the group; may not be nullclientId - the name of the client; may not be nullautoOffsetReset - how to pick a starting offset when there is no initial offset in ZooKeeper or if an offset is
out of range; may be null for the default to be usedkeyDeserializer - the deserializer for the keys; may not be nullvalueDeserializer - the deserializer for the values; may not be nullcontinuation - the function that determines if the consumer should continue; may not be nulloffsetCommitCallback - the callback that should be used after committing offsets; may be null if offsets are
not to be committedcompletion - the function to call when the consumer terminates; may be nulltopics - the set of topics to consume; may not be null or emptyconsumerFunction - the function to consume the messages; may not be nullpublic void consumeDocuments(BooleanSupplier continuation, Runnable completion, Collection<String> topics, Consumer<org.apache.kafka.clients.consumer.ConsumerRecord<String,io.debezium.document.Document>> consumerFunction)
continuation - the function that determines if the consumer should continue; may not be nullcompletion - the function to call when all messages have been consumed; may be nulltopics - the set of topics to consume; may not be null or emptyconsumerFunction - the function to consume the messages; may not be nullpublic void consumeStrings(BooleanSupplier continuation, Runnable completion, Collection<String> topics, Consumer<org.apache.kafka.clients.consumer.ConsumerRecord<String,String>> consumerFunction)
continuation - the function that determines if the consumer should continue; may not be nullcompletion - the function to call when all messages have been consumed; may be nulltopics - the set of topics to consume; may not be null or emptyconsumerFunction - the function to consume the messages; may not be nullpublic void consumeIntegers(BooleanSupplier continuation, Runnable completion, Collection<String> topics, Consumer<org.apache.kafka.clients.consumer.ConsumerRecord<String,Integer>> consumerFunction)
continuation - the function that determines if the consumer should continue; may not be nullcompletion - the function to call when all messages have been consumed; may be nulltopics - the set of topics to consume; may not be null or emptyconsumerFunction - the function to consume the messages; may not be nullpublic void consumeStrings(String topicName, int count, long timeout, TimeUnit unit, Runnable completion, BiPredicate<String,String> consumer)
topicName - the name of the topic; may not be nullcount - the expected number of messages to read before terminating; may not be nulltimeout - the maximum time that this consumer should run before terminating; must be positiveunit - the unit of time for the timeout; may not be nullcompletion - the function to call when all messages have been consumed; may be nullconsumer - the function to consume the messages; may not be nullpublic void consumeDocuments(String topicName, int count, long timeout, TimeUnit unit, Runnable completion, BiPredicate<String,io.debezium.document.Document> consumer)
topicName - the name of the topic; may not be nullcount - the expected number of messages to read before terminating; may not be nulltimeout - the maximum time that this consumer should run before terminating; must be positiveunit - the unit of time for the timeout; may not be nullcompletion - the function to call when all messages have been consumed; may be nullconsumer - the function to consume the messages; may not be nullpublic void consumeIntegers(String topicName, int count, long timeout, TimeUnit unit, Runnable completion, BiPredicate<String,Integer> consumer)
topicName - the name of the topic; may not be nullcount - the expected number of messages to read before terminating; may not be nulltimeout - the maximum time that this consumer should run before terminating; must be positiveunit - the unit of time for the timeout; may not be nullcompletion - the function to call when all messages have been consumed; may be nullconsumer - the function to consume the messages; may not be nullpublic void consumeStrings(String topicName, int count, long timeout, TimeUnit unit, Runnable completion)
topicName - the name of the topic; may not be nullcount - the expected number of messages to read before terminating; may not be nulltimeout - the maximum time that this consumer should run before terminating; must be positiveunit - the unit of time for the timeout; may not be nullcompletion - the function to call when all messages have been consumed; may be nullpublic void consumeDocuments(String topicName, int count, long timeout, TimeUnit unit, Runnable completion)
topicName - the name of the topic; may not be nullcount - the expected number of messages to read before terminating; may not be nulltimeout - the maximum time that this consumer should run before terminating; must be positiveunit - the unit of time for the timeout; may not be nullcompletion - the function to call when all messages have been consumed; may be nullpublic void consumeIntegers(String topicName, int count, long timeout, TimeUnit unit, Runnable completion)
topicName - the name of the topic; may not be nullcount - the expected number of messages to read before terminating; may not be nulltimeout - the maximum time that this consumer should run before terminating; must be positiveunit - the unit of time for the timeout; may not be nullcompletion - the function to call when all messages have been consumed; may be nullprotected BooleanSupplier continueIfNotExpired(BooleanSupplier continuation, long timeout, TimeUnit unit)
Copyright © 2021 JBoss by Red Hat. All rights reserved.