Package io.debezium.connector.mongodb
Class ShardedIncrementalSnapshotIT
java.lang.Object
io.debezium.embedded.AbstractConnectorTest
io.debezium.connector.mongodb.AbstractShardedMongoConnectorIT
io.debezium.connector.mongodb.ShardedIncrementalSnapshotIT
- All Implemented Interfaces:
Testing
-
Nested Class Summary
Nested classes/interfaces inherited from class io.debezium.embedded.AbstractConnectorTest
AbstractConnectorTest.SourceRecordsNested classes/interfaces inherited from interface io.debezium.util.Testing
Testing.Debug, Testing.Files, Testing.InterruptableFunction, Testing.Network, Testing.Print, Testing.Timer -
Field Summary
FieldsModifier and TypeFieldDescriptionprivate static final intprivate static final Stringprivate static final Stringprivate static final Stringprivate static final Stringprivate static final intprotected static final intprivate static final StringFields inherited from class io.debezium.connector.mongodb.AbstractShardedMongoConnectorIT
DEFAULT_COLLECTION, DEFAULT_DATABASE, DEFAULT_SHARDING_KEY, mongoFields inherited from class io.debezium.embedded.AbstractConnectorTest
consumedLines, engine, logger, logTestName, OFFSET_STORE_PATH, pollTimeoutInMs, skipTestRule -
Constructor Summary
Constructors -
Method Summary
Modifier and TypeMethodDescriptionprotected Configuration.Builderconfig()protected Class<io.debezium.connector.mongodb.MongoDbConnector>consumeMixedWithIncrementalSnapshot(int recordCount, String topicName) consumeMixedWithIncrementalSnapshot(int recordCount, Function<org.apache.kafka.connect.source.SourceRecord, V> valueConverter, Predicate<Map.Entry<Integer, V>> dataCompleted, Consumer<List<org.apache.kafka.connect.source.SourceRecord>> recordConsumer, String topicName) protected <V,K> Map<K, V> consumeMixedWithIncrementalSnapshot(int recordCount, Predicate<Map.Entry<K, V>> dataCompleted, Function<org.apache.kafka.connect.data.Struct, K> idCalculator, Function<org.apache.kafka.connect.source.SourceRecord, V> valueConverter, String topicName, Consumer<List<org.apache.kafka.connect.source.SourceRecord>> recordConsumer) protected IntegerextractFieldValue(org.apache.kafka.connect.source.SourceRecord record) protected Stringprotected Stringprotected voidprotected voidsendAdHocSnapshotSignal(String... dataCollectionIds) protected Stringprivate <K> voidsnapshotOnly(K initialId, Function<K, K> idGenerator) voidprotected voidprotected voidstartConnector(Function<Configuration.Builder, Configuration.Builder> custConfig, io.debezium.engine.DebeziumEngine.CompletionCallback callback) protected Stringprotected Stringprotected voidMethods inherited from class io.debezium.connector.mongodb.AbstractShardedMongoConnectorIT
afterAll, afterEach, beforeAll, beforeEach, connect, insertDocuments, insertDocumentsInTx, shardedCollectionMethods inherited from class io.debezium.embedded.AbstractConnectorTest
assertBeginTransaction, assertConfigurationErrors, assertConfigurationErrors, assertConfigurationErrors, assertConnectorIsRunning, assertConnectorNotRunning, assertDelete, assertEndTransaction, assertEngineIsRunning, assertHasNoSourceQuery, assertInsert, assertKey, assertNoConfigurationErrors, assertNoRecordsToConsume, assertOffset, assertOffset, assertOnlyTransactionRecordsToConsume, assertRecordTransactionMetadata, assertSchemaMatchesStruct, assertSchemaMatchesStruct, assertSourceQuery, assertTombstone, assertTombstone, assertUpdate, assertValueField, configValue, consumeAvailableRecords, consumeAvailableRecordsByTopic, consumeDmlRecordsByTopic, consumeDmlRecordsByTopic, consumeDmlRecordsByTopic, consumeRecord, consumeRecords, consumeRecords, consumeRecords, consumeRecordsButSkipUntil, consumeRecordsByTopic, consumeRecordsByTopic, consumeRecordsByTopic, consumeRecordsByTopicUntil, consumeRecordsUntil, debug, getConsumer, getMaximumEnqueuedRecordCount, getSnapshotMetricsObjectName, getSnapshotMetricsObjectName, getSnapshotMetricsObjectName, getStreamingMetricsObjectName, getStreamingMetricsObjectName, getStreamingMetricsObjectName, getStreamingMetricsObjectName, getStreamingNamespace, initializeConnectorTestFramework, isStreamingRunning, isStreamingRunning, isStreamingRunning, isStreamingRunning, isTransactionRecord, loggingCompletion, print, readLastCommittedOffset, readLastCommittedOffsets, setConsumeTimeout, skipAvroValidation, start, start, start, start, start, start, start, startAndConsumeTillEnd, startAndConsumeTillEnd, stopConnector, stopConnector, validate, waitForAvailableRecords, waitForConnectorShutdown, waitForNotInitialState, waitForSnapshotToBeCompleted, waitForSnapshotToBeCompleted, waitForSnapshotWithCustomMetricsToBeCompleted, waitForStreamingRunning, waitForStreamingRunning, waitForStreamingRunning, waitForStreamingWithCustomMetricsToStart, waitTimeForRecords, waitTimeForRecordsAfterNulls
-
Field Details
-
ROW_COUNT
protected static final int ROW_COUNT- See Also:
-
MAXIMUM_NO_RECORDS_CONSUMES
private static final int MAXIMUM_NO_RECORDS_CONSUMES- See Also:
-
DATABASE_NAME
- See Also:
-
COLLECTION_NAME
- See Also:
-
SIGNAL_COLLECTION_NAME
- See Also:
-
FULL_COLLECTION_NAME
- See Also:
-
DOCUMENT_ID
- See Also:
-
CHUNK_SIZE
private static final int CHUNK_SIZE- See Also:
-
-
Constructor Details
-
ShardedIncrementalSnapshotIT
public ShardedIncrementalSnapshotIT()
-
-
Method Details
-
shardedDatabase
- Overrides:
shardedDatabasein classAbstractShardedMongoConnectorIT
-
shardedCollections
- Overrides:
shardedCollectionsin classAbstractShardedMongoConnectorIT
-
fullDataCollectionName
-
topicName
-
config
-
snapshotOnlyWithInt64
- Throws:
Exception
-
snapshotOnly
- Throws:
Exception
-
valueFieldName
-
pkFieldName
-
connectorClass
-
startConnector
protected void startConnector(Function<Configuration.Builder, Configuration.Builder> custConfig, io.debezium.engine.DebeziumEngine.CompletionCallback callback) -
startConnector
protected void startConnector() -
waitForConnectorToStart
protected void waitForConnectorToStart() -
sendAdHocSnapshotSignal
- Throws:
SQLException
-
sendAdHocSnapshotSignal
- Throws:
SQLException
-
consumeMixedWithIncrementalSnapshot
protected Map<Integer,Integer> consumeMixedWithIncrementalSnapshot(int recordCount, String topicName) throws InterruptedException - Throws:
InterruptedException
-
extractFieldValue
-
consumeMixedWithIncrementalSnapshot
protected <V> Map<Integer,V> consumeMixedWithIncrementalSnapshot(int recordCount, Function<org.apache.kafka.connect.source.SourceRecord, V> valueConverter, Predicate<Map.Entry<Integer, throws InterruptedExceptionV>> dataCompleted, Consumer<List<org.apache.kafka.connect.source.SourceRecord>> recordConsumer, String topicName) - Throws:
InterruptedException
-
consumeMixedWithIncrementalSnapshot
protected <V,K> Map<K,V> consumeMixedWithIncrementalSnapshot(int recordCount, Predicate<Map.Entry<K, V>> dataCompleted, Function<org.apache.kafka.connect.data.Struct, throws InterruptedExceptionK> idCalculator, Function<org.apache.kafka.connect.source.SourceRecord, V> valueConverter, String topicName, Consumer<List<org.apache.kafka.connect.source.SourceRecord>> recordConsumer) - Throws:
InterruptedException
-