Package io.debezium.connector.mongodb
Class IncrementalSnapshotIT
java.lang.Object
io.debezium.embedded.AbstractConnectorTest
io.debezium.connector.mongodb.AbstractMongoConnectorIT
io.debezium.connector.mongodb.IncrementalSnapshotIT
- All Implemented Interfaces:
Testing
Test to verify incremental snapshotting for MongoDB.
- Author:
- Jiri Pechanec
-
Nested Class Summary
Nested classes/interfaces inherited from class io.debezium.embedded.AbstractConnectorTest
AbstractConnectorTest.SourceRecordsNested classes/interfaces inherited from interface io.debezium.util.Testing
Testing.Debug, Testing.Files, Testing.InterruptableFunction, Testing.Network, Testing.Print, Testing.Timer -
Field Summary
FieldsModifier and TypeFieldDescriptionprivate static final Stringprivate static final Stringprotected static final Pathprivate static final Stringprivate static final Stringprivate static final intprotected static final intorg.junit.rules.TestRuleFields inherited from class io.debezium.connector.mongodb.AbstractMongoConnectorIT
config, context, logInterceptorFields inherited from class io.debezium.embedded.AbstractConnectorTest
engine, logger, logTestName, OFFSET_STORE_PATH, pollTimeoutInMs, skipTestRule -
Constructor Summary
Constructors -
Method Summary
Modifier and TypeMethodDescriptionvoidafter()voidbefore()protected Configuration.Builderconfig()protected Class<io.debezium.connector.mongodb.MongoDbConnector>consumeMixedWithIncrementalSnapshot(int recordCount) consumeMixedWithIncrementalSnapshot(int recordCount, Function<org.apache.kafka.connect.source.SourceRecord, V> valueConverter, Predicate<Map.Entry<Integer, V>> dataCompleted, Consumer<List<org.apache.kafka.connect.source.SourceRecord>> recordConsumer) consumeMixedWithIncrementalSnapshot(int recordCount, Predicate<Map.Entry<Integer, Integer>> dataCompleted, Consumer<List<org.apache.kafka.connect.source.SourceRecord>> recordConsumer) consumeMixedWithIncrementalSnapshot(int recordCount, Predicate<Map.Entry<Integer, V>> dataCompleted, Function<org.apache.kafka.connect.data.Struct, Integer> idCalculator, Function<org.apache.kafka.connect.source.SourceRecord, V> valueConverter, String topicName, Consumer<List<org.apache.kafka.connect.source.SourceRecord>> recordConsumer) consumeRecordsMixedWithIncrementalSnapshot(int recordCount) consumeRecordsMixedWithIncrementalSnapshot(int recordCount, Predicate<Map.Entry<Integer, org.apache.kafka.connect.source.SourceRecord>> dataCompleted, Consumer<List<org.apache.kafka.connect.source.SourceRecord>> recordConsumer) protected Stringprotected IntegerextractFieldValue(org.apache.kafka.connect.source.SourceRecord record) protected Stringprotected intprotected voidvoidinserts()voidprotected Stringprotected voidprotected voidpopulateDataCollection(io.debezium.connector.mongodb.ConnectionContext.MongoPrimary connection) protected voidpopulateDataCollection(io.debezium.connector.mongodb.ConnectionContext.MongoPrimary connection, String dataCollectionName) protected voidprotected voidsendAdHocSnapshotSignal(String... dataCollectionIds) voidvoidprotected voidprotected voidstartConnector(io.debezium.engine.DebeziumEngine.CompletionCallback callback) protected voidstartConnector(Function<Configuration.Builder, Configuration.Builder> custConfig) protected voidstartConnector(Function<Configuration.Builder, Configuration.Builder> custConfig, io.debezium.engine.DebeziumEngine.CompletionCallback callback) protected Stringprotected voidupdateData(int batchSize) voidupdates()voidvoidprotected Stringprotected voidMethods inherited from class io.debezium.connector.mongodb.AbstractMongoConnectorIT
afterEach, beforEach, connectionErrorHandler, deleteDocuments, dropAndInsertDocuments, insertDocuments, insertDocumentsInTx, loadTestDocuments, primary, storeDocuments, storeDocuments, updateDocument, updateDocumentsInTxMethods inherited from class io.debezium.embedded.AbstractConnectorTest
assertBeginTransaction, assertConfigurationErrors, assertConfigurationErrors, assertConfigurationErrors, assertConnectorIsRunning, assertConnectorNotRunning, assertDelete, assertEndTransaction, assertEngineIsRunning, assertHasNoSourceQuery, assertInsert, assertKey, assertNoConfigurationErrors, assertNoRecordsToConsume, assertOffset, assertOffset, assertOnlyTransactionRecordsToConsume, assertRecordTransactionMetadata, assertSchemaMatchesStruct, assertSchemaMatchesStruct, assertSourceQuery, assertTombstone, assertTombstone, assertUpdate, assertValueField, configValue, consumeAvailableRecords, consumeDmlRecordsByTopic, consumeDmlRecordsByTopic, consumeDmlRecordsByTopic, consumeRecord, consumeRecords, consumeRecords, consumeRecords, consumeRecordsByTopic, consumeRecordsByTopic, consumeRecordsByTopic, consumeRecordsByTopicUntil, consumeRecordsUntil, debug, getSnapshotMetricsObjectName, getStreamingMetricsObjectName, getStreamingMetricsObjectName, getStreamingNamespace, initializeConnectorTestFramework, isStreamingRunning, isStreamingRunning, isTransactionRecord, loggingCompletion, print, readLastCommittedOffset, readLastCommittedOffsets, setConsumeTimeout, skipAvroValidation, start, start, start, start, start, startAndConsumeTillEnd, startAndConsumeTillEnd, stopConnector, stopConnector, validate, waitForAvailableRecords, waitForConnectorShutdown, waitForSnapshotToBeCompleted, waitForStreamingRunning, waitForStreamingRunning, waitTimeForRecords, waitTimeForRecordsAfterNulls
-
Field Details
-
ROW_COUNT
protected static final int ROW_COUNT- See Also:
-
MAXIMUM_NO_RECORDS_CONSUMES
private static final int MAXIMUM_NO_RECORDS_CONSUMES- See Also:
-
DATABASE_NAME
- See Also:
-
COLLECTION_NAME
- See Also:
-
FULL_COLLECTION_NAME
- See Also:
-
DOCUMENT_ID
- See Also:
-
DB_HISTORY_PATH
-
skipForOplog
public org.junit.rules.TestRule skipForOplog
-
-
Constructor Details
-
IncrementalSnapshotIT
public IncrementalSnapshotIT()
-
-
Method Details
-
before
public void before() -
after
public void after() -
connectorClass
-
config
-
dataCollectionName
-
fullDataCollectionName
-
topicName
-
populateDataCollection
protected void populateDataCollection(io.debezium.connector.mongodb.ConnectionContext.MongoPrimary connection, String dataCollectionName) -
populateDataCollection
protected void populateDataCollection(io.debezium.connector.mongodb.ConnectionContext.MongoPrimary connection) -
populateDataCollection
protected void populateDataCollection() -
insertAdditionalData
protected void insertAdditionalData() -
updateData
protected void updateData(int batchSize) -
startConnector
protected void startConnector(io.debezium.engine.DebeziumEngine.CompletionCallback callback) -
startConnector
-
startConnector
protected void startConnector(Function<Configuration.Builder, Configuration.Builder> custConfig, io.debezium.engine.DebeziumEngine.CompletionCallback callback) -
startConnector
protected void startConnector() -
waitForConnectorToStart
protected void waitForConnectorToStart() -
sendAdHocSnapshotSignal
- Throws:
SQLException
-
sendAdHocSnapshotSignal
- Throws:
SQLException
-
consumeMixedWithIncrementalSnapshot
protected Map<Integer,Integer> consumeMixedWithIncrementalSnapshot(int recordCount) throws InterruptedException - Throws:
InterruptedException
-
extractFieldValue
-
consumeMixedWithIncrementalSnapshot
protected <V> Map<Integer,V> consumeMixedWithIncrementalSnapshot(int recordCount, Function<org.apache.kafka.connect.source.SourceRecord, V> valueConverter, Predicate<Map.Entry<Integer, throws InterruptedExceptionV>> dataCompleted, Consumer<List<org.apache.kafka.connect.source.SourceRecord>> recordConsumer) - Throws:
InterruptedException
-
consumeMixedWithIncrementalSnapshot
protected <V> Map<Integer,V> consumeMixedWithIncrementalSnapshot(int recordCount, Predicate<Map.Entry<Integer, V>> dataCompleted, Function<org.apache.kafka.connect.data.Struct, throws InterruptedExceptionInteger> idCalculator, Function<org.apache.kafka.connect.source.SourceRecord, V> valueConverter, String topicName, Consumer<List<org.apache.kafka.connect.source.SourceRecord>> recordConsumer) - Throws:
InterruptedException
-
consumeRecordsMixedWithIncrementalSnapshot
protected Map<Integer,org.apache.kafka.connect.source.SourceRecord> consumeRecordsMixedWithIncrementalSnapshot(int recordCount) throws InterruptedException - Throws:
InterruptedException
-
consumeMixedWithIncrementalSnapshot
protected Map<Integer,Integer> consumeMixedWithIncrementalSnapshot(int recordCount, Predicate<Map.Entry<Integer, Integer>> dataCompleted, Consumer<List<org.apache.kafka.connect.source.SourceRecord>> recordConsumer) throws InterruptedException- Throws:
InterruptedException
-
consumeRecordsMixedWithIncrementalSnapshot
protected Map<Integer,org.apache.kafka.connect.source.SourceRecord> consumeRecordsMixedWithIncrementalSnapshot(int recordCount, Predicate<Map.Entry<Integer, org.apache.kafka.connect.source.SourceRecord>> dataCompleted, Consumer<List<org.apache.kafka.connect.source.SourceRecord>> recordConsumer) throws InterruptedException- Throws:
InterruptedException
-
valueFieldName
-
pkFieldName
-
snapshotOnly
- Throws:
Exception
-
invalidTablesInTheList
- Throws:
Exception
-
snapshotOnlyWithRestart
- Throws:
Exception
-
inserts
- Throws:
Exception
-
updates
- Throws:
Exception
-
updatesWithRestart
- Throws:
Exception
-
updatesLargeChunk
- Throws:
Exception
-
getMaximumEnqueuedRecordCount
protected int getMaximumEnqueuedRecordCount()- Overrides:
getMaximumEnqueuedRecordCountin classAbstractConnectorTest
-