Class AbstractIncrementalSnapshotTest<T extends org.apache.kafka.connect.source.SourceConnector>
java.lang.Object
io.debezium.embedded.AbstractConnectorTest
io.debezium.pipeline.source.snapshot.incremental.AbstractIncrementalSnapshotTest<T>
- All Implemented Interfaces:
Testing
- Direct Known Subclasses:
AbstractIncrementalSnapshotWithSchemaChangesSupportTest
public abstract class AbstractIncrementalSnapshotTest<T extends org.apache.kafka.connect.source.SourceConnector>
extends AbstractConnectorTest
-
Nested Class Summary
Nested classes/interfaces inherited from class io.debezium.embedded.AbstractConnectorTest
AbstractConnectorTest.SourceRecordsNested classes/interfaces inherited from interface io.debezium.util.Testing
Testing.Debug, Testing.Files, Testing.InterruptableFunction, Testing.Network, Testing.Print, Testing.Timer -
Field Summary
FieldsModifier and TypeFieldDescriptionprivate static final intprotected static final intprotected static final PathFields inherited from class io.debezium.embedded.AbstractConnectorTest
consumedLines, engine, logger, logTestName, OFFSET_STORE_PATH, pollTimeoutInMs, skipTestRule -
Constructor Summary
Constructors -
Method Summary
Modifier and TypeMethodDescriptionprotected StringalterTableAddColumnStatement(String tableName) protected StringalterTableDropColumnStatement(String tableName) protected abstract Configuration.Builderconfig()protected booleanconsumeAnyRemainingIncrementalSnapshotEventsAndCheckForStopMessage(LogInterceptor interceptor, String stopMessage) consumeMixedWithIncrementalSnapshot(int recordCount) consumeMixedWithIncrementalSnapshot(int recordCount, String topicName) consumeMixedWithIncrementalSnapshot(int recordCount, Function<org.apache.kafka.connect.source.SourceRecord, V> valueConverter, Predicate<Map.Entry<Integer, V>> dataCompleted, Consumer<List<org.apache.kafka.connect.source.SourceRecord>> recordConsumer, String topicName) consumeMixedWithIncrementalSnapshot(int recordCount, Predicate<Map.Entry<Integer, Integer>> dataCompleted, Consumer<List<org.apache.kafka.connect.source.SourceRecord>> recordConsumer) consumeMixedWithIncrementalSnapshot(int recordCount, Predicate<Map.Entry<Integer, V>> dataCompleted, Function<org.apache.kafka.connect.data.Struct, Integer> idCalculator, Function<org.apache.kafka.connect.source.SourceRecord, V> valueConverter, String topicName, Consumer<List<org.apache.kafka.connect.source.SourceRecord>> recordConsumer) consumeRecordsMixedWithIncrementalSnapshot(int recordCount) consumeRecordsMixedWithIncrementalSnapshot(int recordCount, Predicate<Map.Entry<Integer, org.apache.kafka.connect.source.SourceRecord>> dataCompleted, Consumer<List<org.apache.kafka.connect.source.SourceRecord>> recordConsumer) protected abstract JdbcConnectionprotected intGet the maximum number of messages that can be obtained from the connector and held in-memory before they are consumed by test methods usingAbstractConnectorTest.consumeRecord(),AbstractConnectorTest.consumeRecords(int), orAbstractConnectorTest.consumeRecords(int, Consumer).protected Stringvoidinserts()voidprotected abstract Configuration.BuildermutableConfig(boolean signalTableOnly, boolean storeOnlyCapturedDdl) protected Stringprotected voidpopulate4PkTable(JdbcConnection connection, String tableName) protected voidprotected voidpopulateTable(JdbcConnection connection) protected voidpopulateTable(JdbcConnection connection, String tableName) protected voidprotected voidpopulateTables(JdbcConnection connection) protected voidpopulateTableWithSpecificValue(int startRow, int count, int value) private voidpopulateTableWithSpecificValue(JdbcConnection connection, String tableName, int startRow, int count, int value) voidvoidprotected voidprotected voidsendAdHocSnapshotSignal(String... dataCollectionIds) protected voidsendAdHocSnapshotSignalAndWait(String... collectionIds) protected voidsendAdHocSnapshotSignalWithAdditionalConditionWithSurrogateKey(Optional<String> additionalCondition, Optional<String> surrogateKey, String... dataCollectionIds) protected voidsendAdHocSnapshotStopSignal(String... dataCollectionIds) protected voidsendAdHocSnapshotStopSignalAndWait(String... collectionIds) protected voidprotected voidvoidvoidprotected abstract Stringprotected Stringvoidvoidvoidvoidvoidvoidvoidvoidprotected voidprotected voidstartConnector(io.debezium.engine.DebeziumEngine.CompletionCallback callback) protected voidstartConnector(Function<Configuration.Builder, Configuration.Builder> custConfig) protected voidstartConnector(Function<Configuration.Builder, Configuration.Builder> custConfig, io.debezium.engine.DebeziumEngine.CompletionCallback callback, boolean expectNoRecords) protected voidvoidvoidprotected Stringprotected abstract Stringvoidprotected abstract Stringvoidupdates()voidvoidprotected Stringprotected voidwaitForCdcTransactionPropagation(int expectedTransactions) protected voidMethods inherited from class io.debezium.embedded.AbstractConnectorTest
assertBeginTransaction, assertConfigurationErrors, assertConfigurationErrors, assertConfigurationErrors, assertConnectorIsRunning, assertConnectorNotRunning, assertDelete, assertEndTransaction, assertEngineIsRunning, assertHasNoSourceQuery, assertInsert, assertKey, assertNoConfigurationErrors, assertNoRecordsToConsume, assertOffset, assertOffset, assertOnlyTransactionRecordsToConsume, assertRecordTransactionMetadata, assertSchemaMatchesStruct, assertSchemaMatchesStruct, assertSourceQuery, assertTombstone, assertTombstone, assertUpdate, assertValueField, configValue, consumeAvailableRecords, consumeDmlRecordsByTopic, consumeDmlRecordsByTopic, consumeDmlRecordsByTopic, consumeRecord, consumeRecords, consumeRecords, consumeRecords, consumeRecordsByTopic, consumeRecordsByTopic, consumeRecordsByTopic, consumeRecordsByTopicUntil, consumeRecordsUntil, debug, getConsumer, getSnapshotMetricsObjectName, getStreamingMetricsObjectName, getStreamingMetricsObjectName, getStreamingNamespace, initializeConnectorTestFramework, isStreamingRunning, isStreamingRunning, isTransactionRecord, loggingCompletion, print, readLastCommittedOffset, readLastCommittedOffsets, setConsumeTimeout, skipAvroValidation, start, start, start, start, start, start, start, startAndConsumeTillEnd, startAndConsumeTillEnd, stopConnector, stopConnector, validate, waitForAvailableRecords, waitForConnectorShutdown, waitForNotInitialState, waitForSnapshotToBeCompleted, waitForStreamingRunning, waitForStreamingRunning, waitTimeForRecords, waitTimeForRecordsAfterNulls
-
Field Details
-
ROW_COUNT
protected static final int ROW_COUNT- See Also:
-
MAXIMUM_NO_RECORDS_CONSUMES
private static final int MAXIMUM_NO_RECORDS_CONSUMES- See Also:
-
SCHEMA_HISTORY_PATH
-
-
Constructor Details
-
AbstractIncrementalSnapshotTest
public AbstractIncrementalSnapshotTest()
-
-
Method Details
-
connectorClass
-
databaseConnection
-
topicName
-
tableName
-
topicNames
-
tableNames
-
signalTableName
-
signalTableNameSanitized
-
config
-
mutableConfig
protected abstract Configuration.Builder mutableConfig(boolean signalTableOnly, boolean storeOnlyCapturedDdl) -
waitForCdcTransactionPropagation
- Throws:
Exception
-
alterTableAddColumnStatement
-
alterTableDropColumnStatement
-
tableDataCollectionId
-
tableDataCollectionIds
-
populateTable
- Throws:
SQLException
-
populateTable
- Throws:
SQLException
-
populateTables
- Throws:
SQLException
-
populateTable
- Throws:
SQLException
-
populateTableWithSpecificValue
protected void populateTableWithSpecificValue(int startRow, int count, int value) throws SQLException - Throws:
SQLException
-
populateTableWithSpecificValue
private void populateTableWithSpecificValue(JdbcConnection connection, String tableName, int startRow, int count, int value) throws SQLException - Throws:
SQLException
-
populateTables
- Throws:
SQLException
-
populate4PkTable
- Throws:
SQLException
-
consumeMixedWithIncrementalSnapshot
protected Map<Integer,Integer> consumeMixedWithIncrementalSnapshot(int recordCount) throws InterruptedException - Throws:
InterruptedException
-
consumeMixedWithIncrementalSnapshot
protected Map<Integer,Integer> consumeMixedWithIncrementalSnapshot(int recordCount, String topicName) throws InterruptedException - Throws:
InterruptedException
-
consumeMixedWithIncrementalSnapshot
protected <V> Map<Integer,V> consumeMixedWithIncrementalSnapshot(int recordCount, Function<org.apache.kafka.connect.source.SourceRecord, V> valueConverter, Predicate<Map.Entry<Integer, throws InterruptedExceptionV>> dataCompleted, Consumer<List<org.apache.kafka.connect.source.SourceRecord>> recordConsumer, String topicName) - Throws:
InterruptedException
-
consumeMixedWithIncrementalSnapshot
protected <V> Map<Integer,V> consumeMixedWithIncrementalSnapshot(int recordCount, Predicate<Map.Entry<Integer, V>> dataCompleted, Function<org.apache.kafka.connect.data.Struct, throws InterruptedExceptionInteger> idCalculator, Function<org.apache.kafka.connect.source.SourceRecord, V> valueConverter, String topicName, Consumer<List<org.apache.kafka.connect.source.SourceRecord>> recordConsumer) - Throws:
InterruptedException
-
consumeRecordsMixedWithIncrementalSnapshot
protected Map<Integer,org.apache.kafka.connect.source.SourceRecord> consumeRecordsMixedWithIncrementalSnapshot(int recordCount) throws InterruptedException - Throws:
InterruptedException
-
consumeMixedWithIncrementalSnapshot
protected Map<Integer,Integer> consumeMixedWithIncrementalSnapshot(int recordCount, Predicate<Map.Entry<Integer, Integer>> dataCompleted, Consumer<List<org.apache.kafka.connect.source.SourceRecord>> recordConsumer) throws InterruptedException- Throws:
InterruptedException
-
consumeRecordsMixedWithIncrementalSnapshot
protected Map<Integer,org.apache.kafka.connect.source.SourceRecord> consumeRecordsMixedWithIncrementalSnapshot(int recordCount, Predicate<Map.Entry<Integer, org.apache.kafka.connect.source.SourceRecord>> dataCompleted, Consumer<List<org.apache.kafka.connect.source.SourceRecord>> recordConsumer) throws InterruptedException- Throws:
InterruptedException
-
valueFieldName
-
pkFieldName
-
getSignalTypeFieldName
-
sendAdHocSnapshotSignal
- Throws:
SQLException
-
sendAdHocSnapshotSignalWithAdditionalConditionWithSurrogateKey
-
sendAdHocSnapshotStopSignal
- Throws:
SQLException
-
sendAdHocSnapshotSignal
- Throws:
SQLException
-
sendPauseSignal
protected void sendPauseSignal() -
sendResumeSignal
protected void sendResumeSignal() -
startConnector
protected void startConnector(io.debezium.engine.DebeziumEngine.CompletionCallback callback) -
startConnector
-
startConnector
protected void startConnector(Function<Configuration.Builder, Configuration.Builder> custConfig, io.debezium.engine.DebeziumEngine.CompletionCallback callback, boolean expectNoRecords) -
startConnectorWithSnapshot
protected void startConnectorWithSnapshot(Function<Configuration.Builder, Configuration.Builder> custConfig) -
startConnector
protected void startConnector() -
waitForConnectorToStart
protected void waitForConnectorToStart() -
snapshotOnly
- Throws:
Exception
-
invalidTablesInTheList
- Throws:
Exception
-
inserts
- Throws:
Exception
-
updates
- Throws:
Exception
-
updatesWithRestart
- Throws:
Exception
-
updatesLargeChunk
- Throws:
Exception
-
snapshotOnlyWithRestart
- Throws:
Exception
-
snapshotPreceededBySchemaChange
- Throws:
Exception
-
snapshotWithRegexDataCollections
- Throws:
Exception
-
stopCurrentIncrementalSnapshotWithoutCollectionsAndTakeNewNewIncrementalSnapshotAfterRestart
@FixFor("DBZ-4271") public void stopCurrentIncrementalSnapshotWithoutCollectionsAndTakeNewNewIncrementalSnapshotAfterRestart() throws Exception- Throws:
Exception
-
stopCurrentIncrementalSnapshotWithAllCollectionsAndTakeNewNewIncrementalSnapshotAfterRestart
@FixFor("DBZ-4271") public void stopCurrentIncrementalSnapshotWithAllCollectionsAndTakeNewNewIncrementalSnapshotAfterRestart() throws Exception- Throws:
Exception
-
removeNotYetCapturedCollectionFromInProgressIncrementalSnapshot
@FixFor("DBZ-4271") public void removeNotYetCapturedCollectionFromInProgressIncrementalSnapshot() throws Exception- Throws:
Exception
-
removeStartedCapturedCollectionFromInProgressIncrementalSnapshot
@FixFor("DBZ-4271") public void removeStartedCapturedCollectionFromInProgressIncrementalSnapshot() throws Exception- Throws:
Exception
-
shouldSnapshotNewlyAddedTableToIncludeListAfterRestart
@FixFor("DBZ-4834") public void shouldSnapshotNewlyAddedTableToIncludeListAfterRestart() throws Exception- Throws:
Exception
-
testPauseDuringSnapshot
- Throws:
Exception
-
snapshotWithAdditionalCondition
- Throws:
Exception
-
shouldExecuteRegularSnapshotWhenAdditionalConditionEmpty
- Throws:
Exception
-
snapshotWithAdditionalConditionWithRestart
- Throws:
Exception
-
snapshotWithSurrogateKey
- Throws:
Exception
-
snapshotWithAdditionalConditionWithSurrogateKey
- Throws:
Exception
-
sendAdHocSnapshotSignalAndWait
- Throws:
Exception
-
sendAdHocSnapshotStopSignalAndWait
- Throws:
Exception
-
consumeAnyRemainingIncrementalSnapshotEventsAndCheckForStopMessage
protected boolean consumeAnyRemainingIncrementalSnapshotEventsAndCheckForStopMessage(LogInterceptor interceptor, String stopMessage) throws Exception - Throws:
Exception
-
getMaximumEnqueuedRecordCount
protected int getMaximumEnqueuedRecordCount()Description copied from class:AbstractConnectorTestGet the maximum number of messages that can be obtained from the connector and held in-memory before they are consumed by test methods usingAbstractConnectorTest.consumeRecord(),AbstractConnectorTest.consumeRecords(int), orAbstractConnectorTest.consumeRecords(int, Consumer).By default this method return
100.- Overrides:
getMaximumEnqueuedRecordCountin classAbstractConnectorTest- Returns:
- the maximum number of records that can be enqueued
-