"tasks" is empty for debezium sql source connector

Hey guys,

any help will be really appreciated
i have deployed 4 debezium SQL server connectors (one each for each database in 2 different servers)
all connectors are in " running " status , but for 2 connectors databsehistory topic is not being created
When analysed i foud the “task” is empty
no task is being created ,
searched the various forms/groups but most of them suggested to create anew connector.
I did try this , but no use.

How can i make task running for my connectors??

my connector config is given below, eveything else working , i have other connectors running in the same cluster

{
“connector.class”: “io.debezium.connector.sqlserver.SqlServerConnector”,
“errors.log.include.messages”: “true”,
“topic.creation.default.partitions”: “1”,
“database.history.consumer.sasl.jaas.config”: “org.apache.kafka.common.security.plain.PlainLoginModule required username = "" password = "";",
“transforms.replaceField.blacklist”: “Id,CreatedDate",
“transforms”: “unwrap,insertSpaceId,castToInt,ValueToKey,recordTimestamp,replaceField,changeTopicVersion”,
“transforms.changeTopicVersion.type”: “org.apache.kafka.connect.transforms.RegexRouter”,
“transforms.insertSpaceId.static.field”: "Sp
",
“include.schema.changes”: “false”,
“value.converter.basic.auth.credentials.source”: “USER_INFO”,
“transforms.insertSpaceId.static.value”: “989”,
“database.history.kafka.recovery.poll.interval.ms”: “500”,
“transforms.unwrap.drop.tombstones”: “false”,
“topic.creation.default.replication.factor”: “3”,
“transforms.unwrap.type”: “io.debezium.transforms.ExtractNewRecordState”,
“errors.log.enable”: “true”,
“key.converter”: “io.confluent.connect.avro.AvroConverter”,
“transforms.changeTopicVersion.replacement”: “$1v2”,
“transforms.ValueToKey.type”: “org.apache.kafka.connect.transforms.ValueToKey”,
“database.history.producer.sasl.mechanism”: “PLAIN”,
“database.user”: "a
r",
“database.dbname”: "
**”,
“topic.creation.default.compression.type”: “lz4”,
“transforms.recordTimestamp.timestamp.column.name”: “CreatedDate”,
“transforms.replaceField.type”: “org.apache.kafka.connect.transforms.ReplaceField$Value”,
“database.history.kafka.bootstrap.servers”: "
",
“transforms.recordTimestamp.type”: "onvertor$Value",
“transforms.ValueToKey.fields”: “SpaceId,ArticleReviewInvitationId,ArticleId”,
“key.converter.basic.auth.user.info”: "
",
“topic.creation.enable”: “true”,
“value.converter.schema.registry.url”: "
",
“database.password”: "j
",
“database.history.consumer.sasl.mechanism”: “PLAIN”,
“pk.mode”: “record_key”,
“key.converter.basic.auth.credentials.source”: “USER_INFO”,
“transforms.unwrap.delete.handling.mode”: “drop”,
“transforms.castToInt.type”: “org.apache.kafka.connect.transforms.Cast$Value”,
“transforms.changeTopicVersion.regex”: "f
e.(.)v1",
“database.history.kafka.topic”: "
_new",
“database.history.consumer.security.protocol”: “SASL_SSL”,
“transforms.castToInt.spec”: “SpaceId:int32”,
“value.converter”: “io.confluent.connect.avro.AvroConverter”,
“database.history.producer.sasl.jaas.config”: "org.apache.kafka.common.security.plain.PlainLoginModule required username = "" password = "e3";",
“topic.creation.default.cleanup.policy”: “compact”,
“database.history.producer.security.protocol”: “SASL_SSL”,
“database.server.name”: "
",
“value.converter.basic.auth.user.info”: "
,
“transforms.insertSpaceId.type”: “org.apache.kafka.connect.transforms.InsertField$Value”,
“database.hostname”: "
",
“connection.url”: "jdbc:sqlserver://
;databaseName=
",
“table.include.list”: "
*********************",
“key.converter.schema.registry.url”: "

}

This topic was automatically closed 30 days after the last reply. New replies are no longer allowed.