{
"name": "mysql-user-tables",
"config": {
"connector.class": "io.debezium.connector.mysql.MySqlConnector",
"tasks.max": "1",
"database.allowPublicKeyRetrieval":"true",
"database.hostname": "mysql",
"database.port": "3306",
"database.user": "root",
"database.password": "mehrdad517.%mmMM",
"database.server.id": "184054",
"database.server.name": "database",
"database.include.list": "mlm",
"table.include.list": "mlm.user, mlm.marketer, mlm.marketer_info",
"database.history.kafka.bootstrap.servers": "kafka:9092",
"database.history.kafka.topic": "schema-changes.database",
"include.schema.changes" : "false",
"transforms": "unwrap",
"transforms.unwrap.type": "io.debezium.transforms.ExtractNewRecordState",
"key.converter": "org.apache.kafka.connect.storage.StringConverter",
"value.converter": "io.confluent.connect.avro.AvroConverter",
"value.converter.schema.registry.url": "http://schema-registry:8081"
}
}
and this is my elasticsearch connector config
{
"name": "elastic",
"config": {
"connector.class": "io.confluent.connect.elasticsearch.ElasticsearchSinkConnector",
"tasks.max": "1",
"topics": "database.mlm.user, database.mlm.marketer, database.mlm.marketer_info",
"connection.url": "http://elasticsearch:9200",
"key.ignore": "true",
"type.name": "kafka-connect",
"errors.tolerance": "all",
"errors.log.enable": "true",
"errors.log.include.messages": "true",
"schema.ignore": "true",
"key.converter": "org.apache.kafka.connect.storage.StringConverter",
"value.converter": "io.confluent.connect.avro.AvroConverter"
}
}
But the index is not made in elastic while the connections are running