I have created a connector for handling Avro data. I am able to publish data into a topic. But I am not getting the data into the output topic. I have checked the logs of connector and rest-proxy there is no error are showing. Please find the below connect APIrequest
{
"name": "sink-elastic_avro_2_topic",
"config": {
"connector.class": "io.confluent.connect.http.HttpSinkConnector",
"headers": "Content-Type:application/vnd.kafka.json.v2+json|Accept:application/vnd.kafka.v2+json",
"batch.max.size": "3000",
"confluent.topic.bootstrap.servers": "broker:9092",
"tasks.max": "3",
"http.api.url": "http://xxx.xx.xxx/topics/avro_output_topic",
"topics": "avro_input_topic",
"request.method": "POST",
"reporter.bootstrap.servers": "broker:9092",
"regex.patterns": "^~$",
"regex.separator": "~",
"reporter.error.topic.name": "error-responses",
"regex.replacements": "{\"key\" : \"${key}\" ,\"value\":~}",
"reporter.result.topic.name": "success-responses",
"batch.prefix": "{\"records\":[",
"reporter.error.topic.replication.factor": "1",
"consumer.override.auto.offset.reset": "latest",
"confluent.topic.replication.factor": "1",
"value.converter.schemas.enable": "false",
"value.converter": "io.confluent.connect.avro.AvroConverter",
"value.converter.schema.registry.url": "http://schema-registry:8081",
"batch.suffix": "]}",
"key.converter": "org.apache.kafka.connect.storage.StringConverter",
"reporter.result.topic.replication.factor": "1"
}
}