Hi,
I’m using self-managed debezium-sqlserver connector in a private vpc and stream the cdc data to my topics in confluent cloud. I can successfully deploy and manage my kafka connect nodes with docker-compose. But i have to go with Dockerfile right now for our DevOps processes.
Everything looks good before sending Curl PUT command. After that i’m taking this error message
> [2021-06-04 07:21:20,672] ERROR Uncaught exception in REST call to /connectors/sqlserver-cdc-dockerfile/config (org.apache.kafka.connect.runtime.rest.errors.ConnectExceptionMapper)
>
> com.fasterxml.jackson.core.io.JsonEOFException: **Unexpected end-of-input: expected close marker for Object** (start marker at [Source: (org.glassfish.jersey.message.internal.ReaderInterceptorExecutor$UnCloseableInputStream); line: 1, column: 1])
>
> at [Source: (org.glassfish.jersey.message.internal.ReaderInterceptorExecutor$UnCloseableInputStream); line: 1, column: 2]
It looks like a JSON validation error but i’m quite sure that my json is fine. And all my configs and versions works fine with docker-compose.
Here is my Dockerfile :
FROM confluentinc/cp-kafka-connect-base:5.5.0
ENV CONNECT_BOOTSTRAP_SERVERS="europe-west3.gcp.confluent.cloud:9092"\
CONNECT_REST_PORT=8083 \
CONNECT_REST_ADVERTISED_HOST_NAME="kafka-connect-cdc-00" \
CONNECT_GROUP_ID="kafka-connect-sqlserver" \
CONNECT_CONFIG_STORAGE_TOPIC="_kafka-connect-cdc-configs" \
CONNECT_OFFSET_STORAGE_TOPIC="_kafka-connect-cdc-offsets" \
CONNECT_STATUS_STORAGE_TOPIC="_kafka-connect-cdc-status" \
CONNECT_KEY_CONVERTER="io.confluent.connect.avro.AvroConverter" \
CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL="https://psrc-yoz1k.eu-central-1.aws.confluent.cloud" \
CONNECT_KEY_CONVERTER_BASIC_AUTH_CREDENTIALS_SOURCE="USER_INFO" \
CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_BASIC_AUTH_USER_INFO="" \
CONNECT_VALUE_CONVERTER="io.confluent.connect.avro.AvroConverter" \
CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL="https://psrc-yoz1k.eu-central-1.aws.confluent.cloud" \
CONNECT_VALUE_CONVERTER_BASIC_AUTH_CREDENTIALS_SOURCE="USER_INFO" \
CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_BASIC_AUTH_USER_INFO="" \
CONNECT_INTERNAL_KEY_CONVERTER="org.apache.kafka.connect.json.JsonConverter" \
CONNECT_INTERNAL_VALUE_CONVERTER="org.apache.kafka.connect.json.JsonConverter" \
CONNECT_LOG4J_ROOT_LOGLEVEL="INFO" \
CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR="3" \
CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR="3" \
CONNECT_STATUS_STORAGE_REPLICATION_FACTOR="3" \
CONNECT_PLUGIN_PATH=/usr/share/java,/usr/share/confluent-hub-components,/data/connect-jars \
CONNECT_REQUEST_TIMEOUT_MS="20000" \
CONNECT_SASL_MECHANISM="PLAIN" \
CONNECT_RETRY_BACKOFF_MS="500" \
CONNECT_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM="https" \
CONNECT_SECURITY_PROTOCOL="SASL_SSL" \
CONNECT_SASL_JAAS_CONFIG="org.apache.kafka.common.security.plain.PlainLoginModule required username=\"\" password=\"" \
CONNECT_CONSUMER_SASL_JAAS_CONFIG="org.apache.kafka.common.security.plain.PlainLoginModule required username=\"\" password=\"\";" \
CONNECT_PRODUCER_SASL_JAAS_CONFIG="org.apache.kafka.common.security.plain.PlainLoginModule required username=\"\" password=\"\";" \
CONNECT_CONSUMER_SECURITY_PROTOCOL="SASL_SSL" \
CONNECT_CONSUMER_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM="https" \
CONNECT_CONSUMER_SASL_MECHANISM="PLAIN" \
CONNECT_CONSUMER_REQUEST_TIMEOUT_MS="20000" \
CONNECT_CONSUMER_RETRY_BACKOFF_MS="500" \
CONNECT_PRODUCER_SECURITY_PROTOCOL=SASL_SSL \
CONNECT_PRODUCER_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM="https" \
CONNECT_PRODUCER_SASL_MECHANISM="PLAIN" \
CONNECT_PRODUCER_REQUEST_TIMEOUT_MS="20000" \
CONNECT_PRODUCER_RETRY_BACKOFF_MS="500"
RUN confluent-hub install --no-prompt debezium/debezium-connector-sqlserver:latest
RUN mkdir /config
COPY ./config /config
RUN chmod +x /config/sqlserver-connector.sh
ENV EXPOSED_PORT 8083
CMD /config/sqlserver-connector.sh
This part works well, the error occurred in my .sh file.
sqlserver-connector.sh file
/etc/confluent/docker/run &
bash -c ' \
echo -e "\n\n=============\nWaiting for Kafka Connect to start listening on localhost ⏳\n=============\n"
while [ $(curl -s -o /dev/null -w %{http_code} http://localhost:8083/connectors) -ne 200 ] ; do
echo -e "\t" $(date) " Kafka Connect listener HTTP state: " $(curl -s -o /dev/null -w %{http_code} http://localhost:8083/connectors) " (waiting for 200)"
sleep 5
done
echo -e $(date) "\n\n--------------\n\o/ Kafka Connect is ready! Listener HTTP state: " $(curl -s -o /dev/null -w %{http_code} http://localhost:8083/connectors) "\n--------------\n"
sleep 5
curl -i -X PUT -H "Content-Type:application/json" http://localhost:8083/connectors/sqlserver-cdc-dockerfile/config \
-d '{
"connector.class" : "io.debezium.connector.sqlserver.SqlServerConnector",
"database.hostname" : "localhost",
"database.port" : "1433",
"database.user" : "sa",
"database.password" : "Password!",
"database.dbname" : "testDB",
"database.server.name" : "dockerfiletest",
"table.include.list" : "dbo.customers",
"topic.creation.enable" : "true",
"tasks.max" : 1,
"topic.creation.default.replication.factor" : 3,
"topic.creation.default.partitions" : 3,
"message.key.columns" : "testDB.dbo.customers:id",
"snapshot.mode" : "schema_only",
"snapshot.isolation.mode" : "read_committed",
"database.history.kafka.bootstrap.servers" : "europe-west3.gcp.confluent.cloud:9092",
"database.history.kafka.topic" : "dbhistory.dockerfiletest",
"database.history.consumer.security.protocol" : "SASL_SSL",
"database.history.consumer.ssl.endpoint.identification.algorithm": "https",
"database.history.consumer.sasl.mechanism" : "PLAIN",
"database.history.consumer.sasl.jaas.config" : "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"\" password=\"\";",
"database.history.producer.security.protocol" : "SASL_SSL",
"database.history.producer.ssl.endpoint.identification.algorithm": "https",
"database.history.producer.sasl.mechanism" : "PLAIN",
"database.history.producer.sasl.jaas.config" : "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"\" password=\"\";"
}'
I would appreciate it if you could point the part I missed.
Thank you in advance for your time.