Hello All,
trying to learn confluent kafka using docker. I setup the all containers and postgresql connector is missing. not sure how to add PostgreSQL connector in locally setup in mac.
Error:
{“error_code”:500,“message”:“Failed to find any class that implements Connector and which name matches io.debezium.connector.postgresql.PostgresConnector, available connectors are: PluginDesc{klass=class org.apache.kafka.connect.file.FileStreamSinkConnector, name=‘org.apache.kafka.connect.file.FileStreamSinkConnector’, version=‘6.2.0-ccs’, encodedVersion=6.2.0-ccs, type=sink, typeName=‘sink’, location=‘file:/usr/share/java/kafka/’}, PluginDesc{klass=class org.apache.kafka.connect.file.FileStreamSourceConnector, name=‘org.apache.kafka.connect.file.FileStreamSourceConnector’, version=‘6.2.0-ccs’, encodedVersion=6.2.0-ccs, type=source, typeName=‘source’, location=‘file:/usr/share/java/kafka/’}, PluginDesc{klass=class org.apache.kafka.connect.mirror.MirrorCheckpointConnector, name=‘org.apache.kafka.connect.mirror.MirrorCheckpointConnector’, version=‘1’, encodedVersion=1, type=source, typeName=‘source’, location=‘file:/usr/share/java/kafka/’}, PluginDesc{klass=class org.apache.kafka.connect.mirror.MirrorHeartbeatConnector, name=‘org.apache.kafka.connect.mirror.MirrorHeartbeatConnector’, version=‘1’, encodedVersion=1, type=source, typeName=‘source’, location=‘file:/usr/share/java/kafka/’}, PluginDesc{klass=class org.apache.kafka.connect.mirror.MirrorSourceConnector, name=‘org.apache.kafka.connect.mirror.MirrorSourceConnector’, version=‘1’, encodedVersion=1, type=source, typeName=‘source’, location=‘file:/usr/share/java/kafka/’}, PluginDesc{klass=class org.apache.kafka.connect.tools.MockConnector, name=‘org.apache.kafka.connect.tools.MockConnector’, version=‘6.2.0-ccs’, encodedVersion=6.2.0-ccs, type=connector, typeName=‘connector’, location=‘file:/usr/share/java/acl/’}, PluginDesc{klass=class org.apache.kafka.connect.tools.MockSinkConnector, name=‘org.apache.kafka.connect.tools.MockSinkConnector’, version=‘6.2.0-ccs’, encodedVersion=6.2.0-ccs, type=sink, typeName=‘sink’, location=‘file:/usr/share/java/acl/’}, PluginDesc{klass=class org.apache.kafka.connect.tools.MockSourceConnector, name=‘org.apache.kafka.connect.tools.MockSourceConnector’, version=‘6.2.0-ccs’, encodedVersion=6.2.0-ccs, type=source, typeName=‘source’, location=‘file:/usr/share/java/acl/’}, PluginDesc{klass=class org.apache.kafka.connect.tools.SchemaSourceConnector, name=‘org.apache.kafka.connect.tools.SchemaSourceConnector’, version=‘6.2.0-ccs’, encodedVersion=6.2.0-ccs, type=source, typeName=‘source’, location=‘file:/usr/share/java/acl/’}, PluginDesc{klass=class org.apache.kafka.connect.tools.VerifiableSinkConnector, name=‘org.apache.kafka.connect.tools.VerifiableSinkConnector’, version=‘6.2.0-ccs’, encodedVersion=6.2.0-ccs, type=source, typeName=‘source’, location=‘file:/usr/share/java/acl/’}, PluginDesc{klass=class org.apache.kafka.connect.tools.VerifiableSourceConnector, name=‘org.apache.kafka.connect.tools.VerifiableSourceConnector’, version=‘6.2.0-ccs’, encodedVersion=6.2.0-ccs, type=source, typeName=‘source’, location=‘file:/usr/share/java/acl/’}”}%
version: '3.9'
services:
postgres_db_1:
image: postgres
container_name: postgres_db_1
environment:
POSTGRES_PASSWORD: password1
POSTGRES_USER: postgres
POSTGRES_DB: db_1
ports:
- 5432:5432
postgres_db_2:
image: postgres
container_name: postgres_db_2
environment:
POSTGRES_PASSWORD: password1
POSTGRES_USER: postgres
POSTGRES_DB: db_2
ports:
- 5431:5432
zookeeper:
image: confluentinc/cp-zookeeper:7.3.0
container_name: zookeeper
environment:
ZOOKEEPER_CLIENT_PORT: 32181
ZOOKEEPER_TICK_TIME: 2000
broker:
image: confluentinc/cp-kafka:7.3.0
container_name: broker
ports:
# To learn about configuring Kafka for access across networks see
# https://www.confluent.io/blog/kafka-client-cannot-connect-to-broker-on-aws-on-docker-etc/
- "9092:9092"
depends_on:
- zookeeper
environment:
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: zookeeper:32181
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker:29092,PLAINTEXT_HOST://localhost:9092
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "true"
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 100
schema-registry:
image: confluentinc/cp-schema-registry:6.2.0
container_name: schema-registry
ports:
- "8081:8081"
depends_on:
- broker
environment:
SCHEMA_REGISTRY_HOST_NAME: schema-registry
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: broker:29092
kafka-connect:
image: confluentinc/cp-kafka-connect-base:6.2.0
container_name: kafka-connect
depends_on:
- broker
- schema-registry
ports:
- 8083:8083
environment:
CONNECT_BOOTSTRAP_SERVERS: "broker:29092"
CONNECT_REST_PORT: 8083
CONNECT_GROUP_ID: kafka-connect
CONNECT_CONFIG_STORAGE_TOPIC: _connect-configs
CONNECT_OFFSET_STORAGE_TOPIC: _connect-offsets
CONNECT_STATUS_STORAGE_TOPIC: _connect-status
CONNECT_KEY_CONVERTER: org.apache.kafka.connect.storage.StringConverter
CONNECT_VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter
CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: 'http://schema-registry:8081'
CONNECT_REST_ADVERTISED_HOST_NAME: "kafka-connect"
CONNECT_LOG4J_APPENDER_STDOUT_LAYOUT_CONVERSIONPATTERN: "[%d] %p %X{connector.context}%m (%c:%L)%n"
CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: "1"
CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: "1"
CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: "1"
control-center:
image: confluentinc/cp-enterprise-control-center:6.2.0
container_name: control-center
depends_on:
- broker
- schema-registry
ports:
- "9021:9021"
environment:
CONTROL_CENTER_BOOTSTRAP_SERVERS: 'broker:29092'
CONTROL_CENTER_CONNECT_CONNECT_CLUSTER: 'kafka-connect:8083'
CONTROL_CENTER_SCHEMA_REGISTRY_URL: "http://schema-registry:8081"
CONTROL_CENTER_KSQL_KSQLDB_URL: "http://ksqldb:8088"
# The advertised URL needs to be the URL on which the browser
# can access the KSQL server (e.g. http://localhost:8088/info)
CONTROL_CENTER_KSQL_KSQLDB_ADVERTISED_URL: "http://localhost:8088"
# -v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v-v
# Useful settings for development/laptop use - modify as needed for Prod
CONFLUENT_METRICS_TOPIC_REPLICATION: 1
CONTROL_CENTER_REPLICATION_FACTOR: 1
CONTROL_CENTER_COMMAND_TOPIC_REPLICATION: 1
CONTROL_CENTER_MONITORING_INTERCEPTOR_TOPIC_REPLICATION: 1
CONTROL_CENTER_INTERNAL_TOPICS_PARTITIONS: 1
CONTROL_CENTER_INTERNAL_TOPICS_REPLICATION: 1
CONTROL_CENTER_MONITORING_INTERCEPTOR_TOPIC_PARTITIONS: 1
CONTROL_CENTER_STREAMS_NUM_STREAM_THREADS: 1
CONTROL_CENTER_STREAMS_CACHE_MAX_BYTES_BUFFERING: 104857600
command:
- bash
- -c
- |
echo "Waiting two minutes for Kafka brokers to start and
necessary topics to be available"
sleep 120
/etc/confluent/docker/run