Spring Boot with docker-compose database connection issue through Apache Kafka for mongodb and mysql

I have a problem in my Spring Boot CQRS example running on docker.
When I try to send any request from postman collection through postman, I get the successful message and I can see any data stored in MongoDB for account-cmd. Except for that, I get 204 No Content message for the account-query part as I cannot see any data stored in Mysql .

I think there can be a Kafka problem.

Here is my docker-compose.yml shown below

version: '3.9'

services:
  account-cmd:
    image: 'account-cmd:latest'
    build:
      context: ./account.cmd
    ports:
      - "5000:5000"
    depends_on:
      - zookeeper
      - kafka
      - mongodb
    environment:
      - kafka.producer.key-serializer=org.apache.kafka.common.serialization.StringSerializer
      - kafka.producer.value-serializer=org.springframework.kafka.support.serializer.JsonSerializer
      - spring.data.mongodb.host=mongodb
      - spring.data.mongodb.port=27017
      - spring.data.mongodb.database=bankAccount
      - server.port=5000
      - spring.kafka.producer.bootstrap-servers=kafka:9092
    networks:
      - bankAccount

  account-query:
    image: 'account-query:latest'
    build:
      context: ./account.query
    ports:
      - "5001:5001"
    depends_on:
      - zookeeper
      - kafka
      - database
    environment:
      - spring.datasource.url=jdbc:mysql://database:3306/bankAccount?createDatabaseIfNotExist=true&useSSL=false&allowPublicKeyRetrieval=true
      - server.port=5001
      - spring.datasource.username=root
      - spring.datasource.password=ippavlova_1990
      - spring.kafka.consumer.group-id=bankaccConsumer
      - spring.kafka.consumer.auto-offset-reset=earliest
      - spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
      - spring.kafka.consumer.value-deserializer=org.springframework.kafka.support.serializer.JsonDeserializer
      - spring.kafka.consumer.properties.spring.json.trusted.packages=*
      - spring.kafka.consumer.bootstrap-servers=kafka:9092
    networks:
      - bankAccount

  database:
    container_name: mysql-database
    image: 'mysql:latest'
    ports:
      - "3306:3306"
    restart: always
    environment:
      MYSQL_PASSWORD: ippavlova_1990
      MYSQL_ROOT_PASSWORD: ippavlova_1990
    volumes:
      - db-data:/var/lib/mysql
    networks:
      - bankAccount
    healthcheck:
      test: [ "CMD", "mysqladmin" ,"ping", "-h", "localhost" ]
      timeout: 20s
      retries: 10


  mongodb:
    image: "mongo:latest"
    container_name: mongo-container
    restart: always
    ports:
      - "27017:27017"
    volumes:
      - mongodb_data_container:/data/db
    networks:
      - bankAccount

  zookeeper:
    image: 'bitnami/zookeeper:latest'
    restart: always
    ports:
      - "2181:2181"
    volumes:
      - "zookeeper_data:/bitnami"
    environment:
      - ALLOW_ANONYMOUS_LOGIN=yes
    networks:
      - bankAccount

  kafka:
    image: 'bitnami/kafka:latest'
    ports:
      - '9092:9092'
      - '9093:9093'
    environment:
      - KAFKA_CFG_ZOOKEEPER_CONNECT=zookeeper:2181
      - ALLOW_PLAINTEXT_LISTENER=yes
      - KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP=CLIENT:PLAINTEXT,EXTERNAL:PLAINTEXT
      - KAFKA_CFG_LISTENERS=CLIENT://:9092,EXTERNAL://:9093
      - KAFKA_CFG_ADVERTISED_LISTENERS=CLIENT://kafka:9092,EXTERNAL://localhost:9093
      - KAFKA_CFG_INTER_BROKER_LISTENER_NAME=CLIENT
    depends_on:
      - zookeeper
    networks:
      - bankAccount


volumes:
  mongodb_data_container:
  db-data:
  zookeeper_data:
    driver: local
  kafka_data:
    driver: local

networks:
  bankAccount:

How can I fix it?

Here is the MongoDB commands to show the result

-> docker exec -it mongo-container bash
-> mongosh
-> show dbs
-> use eventStore
-> db.eventStore.find({})

Here is the Mysql commands to show the result

-> docker exec -it mysql_container_name mysql -uroot -p
-> Enter password: password
-> USE bankAccount
-> show tables;
-> SELECT * FROM bank_account;

Here is my repo : Link

This topic was automatically closed 30 days after the last reply. New replies are no longer allowed.