Do not get my Data written in the JDBC Sink

Hi, I’m new to the confluent world. I’m trying to get data from a MySQLDB using the debezium MySQL connector, put the Data and write it to a Oracle DB using the JDBC Sink Connector.
I’m able to get the Message into the Topic. The Schema looks like this:
{
“connect.name”: “silkroaddata.silkroad.outpost.Envelope”,
“fields”: [
{
“default”: null,
“name”: “before”,
“type”: [
“null”,
{
“connect.name”: “silkroaddata.silkroad.outpost.Value”,
“fields”: [
{
“name”: “mykey”,
“type”: “int”
},
{
“default”: null,
“name”: “locationname”,
“type”: [
“null”,
“string”
]
}
],
“name”: “Value”,
“type”: “record”
}
]
},
{
“default”: null,
“name”: “after”,
“type”: [
“null”,
“Value”
]
},
{
“name”: “source”,
“type”: {
“connect.name”: “io.debezium.connector.mysql.Source”,
“fields”: [
{
“name”: “version”,
“type”: “string”
},
{
“name”: “connector”,
“type”: “string”
},
{
“name”: “name”,
“type”: “string”
},
{
“name”: “ts_ms”,
“type”: “long”
},
{
“default”: “false”,
“name”: “snapshot”,
“type”: [
{
“connect.default”: “false”,
“connect.name”: “io.debezium.data.Enum”,
“connect.parameters”: {
“allowed”: “true,last,false,incremental”
},
“connect.version”: 1,
“type”: “string”
},
“null”
]
},
{
“name”: “db”,
“type”: “string”
},
{
“default”: null,
“name”: “sequence”,
“type”: [
“null”,
“string”
]
},
{
“default”: null,
“name”: “table”,
“type”: [
“null”,
“string”
]
},
{
“name”: “server_id”,
“type”: “long”
},
{
“default”: null,
“name”: “gtid”,
“type”: [
“null”,
“string”
]
},
{
“name”: “file”,
“type”: “string”
},
{
“name”: “pos”,
“type”: “long”
},
{
“name”: “row”,
“type”: “int”
},
{
“default”: null,
“name”: “thread”,
“type”: [
“null”,
“long”
]
},
{
“default”: null,
“name”: “query”,
“type”: [
“null”,
“string”
]
}
],
“name”: “Source”,
“namespace”: “io.debezium.connector.mysql”,
“type”: “record”
}
},
{
“name”: “op”,
“type”: “string”
},
{
“default”: null,
“name”: “ts_ms”,
“type”: [
“null”,
“long”
]
},
{
“default”: null,
“name”: “transaction”,
“type”: [
“null”,
{
“fields”: [
{
“name”: “id”,
“type”: “string”
},
{
“name”: “total_order”,
“type”: “long”
},
{
“name”: “data_collection_order”,
“type”: “long”
}
],
“name”: “ConnectDefault”,
“namespace”: “io.confluent.connect.avro”,
“type”: “record”
}
]
}
],
“name”: “Envelope”,
“namespace”: “silkroaddata.silkroad.outpost”,
“type”: “record”
}

The Sink Config looks like this:
{
“name”: “OracleSinc”,
“config”: {
“value.converter.schema.registry.url”: “http://172.16.1.75:8081”,
“key.converter.schema.registry.url”: “http://172.16.1.75:8081”,
“name”: “OracleSinc”,
“connector.class”: “io.confluent.connect.jdbc.JdbcSinkConnector”,
“key.converter”: “io.confluent.connect.avro.AvroConverter”,
“value.converter”: “io.confluent.connect.avro.AvroConverter”,
“transforms”: “”,
“errors.log.enable”: “true”,
“errors.log.include.messages”: “true”,
“topics”: “silkroaddata.silkroad.outpost”,
“connection.url”: “jdbc:oracle:thin:@172.16.1.75:1521/xe”,
“connection.user”: “DEMO”,
“connection.password”: “****”,
“dialect.name”: “OracleDatabaseDialect”,
“insert.mode”: “UPSERT”,
“delete.enabled”: “false”,
“table.name.format”: “DEMO.OUTPOST”,
“pk.mode”: “record_value”,
“pk.fields”: “mykey”,
“fields.whitelist”: “”,
“auto.create”: “true”,
“auto.evolve”: “true”
}
}

The Error I get is:
Caused by: org.apache.kafka.connect.errors.ConnectException: PK mode for table ‘OUTPOST’ is RECORD_VALUE with configured PK fields [mykey], but record value schema does not contain field: mykey

When I switch to record_key, it creates the table but only with the fiels mykey.
I guess it’s a Problem with the primary key field config, but I do not have a clue, how that should look like. As said, I’m compleately new in this area, can anyone help with a headstart?
Regards,
Eric