MicrosoftSqlServerSink and SqlServerCdcSource connector

I am having a sink and source connector in my local env and I am attaching the configuration below:

Sink:

{
  "connector.class": "io.confluent.connect.jdbc.JdbcSinkConnector",
  "tasks.max": "1",
  "topics": "etl-sink-accountposting-failed.dbo.NPSS_Notification_Detail",
  "connection.url": "jdbc:sqlserver://xx.xx.xx.xx:1433;databaseName=xxxxx;user=xxxxx;password=xxxxxx",
  "auto.create": "false",
  "auto.evolve": "false",
  "insert.mode": "insert",
  "key.converter": "io.confluent.connect.avro.AvroConverter",
  "key.converter.schemas.enable": "true",
  "key.converter.schema.registry.url": "http://xxxx:8081",
  "key.converter.enhanced.avro.schema.support": "true",
  "value.converter": "io.confluent.connect.avro.AvroConverter",
  "value.converter.schemas.enable": "true",
  "value.converter.schema.registry.url": "http://xxxx:8081",
  "value.converter.enhanced.avro.schema.support": "true",
  "errors.log.enable": "true",
  "errors.log.include.messages": "true",
  "transforms": "route",
  "transforms.route.type": "org.apache.kafka.connect.transforms.RegexRouter",
  "transforms.route.regex": "([^.]+)\\.([^.]+)\\.([^.]+)",
  "transforms.route.replacement": "$3",
  "time.precision.mode": "connect"
}

Source:

{
  "connector.class": "io.debezium.connector.sqlserver.SqlServerConnector",
  "tasks.max": "1",
  "database.server.name": "etl-source-financialaccounting",
  "database.hostname": "xxxxx",
  "database.port": "1433",
  "database.user": "xxxxx",
  "database.password": "xxxxx",
  "database.dbname": "xxxxx",
  "database.history.kafka.bootstrap.servers": "xxxxx:3300",
  "database.history.kafka.topic": "dbhistory.financialaccounting",
  "table.include.list": "CFM_AP.PostingRepairInstance",
  "transforms": "unwrap",
  "transforms.unwrap.type": "io.debezium.transforms.ExtractNewRecordState",
  "transforms.unwrap.drop.tombstones": "true",
  "transforms.unwrap.delete.handling.mode": "drop",
  "include.schema.changes": "false",
  "key.converter": "io.confluent.connect.avro.AvroConverter",
  "key.converter.schemas.enable": "true",
  "key.converter.schema.registry.url": "http://xxxxx:8081",
  "key.converter.enhanced.avro.schema.support": "true",
  "value.converter": "io.confluent.connect.avro.AvroConverter",
  "value.converter.schemas.enable": "true",
  "value.converter.schema.registry.url": "http://xxxxx:8081",
  "value.converter.enhanced.avro.schema.support": "true",
  "errors.log.enable": "true",
  "errors.log.include.messages": "true",
  "time.precision.mode": "connect"
}

Now we are shifting our kafka and connectors to confluent and the connector we use in confluent are MicrosoftSqlServerSink and SqlServerCdcSource connector. Do we have any convertor for converting this configuration apt for confluent kafka. We also need fully managed connectors.