I'm trying to do something similar following the steps in these posts but it doesn't work for me, I'm using bezium 2.5 Final.
My DockerFile is like this:
# # https://packages.confluent.io/maven/io/confluent/
ARG DEBEZIUM_VERSION
FROM quay.io/debezium/connect:$DEBEZIUM_VERSION
#FROM quay.io/debezium/connect-base:$DEBEZIUM_VERSION
ARG KAFKA_VERSION
RUN echo ${KAFKA_VERSION}
ADD --chown=kafka:kafka --chmod=775 https://repo1.maven.org/maven2/com/ibm/informix/jdbc/4.50.10/jdbc-4.50.10.jar /kafka/connect/debezium-connector-informix/
ADD --chown=kafka:kafka --chmod=775 https://repo1.maven.org/maven2/com/ibm/informix/ifx-changestream-client/1.1.3/ifx-changestream-client-1.1.3.jar /kafka/connect/debezium-connector-informix/
ADD --chown=kafka:kafka --chmod=775 https://packages.confluent.io/maven/io/confluent/kafka-connect-avro-converter/${KAFKA_VERSION}/kafka-connect-avro-converter-${KAFKA_VERSION}.jar /kafka/connect/kafka-connect-avro-converter/
ADD --chown=kafka:kafka --chmod=775 https://packages.confluent.io/maven/io/confluent/kafka-connect-avro-data/${KAFKA_VERSION}/kafka-connect-avro-data-${KAFKA_VERSION}.jar /kafka/connect/kafka-connect-avro-data/
ADD --chown=kafka:kafka --chmod=775 https://packages.confluent.io/maven/io/confluent/kafka-avro-serializer/${KAFKA_VERSION}/kafka-avro-serializer-${KAFKA_VERSION}.jar /kafka/connect/kafka-avro-serializer/
ADD --chown=kafka:kafka --chmod=775 https://packages.confluent.io/maven/io/confluent/kafka-schema-serializer/${KAFKA_VERSION}/kafka-schema-serializer-${KAFKA_VERSION}.jar /kafka/connect/kafka-schema-serializer/
ADD --chown=kafka:kafka --chmod=775 https://packages.confluent.io/maven/io/confluent/kafka-schema-registry-client/${KAFKA_VERSION}/kafka-schema-registry-client-${KAFKA_VERSION}.jar /kafka/connect/kafka-schema-registry-client/
ADD --chown=kafka:kafka --chmod=775 https://packages.confluent.io/maven/io/confluent/common-config/${KAFKA_VERSION}/common-config-${KAFKA_VERSION}.jar /kafka/connect/common-config/
ADD --chown=kafka:kafka --chmod=775 https://packages.confluent.io/maven/io/confluent/common-utils/${KAFKA_VERSION}/common-utils-${KAFKA_VERSION}.jar /kafka/connect/common-utils/
ADD --chown=kafka:kafka --chmod=775 https://repo1.maven.org/maven2/org/apache/avro/avro/1.11.3/avro-1.11.3.jar /kafka/connect/avro/
ADD --chown=kafka:kafka --chmod=775 https://repo1.maven.org/maven2/com/google/guava/guava/33.0.0-jre/guava-33.0.0-jre.jar /kafka/connect/guava/
My docker-compose is like this:
connect:
image: debezium/connect-ifx:${DEBEZIUM_VERSION}
container_name: connect
build:
context: ./debezium-ifx-init/ifxconnect
args:
DEBEZIUM_VERSION: ${DEBEZIUM_VERSION}
KAFKA_VERSION: ${KAFKA_VERSION}
ports:
- 8083:8083
depends_on:
- kafka
- informix
- schema-registry
environment:
GROUP_ID: 1
# Kafka config
CONFIG_STORAGE_TOPIC: my_connect_configs
OFFSET_STORAGE_TOPIC: my_connect_offsets
STATUS_STORAGE_TOPIC: my_connect_statuses
BOOTSTRAP_SERVERS: kafka:9092
# Avro config
KEY_CONVERTER: io.confluent.connect.avro.AvroConverter
VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter
CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8081
CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8081
KAFKA_CONNECT_PLUGINS_DIR: /kafka/connect
CLASSPATH: /kafka/connect/kafka-connect-avro-converter/*:/kafka/connect/kafka-connect-avro-data/*:/kafka/connect/kafka-avro-serializer/*:/kafka/connect/kafka-schema-serializer/*:/kafka/connect/kafka-schema-registry-client/*:/kafka/connect/common-config/*:/kafka/connect/common-utils/*:/kafka/connect/avro/*:/kafka/connect/guava/*
volumes:
- ./data/connect/data:/var/lib/kafka/data
But when registering the source connector it gives me this error:
{
"name": "source-testdb-ifx",
"config": {
"connector.class" : "io.debezium.connector.informix.InformixConnector",
"tasks.max" : "1",
"topic.prefix" : "ifxserver",
"database.hostname" : "informix",
"database.port" : "9088",
"database.user" : "informix",
"database.password" : "in4mix",
"database.dbname" : "testdb",
"schema.history.internal.kafka.bootstrap.servers" : "kafka:9092",
"schema.history.internal.kafka.topic": "schema-changes.testdb",
"key.converter": "io.confluent.connect.avro.AvroConverter",
"key.converter.schema.registry.url": "http://schema-registry:8081",
"value.converter": "io.confluent.connect.avro.AvroConverter",
"value.converter.schema.registry.url": "http://schema-registry:8081"
}
}
ERROR || Failed to start task source-testdb-ifx-0 [org.apache.kafka.connect.runtime.Worker] java.lang.NoClassDefFoundError: io/confluent/connect/schema/AbstractDataConfig
Any idea what it could be?
I finally solved it by using the confluent image instead of the debenzium one. I copy the jars to the new image.
connect:
image: confluentinc/cp-server-connect:${KAFKA_VERSION}
container_name: connect
ports:
- 8083:8083
depends_on:
- kafka
- informix
- schema-registry
environment:
CONNECT_BOOTSTRAP_SERVERS: 'kafka:9092'
CONNECT_REST_ADVERTISED_HOST_NAME: connect
CONNECT_GROUP_ID: compose-connect-group
CONNECT_CONFIG_STORAGE_TOPIC: docker-connect-configs
CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
CONNECT_OFFSET_FLUSH_INTERVAL_MS: 10000
CONNECT_OFFSET_STORAGE_TOPIC: docker-connect-offsets
CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
CONNECT_STATUS_STORAGE_TOPIC: docker-connect-status
CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
CLASSPATH: /usr/share/java/monitoring-interceptors/monitoring-interceptors-7.2.1.jar
CONNECT_PRODUCER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringProducerInterceptor"
CONNECT_CONSUMER_INTERCEPTOR_CLASSES: "io.confluent.monitoring.clients.interceptor.MonitoringConsumerInterceptor"
CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components"
CONNECT_LOG4J_LOGGERS: org.apache.kafka.connect=DEBUG,org.apache.kafka.connect.runtime.rest=DEBUG,org.apache.zookeeper=ERROR,org.I0Itec.zkclient=ERROR,org.reflections=ERROR
CONNECT_REST_PORT: 8083
CONNECT_KEY_CONVERTER: io.confluent.connect.avro.AvroConverter
CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8081
CONNECT_VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter
CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8081
#CONNECT_LOG4J_ROOT_LOGLEVEL: "INFO"
#CONNECT_LOG4J_LOGGERS: "org.apache.kafka.connect.runtime.rest=WARN,org.reflections=ERROR"
volumes:
- ./data/connect/data:/var/lib/kafka/data
command:
- /bin/bash
- -c
- |
echo "Installing Connectors"
confluent-hub install --no-prompt confluentinc/kafka-connect-jdbc:10.7.4
# JDBC Drivers
# ------------
# Informix
mkdir -p /usr/share/confluent-hub-components/debezium-connector-informix
cd /usr/share/confluent-hub-components/debezium-connector-informix/
curl https://repo1.maven.org/maven2/com/ibm/informix/jdbc/4.50.8/jdbc-4.50.8.jar --compressed --output informix-jdbc-4.50.8.jar
# changestream
curl https://repo1.maven.org/maven2/com/ibm/informix/ifx-changestream-client/1.1.3/ifx-changestream-client-1.1.3.jar --compressed --output ifx-changestream-client-1.1.3.jar
# Debezium 2.5 informix
#mkdir -p /usr/share/confluent-hub-components/debezium-connector-informix
curl https://repo1.maven.org/maven2/io/debezium/debezium-connector-informix/2.5.0.Final/debezium-connector-informix-2.5.0.Final-plugin.tar.gz | \
tar xvfz - --strip-components=1 --directory /usr/share/confluent-hub-components/debezium-connector-informix
# Now launch Kafka Connect
sleep infinity &
/etc/confluent/docker/run