Merge pull request #24 from Altinity/confluent_schema_registry

Confluent schema registry
This commit is contained in:
Kanthi 2022-07-31 10:47:28 -04:00 committed by GitHub
commit 4436bd2970
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 538 additions and 78 deletions

View File

@ -62,48 +62,91 @@ fi
# "value.converter":"io.confluent.connect.avro.AvroConverter",
# "value.converter.schema.registry.url":"http://schemaregistry:8081"
#https://debezium.io/documentation/reference/stable/configuration/avro.html
cat <<EOF | curl --request POST --url "${CONNECTORS_MANAGEMENT_URL}" --header 'Content-Type: application/json' --data @-
{
"name": "${CONNECTOR_NAME}",
"config": {
"connector.class": "${CONNECTOR_CLASS}",
"tasks.max": "1",
"snapshot.mode": "${SNAPSHOT_MODE}",
"snapshot.locking.mode": "minimal",
"snapshot.delay.ms": 10000,
"include.schema.changes":"true",
"include.schema.comments": "true",
"database.hostname": "${HOST}",
"database.port": "${PORT}",
"database.user": "${USER}",
"database.password": "${PASSWORD}",
"database.server.id": "${DATABASE_SERVER_ID}",
"database.server.name": "${DATABASE_SERVER_NAME}",
"database.whitelist": "${DBS}",
"database.allowPublicKeyRetrieval":"true",
if [[ $2 == "apicurio" ]]; then
echo "APICURIO SCHEMA REGISTRY"
#https://debezium.io/documentation/reference/stable/configuration/avro.html
cat <<EOF | curl --request POST --url "${CONNECTORS_MANAGEMENT_URL}" --header 'Content-Type: application/json' --data @-
{
"name": "${CONNECTOR_NAME}",
"config": {
"connector.class": "${CONNECTOR_CLASS}",
"tasks.max": "1",
"snapshot.mode": "${SNAPSHOT_MODE}",
"snapshot.locking.mode": "minimal",
"snapshot.delay.ms": 10000,
"include.schema.changes":"true",
"include.schema.comments": "true",
"database.hostname": "${HOST}",
"database.port": "${PORT}",
"database.user": "${USER}",
"database.password": "${PASSWORD}",
"database.server.id": "${DATABASE_SERVER_ID}",
"database.server.name": "${DATABASE_SERVER_NAME}",
"database.whitelist": "${DBS}",
"database.allowPublicKeyRetrieval":"true",
"database.history.kafka.bootstrap.servers": "${KAFKA_BOOTSTRAP_SERVERS}",
"database.history.kafka.topic": "${KAFKA_TOPIC}",
"database.history.kafka.bootstrap.servers": "${KAFKA_BOOTSTRAP_SERVERS}",
"database.history.kafka.topic": "${KAFKA_TOPIC}",
"key.converter": "io.apicurio.registry.utils.converter.AvroConverter",
"value.converter": "io.apicurio.registry.utils.converter.AvroConverter",
"key.converter": "io.apicurio.registry.utils.converter.AvroConverter",
"value.converter": "io.apicurio.registry.utils.converter.AvroConverter",
"key.converter.apicurio.registry.url": "http://schemaregistry:8080/apis/registry/v2",
"key.converter.apicurio.registry.auto-register": "true",
"key.converter.apicurio.registry.find-latest": "true",
"key.converter.apicurio.registry.url": "http://schemaregistry:8080/apis/registry/v2",
"key.converter.apicurio.registry.auto-register": "true",
"key.converter.apicurio.registry.find-latest": "true",
"value.converter.apicurio.registry.url": "http://schemaregistry:8080/apis/registry/v2",
"value.converter.apicurio.registry.auto-register": "true",
"value.converter.apicurio.registry.find-latest": "true",
"value.converter.apicurio.registry.url": "http://schemaregistry:8080/apis/registry/v2",
"value.converter.apicurio.registry.auto-register": "true",
"value.converter.apicurio.registry.find-latest": "true",
"topic.creation.$alias.partitions": 1,
"topic.creation.default.replication.factor": 1,
"topic.creation.default.partitions": 1,
"topic.creation.$alias.partitions": 6,
"topic.creation.default.replication.factor": 1,
"topic.creation.default.partitions": 6,
"provide.transaction.metadata": "true"
}
}
"provide.transaction.metadata": "true"
}
}
EOF
else
echo "Using confluent schema registry"
#https://debezium.io/documentation/reference/stable/configuration/avro.html
cat <<EOF | curl --request POST --url "${CONNECTORS_MANAGEMENT_URL}" --header 'Content-Type: application/json' --data @-
{
"name": "${CONNECTOR_NAME}",
"config": {
"connector.class": "${CONNECTOR_CLASS}",
"tasks.max": "1",
"snapshot.mode": "${SNAPSHOT_MODE}",
"snapshot.locking.mode": "minimal",
"snapshot.delay.ms": 10000,
"include.schema.changes":"true",
"include.schema.comments": "true",
"database.hostname": "${HOST}",
"database.port": "${PORT}",
"database.user": "${USER}",
"database.password": "${PASSWORD}",
"database.server.id": "${DATABASE_SERVER_ID}",
"database.server.name": "${DATABASE_SERVER_NAME}",
"database.whitelist": "${DBS}",
"database.allowPublicKeyRetrieval":"true",
"database.history.kafka.bootstrap.servers": "${KAFKA_BOOTSTRAP_SERVERS}",
"database.history.kafka.topic": "${KAFKA_TOPIC}",
"key.converter": "io.confluent.connect.avro.AvroConverter",
"value.converter": "io.confluent.connect.avro.AvroConverter",
"key.converter.schema.registry.url": "http://schemaregistry:8081",
"value.converter.schema.registry.url":"http://schemaregistry:8081",
"topic.creation.$alias.partitions": 6,
"topic.creation.default.replication.factor": 1,
"topic.creation.default.partitions": 6,
"provide.transaction.metadata": "true"
}
}
EOF
fi
#"topic.creation.$alias.partitions": 3,
echo

View File

@ -0,0 +1,247 @@
version: "2.3"
# Ubuntu , set this for redpanda to start
# https://sort.veritas.com/public/documents/HSO/2.0/linux/productguides/html/hfo_admin_ubuntu/ch04s03.htm
# Clickhouse Table Schema
# create table test(id int, message String) ENGINE=MergeTree() PRIMARY KEY id;
services:
mysql-master:
# mysql --host=127.0.0.1 --port=3306 --user=root --password=root --database=test
# SHOW VARIABLES LIKE 'server_id';
# SHOW VARIABLES LIKE 'log_bin';
# SHOW MASTER STATUS;
container_name: mysql-master
image: docker.io/bitnami/mysql:latest
# command:
# --default-authentication-plugin=mysql_native_password
# --server-id=1
# --binlog_do_db=test
restart: "no"
ports:
- "3306:3306"
environment:
- MYSQL_ROOT_PASSWORD=root
- MYSQL_DATABASE=test
- MYSQL_REPLICATION_MODE=master
- MYSQL_REPLICATION_USER=repl_user
- ALLOW_EMPTY_PASSWORD=yes
volumes:
- ./mysqld.cnf:/opt/bitnami/mysql/conf/my_custom.cnf
- ../sql/init_mysql.sql:/docker-entrypoint-initdb.d/init_mysql.sql
healthcheck:
test: [ 'CMD', '/opt/bitnami/scripts/mysql/healthcheck.sh' ]
interval: 15s
timeout: 5s
retries: 6
#security_opt:
# - seccomp:unconfined
# mysql-slave:
# # mysql --host=127.0.0.1 --port=3306 --user=root --password=root --database=test
# # SHOW VARIABLES LIKE 'server_id';
# # SHOW VARIABLES LIKE 'log_bin';
# # SHOW MASTER STATUS;
# container_name: mysql-slave
# image: docker.io/bitnami/mysql:8.0
## command:
## --default-authentication-plugin=mysql_native_password
## --server-id=1
## --binlog_do_db=test
# restart: "no"
# ports:
# - "3306"
# environment:
# - MYSQL_ROOT_PASSWORD=root
# - MYSQL_DATABASE=test
# - MYSQL_REPLICATION_MODE=slave
# - MYSQL_REPLICATION_USER=repl_user
# - MYSQL_MASTER_HOST=mysql-master
# - MYSQL_MASTER_PORT_NUMBER=3306
# - MYSQL_MASTER_ROOT_PASSWORD=root
# - ALLOW_EMPTY_PASSWORD=yes
# volumes:
# - ./mysqld.cnf:/opt/bitnami/mysql/conf/my_custom.cnf
# # volumes:
# # - ../sql/init_mysql.sql:/docker-entrypoint-initdb.d/init_mysql.sql
# healthcheck:
# test: [ 'CMD', '/opt/bitnami/scripts/mysql/healthcheck.sh' ]
# interval: 15s
# timeout: 5s
# retries: 6
# #security_opt:
# # - seccomp:unconfined
# depends_on:
# - mysql-master
schemaregistry:
container_name: schemaregistry
#image: apicurio/apicurio-registry-mem:latest-release
image: confluentinc/cp-schema-registry:latest
restart: "no"
ports:
- "8081:8081"
#environment:
# - KAFKA_BOOTSTRAP_SERVERS=kafka:9092
environment:
- SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS=kafka:9092
- SCHEMA_REGISTRY_HOST_NAME=schemaregistry
- SCHEMA_REGISTRY_LISTENERS=http://0.0.0.0:8081
- SCHEMA_REGISTRY_DEBUG=true
depends_on:
- kafka
debezium:
container_name: debezium
image: altinity/source-connector-on-debezium-base:${SINK_VERSION}
restart: "no"
ports:
- "8083:8083"
environment:
# Where to find Kafka broker
- BOOTSTRAP_SERVERS=kafka:9092
# Maps to rest.advertised.host.name
#- ADVERTISED_HOST_NAME=kafka
# Maps to rest.advertised.port
#- ADVERTISED_PORT=9092
- GROUP_ID=1
- CONFIG_STORAGE_TOPIC=config-storage-topic-debezium
- OFFSET_STORAGE_TOPIC=offset-storage-topic-debezium
- STATUS_STORAGE_TOPIC=status-storage-topic-debezium
- LOG_LEVEL=INFO
- KEY_CONVERTER=io.confluent.connect.avro.AvroConverter
- VALUE_CONVERTER=io.confluent.connect.avro.AvroConverter
#- LOG_LEVEL=DEBUG
depends_on:
- kafka
kafka:
container_name: kafka
image: vectorized/redpanda
restart: "no"
# ports:
# - "8081:8081"
# - "8082:8082"
# - "9091:9091"
# - "9092:9092"
ports:
# Expose LOCALHOST_NETWORK listener on localhost
- "19092:19092"
command:
- redpanda
- start
- --overprovisioned
# Setup listeners for docker network and localhost network
- --kafka-addr
- DOCKER_NETWORK://0.0.0.0:9092,LOCALHOST_NETWORK://0.0.0.0:19092
- --advertise-kafka-addr
- DOCKER_NETWORK://kafka:9092,LOCALHOST_NETWORK://127.0.0.1:19092
# volumes:
# - test:/
# kafka2:
# container_name: kafka2
# image: vectorized/redpanda
# restart: always
sink:
container_name: sink
image: altinity/clickhouse-sink-connector:${SINK_VERSION}
restart: "no"
ports:
- "18083:8083"
- "5005:5005"
- "39999:39999"
environment:
# Where to find Kafka broker
- BOOTSTRAP_SERVERS=kafka:9092
# Maps to rest.advertised.host.name
#- ADVERTISED_HOST_NAME=kafka
# Maps to rest.advertised.port
#- ADVERTISED_PORT=9092
- GROUP_ID=2
- CONFIG_STORAGE_TOPIC=config-storage-topic-sink
- OFFSET_STORAGE_TOPIC=offset-storage-topic-sink
- STATUS_STORAGE_TOPIC=status-storage-topic-sink
- LOG_LEVEL=INFO
- JAVA_DEBUG_PORT=*:5005
- DEFAULT_JAVA_DEBUG_PORT=*:5005
- KAFKA_DEBUG=true
- JMX_PORT=39999
#- LOG_LEVEL=DEBUG
depends_on:
- kafka
clickhouse:
# clickhouse-client --host=127.0.0.1 --port=9000 --user=root --password=root --database=test
container_name: clickhouse
image: clickhouse/clickhouse-server:latest
restart: "no"
ports:
- "8123:8123"
- "9000:9000"
environment:
- CLICKHOUSE_USER=root
- CLICKHOUSE_PASSWORD=root
- CLICKHOUSE_DB=test
ulimits:
nofile:
soft: "262144"
hard: "262144"
volumes:
- ../sql/init_clickhouse.sql:/docker-entrypoint-initdb.d/init_clickhouse.sql
##### MONITORING #####
#https://stackoverflow.com/questions/55473562/configuring-a-jmx-prometheus-exporter-docker-container-to-read-jmx-local-applica
# Bitnami bitnami/jmx-exporter vs sscaling images
jmx_exporter:
container_name: jmx_exporter
image: sscaling/jmx-prometheus-exporter
restart: "no"
ports:
- "9072:9072"
environment:
SERVICE_PORT: 9072
volumes:
- ../config/jmx-config.yml:/opt/jmx_exporter/config.yml
blackbox_exporter:
container_name: blackbox_exporter
image: prom/blackbox-exporter:master
restart: "no"
ports:
- "9115:9115"
prometheus:
container_name: prometheus
image: bitnami/prometheus
restart: "no"
ports:
- "9090:9090"
volumes:
- ../config/prometheus.yml:/opt/bitnami/prometheus/conf/prometheus.yml
grafana:
container_name: grafana
image: grafana/grafana
restart: "no"
volumes:
- ../config/grafana/dashboards:/etc/grafana/provisioning/dashboards
ports:
- "3000:3000"
#### END OF MONITORING ###
bash:
container_name: bash
image: ubuntu
restart: "no"
entrypoint:
- /bin/sleep
command:
- infinity

View File

@ -24,54 +24,99 @@ TOPICS_TABLE_MAP="SERVER5432.test.employees_predated:employees, SERVER5432.test.
#"topics": "${TOPICS}",
cat <<EOF | curl --request POST --url "${CONNECTORS_MANAGEMENT_URL}" --header 'Content-Type: application/json' --data @-
{
"name": "${CONNECTOR_NAME}",
"config": {
"connector.class": "com.altinity.clickhouse.sink.connector.ClickHouseSinkConnector",
"tasks.max": "10",
"topics": "${TOPICS}",
"clickhouse.topic2table.map": "${TOPICS_TABLE_MAP}",
"clickhouse.server.url": "${CLICKHOUSE_HOST}",
"clickhouse.server.user": "${CLICKHOUSE_USER}",
"clickhouse.server.pass": "${CLICKHOUSE_PASSWORD}",
"clickhouse.server.database": "${CLICKHOUSE_DATABASE}",
"clickhouse.server.port": ${CLICKHOUSE_PORT},
"clickhouse.table.name": "${CLICKHOUSE_TABLE}",
"key.converter": "io.apicurio.registry.utils.converter.AvroConverter",
"value.converter": "io.apicurio.registry.utils.converter.AvroConverter",
if [[ $1 == "apicurio" ]]; then
echo "APICURIO SCHEMA REGISTRY"
cat <<EOF | curl --request POST --url "${CONNECTORS_MANAGEMENT_URL}" --header 'Content-Type: application/json' --data @-
{
"name": "${CONNECTOR_NAME}",
"config": {
"connector.class": "com.altinity.clickhouse.sink.connector.ClickHouseSinkConnector",
"tasks.max": "10",
"topics": "${TOPICS}",
"clickhouse.topic2table.map": "${TOPICS_TABLE_MAP}",
"clickhouse.server.url": "${CLICKHOUSE_HOST}",
"clickhouse.server.user": "${CLICKHOUSE_USER}",
"clickhouse.server.pass": "${CLICKHOUSE_PASSWORD}",
"clickhouse.server.database": "${CLICKHOUSE_DATABASE}",
"clickhouse.server.port": ${CLICKHOUSE_PORT},
"clickhouse.table.name": "${CLICKHOUSE_TABLE}",
"key.converter": "io.apicurio.registry.utils.converter.AvroConverter",
"value.converter": "io.apicurio.registry.utils.converter.AvroConverter",
"key.converter.apicurio.registry.url": "http://schemaregistry:8080/apis/registry/v2",
"key.converter.apicurio.registry.auto-register": "true",
"key.converter.apicurio.registry.find-latest": "true",
"key.converter.apicurio.registry.url": "http://schemaregistry:8080/apis/registry/v2",
"key.converter.apicurio.registry.auto-register": "true",
"key.converter.apicurio.registry.find-latest": "true",
"value.converter.apicurio.registry.url": "http://schemaregistry:8080/apis/registry/v2",
"value.converter.apicurio.registry.auto-register": "true",
"value.converter.apicurio.registry.find-latest": "true",
"store.kafka.metadata": true,
"topic.creation.default.partitions": 6,
"value.converter.apicurio.registry.url": "http://schemaregistry:8080/apis/registry/v2",
"value.converter.apicurio.registry.auto-register": "true",
"value.converter.apicurio.registry.find-latest": "true",
"store.kafka.metadata": true,
"topic.creation.default.partitions": 6,
"store.raw.data": false,
"store.raw.data.column": "raw_data",
"store.raw.data": false,
"store.raw.data.column": "raw_data",
"metrics.enable": true,
"metrics.port": 8084,
"buffer.flush.time.ms": 500,
"thread.pool.size": 1,
"fetch.min.bytes": 52428800,
"metrics.enable": true,
"metrics.port": 8084,
"buffer.flush.time.ms": 500,
"thread.pool.size": 1,
"fetch.min.bytes": 52428800,
"enable.kafka.offset": false,
"enable.kafka.offset": false,
"replacingmergetree.delete.column": "sign",
"replacingmergetree.delete.column": "sign",
"auto.create.tables": true,
"schema.evolution": false,
"auto.create.tables": true,
"schema.evolution": false,
"deduplication.policy": "off"
}
}
"deduplication.policy": "off"
}
}
EOF
# "replacingmergetree.delete.column": "sign_delete"
else
echo "Using confluent schema registry"
cat <<EOF | curl --request POST --url "${CONNECTORS_MANAGEMENT_URL}" --header 'Content-Type: application/json' --data @-
{
"name": "${CONNECTOR_NAME}",
"config": {
"connector.class": "com.altinity.clickhouse.sink.connector.ClickHouseSinkConnector",
"tasks.max": "10",
"topics": "${TOPICS}",
"clickhouse.topic2table.map": "${TOPICS_TABLE_MAP}",
"clickhouse.server.url": "${CLICKHOUSE_HOST}",
"clickhouse.server.user": "${CLICKHOUSE_USER}",
"clickhouse.server.pass": "${CLICKHOUSE_PASSWORD}",
"clickhouse.server.database": "${CLICKHOUSE_DATABASE}",
"clickhouse.server.port": ${CLICKHOUSE_PORT},
"clickhouse.table.name": "${CLICKHOUSE_TABLE}",
"key.converter": "io.confluent.connect.avro.AvroConverter",
"value.converter": "io.confluent.connect.avro.AvroConverter",
"key.converter.schema.registry.url": "http://schemaregistry:8081",
"value.converter.schema.registry.url":"http://schemaregistry:8081",
echo
"store.kafka.metadata": true,
"topic.creation.default.partitions": 6,
"store.raw.data": false,
"store.raw.data.column": "raw_data",
"metrics.enable": true,
"metrics.port": 8084,
"buffer.flush.time.ms": 500,
"thread.pool.size": 1,
"fetch.min.bytes": 52428800,
"enable.kafka.offset": false,
"replacingmergetree.delete.column": "sign",
"auto.create.tables": true,
"schema.evolution": false,
"deduplication.policy": "off"
}
}
EOF
fi
# "replacingmergetree.delete.column": "sign_delete"

View File

@ -1,4 +1,4 @@
FROM debezium/connect-base:1.9.2.Final
FROM debezium/connect-base:1.9.5.Final
LABEL maintainer="Altinity"
@ -9,12 +9,33 @@ COPY target/*.jar /kafka/connect/clickhouse-kafka-sink-connector/
# Copy libs
COPY deploy/libs/* /kafka/connect/clickhouse-kafka-sink-connector/
#
#docker-maven-download confluent kafka-connect-avro-converter "$CONFLUENT_VERSION" 16c38a7378032f850f0293b7654aa6bf && \
# docker-maven-download confluent kafka-connect-avro-data "$CONFLUENT_VERSION" 63022db9533689968540f45be705786d && \
# docker-maven-download confluent kafka-avro-serializer "$CONFLUENT_VERSION" b1379606e1dcc5d7b809c82abe294cc7 && \
# docker-maven-download confluent kafka-schema-serializer "$CONFLUENT_VERSION" b68a7eebf7ce6a1b826bd5bbb443b176 && \
# docker-maven-download confluent kafka-schema-registry-client "$CONFLUENT_VERSION" e3631a8a3fe10312a727e9d50fcd5527 && \
# docker-maven-download confluent common-config "$CONFLUENT_VERSION" e1a4dc2b6d1d8d8c2df47db580276f38 && \
# docker-maven-download confluent common-utils "$CONFLUENT_VERSION" ad9e39d87c6a9fa1a9b19e6ce80392fa && \
# Copy apicurio
RUN 'curl' '-L' '--output' '/kafka/connect/clickhouse-kafka-sink-connector/apicurio.tgz' 'https://repo1.maven.org/maven2/io/apicurio/apicurio-registry-distro-connect-converter/2.1.5.Final/apicurio-registry-distro-connect-converter-2.1.5.Final.tar.gz' \
&& 'tar' 'xvfz' '/kafka/connect/clickhouse-kafka-sink-connector/apicurio.tgz' '-C' '/kafka/connect/clickhouse-kafka-sink-connector' \
&& 'rm' '-vf' '/kafka/connect/clickhouse-kafka-sink-connector/apicurio.tgz/apicurio.tgz'
# Copy confluent
RUN 'curl' '-L' '--output' '/kafka/libs/kafka-connect-avro-converter-7.2.1.jar' 'https://packages.confluent.io/maven/io/confluent/kafka-connect-avro-converter/7.2.1/kafka-connect-avro-converter-7.2.1.jar'
RUN 'curl' '-L' '--output' '/kafka/libs/kafka-avro-serializer-7.2.1.jar' 'https://packages.confluent.io/maven/io/confluent/kafka-avro-serializer/7.2.1/kafka-avro-serializer-7.2.1.jar'
RUN 'curl' '-L' '--output' '/kafka/libs/kafka-schema-registry-7.2.1.jar' 'https://packages.confluent.io/maven/io/confluent/kafka-schema-registry/7.2.1/kafka-schema-registry-7.2.1.jar'
RUN 'curl' '-L' '--output' '/kafka/libs/kafka-schema-registry-client-7.2.1.jar' 'https://packages.confluent.io/maven/io/confluent/kafka-schema-registry-client/7.2.1/kafka-schema-registry-client-7.2.1.jar'
RUN 'curl' '-L' '--output' '/kafka/libs/common-config-7.2.1.jar' 'https://packages.confluent.io/maven/io/confluent/common-config/7.2.1/common-config-7.2.1.jar'
RUN 'curl' '-L' '--output' '/kafka/libs/common-utils-7.2.1.jar' 'https://packages.confluent.io/maven/io/confluent/common-utils/7.2.1/common-utils-7.2.1.jar'
RUN chown -R kafka:kafka /kafka/connect/
USER kafka
ENTRYPOINT ["/docker-entrypoint.sh"]
CMD ["start"]

View File

@ -0,0 +1,75 @@
FROM debezium/connect-base:1.9.5.Final
#https://github.com/debezium/container-images/blob/main/connect-base/1.2/Dockerfile
LABEL maintainer="Altinity"
USER root:root
RUN mkdir /kafka/connect/mysql-connector
# Copy debezium jar files
RUN 'curl' '-L' '--output' '/kafka/libs/debezium-core-1.9.5.Final.jar' 'https://repo1.maven.org/maven2/io/debezium/debezium-core/1.9.5.Final/debezium-core-1.9.5.Final.jar'
RUN 'curl' '-L' '--output' '/kafka/connect/mysql-connector/debezium-core-1.9.5.Final.jar' 'https://repo1.maven.org/maven2/io/debezium/debezium-core/1.9.5.Final/debezium-core-1.9.5.Final.jar'
RUN 'curl' '-L' '--output' '/kafka/libs/debezium-connector-mysql-1.9.5.Final.jar' 'https://repo1.maven.org/maven2/io/debezium/debezium-connector-mysql/1.9.5.Final/debezium-connector-mysql-1.9.5.Final.jar'
RUN 'curl' '-L' '--output' '/kafka/connect/mysql-connector/debezium-connector-mysql-1.9.5.Final.jar' 'https://repo1.maven.org/maven2/io/debezium/debezium-connector-mysql/1.9.5.Final/debezium-connector-mysql-1.9.5.Final.jar'
RUN 'curl' '-L' '--output' '/kafka/libs/debezium-api-1.9.5.Final.jar' 'https://repo1.maven.org/maven2/io/debezium/debezium-api/1.9.5.Final/debezium-api-1.9.5.Final.jar'
RUN 'curl' '-L' '--output' '/kafka/connect/mysql-connector/debezium-api-1.9.5.Final.jar' 'https://repo1.maven.org/maven2/io/debezium/debezium-api/1.9.5.Final/debezium-api-1.9.5.Final.jar'
RUN 'curl' '-L' '--output' '/kafka/libs/mysql-connector-java-8.0.30.jar' 'https://repo1.maven.org/maven2/mysql/mysql-connector-java/8.0.30/mysql-connector-java-8.0.30.jar'
RUN 'curl' '-L' '--output' '/kafka/connect/mysql-connector/mysql-connector-java-8.0.30.jar' 'https://repo1.maven.org/maven2/mysql/mysql-connector-java/8.0.30/mysql-connector-java-8.0.30.jar'
RUN 'curl' '-L' '--output' '/kafka/libs/debezium-ddl-parser-1.9.5.Final.jar' 'https://repo1.maven.org/maven2/io/debezium/debezium-ddl-parser/1.9.5.Final/debezium-ddl-parser-1.9.5.Final.jar'
RUN 'curl' '-L' '--output' '/kafka/connect/mysql-connector/debezium-ddl-parser-1.9.5.Final.jar' 'https://repo1.maven.org/maven2/io/debezium/debezium-ddl-parser/1.9.5.Final/debezium-ddl-parser-1.9.5.Final.jar'
RUN 'curl' '-L' '--output' '/kafka/libs/antlr4-runtime-4.8.jar' 'https://repo1.maven.org/maven2/org/antlr/antlr4-runtime/4.8/antlr4-runtime-4.8.jar'
RUN 'curl' '-L' '--output' '/kafka/connect/mysql-connector/antlr4-runtime-4.8.jar' 'https://repo1.maven.org/maven2/org/antlr/antlr4-runtime/4.8/antlr4-runtime-4.8.jar'
RUN 'curl' '-L' '--output' '/kafka/libs/mysql-binlog-connector-java-0.26.1.jar' 'https://repo1.maven.org/maven2/com/zendesk/mysql-binlog-connector-java/0.26.1/mysql-binlog-connector-java-0.26.1.jar'
RUN 'curl' '-L' '--output' '/kafka/connect/mysql-connector/mysql-binlog-connector-java-0.26.1.jar' 'https://repo1.maven.org/maven2/com/zendesk/mysql-binlog-connector-java/0.26.1/mysql-binlog-connector-java-0.26.1.jar'
#https://repo1.maven.org/maven2/com/github/shyiko/mysql-binlog-connector-java/0.21.0/mysql-binlog-connector-java-0.21.0.jar
#https://repo1.maven.org/maven2/io/debezium/debezium-core/1.9.5.Final/debezium-core-1.9.5.Final.jar
# Copy sink-connector
#COPY target/*.jar /kafka/connect/clickhouse-kafka-sink-connector/
# Copy libs
#COPY deploy/libs/* /kafka/connect/clickhouse-kafka-sink-connector/
#
#docker-maven-download confluent kafka-connect-avro-converter "$CONFLUENT_VERSION" 16c38a7378032f850f0293b7654aa6bf && \
# docker-maven-download confluent kafka-connect-avro-data "$CONFLUENT_VERSION" 63022db9533689968540f45be705786d && \
# docker-maven-download confluent kafka-avro-serializer "$CONFLUENT_VERSION" b1379606e1dcc5d7b809c82abe294cc7 && \
# docker-maven-download confluent kafka-schema-serializer "$CONFLUENT_VERSION" b68a7eebf7ce6a1b826bd5bbb443b176 && \
# docker-maven-download confluent kafka-schema-registry-client "$CONFLUENT_VERSION" e3631a8a3fe10312a727e9d50fcd5527 && \
# docker-maven-download confluent common-config "$CONFLUENT_VERSION" e1a4dc2b6d1d8d8c2df47db580276f38 && \
# docker-maven-download confluent common-utils "$CONFLUENT_VERSION" ad9e39d87c6a9fa1a9b19e6ce80392fa && \
# Copy apicurio
#RUN 'curl' '-L' '--output' '/kafka/connect/clickhouse-kafka-sink-connector/apicurio.tgz' 'https://repo1.maven.org/maven2/io/apicurio/apicurio-registry-distro-connect-converter/2.1.5.Final/apicurio-registry-distro-connect-converter-2.1.5.Final.tar.gz' \
# && 'tar' 'xvfz' '/kafka/connect/clickhouse-kafka-sink-connector/apicurio.tgz' '-C' '/kafka/connect/clickhouse-kafka-sink-connector' \
# && 'rm' '-vf' '/kafka/connect/clickhouse-kafka-sink-connector/apicurio.tgz/apicurio.tgz'
# Copy confluent
RUN 'curl' '-L' '--output' '/kafka/libs/kafka-connect-avro-converter-7.2.1.jar' 'https://packages.confluent.io/maven/io/confluent/kafka-connect-avro-converter/7.2.1/kafka-connect-avro-converter-7.2.1.jar'
RUN 'curl' '-L' '--output' '/kafka/libs/kafka-avro-serializer-7.2.1.jar' 'https://packages.confluent.io/maven/io/confluent/kafka-avro-serializer/7.2.1/kafka-avro-serializer-7.2.1.jar'
RUN 'curl' '-L' '--output' '/kafka/libs/kafka-schema-registry-7.2.1.jar' 'https://packages.confluent.io/maven/io/confluent/kafka-schema-registry/7.2.1/kafka-schema-registry-7.2.1.jar'
RUN 'curl' '-L' '--output' '/kafka/libs/kafka-schema-registry-client-7.2.1.jar' 'https://packages.confluent.io/maven/io/confluent/kafka-schema-registry-client/7.2.1/kafka-schema-registry-client-7.2.1.jar'
RUN 'curl' '-L' '--output' '/kafka/libs/common-config-7.2.1.jar' 'https://packages.confluent.io/maven/io/confluent/common-config/7.2.1/common-config-7.2.1.jar'
RUN 'curl' '-L' '--output' '/kafka/libs/common-utils-7.2.1.jar' 'https://packages.confluent.io/maven/io/confluent/common-utils/7.2.1/common-utils-7.2.1.jar'
RUN chown -R kafka:kafka /kafka/connect/
USER kafka
ENTRYPOINT ["/docker-entrypoint.sh"]
CMD ["start"]

View File

@ -0,0 +1,29 @@
#!/bin/bash
# Production docker image builder
set -e
# Source configuration
CUR_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
SRC_ROOT="$(realpath "${CUR_DIR}/..")"
# Externally configurable build-dependent options
TAG=$(date +%F)
#TAG="${TAG:-latest}"
DOCKER_IMAGE="altinity/source-connector-on-debezium-base:${TAG}"
# Externally configurable build-dependent options
DOCKERFILE_DIR="${SRC_ROOT}/docker"
DOCKERFILE="${DOCKERFILE_DIR}/Dockerfile-source-debezium-base-image"
echo "***************"
echo "* Build image *"
echo "***************"
DOCKER_CMD="docker build -t ${DOCKER_IMAGE} -f ${DOCKERFILE} ${SRC_ROOT}"
if ${DOCKER_CMD}; then
echo "ALL DONE"
else
echo "FAILED"
exit 1
fi