找不到任何实现 Connector 且名称与 MySQL 匹配的类
Posted
技术标签:
【中文标题】找不到任何实现 Connector 且名称与 MySQL 匹配的类【英文标题】:Failed to find any class that implements Connector and which name matches with MySQL 【发布时间】:2021-08-08 06:58:25 【问题描述】:使用官方文档配置kafka connect后...
我得到一个错误,驱动程序在 kafka 连接中不存在!
我尝试将.jar
复制到上述目录,但没有任何反应。
对解决方案有什么建议吗?
码头工人撰写
---
version: '2'
services:
zookeeper:
image: confluentinc/cp-zookeeper:latest
hostname: zookeeper
container_name: zookeeper
ports:
- 2181:2181
environment:
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000
broker-1:
image: confluentinc/cp-enterprise-kafka:latest
hostname: broker-1
container_name: broker-1
depends_on:
- zookeeper
ports:
- 9092:9092
environment:
KAFKA_BROKER_ID: 1
KAFKA_BROKER_RACK: rack-a
KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
KAFKA_ADVERTISED_HOST_NAME: <netaddr>
KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://<netaddr>:9092'
KAFKA_METRIC_REPORTERS: io.confluent.metrics.reporter.ConfluentMetricsReporter
KAFKA_DELETE_TOPIC_ENABLE: 'true'
KAFKA_JMX_PORT: 9999
KAFKA_JMX_HOSTNAME: 'broker-1'
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
CONFLUENT_METRICS_REPORTER_BOOTSTRAP_SERVERS: broker-1:9092
CONFLUENT_METRICS_REPORTER_ZOOKEEPER_CONNECT: zookeeper:2181
CONFLUENT_METRICS_REPORTER_TOPIC_REPLICAS: 1
CONFLUENT_METRICS_ENABLE: 'true'
CONFLUENT_SUPPORT_CUSTOMER_ID: 'anonymous'
schema_registry:
image: confluentinc/cp-schema-registry:latest
hostname: schema_registry
container_name: schema_registry
depends_on:
- zookeeper
- broker-1
ports:
- 8081:8081
environment:
SCHEMA_REGISTRY_HOST_NAME: schema_registry
SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181'
SCHEMA_REGISTRY_ACCESS_CONTROL_ALLOW_ORIGIN: '*'
SCHEMA_REGISTRY_ACCESS_CONTROL_ALLOW_METHODS: 'GET,POST,PUT,OPTIONS'
connect:
image: confluentinc/cp-kafka-connect:latest
hostname: connect
container_name: connect
depends_on:
- zookeeper
- broker-1
- schema_registry
ports:
- 8083:8083
environment:
CONNECT_BOOTSTRAP_SERVERS: 'broker-1:9092'
CONNECT_REST_ADVERTISED_HOST_NAME: connect
CONNECT_REST_PORT: 8083
CONNECT_GROUP_ID: compose-connect-group
CONNECT_CONFIG_STORAGE_TOPIC: docker-connect-configs
CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
CONNECT_OFFSET_STORAGE_TOPIC: docker-connect-offsets
CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
CONNECT_STATUS_STORAGE_TOPIC: docker-connect-status
CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
CONNECT_KEY_CONVERTER: io.confluent.connect.avro.AvroConverter
CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: 'http://schema_registry:8081'
CONNECT_VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter
CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: 'http://schema_registry:8081'
CONNECT_INTERNAL_KEY_CONVERTER: org.apache.kafka.connect.json.JsonConverter
CONNECT_INTERNAL_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter
CONNECT_ZOOKEEPER_CONNECT: 'zookeeper:2181'
CONNECT_PLUGIN_PATH: /usr/share/java,/etc/kafka-connect/jars
volumes:
- /etc/kafka/jars:/etc/kafka-connect/jars
司机
curl -k -SL "http://dev.mysql.com/get/Downloads/Connector-J/mysql-connector-java-5.1.37.tar.gz" | tar -xzf - -C /etc/kafka/jars --strip-components=1 mysql-connector-java-5.1.37/mysql-connector-java-5.1.37-bin.jar
卷曲
curl -X POST \
-H "Content-Type: application/json" \
--data ' "name": "quickstart-jdbc-source", "config": "connector.class": "io.confluent.connect.jdbc.JdbcSourceConnector", "tasks.max": 1, "connection.url": "jdbc:mysql://127.0.0.1:3306/connect_test?user=root&password=confluent", "mode": "incrementing", "incrementing.column.name": "id", "timestamp.column.name": "modified", "topic.prefix": "quickstart-jdbc-", "poll.interval.ms": 1000 ' \
http://$CONNECT_HOST:8083/connectors
输出
"error_code":500,"message":"Failed to find any class that implements Connector and which name matches io.confluent.connect.jdbc.JdbcSourceConnector, available connectors are: PluginDescklass=class org.apache.kafka.connect.file.FileStreamSinkConnector, name='org.apache.kafka.connect.file.FileStreamSinkConnector', version='6.1.1-ccs', encodedVersion=6.1.1-ccs, type=sink, typeName='sink', location='file:/usr/share/java/kafka/', PluginDescklass=class org.apache.kafka.connect.file.FileStreamSourceConnector, name='org.apache.kafka.connect.file.FileStreamSourceConnector', version='6.1.1-ccs', encodedVersion=6.1.1-ccs, type=source, typeName='source', location='file:/usr/share/java/kafka/', PluginDescklass=class org.apache.kafka.connect.mirror.MirrorCheckpointConnector, name='org.apache.kafka.connect.mirror.MirrorCheckpointConnector', version='1', encodedVersion=1, type=source, typeName='source', location='file:/usr/share/java/kafka/', PluginDescklass=class org.apache.kafka.connect.mirror.MirrorHeartbeatConnector, name='org.apache.kafka.connect.mirror.MirrorHeartbeatConnector', version='1', encodedVersion=1, type=source, typeName='source', location='file:/usr/share/java/kafka/', PluginDescklass=class org.apache.kafka.connect.mirror.MirrorSourceConnector, name='org.apache.kafka.connect.mirror.MirrorSourceConnector', version='1', encodedVersion=1, type=source, typeName='source', location='file:/usr/share/java/kafka/', PluginDescklass=class org.apache.kafka.connect.tools.MockConnector, name='org.apache.kafka.connect.tools.MockConnector', version='6.1.1-ccs', encodedVersion=6.1.1-ccs, type=connector, typeName='connector', location='file:/usr/share/java/confluent-control-center/', PluginDescklass=class org.apache.kafka.connect.tools.MockSinkConnector, name='org.apache.kafka.connect.tools.MockSinkConnector', version='6.1.1-ccs', encodedVersion=6.1.1-ccs, type=sink, typeName='sink', location='file:/usr/share/java/confluent-control-center/', PluginDescklass=class org.apache.kafka.connect.tools.MockSourceConnector, name='org.apache.kafka.connect.tools.MockSourceConnector', version='6.1.1-ccs', encodedVersion=6.1.1-ccs, type=source, typeName='source', location='file:/usr/share/java/confluent-control-center/', PluginDescklass=class org.apache.kafka.connect.tools.SchemaSourceConnector, name='org.apache.kafka.connect.tools.SchemaSourceConnector', version='6.1.1-ccs', encodedVersion=6.1.1-ccs, type=source, typeName='source', location='file:/usr/share/java/confluent-control-center/', PluginDescklass=class org.apache.kafka.connect.tools.VerifiableSinkConnector, name='org.apache.kafka.connect.tools.VerifiableSinkConnector', version='6.1.1-ccs', encodedVersion=6.1.1-ccs, type=source, typeName='source', location='file:/usr/share/java/confluent-control-center/', PluginDescklass=class org.apache.kafka.connect.tools.VerifiableSourceConnector, name='org.apache.kafka.connect.tools.VerifiableSourceConnector', version='6.1.1-ccs', encodedVersion=6.1.1-ccs, type=source, typeName='source', location='file:/usr/share/java/confluent-control-center/'"
ls 容器
[root@connect appuser]# ls /usr/share/java/
acl confluent-control-center confluent-telemetry kafka monitoring-interceptors rest-utils
confluent-common confluent-hub-client cp-base-new kafka-serde-tools mysql-connector-java-5.1.37-bin.jar schema-registry
[root@connect appuser]# ls /etc/kafka-connect/jars/
mysql-connector-java-5.1.37-bin.jar
文档:https://docs.confluent.io/5.0.0/installation/docker/docs/installation/connect-avro-jdbc.html
【问题讨论】:
【参考方案1】:错误并不是说您的驱动程序不存在,而是说连接器不存在。扫描每个PluginDescklass=class
的错误,您会注意到您尝试使用的connector.class
不存在
来自 Confluent 的最新 Kafka Connect 映像不包括连接器,除了那些与 Kafka 预先捆绑的连接器(以及一些来自控制中心的,它们并不是真正有用的),因此您必须自己安装其他连接器 - described here
如果您想遵循 5.0 文档,请使用适当的标记 docker 映像而不是 latest
(旧映像确实安装了连接器)
另外,您需要将 jdbc 驱动程序直接放在 jdbc 连接器文件夹中,以便在类路径上正确检测到它;它不是 Connect 术语中的“插件”。上面的链接也展示了一个例子
【讨论】:
非常感谢,通过您的推荐解决了以上是关于找不到任何实现 Connector 且名称与 MySQL 匹配的类的主要内容,如果未能解决你的问题,请参考以下文章
Xcode 找不到任何与“应用程序名称”匹配的 iOS 应用程序开发配置文件
服务器找不到 java:comp/env/jdbc/my_db 数据源...在上下文“java:”中找不到名称 comp/env/jdbc
google-services.json 找不到与任何模块的包名称匹配的客户端
为啥我会收到“找不到数据源名称且未指定默认驱动程序”,我该如何解决?