2018-10-17 12:50:14,337 ERROR || Failed to send HTTP request to endpoint: http://schema-registry:8081/subjects/dq.public.bug_135605_2-key/versions [io.confluent.kafka.schemaregistry.client.rest.RestService] java.net.UnknownHostException: schema-registry at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:184) at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) at java.net.Socket.connect(Socket.java:589) at java.net.Socket.connect(Socket.java:538) at sun.net.NetworkClient.doConnect(NetworkClient.java:180) at sun.net.www.http.HttpClient.openServer(HttpClient.java:463) at sun.net.www.http.HttpClient.openServer(HttpClient.java:558) at sun.net.www.http.HttpClient.(HttpClient.java:242) at sun.net.www.http.HttpClient.New(HttpClient.java:339) at sun.net.www.http.HttpClient.New(HttpClient.java:357) at sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:1220) at sun.net.www.protocol.http.HttpURLConnection.plainConnect0(HttpURLConnection.java:1156) at sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:1050) at sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:984) at sun.net.www.protocol.http.HttpURLConnection.getOutputStream0(HttpURLConnection.java:1334) at sun.net.www.protocol.http.HttpURLConnection.getOutputStream(HttpURLConnection.java:1309) at io.confluent.kafka.schemaregistry.client.rest.RestService.sendHttpRequest(RestService.java:172) at io.confluent.kafka.schemaregistry.client.rest.RestService.httpRequest(RestService.java:229) at io.confluent.kafka.schemaregistry.client.rest.RestService.registerSchema(RestService.java:320) at io.confluent.kafka.schemaregistry.client.rest.RestService.registerSchema(RestService.java:312) at io.confluent.kafka.schemaregistry.client.rest.RestService.registerSchema(RestService.java:307) at io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient.registerAndGetId(CachedSchemaRegistryClient.java:115) at io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient.register(CachedSchemaRegistryClient.java:154) at io.confluent.kafka.serializers.AbstractKafkaAvroSerializer.serializeImpl(AbstractKafkaAvroSerializer.java:79) at io.confluent.connect.avro.AvroConverter$Serializer.serialize(AvroConverter.java:109) at io.confluent.connect.avro.AvroConverter.fromConnectData(AvroConverter.java:74) at org.apache.kafka.connect.runtime.WorkerSourceTask.sendRecords(WorkerSourceTask.java:227) at org.apache.kafka.connect.runtime.WorkerSourceTask.execute(WorkerSourceTask.java:194) at org.apache.kafka.connect.runtime.WorkerTask.doRun(WorkerTask.java:170) at org.apache.kafka.connect.runtime.WorkerTask.run(WorkerTask.java:214) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) 2018-10-17 12:50:14,339 INFO || WorkerSourceTask{id=dq-connector-0} Committing offsets [org.apache.kafka.connect.runtime.WorkerSourceTask] 2018-10-17 12:50:14,339 INFO || WorkerSourceTask{id=dq-connector-0} flushing 0 outstanding messages for offset commit [org.apache.kafka.connect.runtime.WorkerSourceTask] 2018-10-17 12:50:14,344 INFO || Cluster ID: DaOpMGMASwmfpjyTvyyyVQ [org.apache.kafka.clients.Metadata] 2018-10-17 12:50:14,351 INFO || WorkerSourceTask{id=dq-connector-0} Finished commitOffsets successfully in 12 ms [org.apache.kafka.connect.runtime.WorkerSourceTask] 2018-10-17 12:52:52,439 INFO || Cluster ID: DaOpMGMASwmfpjyTvyyyVQ [org.apache.kafka.clients.Metadata] 2018-10-17 12:52:54,235 INFO || Cluster ID: DaOpMGMASwmfpjyTvyyyVQ [org.apache.kafka.clients.Metadata] 2018-10-17 12:52:54,310 INFO || Cluster ID: DaOpMGMASwmfpjyTvyyyVQ [org.apache.kafka.clients.Metadata] 2018-10-17 12:52:54,312 INFO || Cluster ID: DaOpMGMASwmfpjyTvyyyVQ [org.apache.kafka.clients.Metadata] 2018-10-17 12:52:54,313 INFO || Cluster ID: DaOpMGMASwmfpjyTvyyyVQ [org.apache.kafka.clients.Metadata] 2018-10-17 13:20:01,499 ERROR || WorkerSourceTask{id=dq-connector-0} Task threw an uncaught and unrecoverable exception [org.apache.kafka.connect.runtime.WorkerTask] org.apache.kafka.connect.errors.DataException: dq.public.bug_135605_2 at io.confluent.connect.avro.AvroConverter.fromConnectData(AvroConverter.java:76) at org.apache.kafka.connect.runtime.WorkerSourceTask.sendRecords(WorkerSourceTask.java:227) at org.apache.kafka.connect.runtime.WorkerSourceTask.execute(WorkerSourceTask.java:194) at org.apache.kafka.connect.runtime.WorkerTask.doRun(WorkerTask.java:170) at org.apache.kafka.connect.runtime.WorkerTask.run(WorkerTask.java:214) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.kafka.common.errors.SerializationException: Error serializing Avro message Caused by: java.net.UnknownHostException: schema-registry at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:184) at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) at java.net.Socket.connect(Socket.java:589) at java.net.Socket.connect(Socket.java:538) at sun.net.NetworkClient.doConnect(NetworkClient.java:180) at sun.net.www.http.HttpClient.openServer(HttpClient.java:463) at sun.net.www.http.HttpClient.openServer(HttpClient.java:558) at sun.net.www.http.HttpClient.(HttpClient.java:242) at sun.net.www.http.HttpClient.New(HttpClient.java:339) at sun.net.www.http.HttpClient.New(HttpClient.java:357) at sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:1220) at sun.net.www.protocol.http.HttpURLConnection.plainConnect0(HttpURLConnection.java:1156) at sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:1050) at sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:984) at sun.net.www.protocol.http.HttpURLConnection.getOutputStream0(HttpURLConnection.java:1334) at sun.net.www.protocol.http.HttpURLConnection.getOutputStream(HttpURLConnection.java:1309) at io.confluent.kafka.schemaregistry.client.rest.RestService.sendHttpRequest(RestService.java:172) at io.confluent.kafka.schemaregistry.client.rest.RestService.httpRequest(RestService.java:229) at io.confluent.kafka.schemaregistry.client.rest.RestService.registerSchema(RestService.java:320) at io.confluent.kafka.schemaregistry.client.rest.RestService.registerSchema(RestService.java:312) at io.confluent.kafka.schemaregistry.client.rest.RestService.registerSchema(RestService.java:307) at io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient.registerAndGetId(CachedSchemaRegistryClient.java:115) at io.confluent.kafka.schemaregistry.client.CachedSchemaRegistryClient.register(CachedSchemaRegistryClient.java:154) at io.confluent.kafka.serializers.AbstractKafkaAvroSerializer.serializeImpl(AbstractKafkaAvroSerializer.java:79) at io.confluent.connect.avro.AvroConverter$Serializer.serialize(AvroConverter.java:109) at io.confluent.connect.avro.AvroConverter.fromConnectData(AvroConverter.java:74) at org.apache.kafka.connect.runtime.WorkerSourceTask.sendRecords(WorkerSourceTask.java:227) at org.apache.kafka.connect.runtime.WorkerSourceTask.execute(WorkerSourceTask.java:194) at org.apache.kafka.connect.runtime.WorkerTask.doRun(WorkerTask.java:170) at org.apache.kafka.connect.runtime.WorkerTask.run(WorkerTask.java:214) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) at java.util.concurrent.FutureTask.run(FutureTask.java:266) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) 2018-10-17 13:20:01,503 ERROR || WorkerSourceTask{id=dq-connector-0} Task is being killed and will not recover until manually restarted [org.apache.kafka.connect.runtime.WorkerTask] 2018-10-17 13:20:01,503 INFO || [Producer clientId=producer-5] Closing the Kafka producer with timeoutMillis = 30000 ms. [org.apache.kafka.clients.producer.KafkaProducer] 2018-10-17 13:20:05,705 INFO || Stopping task dq-connector-0 [org.apache.kafka.connect.runtime.Worker] 2018-10-17 13:20:05,705 INFO || Starting task dq-connector-0 [org.apache.kafka.connect.runtime.distributed.DistributedHerder] 2018-10-17 13:20:05,705 INFO || Creating task dq-connector-0 [org.apache.kafka.connect.runtime.Worker] 2018-10-17 13:20:05,705 INFO || ConnectorConfig values: connector.class = io.debezium.connector.postgresql.PostgresConnector header.converter = class org.apache.kafka.connect.storage.SimpleHeaderConverter key.converter = null name = dq-connector tasks.max = 1 transforms = [] value.converter = null [org.apache.kafka.connect.runtime.ConnectorConfig] 2018-10-17 13:20:05,705 INFO || EnrichedConnectorConfig values: connector.class = io.debezium.connector.postgresql.PostgresConnector header.converter = class org.apache.kafka.connect.storage.SimpleHeaderConverter key.converter = null name = dq-connector tasks.max = 1 transforms = [] value.converter = null [org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig] 2018-10-17 13:20:05,705 INFO || TaskConfig values: task.class = class io.debezium.connector.postgresql.PostgresConnectorTask [org.apache.kafka.connect.runtime.TaskConfig] 2018-10-17 13:20:05,705 INFO || Instantiated task dq-connector-0 with version 0.8.3.Final of type io.debezium.connector.postgresql.PostgresConnectorTask [org.apache.kafka.connect.runtime.Worker] 2018-10-17 13:20:05,706 INFO || AvroConverterConfig values: schema.registry.url = [http://schema-registry:8081] auto.register.schemas = true max.schemas.per.subject = 1000 basic.auth.credentials.source = URL schema.registry.basic.auth.user.info = [hidden] value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy [io.confluent.connect.avro.AvroConverterConfig] 2018-10-17 13:20:05,706 INFO || AvroDataConfig values: schemas.cache.config = 1000 enhanced.avro.schema.support = false connect.meta.data = true [io.confluent.connect.avro.AvroDataConfig] 2018-10-17 13:20:05,706 INFO || AvroConverterConfig values: schema.registry.url = [http://schema-registry:8081] auto.register.schemas = true max.schemas.per.subject = 1000 basic.auth.credentials.source = URL schema.registry.basic.auth.user.info = [hidden] value.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy key.subject.name.strategy = class io.confluent.kafka.serializers.subject.TopicNameStrategy [io.confluent.connect.avro.AvroConverterConfig] 2018-10-17 13:20:05,706 INFO || AvroDataConfig values: schemas.cache.config = 1000 enhanced.avro.schema.support = false connect.meta.data = true [io.confluent.connect.avro.AvroDataConfig] 2018-10-17 13:20:05,706 INFO || ProducerConfig values: acks = all batch.size = 16384 bootstrap.servers = [kafka:9092] buffer.memory = 33554432 client.id = compression.type = none connections.max.idle.ms = 540000 enable.idempotence = false interceptor.classes = [] key.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer linger.ms = 0 max.block.ms = 9223372036854775807 max.in.flight.requests.per.connection = 1 max.request.size = 1048576 metadata.max.age.ms = 300000 metric.reporters = [] metrics.num.samples = 2 metrics.recording.level = INFO metrics.sample.window.ms = 30000 partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner receive.buffer.bytes = 32768 reconnect.backoff.max.ms = 1000 reconnect.backoff.ms = 50 request.timeout.ms = 2147483647 retries = 2147483647 retry.backoff.ms = 100 sasl.jaas.config = null sasl.kerberos.kinit.cmd = /usr/bin/kinit sasl.kerberos.min.time.before.relogin = 60000 sasl.kerberos.service.name = null sasl.kerberos.ticket.renew.jitter = 0.05 sasl.kerberos.ticket.renew.window.factor = 0.8 sasl.mechanism = GSSAPI security.protocol = PLAINTEXT send.buffer.bytes = 131072 ssl.cipher.suites = null ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1] ssl.endpoint.identification.algorithm = null ssl.key.password = null ssl.keymanager.algorithm = SunX509 ssl.keystore.location = null ssl.keystore.password = null ssl.keystore.type = JKS ssl.protocol = TLS ssl.provider = null ssl.secure.random.implementation = null ssl.trustmanager.algorithm = PKIX ssl.truststore.location = null ssl.truststore.password = null ssl.truststore.type = JKS transaction.timeout.ms = 60000 transactional.id = null value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer [org.apache.kafka.clients.producer.ProducerConfig] 2018-10-17 13:20:05,708 WARN Postgres|dq|records-stream-producer Closing replication stream due to db connection IO exception... [io.debezium.connector.postgresql.RecordsStreamProducer]