JustPaste.it


C:\kafka\bin\windows>c:\kafka\bin\windows\connect-standalone.bat C:/kafka/config/connect-standalone.properties c:\kafka\plugins\confluentinc-kafka-connect-elasticsearch-11.0.4\etc\absa-elasticsearch.properties
[2021-06-01 10:58:49,201] INFO Kafka Connect standalone worker initializing ... (org.apache.kafka.connect.cli.ConnectStandalone:68)
[2021-06-01 10:58:49,207] INFO WorkerInfo values:
        jvm.args = -Xmx256M, -XX:+UseG1GC, -XX:MaxGCPauseMillis=20, -XX:InitiatingHeapOccupancyPercent=35, -XX:+ExplicitGCInvokesConcurrent, -Djava.awt.headless=true, -Dcom.sun.management.jmxremote, -Dcom.sun.management.jmxremote.authenticate=false, -Dcom.sun.management.jmxremote.ssl=false, -Dkafka.logs.dir=c:\kafka/logs, -Dlog4j.configuration=file:c:\kafka/config/connect-log4j.properties
        jvm.spec = Oracle Corporation, Java HotSpot(TM) 64-Bit Server VM, 16.0.1, 16.0.1+9-24
        jvm.classpath = C:\Program Files\IBM\MQ\java\lib\com.ibm.mqjms.jar;C:\Program Files\IBM\MQ\java\lib\com.ibm.mq.jar;c:\kafka\libs\activation-1.1.1.jar;c:\kafka\libs\aopalliance-repackaged-2.6.1.jar;c:\kafka\libs\argparse4j-0.7.0.jar;c:\kafka\libs\audience-annotations-0.5.0.jar;c:\kafka\libs\commons-cli-1.4.jar;c:\kafka\libs\commons-lang3-3.8.1.jar;c:\kafka\libs\connect-api-2.8.0.jar;c:\kafka\libs\connect-basic-auth-extension-2.8.0.jar;c:\kafka\libs\connect-file-2.8.0.jar;c:\kafka\libs\connect-json-2.8.0.jar;c:\kafka\libs\connect-mirror-2.8.0.jar;c:\kafka\libs\connect-mirror-client-2.8.0.jar;c:\kafka\libs\connect-runtime-2.8.0.jar;c:\kafka\libs\connect-transforms-2.8.0.jar;c:\kafka\libs\hk2-api-2.6.1.jar;c:\kafka\libs\hk2-locator-2.6.1.jar;c:\kafka\libs\hk2-utils-2.6.1.jar;c:\kafka\libs\jackson-annotations-2.10.5.jar;c:\kafka\libs\jackson-core-2.10.5.jar;c:\kafka\libs\jackson-databind-2.10.5.1.jar;c:\kafka\libs\jackson-dataformat-csv-2.10.5.jar;c:\kafka\libs\jackson-datatype-jdk8-2.10.5.jar;c:\kafka\libs\jackson-jaxrs-base-2.10.5.jar;c:\kafka\libs\jackson-jaxrs-json-provider-2.10.5.jar;c:\kafka\libs\jackson-module-jaxb-annotations-2.10.5.jar;c:\kafka\libs\jackson-module-paranamer-2.10.5.jar;c:\kafka\libs\jackson-module-scala_2.13-2.10.5.jar;c:\kafka\libs\jakarta.activation-api-1.2.1.jar;c:\kafka\libs\jakarta.annotation-api-1.3.5.jar;c:\kafka\libs\jakarta.inject-2.6.1.jar;c:\kafka\libs\jakarta.validation-api-2.0.2.jar;c:\kafka\libs\jakarta.ws.rs-api-2.1.6.jar;c:\kafka\libs\jakarta.xml.bind-api-2.3.2.jar;c:\kafka\libs\javassist-3.27.0-GA.jar;c:\kafka\libs\javax.servlet-api-3.1.0.jar;c:\kafka\libs\javax.ws.rs-api-2.1.1.jar;c:\kafka\libs\jaxb-api-2.3.0.jar;c:\kafka\libs\jersey-client-2.31.jar;c:\kafka\libs\jersey-common-2.31.jar;c:\kafka\libs\jersey-container-servlet-2.31.jar;c:\kafka\libs\jersey-container-servlet-core-2.31.jar;c:\kafka\libs\jersey-hk2-2.31.jar;c:\kafka\libs\jersey-media-jaxb-2.31.jar;c:\kafka\libs\jersey-server-2.31.jar;c:\kafka\libs\jetty-client-9.4.39.v20210325.jar;c:\kafka\libs\jetty-continuation-9.4.39.v20210325.jar;c:\kafka\libs\jetty-http-9.4.39.v20210325.jar;c:\kafka\libs\jetty-io-9.4.39.v20210325.jar;c:\kafka\libs\jetty-security-9.4.39.v20210325.jar;c:\kafka\libs\jetty-server-9.4.39.v20210325.jar;c:\kafka\libs\jetty-servlet-9.4.39.v20210325.jar;c:\kafka\libs\jetty-servlets-9.4.39.v20210325.jar;c:\kafka\libs\jetty-util-9.4.39.v20210325.jar;c:\kafka\libs\jetty-util-ajax-9.4.39.v20210325.jar;c:\kafka\libs\jline-3.12.1.jar;c:\kafka\libs\jopt-simple-5.0.4.jar;c:\kafka\libs\kafka-clients-2.8.0.jar;c:\kafka\libs\kafka-log4j-appender-2.8.0.jar;c:\kafka\libs\kafka-metadata-2.8.0.jar;c:\kafka\libs\kafka-raft-2.8.0.jar;c:\kafka\libs\kafka-shell-2.8.0.jar;c:\kafka\libs\kafka-streams-2.8.0.jar;c:\kafka\libs\kafka-streams-examples-2.8.0.jar;c:\kafka\libs\kafka-streams-scala_2.13-2.8.0.jar;c:\kafka\libs\kafka-streams-test-utils-2.8.0.jar;c:\kafka\libs\kafka-tools-2.8.0.jar;c:\kafka\libs\kafka_2.13-2.8.0-javadoc.jar;c:\kafka\libs\kafka_2.13-2.8.0-javadoc.jar.asc;c:\kafka\libs\kafka_2.13-2.8.0-sources.jar;c:\kafka\libs\kafka_2.13-2.8.0-sources.jar.asc;c:\kafka\libs\kafka_2.13-2.8.0-test-sources.jar;c:\kafka\libs\kafka_2.13-2.8.0-test-sources.jar.asc;c:\kafka\libs\kafka_2.13-2.8.0-test.jar;c:\kafka\libs\kafka_2.13-2.8.0-test.jar.asc;c:\kafka\libs\kafka_2.13-2.8.0.jar;c:\kafka\libs\kafka_2.13-2.8.0.jar.asc;c:\kafka\libs\log4j-1.2.17.jar;c:\kafka\libs\lz4-java-1.7.1.jar;c:\kafka\libs\maven-artifact-3.6.3.jar;c:\kafka\libs\metrics-core-2.2.0.jar;c:\kafka\libs\netty-buffer-4.1.62.Final.jar;c:\kafka\libs\netty-codec-4.1.62.Final.jar;c:\kafka\libs\netty-common-4.1.62.Final.jar;c:\kafka\libs\netty-handler-4.1.62.Final.jar;c:\kafka\libs\netty-resolver-4.1.62.Final.jar;c:\kafka\libs\netty-transport-4.1.62.Final.jar;c:\kafka\libs\netty-transport-native-epoll-4.1.62.Final.jar;c:\kafka\libs\netty-transport-native-unix-common-4.1.62.Final.jar;c:\kafka\libs\osgi-resource-locator-1.0.3.jar;c:\kafka\libs\paranamer-2.8.jar;c:\kafka\libs\plexus-utils-3.2.1.jar;c:\kafka\libs\reflections-0.9.12.jar;c:\kafka\libs\rocksdbjni-5.18.4.jar;c:\kafka\libs\scala-collection-compat_2.13-2.3.0.jar;c:\kafka\libs\scala-java8-compat_2.13-0.9.1.jar;c:\kafka\libs\scala-library-2.13.5.jar;c:\kafka\libs\scala-logging_2.13-3.9.2.jar;c:\kafka\libs\scala-reflect-2.13.5.jar;c:\kafka\libs\slf4j-api-1.7.30.jar;c:\kafka\libs\slf4j-log4j12-1.7.30.jar;c:\kafka\libs\snappy-java-1.1.8.1.jar;c:\kafka\libs\zookeeper-3.5.9.jar;c:\kafka\libs\zookeeper-jute-3.5.9.jar;c:\kafka\libs\zstd-jni-1.4.9-1.jar
        os.spec = Windows 10, amd64, 10.0
        os.vcpus = 12
 (org.apache.kafka.connect.runtime.WorkerInfo:71)
[2021-06-01 10:58:49,213] INFO Scanning for plugin classes. This might take a moment ... (org.apache.kafka.connect.cli.ConnectStandalone:77)
[2021-06-01 10:58:49,226] INFO Loading plugin from: C:\kafka\plugins\confluentinc-kafka-connect-elasticsearch-11.0.4 (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:246)
[2021-06-01 10:58:50,035] INFO Registered loader: PluginClassLoader{pluginLocation=file:/C:/kafka/plugins/confluentinc-kafka-connect-elasticsearch-11.0.4/} (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:269)
[2021-06-01 10:58:50,036] INFO Added plugin 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:50,038] INFO Added plugin 'org.apache.kafka.connect.connector.policy.AllConnectorClientConfigOverridePolicy' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:50,038] INFO Added plugin 'org.apache.kafka.connect.connector.policy.PrincipalConnectorClientConfigOverridePolicy' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:50,038] INFO Added plugin 'org.apache.kafka.connect.connector.policy.NoneConnectorClientConfigOverridePolicy' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:50,040] INFO Loading plugin from: C:\kafka\plugins\confluentinc-kafka-connect-elasticsearch-11.0.4\assets (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:246)
[2021-06-01 10:58:50,043] INFO Registered loader: PluginClassLoader{pluginLocation=file:/C:/kafka/plugins/confluentinc-kafka-connect-elasticsearch-11.0.4/assets/} (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:269)
[2021-06-01 10:58:50,043] INFO Loading plugin from: C:\kafka\plugins\confluentinc-kafka-connect-elasticsearch-11.0.4\doc (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:246)
[2021-06-01 10:58:50,047] INFO Registered loader: PluginClassLoader{pluginLocation=file:/C:/kafka/plugins/confluentinc-kafka-connect-elasticsearch-11.0.4/doc/} (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:269)
[2021-06-01 10:58:50,048] INFO Loading plugin from: C:\kafka\plugins\confluentinc-kafka-connect-elasticsearch-11.0.4\etc (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:246)
[2021-06-01 10:58:50,049] INFO Registered loader: PluginClassLoader{pluginLocation=file:/C:/kafka/plugins/confluentinc-kafka-connect-elasticsearch-11.0.4/etc/} (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:269)
[2021-06-01 10:58:50,050] INFO Loading plugin from: C:\kafka\plugins\confluentinc-kafka-connect-elasticsearch-11.0.4\lib (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:246)
[2021-06-01 10:58:50,565] INFO Registered loader: PluginClassLoader{pluginLocation=file:/C:/kafka/plugins/confluentinc-kafka-connect-elasticsearch-11.0.4/lib/} (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:269)
[2021-06-01 10:58:51,146] INFO Registered loader: jdk.internal.loader.ClassLoaders$AppClassLoader@1d44bcfa (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:269)
[2021-06-01 10:58:51,146] INFO Added plugin 'org.apache.kafka.connect.mirror.MirrorSourceConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,148] INFO Added plugin 'org.apache.kafka.connect.file.FileStreamSinkConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,148] INFO Added plugin 'org.apache.kafka.connect.tools.MockSourceConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,148] INFO Added plugin 'org.apache.kafka.connect.tools.SchemaSourceConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,149] INFO Added plugin 'org.apache.kafka.connect.file.FileStreamSourceConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,149] INFO Added plugin 'org.apache.kafka.connect.mirror.MirrorCheckpointConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,149] INFO Added plugin 'org.apache.kafka.connect.tools.VerifiableSinkConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,149] INFO Added plugin 'org.apache.kafka.connect.tools.VerifiableSourceConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,150] INFO Added plugin 'org.apache.kafka.connect.tools.MockSinkConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,150] INFO Added plugin 'org.apache.kafka.connect.mirror.MirrorHeartbeatConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,150] INFO Added plugin 'org.apache.kafka.connect.tools.MockConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,150] INFO Added plugin 'org.apache.kafka.connect.converters.FloatConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,151] INFO Added plugin 'org.apache.kafka.connect.converters.DoubleConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,151] INFO Added plugin 'org.apache.kafka.connect.converters.ByteArrayConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,153] INFO Added plugin 'org.apache.kafka.connect.converters.IntegerConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,153] INFO Added plugin 'org.apache.kafka.connect.converters.LongConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,154] INFO Added plugin 'org.apache.kafka.connect.json.JsonConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,154] INFO Added plugin 'org.apache.kafka.connect.storage.StringConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,154] INFO Added plugin 'org.apache.kafka.connect.converters.ShortConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,154] INFO Added plugin 'org.apache.kafka.connect.storage.SimpleHeaderConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,155] INFO Added plugin 'org.apache.kafka.connect.transforms.ReplaceField$Value' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,155] INFO Added plugin 'org.apache.kafka.connect.transforms.SetSchemaMetadata$Value' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,155] INFO Added plugin 'org.apache.kafka.connect.transforms.ReplaceField$Key' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,155] INFO Added plugin 'org.apache.kafka.connect.transforms.Filter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,156] INFO Added plugin 'org.apache.kafka.connect.transforms.InsertField$Value' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,159] INFO Added plugin 'org.apache.kafka.connect.transforms.TimestampConverter$Key' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,161] INFO Added plugin 'org.apache.kafka.connect.transforms.MaskField$Value' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,162] INFO Added plugin 'org.apache.kafka.connect.transforms.TimestampRouter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,162] INFO Added plugin 'org.apache.kafka.connect.transforms.RegexRouter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,162] INFO Added plugin 'org.apache.kafka.connect.transforms.HoistField$Value' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,163] INFO Added plugin 'org.apache.kafka.connect.transforms.ValueToKey' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,163] INFO Added plugin 'org.apache.kafka.connect.transforms.MaskField$Key' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,163] INFO Added plugin 'org.apache.kafka.connect.transforms.Cast$Key' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,163] INFO Added plugin 'org.apache.kafka.connect.transforms.Cast$Value' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,164] INFO Added plugin 'org.apache.kafka.connect.runtime.PredicatedTransformation' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,164] INFO Added plugin 'org.apache.kafka.connect.transforms.ExtractField$Key' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,164] INFO Added plugin 'org.apache.kafka.connect.transforms.Flatten$Value' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,164] INFO Added plugin 'org.apache.kafka.connect.transforms.InsertField$Key' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,164] INFO Added plugin 'org.apache.kafka.connect.transforms.Flatten$Key' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,165] INFO Added plugin 'org.apache.kafka.connect.transforms.SetSchemaMetadata$Key' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,165] INFO Added plugin 'org.apache.kafka.connect.transforms.ExtractField$Value' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,165] INFO Added plugin 'org.apache.kafka.connect.transforms.TimestampConverter$Value' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,165] INFO Added plugin 'org.apache.kafka.connect.transforms.HoistField$Key' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,166] INFO Added plugin 'org.apache.kafka.connect.transforms.predicates.HasHeaderKey' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,166] INFO Added plugin 'org.apache.kafka.connect.transforms.predicates.RecordIsTombstone' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,166] INFO Added plugin 'org.apache.kafka.connect.transforms.predicates.TopicNameMatches' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,166] INFO Added plugin 'org.apache.kafka.common.config.provider.FileConfigProvider' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,170] INFO Added plugin 'org.apache.kafka.connect.rest.basic.auth.extension.BasicAuthSecurityRestExtension' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:198)
[2021-06-01 10:58:51,170] INFO Added aliases 'ElasticsearchSinkConnector' and 'ElasticsearchSink' to plugin 'io.confluent.connect.elasticsearch.ElasticsearchSinkConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,171] INFO Added aliases 'FileStreamSinkConnector' and 'FileStreamSink' to plugin 'org.apache.kafka.connect.file.FileStreamSinkConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,171] INFO Added aliases 'FileStreamSourceConnector' and 'FileStreamSource' to plugin 'org.apache.kafka.connect.file.FileStreamSourceConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,171] INFO Added aliases 'MirrorCheckpointConnector' and 'MirrorCheckpoint' to plugin 'org.apache.kafka.connect.mirror.MirrorCheckpointConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,172] INFO Added aliases 'MirrorHeartbeatConnector' and 'MirrorHeartbeat' to plugin 'org.apache.kafka.connect.mirror.MirrorHeartbeatConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,172] INFO Added aliases 'MirrorSourceConnector' and 'MirrorSource' to plugin 'org.apache.kafka.connect.mirror.MirrorSourceConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,172] INFO Added aliases 'MockConnector' and 'Mock' to plugin 'org.apache.kafka.connect.tools.MockConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,173] INFO Added aliases 'MockSinkConnector' and 'MockSink' to plugin 'org.apache.kafka.connect.tools.MockSinkConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,173] INFO Added aliases 'MockSourceConnector' and 'MockSource' to plugin 'org.apache.kafka.connect.tools.MockSourceConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,173] INFO Added aliases 'SchemaSourceConnector' and 'SchemaSource' to plugin 'org.apache.kafka.connect.tools.SchemaSourceConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,173] INFO Added aliases 'VerifiableSinkConnector' and 'VerifiableSink' to plugin 'org.apache.kafka.connect.tools.VerifiableSinkConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,174] INFO Added aliases 'VerifiableSourceConnector' and 'VerifiableSource' to plugin 'org.apache.kafka.connect.tools.VerifiableSourceConnector' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,174] INFO Added aliases 'ByteArrayConverter' and 'ByteArray' to plugin 'org.apache.kafka.connect.converters.ByteArrayConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,174] INFO Added aliases 'DoubleConverter' and 'Double' to plugin 'org.apache.kafka.connect.converters.DoubleConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,175] INFO Added aliases 'FloatConverter' and 'Float' to plugin 'org.apache.kafka.connect.converters.FloatConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,176] INFO Added aliases 'IntegerConverter' and 'Integer' to plugin 'org.apache.kafka.connect.converters.IntegerConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,177] INFO Added aliases 'LongConverter' and 'Long' to plugin 'org.apache.kafka.connect.converters.LongConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,177] INFO Added aliases 'ShortConverter' and 'Short' to plugin 'org.apache.kafka.connect.converters.ShortConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,177] INFO Added aliases 'JsonConverter' and 'Json' to plugin 'org.apache.kafka.connect.json.JsonConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,181] INFO Added aliases 'StringConverter' and 'String' to plugin 'org.apache.kafka.connect.storage.StringConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,182] INFO Added aliases 'ByteArrayConverter' and 'ByteArray' to plugin 'org.apache.kafka.connect.converters.ByteArrayConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,182] INFO Added aliases 'DoubleConverter' and 'Double' to plugin 'org.apache.kafka.connect.converters.DoubleConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,182] INFO Added aliases 'FloatConverter' and 'Float' to plugin 'org.apache.kafka.connect.converters.FloatConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,182] INFO Added aliases 'IntegerConverter' and 'Integer' to plugin 'org.apache.kafka.connect.converters.IntegerConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,183] INFO Added aliases 'LongConverter' and 'Long' to plugin 'org.apache.kafka.connect.converters.LongConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,183] INFO Added aliases 'ShortConverter' and 'Short' to plugin 'org.apache.kafka.connect.converters.ShortConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,183] INFO Added aliases 'JsonConverter' and 'Json' to plugin 'org.apache.kafka.connect.json.JsonConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,184] INFO Added alias 'SimpleHeaderConverter' to plugin 'org.apache.kafka.connect.storage.SimpleHeaderConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:424)
[2021-06-01 10:58:51,184] INFO Added aliases 'StringConverter' and 'String' to plugin 'org.apache.kafka.connect.storage.StringConverter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,184] INFO Added aliases 'PredicatedTransformation' and 'Predicated' to plugin 'org.apache.kafka.connect.runtime.PredicatedTransformation' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,184] INFO Added alias 'Filter' to plugin 'org.apache.kafka.connect.transforms.Filter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:424)
[2021-06-01 10:58:51,185] INFO Added alias 'RegexRouter' to plugin 'org.apache.kafka.connect.transforms.RegexRouter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:424)
[2021-06-01 10:58:51,185] INFO Added alias 'TimestampRouter' to plugin 'org.apache.kafka.connect.transforms.TimestampRouter' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:424)
[2021-06-01 10:58:51,185] INFO Added alias 'ValueToKey' to plugin 'org.apache.kafka.connect.transforms.ValueToKey' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:424)
[2021-06-01 10:58:51,185] INFO Added alias 'HasHeaderKey' to plugin 'org.apache.kafka.connect.transforms.predicates.HasHeaderKey' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:424)
[2021-06-01 10:58:51,186] INFO Added alias 'RecordIsTombstone' to plugin 'org.apache.kafka.connect.transforms.predicates.RecordIsTombstone' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:424)
[2021-06-01 10:58:51,186] INFO Added alias 'TopicNameMatches' to plugin 'org.apache.kafka.connect.transforms.predicates.TopicNameMatches' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:424)
[2021-06-01 10:58:51,186] INFO Added alias 'BasicAuthSecurityRestExtension' to plugin 'org.apache.kafka.connect.rest.basic.auth.extension.BasicAuthSecurityRestExtension' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:424)
[2021-06-01 10:58:51,186] INFO Added aliases 'AllConnectorClientConfigOverridePolicy' and 'All' to plugin 'org.apache.kafka.connect.connector.policy.AllConnectorClientConfigOverridePolicy' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,187] INFO Added aliases 'NoneConnectorClientConfigOverridePolicy' and 'None' to plugin 'org.apache.kafka.connect.connector.policy.NoneConnectorClientConfigOverridePolicy' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,187] INFO Added aliases 'PrincipalConnectorClientConfigOverridePolicy' and 'Principal' to plugin 'org.apache.kafka.connect.connector.policy.PrincipalConnectorClientConfigOverridePolicy' (org.apache.kafka.connect.runtime.isolation.DelegatingClassLoader:427)
[2021-06-01 10:58:51,201] INFO StandaloneConfig values:
        access.control.allow.methods =
        access.control.allow.origin =
        admin.listeners = null
        bootstrap.servers = [localhost:9092]
        client.dns.lookup = use_all_dns_ips
        config.providers = []
        connector.client.config.override.policy = None
        header.converter = class org.apache.kafka.connect.storage.SimpleHeaderConverter
        internal.key.converter = class org.apache.kafka.connect.json.JsonConverter
        internal.value.converter = class org.apache.kafka.connect.json.JsonConverter
        key.converter = class org.apache.kafka.connect.json.JsonConverter
        listeners = null
        metric.reporters = []
        metrics.num.samples = 2
        metrics.recording.level = INFO
        metrics.sample.window.ms = 30000
        offset.flush.interval.ms = 10000
        offset.flush.timeout.ms = 5000
        offset.storage.file.filename = /tmp/connect.offsets
        plugin.path = [C:/kafka/plugins, C:/kafka/plugins/confluentinc-kafka-connect-elasticsearch-11.0.4]
        response.http.headers.config =
        rest.advertised.host.name = null
        rest.advertised.listener = null
        rest.advertised.port = null
        rest.extension.classes = []
        rest.host.name = null
        rest.port = 8083
        ssl.cipher.suites = null
        ssl.client.auth = none
        ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
        ssl.endpoint.identification.algorithm = https
        ssl.engine.factory.class = null
        ssl.key.password = null
        ssl.keymanager.algorithm = SunX509
        ssl.keystore.certificate.chain = null
        ssl.keystore.key = null
        ssl.keystore.location = null
        ssl.keystore.password = null
        ssl.keystore.type = JKS
        ssl.protocol = TLSv1.3
        ssl.provider = null
        ssl.secure.random.implementation = null
        ssl.trustmanager.algorithm = PKIX
        ssl.truststore.certificates = null
        ssl.truststore.location = null
        ssl.truststore.password = null
        ssl.truststore.type = JKS
        task.shutdown.graceful.timeout.ms = 5000
        topic.creation.enable = true
        topic.tracking.allow.reset = true
        topic.tracking.enable = true
        value.converter = class org.apache.kafka.connect.json.JsonConverter
 (org.apache.kafka.connect.runtime.standalone.StandaloneConfig:372)
[2021-06-01 10:58:51,202] INFO Creating Kafka admin client (org.apache.kafka.connect.util.ConnectUtils:49)
[2021-06-01 10:58:51,208] INFO AdminClientConfig values:
        bootstrap.servers = [localhost:9092]
        client.dns.lookup = use_all_dns_ips
        client.id =
        connections.max.idle.ms = 300000
        default.api.timeout.ms = 60000
        metadata.max.age.ms = 300000
        metric.reporters = []
        metrics.num.samples = 2
        metrics.recording.level = INFO
        metrics.sample.window.ms = 30000
        receive.buffer.bytes = 65536
        reconnect.backoff.max.ms = 1000
        reconnect.backoff.ms = 50
        request.timeout.ms = 30000
        retries = 2147483647
        retry.backoff.ms = 100
        sasl.client.callback.handler.class = null
        sasl.jaas.config = null
        sasl.kerberos.kinit.cmd = /usr/bin/kinit
        sasl.kerberos.min.time.before.relogin = 60000
        sasl.kerberos.service.name = null
        sasl.kerberos.ticket.renew.jitter = 0.05
        sasl.kerberos.ticket.renew.window.factor = 0.8
        sasl.login.callback.handler.class = null
        sasl.login.class = null
        sasl.login.refresh.buffer.seconds = 300
        sasl.login.refresh.min.period.seconds = 60
        sasl.login.refresh.window.factor = 0.8
        sasl.login.refresh.window.jitter = 0.05
        sasl.mechanism = GSSAPI
        security.protocol = PLAINTEXT
        security.providers = null
        send.buffer.bytes = 131072
        socket.connection.setup.timeout.max.ms = 30000
        socket.connection.setup.timeout.ms = 10000
        ssl.cipher.suites = null
        ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
        ssl.endpoint.identification.algorithm = https
        ssl.engine.factory.class = null
        ssl.key.password = null
        ssl.keymanager.algorithm = SunX509
        ssl.keystore.certificate.chain = null
        ssl.keystore.key = null
        ssl.keystore.location = null
        ssl.keystore.password = null
        ssl.keystore.type = JKS
        ssl.protocol = TLSv1.3
        ssl.provider = null
        ssl.secure.random.implementation = null
        ssl.trustmanager.algorithm = PKIX
        ssl.truststore.certificates = null
        ssl.truststore.location = null
        ssl.truststore.password = null
        ssl.truststore.type = JKS
 (org.apache.kafka.clients.admin.AdminClientConfig:372)
[2021-06-01 10:58:51,292] WARN The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. (org.apache.kafka.clients.admin.AdminClientConfig:380)
[2021-06-01 10:58:51,292] WARN The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. (org.apache.kafka.clients.admin.AdminClientConfig:380)
[2021-06-01 10:58:51,294] WARN The configuration 'offset.storage.file.filename' was supplied but isn't a known config. (org.apache.kafka.clients.admin.AdminClientConfig:380)
[2021-06-01 10:58:51,294] WARN The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. (org.apache.kafka.clients.admin.AdminClientConfig:380)
[2021-06-01 10:58:51,294] WARN The configuration 'plugin.path' was supplied but isn't a known config. (org.apache.kafka.clients.admin.AdminClientConfig:380)
[2021-06-01 10:58:51,294] WARN The configuration 'value.converter' was supplied but isn't a known config. (org.apache.kafka.clients.admin.AdminClientConfig:380)
[2021-06-01 10:58:51,295] WARN The configuration 'key.converter' was supplied but isn't a known config. (org.apache.kafka.clients.admin.AdminClientConfig:380)
[2021-06-01 10:58:51,295] INFO Kafka version: 2.8.0 (org.apache.kafka.common.utils.AppInfoParser:119)
[2021-06-01 10:58:51,296] INFO Kafka commitId: ebb1d6e21cc92130 (org.apache.kafka.common.utils.AppInfoParser:120)
[2021-06-01 10:58:51,296] INFO Kafka startTimeMs: 1622537931295 (org.apache.kafka.common.utils.AppInfoParser:121)
[2021-06-01 10:58:51,514] INFO Kafka cluster ID: XO_bTgfYRZeaBdWZdTGd-Q (org.apache.kafka.connect.util.ConnectUtils:65)
[2021-06-01 10:58:51,515] INFO App info kafka.admin.client for adminclient-1 unregistered (org.apache.kafka.common.utils.AppInfoParser:83)
[2021-06-01 10:58:51,521] INFO Metrics scheduler closed (org.apache.kafka.common.metrics.Metrics:659)
[2021-06-01 10:58:51,522] INFO Closing reporter org.apache.kafka.common.metrics.JmxReporter (org.apache.kafka.common.metrics.Metrics:663)
[2021-06-01 10:58:51,524] INFO Metrics reporters closed (org.apache.kafka.common.metrics.Metrics:669)
[2021-06-01 10:58:51,532] INFO Logging initialized @2817ms to org.eclipse.jetty.util.log.Slf4jLog (org.eclipse.jetty.util.log:169)
[2021-06-01 10:58:51,559] INFO Added connector for http://:8083 (org.apache.kafka.connect.runtime.rest.RestServer:132)
[2021-06-01 10:58:51,559] INFO Initializing REST server (org.apache.kafka.connect.runtime.rest.RestServer:204)
[2021-06-01 10:58:51,565] INFO jetty-9.4.39.v20210325; built: 2021-03-25T14:42:11.471Z; git: 9fc7ca5a922f2a37b84ec9dbc26a5168cee7e667; jvm 16.0.1+9-24 (org.eclipse.jetty.server.Server:375)
[2021-06-01 10:58:51,606] INFO Started http_8083@30c1da48{HTTP/1.1, (http/1.1)}{0.0.0.0:8083} (org.eclipse.jetty.server.AbstractConnector:331)
[2021-06-01 10:58:51,606] INFO Started @2890ms (org.eclipse.jetty.server.Server:415)
[2021-06-01 10:58:51,622] INFO Advertised URI: http://192.168.88.202:8083/ (org.apache.kafka.connect.runtime.rest.RestServer:371)
[2021-06-01 10:58:51,622] INFO REST server listening at http://192.168.88.202:8083/, advertising URL http://192.168.88.202:8083/ (org.apache.kafka.connect.runtime.rest.RestServer:219)
[2021-06-01 10:58:51,623] INFO Advertised URI: http://192.168.88.202:8083/ (org.apache.kafka.connect.runtime.rest.RestServer:371)
[2021-06-01 10:58:51,623] INFO REST admin endpoints at http://192.168.88.202:8083/ (org.apache.kafka.connect.runtime.rest.RestServer:220)
[2021-06-01 10:58:51,623] INFO Advertised URI: http://192.168.88.202:8083/ (org.apache.kafka.connect.runtime.rest.RestServer:371)
[2021-06-01 10:58:51,624] INFO Setting up None Policy for ConnectorClientConfigOverride. This will disallow any client configuration to be overridden (org.apache.kafka.connect.connector.policy.NoneConnectorClientConfigOverridePolicy:45)
[2021-06-01 10:58:51,628] INFO Creating Kafka admin client (org.apache.kafka.connect.util.ConnectUtils:49)
[2021-06-01 10:58:51,629] INFO AdminClientConfig values:
        bootstrap.servers = [localhost:9092]
        client.dns.lookup = use_all_dns_ips
        client.id =
        connections.max.idle.ms = 300000
        default.api.timeout.ms = 60000
        metadata.max.age.ms = 300000
        metric.reporters = []
        metrics.num.samples = 2
        metrics.recording.level = INFO
        metrics.sample.window.ms = 30000
        receive.buffer.bytes = 65536
        reconnect.backoff.max.ms = 1000
        reconnect.backoff.ms = 50
        request.timeout.ms = 30000
        retries = 2147483647
        retry.backoff.ms = 100
        sasl.client.callback.handler.class = null
        sasl.jaas.config = null
        sasl.kerberos.kinit.cmd = /usr/bin/kinit
        sasl.kerberos.min.time.before.relogin = 60000
        sasl.kerberos.service.name = null
        sasl.kerberos.ticket.renew.jitter = 0.05
        sasl.kerberos.ticket.renew.window.factor = 0.8
        sasl.login.callback.handler.class = null
        sasl.login.class = null
        sasl.login.refresh.buffer.seconds = 300
        sasl.login.refresh.min.period.seconds = 60
        sasl.login.refresh.window.factor = 0.8
        sasl.login.refresh.window.jitter = 0.05
        sasl.mechanism = GSSAPI
        security.protocol = PLAINTEXT
        security.providers = null
        send.buffer.bytes = 131072
        socket.connection.setup.timeout.max.ms = 30000
        socket.connection.setup.timeout.ms = 10000
        ssl.cipher.suites = null
        ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
        ssl.endpoint.identification.algorithm = https
        ssl.engine.factory.class = null
        ssl.key.password = null
        ssl.keymanager.algorithm = SunX509
        ssl.keystore.certificate.chain = null
        ssl.keystore.key = null
        ssl.keystore.location = null
        ssl.keystore.password = null
        ssl.keystore.type = JKS
        ssl.protocol = TLSv1.3
        ssl.provider = null
        ssl.secure.random.implementation = null
        ssl.trustmanager.algorithm = PKIX
        ssl.truststore.certificates = null
        ssl.truststore.location = null
        ssl.truststore.password = null
        ssl.truststore.type = JKS
 (org.apache.kafka.clients.admin.AdminClientConfig:372)
[2021-06-01 10:58:51,639] WARN The configuration 'offset.flush.interval.ms' was supplied but isn't a known config. (org.apache.kafka.clients.admin.AdminClientConfig:380)
[2021-06-01 10:58:51,639] WARN The configuration 'key.converter.schemas.enable' was supplied but isn't a known config. (org.apache.kafka.clients.admin.AdminClientConfig:380)
[2021-06-01 10:58:51,640] WARN The configuration 'offset.storage.file.filename' was supplied but isn't a known config. (org.apache.kafka.clients.admin.AdminClientConfig:380)
[2021-06-01 10:58:51,640] WARN The configuration 'value.converter.schemas.enable' was supplied but isn't a known config. (org.apache.kafka.clients.admin.AdminClientConfig:380)
[2021-06-01 10:58:51,641] WARN The configuration 'plugin.path' was supplied but isn't a known config. (org.apache.kafka.clients.admin.AdminClientConfig:380)
[2021-06-01 10:58:51,641] WARN The configuration 'value.converter' was supplied but isn't a known config. (org.apache.kafka.clients.admin.AdminClientConfig:380)
[2021-06-01 10:58:51,641] WARN The configuration 'key.converter' was supplied but isn't a known config. (org.apache.kafka.clients.admin.AdminClientConfig:380)
[2021-06-01 10:58:51,641] INFO Kafka version: 2.8.0 (org.apache.kafka.common.utils.AppInfoParser:119)
[2021-06-01 10:58:51,642] INFO Kafka commitId: ebb1d6e21cc92130 (org.apache.kafka.common.utils.AppInfoParser:120)
[2021-06-01 10:58:51,642] INFO Kafka startTimeMs: 1622537931641 (org.apache.kafka.common.utils.AppInfoParser:121)
[2021-06-01 10:58:51,651] INFO Kafka cluster ID: XO_bTgfYRZeaBdWZdTGd-Q (org.apache.kafka.connect.util.ConnectUtils:65)
[2021-06-01 10:58:51,652] INFO App info kafka.admin.client for adminclient-2 unregistered (org.apache.kafka.common.utils.AppInfoParser:83)
[2021-06-01 10:58:51,654] INFO Metrics scheduler closed (org.apache.kafka.common.metrics.Metrics:659)
[2021-06-01 10:58:51,654] INFO Closing reporter org.apache.kafka.common.metrics.JmxReporter (org.apache.kafka.common.metrics.Metrics:663)
[2021-06-01 10:58:51,655] INFO Metrics reporters closed (org.apache.kafka.common.metrics.Metrics:669)
[2021-06-01 10:58:51,658] INFO Kafka version: 2.8.0 (org.apache.kafka.common.utils.AppInfoParser:119)
[2021-06-01 10:58:51,659] INFO Kafka commitId: ebb1d6e21cc92130 (org.apache.kafka.common.utils.AppInfoParser:120)
[2021-06-01 10:58:51,660] INFO Kafka startTimeMs: 1622537931658 (org.apache.kafka.common.utils.AppInfoParser:121)
[2021-06-01 10:58:51,736] INFO JsonConverterConfig values:
        converter.type = key
        decimal.format = BASE64
        schemas.cache.size = 1000
        schemas.enable = false
 (org.apache.kafka.connect.json.JsonConverterConfig:372)
[2021-06-01 10:58:51,738] INFO JsonConverterConfig values:
        converter.type = value
        decimal.format = BASE64
        schemas.cache.size = 1000
        schemas.enable = false
 (org.apache.kafka.connect.json.JsonConverterConfig:372)
[2021-06-01 10:58:51,746] INFO Kafka Connect standalone worker initialization took 2544ms (org.apache.kafka.connect.cli.ConnectStandalone:99)
[2021-06-01 10:58:51,746] INFO Kafka Connect starting (org.apache.kafka.connect.runtime.Connect:51)
[2021-06-01 10:58:51,748] INFO Herder starting (org.apache.kafka.connect.runtime.standalone.StandaloneHerder:94)
[2021-06-01 10:58:51,748] INFO Worker starting (org.apache.kafka.connect.runtime.Worker:195)
[2021-06-01 10:58:51,749] INFO Starting FileOffsetBackingStore with file \tmp\connect.offsets (org.apache.kafka.connect.storage.FileOffsetBackingStore:58)
[2021-06-01 10:58:51,752] INFO Worker started (org.apache.kafka.connect.runtime.Worker:202)
[2021-06-01 10:58:51,752] INFO Herder started (org.apache.kafka.connect.runtime.standalone.StandaloneHerder:97)
[2021-06-01 10:58:51,753] INFO Initializing REST resources (org.apache.kafka.connect.runtime.rest.RestServer:224)
[2021-06-01 10:58:51,780] INFO Adding admin resources to main listener (org.apache.kafka.connect.runtime.rest.RestServer:241)
[2021-06-01 10:58:51,824] INFO DefaultSessionIdManager workerName=node0 (org.eclipse.jetty.server.session:334)
[2021-06-01 10:58:51,824] INFO No SessionScavenger set, using defaults (org.eclipse.jetty.server.session:339)
[2021-06-01 10:58:51,827] INFO node0 Scavenging every 660000ms (org.eclipse.jetty.server.session:132)
Jun 01, 2021 10:58:52 AM org.glassfish.jersey.internal.inject.Providers checkProviderRuntime
WARNING: A provider org.apache.kafka.connect.runtime.rest.resources.LoggingResource registered in SERVER runtime does not implement any provider interfaces applicable in the SERVER runtime. Due to constraint configuration problems the provider org.apache.kafka.connect.runtime.rest.resources.LoggingResource will be ignored.
Jun 01, 2021 10:58:52 AM org.glassfish.jersey.internal.inject.Providers checkProviderRuntime
WARNING: A provider org.apache.kafka.connect.runtime.rest.resources.ConnectorPluginsResource registered in SERVER runtime does not implement any provider interfaces applicable in the SERVER runtime. Due to constraint configuration problems the provider org.apache.kafka.connect.runtime.rest.resources.ConnectorPluginsResource will be ignored.
Jun 01, 2021 10:58:52 AM org.glassfish.jersey.internal.inject.Providers checkProviderRuntime
WARNING: A provider org.apache.kafka.connect.runtime.rest.resources.ConnectorsResource registered in SERVER runtime does not implement any provider interfaces applicable in the SERVER runtime. Due to constraint configuration problems the provider org.apache.kafka.connect.runtime.rest.resources.ConnectorsResource will be ignored.
Jun 01, 2021 10:58:52 AM org.glassfish.jersey.internal.inject.Providers checkProviderRuntime
WARNING: A provider org.apache.kafka.connect.runtime.rest.resources.RootResource registered in SERVER runtime does not implement any provider interfaces applicable in the SERVER runtime. Due to constraint configuration problems the provider org.apache.kafka.connect.runtime.rest.resources.RootResource will be ignored.
Jun 01, 2021 10:58:52 AM org.glassfish.jersey.internal.Errors logErrors
WARNING: The following warnings have been detected: WARNING: The (sub)resource method listLoggers in org.apache.kafka.connect.runtime.rest.resources.LoggingResource contains empty path annotation.
WARNING: The (sub)resource method createConnector in org.apache.kafka.connect.runtime.rest.resources.ConnectorsResource contains empty path annotation.
WARNING: The (sub)resource method listConnectors in org.apache.kafka.connect.runtime.rest.resources.ConnectorsResource contains empty path annotation.
WARNING: The (sub)resource method listConnectorPlugins in org.apache.kafka.connect.runtime.rest.resources.ConnectorPluginsResource contains empty path annotation.
WARNING: The (sub)resource method serverInfo in org.apache.kafka.connect.runtime.rest.resources.RootResource contains empty path annotation.

[2021-06-01 10:58:52,165] INFO Started o.e.j.s.ServletContextHandler@a6c54c3{/,null,AVAILABLE} (org.eclipse.jetty.server.handler.ContextHandler:916)
[2021-06-01 10:58:52,165] INFO REST resources initialized; server is started and ready to handle requests (org.apache.kafka.connect.runtime.rest.RestServer:319)
[2021-06-01 10:58:52,165] INFO Kafka Connect started (org.apache.kafka.connect.runtime.Connect:57)
[2021-06-01 10:58:52,180] INFO ElasticsearchSinkConnectorConfig values:
        batch.size = 2000
        behavior.on.malformed.documents = WARN
        behavior.on.null.values = FAIL
        compact.map.entries = true
        connection.compression = false
        connection.password = [hidden]
        connection.timeout.ms = 1000
        connection.url = [https://abcd.eu-west-1.es.amazonaws.com:443]
        connection.username = username_xxx
        drop.invalid.message = true
        elastic.https.ssl.cipher.suites = null
        elastic.https.ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
        elastic.https.ssl.endpoint.identification.algorithm = https
        elastic.https.ssl.engine.factory.class = null
        elastic.https.ssl.key.password = null
        elastic.https.ssl.keymanager.algorithm = SunX509
        elastic.https.ssl.keystore.certificate.chain = null
        elastic.https.ssl.keystore.key = null
        elastic.https.ssl.keystore.location = null
        elastic.https.ssl.keystore.password = null
        elastic.https.ssl.keystore.type = JKS
        elastic.https.ssl.protocol = TLSv1.3
        elastic.https.ssl.provider = null
        elastic.https.ssl.secure.random.implementation = null
        elastic.https.ssl.trustmanager.algorithm = PKIX
        elastic.https.ssl.truststore.certificates = null
        elastic.https.ssl.truststore.location = null
        elastic.https.ssl.truststore.password = null
        elastic.https.ssl.truststore.type = JKS
        elastic.security.protocol = PLAINTEXT
        flush.timeout.ms = 180000
        kerberos.keytab.path = null
        kerberos.user.principal = null
        key.ignore = true
        linger.ms = 1
        max.buffered.records = 20000
        max.connection.idle.time.ms = 60000
        max.in.flight.requests = 5
        max.retries = 1000
        proxy.host =
        proxy.password = null
        proxy.port = 8080
        proxy.username =
        read.timeout.ms = 15000
        retry.backoff.ms = 100
        schema.ignore = false
        topic.key.ignore = []
        topic.schema.ignore = []
        write.method = INSERT
 (io.confluent.connect.elasticsearch.ElasticsearchSinkConnectorConfig:372)
[2021-06-01 10:58:52,316] INFO Using unsecured connection to [https://abcd.eu-west-1.es.amazonaws.com:443]. (io.confluent.connect.elasticsearch.ConfigCallbackHandler:112)
[2021-06-01 10:58:54,247] INFO AbstractConfig values:
 (org.apache.kafka.common.config.AbstractConfig:372)
[2021-06-01 10:58:54,255] INFO Creating connector elasticsearch-sink of type io.confluent.connect.elasticsearch.ElasticsearchSinkConnector (org.apache.kafka.connect.runtime.Worker:274)
[2021-06-01 10:58:54,255] INFO SinkConnectorConfig values:
        config.action.reload = restart
        connector.class = io.confluent.connect.elasticsearch.ElasticsearchSinkConnector
        errors.deadletterqueue.context.headers.enable = false
        errors.deadletterqueue.topic.name =
        errors.deadletterqueue.topic.replication.factor = 3
        errors.log.enable = false
        errors.log.include.messages = false
        errors.retry.delay.max.ms = 60000
        errors.retry.timeout = 0
        errors.tolerance = none
        header.converter = null
        key.converter = null
        name = elasticsearch-sink
        predicates = []
        tasks.max = 1
        topics = [quickstart-events]
        topics.regex =
        transforms = [TimestampRouter]
        value.converter = null
 (org.apache.kafka.connect.runtime.SinkConnectorConfig:372)
[2021-06-01 10:58:54,258] INFO EnrichedConnectorConfig values:
        config.action.reload = restart
        connector.class = io.confluent.connect.elasticsearch.ElasticsearchSinkConnector
        errors.deadletterqueue.context.headers.enable = false
        errors.deadletterqueue.topic.name =
        errors.deadletterqueue.topic.replication.factor = 3
        errors.log.enable = false
        errors.log.include.messages = false
        errors.retry.delay.max.ms = 60000
        errors.retry.timeout = 0
        errors.tolerance = none
        header.converter = null
        key.converter = null
        name = elasticsearch-sink
        predicates = []
        tasks.max = 1
        topics = [quickstart-events]
        topics.regex =
        transforms = [TimestampRouter]
        transforms.TimestampRouter.negate = false
        transforms.TimestampRouter.predicate =
        transforms.TimestampRouter.timestamp.format = YYYY.MM.dd
        transforms.TimestampRouter.topic.format = logstash-test-${timestamp}
        transforms.TimestampRouter.type = class org.apache.kafka.connect.transforms.TimestampRouter
        value.converter = null
 (org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig:372)
[2021-06-01 10:58:54,265] INFO Instantiated connector elasticsearch-sink with version 11.0.4 of type class io.confluent.connect.elasticsearch.ElasticsearchSinkConnector (org.apache.kafka.connect.runtime.Worker:284)
[2021-06-01 10:58:54,266] INFO Finished creating connector elasticsearch-sink (org.apache.kafka.connect.runtime.Worker:310)
[2021-06-01 10:58:54,267] INFO ElasticsearchSinkConnectorConfig values:
        batch.size = 2000
        behavior.on.malformed.documents = WARN
        behavior.on.null.values = FAIL
        compact.map.entries = true
        connection.compression = false
        connection.password = [hidden]
        connection.timeout.ms = 1000
        connection.url = [https://abcd.eu-west-1.es.amazonaws.com:443]
        connection.username = master
        drop.invalid.message = true
        elastic.https.ssl.cipher.suites = null
        elastic.https.ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
        elastic.https.ssl.endpoint.identification.algorithm = https
        elastic.https.ssl.engine.factory.class = null
        elastic.https.ssl.key.password = null
        elastic.https.ssl.keymanager.algorithm = SunX509
        elastic.https.ssl.keystore.certificate.chain = null
        elastic.https.ssl.keystore.key = null
        elastic.https.ssl.keystore.location = null
        elastic.https.ssl.keystore.password = null
        elastic.https.ssl.keystore.type = JKS
        elastic.https.ssl.protocol = TLSv1.3
        elastic.https.ssl.provider = null
        elastic.https.ssl.secure.random.implementation = null
        elastic.https.ssl.trustmanager.algorithm = PKIX
        elastic.https.ssl.truststore.certificates = null
        elastic.https.ssl.truststore.location = null
        elastic.https.ssl.truststore.password = null
        elastic.https.ssl.truststore.type = JKS
        elastic.security.protocol = PLAINTEXT
        flush.timeout.ms = 180000
        kerberos.keytab.path = null
        kerberos.user.principal = null
        key.ignore = true
        linger.ms = 1
        max.buffered.records = 20000
        max.connection.idle.time.ms = 60000
        max.in.flight.requests = 5
        max.retries = 1000
        proxy.host =
        proxy.password = null
        proxy.port = 8080
        proxy.username =
        read.timeout.ms = 15000
        retry.backoff.ms = 100
        schema.ignore = false
        topic.key.ignore = []
        topic.schema.ignore = []
        write.method = INSERT
 (io.confluent.connect.elasticsearch.ElasticsearchSinkConnectorConfig:372)
[2021-06-01 10:58:54,269] INFO SinkConnectorConfig values:
        config.action.reload = restart
        connector.class = io.confluent.connect.elasticsearch.ElasticsearchSinkConnector
        errors.deadletterqueue.context.headers.enable = false
        errors.deadletterqueue.topic.name =
        errors.deadletterqueue.topic.replication.factor = 3
        errors.log.enable = false
        errors.log.include.messages = false
        errors.retry.delay.max.ms = 60000
        errors.retry.timeout = 0
        errors.tolerance = none
        header.converter = null
        key.converter = null
        name = elasticsearch-sink
        predicates = []
        tasks.max = 1
        topics = [quickstart-events]
        topics.regex =
        transforms = [TimestampRouter]
        value.converter = null
 (org.apache.kafka.connect.runtime.SinkConnectorConfig:372)
[2021-06-01 10:58:54,270] INFO EnrichedConnectorConfig values:
        config.action.reload = restart
        connector.class = io.confluent.connect.elasticsearch.ElasticsearchSinkConnector
        errors.deadletterqueue.context.headers.enable = false
        errors.deadletterqueue.topic.name =
        errors.deadletterqueue.topic.replication.factor = 3
        errors.log.enable = false
        errors.log.include.messages = false
        errors.retry.delay.max.ms = 60000
        errors.retry.timeout = 0
        errors.tolerance = none
        header.converter = null
        key.converter = null
        name = elasticsearch-sink
        predicates = []
        tasks.max = 1
        topics = [quickstart-events]
        topics.regex =
        transforms = [TimestampRouter]
        transforms.TimestampRouter.negate = false
        transforms.TimestampRouter.predicate =
        transforms.TimestampRouter.timestamp.format = YYYY.MM.dd
        transforms.TimestampRouter.topic.format = logstash-test-${timestamp}
        transforms.TimestampRouter.type = class org.apache.kafka.connect.transforms.TimestampRouter
        value.converter = null
 (org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig:372)
[2021-06-01 10:58:54,271] INFO Creating task elasticsearch-sink-0 (org.apache.kafka.connect.runtime.Worker:509)
[2021-06-01 10:58:54,273] INFO ConnectorConfig values:
        config.action.reload = restart
        connector.class = io.confluent.connect.elasticsearch.ElasticsearchSinkConnector
        errors.log.enable = false
        errors.log.include.messages = false
        errors.retry.delay.max.ms = 60000
        errors.retry.timeout = 0
        errors.tolerance = none
        header.converter = null
        key.converter = null
        name = elasticsearch-sink
        predicates = []
        tasks.max = 1
        transforms = [TimestampRouter]
        value.converter = null
 (org.apache.kafka.connect.runtime.ConnectorConfig:372)
[2021-06-01 10:58:54,276] INFO EnrichedConnectorConfig values:
        config.action.reload = restart
        connector.class = io.confluent.connect.elasticsearch.ElasticsearchSinkConnector
        errors.log.enable = false
        errors.log.include.messages = false
        errors.retry.delay.max.ms = 60000
        errors.retry.timeout = 0
        errors.tolerance = none
        header.converter = null
        key.converter = null
        name = elasticsearch-sink
        predicates = []
        tasks.max = 1
        transforms = [TimestampRouter]
        transforms.TimestampRouter.negate = false
        transforms.TimestampRouter.predicate =
        transforms.TimestampRouter.timestamp.format = YYYY.MM.dd
        transforms.TimestampRouter.topic.format = logstash-test-${timestamp}
        transforms.TimestampRouter.type = class org.apache.kafka.connect.transforms.TimestampRouter
        value.converter = null
 (org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig:372)
[2021-06-01 10:58:54,277] INFO TaskConfig values:
        task.class = class io.confluent.connect.elasticsearch.ElasticsearchSinkTask
 (org.apache.kafka.connect.runtime.TaskConfig:372)
[2021-06-01 10:58:54,277] INFO Instantiated task elasticsearch-sink-0 with version 11.0.4 of type io.confluent.connect.elasticsearch.ElasticsearchSinkTask (org.apache.kafka.connect.runtime.Worker:524)
[2021-06-01 10:58:54,278] INFO JsonConverterConfig values:
        converter.type = key
        decimal.format = BASE64
        schemas.cache.size = 1000
        schemas.enable = true
 (org.apache.kafka.connect.json.JsonConverterConfig:372)
[2021-06-01 10:58:54,278] INFO Set up the key converter class org.apache.kafka.connect.json.JsonConverter for task elasticsearch-sink-0 using the worker config (org.apache.kafka.connect.runtime.Worker:537)
[2021-06-01 10:58:54,278] INFO JsonConverterConfig values:
        converter.type = value
        decimal.format = BASE64
        schemas.cache.size = 1000
        schemas.enable = true
 (org.apache.kafka.connect.json.JsonConverterConfig:372)
[2021-06-01 10:58:54,279] INFO Set up the value converter class org.apache.kafka.connect.json.JsonConverter for task elasticsearch-sink-0 using the worker config (org.apache.kafka.connect.runtime.Worker:543)
[2021-06-01 10:58:54,279] INFO Set up the header converter class org.apache.kafka.connect.storage.SimpleHeaderConverter for task elasticsearch-sink-0 using the worker config (org.apache.kafka.connect.runtime.Worker:550)
[2021-06-01 10:58:54,282] INFO Initializing: org.apache.kafka.connect.runtime.TransformationChain{org.apache.kafka.connect.transforms.TimestampRouter} (org.apache.kafka.connect.runtime.Worker:632)
[2021-06-01 10:58:54,282] INFO SinkConnectorConfig values:
        config.action.reload = restart
        connector.class = io.confluent.connect.elasticsearch.ElasticsearchSinkConnector
        errors.deadletterqueue.context.headers.enable = false
        errors.deadletterqueue.topic.name =
        errors.deadletterqueue.topic.replication.factor = 3
        errors.log.enable = false
        errors.log.include.messages = false
        errors.retry.delay.max.ms = 60000
        errors.retry.timeout = 0
        errors.tolerance = none
        header.converter = null
        key.converter = null
        name = elasticsearch-sink
        predicates = []
        tasks.max = 1
        topics = [quickstart-events]
        topics.regex =
        transforms = [TimestampRouter]
        value.converter = null
 (org.apache.kafka.connect.runtime.SinkConnectorConfig:372)
[2021-06-01 10:58:54,283] INFO EnrichedConnectorConfig values:
        config.action.reload = restart
        connector.class = io.confluent.connect.elasticsearch.ElasticsearchSinkConnector
        errors.deadletterqueue.context.headers.enable = false
        errors.deadletterqueue.topic.name =
        errors.deadletterqueue.topic.replication.factor = 3
        errors.log.enable = false
        errors.log.include.messages = false
        errors.retry.delay.max.ms = 60000
        errors.retry.timeout = 0
        errors.tolerance = none
        header.converter = null
        key.converter = null
        name = elasticsearch-sink
        predicates = []
        tasks.max = 1
        topics = [quickstart-events]
        topics.regex =
        transforms = [TimestampRouter]
        transforms.TimestampRouter.negate = false
        transforms.TimestampRouter.predicate =
        transforms.TimestampRouter.timestamp.format = YYYY.MM.dd
        transforms.TimestampRouter.topic.format = logstash-test-${timestamp}
        transforms.TimestampRouter.type = class org.apache.kafka.connect.transforms.TimestampRouter
        value.converter = null
 (org.apache.kafka.connect.runtime.ConnectorConfig$EnrichedConnectorConfig:372)
[2021-06-01 10:58:54,289] INFO ConsumerConfig values:
        allow.auto.create.topics = true
        auto.commit.interval.ms = 5000
        auto.offset.reset = earliest
        bootstrap.servers = [localhost:9092]
        check.crcs = true
        client.dns.lookup = use_all_dns_ips
        client.id = connector-consumer-elasticsearch-sink-0
        client.rack =
        connections.max.idle.ms = 540000
        default.api.timeout.ms = 60000
        enable.auto.commit = false
        exclude.internal.topics = true
        fetch.max.bytes = 52428800
        fetch.max.wait.ms = 500
        fetch.min.bytes = 1
        group.id = connect-elasticsearch-sink
        group.instance.id = null
        heartbeat.interval.ms = 3000
        interceptor.classes = []
        internal.leave.group.on.close = true
        internal.throw.on.fetch.stable.offset.unsupported = false
        isolation.level = read_uncommitted
        key.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer
        max.partition.fetch.bytes = 1048576
        max.poll.interval.ms = 300000
        max.poll.records = 500
        metadata.max.age.ms = 300000
        metric.reporters = []
        metrics.num.samples = 2
        metrics.recording.level = INFO
        metrics.sample.window.ms = 30000
        partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor]
        receive.buffer.bytes = 65536
        reconnect.backoff.max.ms = 1000
        reconnect.backoff.ms = 50
        request.timeout.ms = 30000
        retry.backoff.ms = 100
        sasl.client.callback.handler.class = null
        sasl.jaas.config = null
        sasl.kerberos.kinit.cmd = /usr/bin/kinit
        sasl.kerberos.min.time.before.relogin = 60000
        sasl.kerberos.service.name = null
        sasl.kerberos.ticket.renew.jitter = 0.05
        sasl.kerberos.ticket.renew.window.factor = 0.8
        sasl.login.callback.handler.class = null
        sasl.login.class = null
        sasl.login.refresh.buffer.seconds = 300
        sasl.login.refresh.min.period.seconds = 60
        sasl.login.refresh.window.factor = 0.8
        sasl.login.refresh.window.jitter = 0.05
        sasl.mechanism = GSSAPI
        security.protocol = PLAINTEXT
        security.providers = null
        send.buffer.bytes = 131072
        session.timeout.ms = 10000
        socket.connection.setup.timeout.max.ms = 30000
        socket.connection.setup.timeout.ms = 10000
        ssl.cipher.suites = null
        ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
        ssl.endpoint.identification.algorithm = https
        ssl.engine.factory.class = null
        ssl.key.password = null
        ssl.keymanager.algorithm = SunX509
        ssl.keystore.certificate.chain = null
        ssl.keystore.key = null
        ssl.keystore.location = null
        ssl.keystore.password = null
        ssl.keystore.type = JKS
        ssl.protocol = TLSv1.3
        ssl.provider = null
        ssl.secure.random.implementation = null
        ssl.trustmanager.algorithm = PKIX
        ssl.truststore.certificates = null
        ssl.truststore.location = null
        ssl.truststore.password = null
        ssl.truststore.type = JKS
        value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer
 (org.apache.kafka.clients.consumer.ConsumerConfig:372)
[2021-06-01 10:58:54,318] WARN The configuration 'metrics.context.connect.kafka.cluster.id' was supplied but isn't a known config. (org.apache.kafka.clients.consumer.ConsumerConfig:380)
[2021-06-01 10:58:54,319] INFO Kafka version: 2.8.0 (org.apache.kafka.common.utils.AppInfoParser:119)
[2021-06-01 10:58:54,320] INFO Kafka commitId: ebb1d6e21cc92130 (org.apache.kafka.common.utils.AppInfoParser:120)
[2021-06-01 10:58:54,320] INFO Kafka startTimeMs: 1622537934318 (org.apache.kafka.common.utils.AppInfoParser:121)
[2021-06-01 10:58:54,326] INFO Created connector elasticsearch-sink (org.apache.kafka.connect.cli.ConnectStandalone:109)
[2021-06-01 10:58:54,327] INFO [Consumer clientId=connector-consumer-elasticsearch-sink-0, groupId=connect-elasticsearch-sink] Subscribed to topic(s): quickstart-events (org.apache.kafka.clients.consumer.KafkaConsumer:965)
[2021-06-01 10:58:54,327] INFO Starting ElasticsearchSinkTask. (io.confluent.connect.elasticsearch.ElasticsearchSinkTask:53)
[2021-06-01 10:58:54,327] INFO ElasticsearchSinkConnectorConfig values:
        batch.size = 2000
        behavior.on.malformed.documents = WARN
        behavior.on.null.values = FAIL
        compact.map.entries = true
        connection.compression = false
        connection.password = [hidden]
        connection.timeout.ms = 1000
        connection.url = [https://abcd.eu-west-1.es.amazonaws.com:443]
        connection.username = master
        drop.invalid.message = true
        elastic.https.ssl.cipher.suites = null
        elastic.https.ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
        elastic.https.ssl.endpoint.identification.algorithm = https
        elastic.https.ssl.engine.factory.class = null
        elastic.https.ssl.key.password = null
        elastic.https.ssl.keymanager.algorithm = SunX509
        elastic.https.ssl.keystore.certificate.chain = null
        elastic.https.ssl.keystore.key = null
        elastic.https.ssl.keystore.location = null
        elastic.https.ssl.keystore.password = null
        elastic.https.ssl.keystore.type = JKS
        elastic.https.ssl.protocol = TLSv1.3
        elastic.https.ssl.provider = null
        elastic.https.ssl.secure.random.implementation = null
        elastic.https.ssl.trustmanager.algorithm = PKIX
        elastic.https.ssl.truststore.certificates = null
        elastic.https.ssl.truststore.location = null
        elastic.https.ssl.truststore.password = null
        elastic.https.ssl.truststore.type = JKS
        elastic.security.protocol = PLAINTEXT
        flush.timeout.ms = 180000
        kerberos.keytab.path = null
        kerberos.user.principal = null
        key.ignore = true
        linger.ms = 1
        max.buffered.records = 20000
        max.connection.idle.time.ms = 60000
        max.in.flight.requests = 5
        max.retries = 1000
        proxy.host =
        proxy.password = null
        proxy.port = 8080
        proxy.username =
        read.timeout.ms = 15000
        retry.backoff.ms = 100
        schema.ignore = false
        topic.key.ignore = []
        topic.schema.ignore = []
        write.method = INSERT
 (io.confluent.connect.elasticsearch.ElasticsearchSinkConnectorConfig:372)
[2021-06-01 10:58:54,329] INFO JsonConverterConfig values:
        converter.type = value
        decimal.format = BASE64
        schemas.cache.size = 1000
        schemas.enable = false
 (org.apache.kafka.connect.json.JsonConverterConfig:372)
[2021-06-01 10:58:54,331] INFO Errant record reporter not configured. (io.confluent.connect.elasticsearch.ElasticsearchSinkTask:63)
[2021-06-01 10:58:54,340] INFO Using unsecured connection to [https://abcd.eu-west-1.es.amazonaws.com:443]. (io.confluent.connect.elasticsearch.ConfigCallbackHandler:112)
ERROR StatusLogger Log4j2 could not find a logging implementation. Please add log4j-core to the classpath. Using SimpleLogger to log to the console...
[2021-06-01 10:58:54,398] INFO Started ElasticsearchSinkTask. (io.confluent.connect.elasticsearch.ElasticsearchSinkTask:75)
[2021-06-01 10:58:54,398] INFO WorkerSinkTask{id=elasticsearch-sink-0} Sink task finished initialization and start (org.apache.kafka.connect.runtime.WorkerSinkTask:309)
[2021-06-01 10:58:54,411] INFO [Consumer clientId=connector-consumer-elasticsearch-sink-0, groupId=connect-elasticsearch-sink] Cluster ID: XO_bTgfYRZeaBdWZdTGd-Q (org.apache.kafka.clients.Metadata:279)
[2021-06-01 10:58:54,412] INFO [Consumer clientId=connector-consumer-elasticsearch-sink-0, groupId=connect-elasticsearch-sink] Discovered group coordinator MYPCHOSTNAME.comapy.domain.com:9092 (id: 2147483647 rack: null) (org.apache.kafka.clients.consumer.internals.AbstractCoordinator:848)
[2021-06-01 10:58:54,414] INFO [Consumer clientId=connector-consumer-elasticsearch-sink-0, groupId=connect-elasticsearch-sink] (Re-)joining group (org.apache.kafka.clients.consumer.internals.AbstractCoordinator:538)
[2021-06-01 10:58:54,427] INFO [Consumer clientId=connector-consumer-elasticsearch-sink-0, groupId=connect-elasticsearch-sink] (Re-)joining group (org.apache.kafka.clients.consumer.internals.AbstractCoordinator:538)
[2021-06-01 10:58:54,432] INFO [Consumer clientId=connector-consumer-elasticsearch-sink-0, groupId=connect-elasticsearch-sink] Successfully joined group with generation Generation{generationId=36, memberId='connector-consumer-elasticsearch-sink-0-6dc85f42-a5fc-4cce-8324-8a75cc01f3d2', protocol='range'} (org.apache.kafka.clients.consumer.internals.AbstractCoordinator:594)
[2021-06-01 10:58:54,434] INFO [Consumer clientId=connector-consumer-elasticsearch-sink-0, groupId=connect-elasticsearch-sink] Finished assignment for group at generation 36: {connector-consumer-elasticsearch-sink-0-6dc85f42-a5fc-4cce-8324-8a75cc01f3d2=Assignment(partitions=[quickstart-events-0])} (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:626)
[2021-06-01 10:58:54,443] INFO [Consumer clientId=connector-consumer-elasticsearch-sink-0, groupId=connect-elasticsearch-sink] Successfully synced group in generation Generation{generationId=36, memberId='connector-consumer-elasticsearch-sink-0-6dc85f42-a5fc-4cce-8324-8a75cc01f3d2', protocol='range'} (org.apache.kafka.clients.consumer.internals.AbstractCoordinator:758)
[2021-06-01 10:58:54,444] INFO [Consumer clientId=connector-consumer-elasticsearch-sink-0, groupId=connect-elasticsearch-sink] Notifying assignor about the new Assignment(partitions=[quickstart-events-0]) (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:276)
[2021-06-01 10:58:54,445] INFO [Consumer clientId=connector-consumer-elasticsearch-sink-0, groupId=connect-elasticsearch-sink] Adding newly assigned partitions: quickstart-events-0 (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:288)
[2021-06-01 10:58:54,452] INFO [Consumer clientId=connector-consumer-elasticsearch-sink-0, groupId=connect-elasticsearch-sink] Setting offset for partition quickstart-events-0 to the committed offset FetchPosition{offset=1342038, offsetEpoch=Optional.empty, currentLeader=LeaderAndEpoch{leader=Optional[MYPCHOSTNAME.comapy.domain.com:9092 (id: 0 rack: null)], epoch=0}} (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:820)
[2021-06-01 10:58:54,538] INFO Creating index logstash-test-2021.06.01. (io.confluent.connect.elasticsearch.ElasticsearchSinkTask:162)
[2021-06-01 10:59:24,894] WARN Bulk request 41 failed. Retrying request. (io.confluent.connect.elasticsearch.ElasticsearchClient:335)
org.apache.http.ConnectionClosedException: Connection is closed
        at org.apache.http.nio.protocol.HttpAsyncRequestExecutor.endOfInput(HttpAsyncRequestExecutor.java:356)
        at org.apache.http.impl.nio.DefaultNHttpClientConnection.consumeInput(DefaultNHttpClientConnection.java:261)
        at org.apache.http.impl.nio.client.InternalIODispatch.onInputReady(InternalIODispatch.java:81)
        at org.apache.http.impl.nio.client.InternalIODispatch.onInputReady(InternalIODispatch.java:39)
        at org.apache.http.impl.nio.reactor.AbstractIODispatch.inputReady(AbstractIODispatch.java:121)
        at org.apache.http.impl.nio.reactor.BaseIOReactor.readable(BaseIOReactor.java:162)
        at org.apache.http.impl.nio.reactor.AbstractIOReactor.processEvent(AbstractIOReactor.java:337)
        at org.apache.http.impl.nio.reactor.AbstractIOReactor.processEvents(AbstractIOReactor.java:315)
        at org.apache.http.impl.nio.reactor.AbstractIOReactor.execute(AbstractIOReactor.java:276)
        at org.apache.http.impl.nio.reactor.BaseIOReactor.execute(BaseIOReactor.java:104)
        at org.apache.http.impl.nio.reactor.AbstractMultiworkerIOReactor$Worker.run(AbstractMultiworkerIOReactor.java:591)
        at java.base/java.lang.Thread.run(Thread.java:831)
[2021-06-01 10:59:40,663] WARN Bulk request 42 failed. Retrying request. (io.confluent.connect.elasticsearch.ElasticsearchClient:335)
java.net.SocketTimeoutException: 15,000 milliseconds timeout on connection http-outgoing-6 [ACTIVE]
        at org.apache.http.nio.protocol.HttpAsyncRequestExecutor.timeout(HttpAsyncRequestExecutor.java:387)
        at org.apache.http.impl.nio.client.InternalIODispatch.onTimeout(InternalIODispatch.java:92)
        at org.apache.http.impl.nio.client.InternalIODispatch.onTimeout(InternalIODispatch.java:39)
        at org.apache.http.impl.nio.reactor.AbstractIODispatch.timeout(AbstractIODispatch.java:175)
        at org.apache.http.impl.nio.reactor.BaseIOReactor.sessionTimedOut(BaseIOReactor.java:263)
        at org.apache.http.impl.nio.reactor.AbstractIOReactor.timeoutCheck(AbstractIOReactor.java:492)
        at org.apache.http.impl.nio.reactor.BaseIOReactor.validate(BaseIOReactor.java:213)
        at org.apache.http.impl.nio.reactor.AbstractIOReactor.execute(AbstractIOReactor.java:280)
        at org.apache.http.impl.nio.reactor.BaseIOReactor.execute(BaseIOReactor.java:104)
        at org.apache.http.impl.nio.reactor.AbstractMultiworkerIOReactor$Worker.run(AbstractMultiworkerIOReactor.java:591)
        at java.base/java.lang.Thread.run(Thread.java:831)
[2021-06-01 10:59:41,680] WARN Bulk request 43 failed. Retrying request. (io.confluent.connect.elasticsearch.ElasticsearchClient:335)
java.net.SocketTimeoutException: 15,000 milliseconds timeout on connection http-outgoing-7 [ACTIVE]
        at org.apache.http.nio.protocol.HttpAsyncRequestExecutor.timeout(HttpAsyncRequestExecutor.java:387)
        at org.apache.http.impl.nio.client.InternalIODispatch.onTimeout(InternalIODispatch.java:92)
        at org.apache.http.impl.nio.client.InternalIODispatch.onTimeout(InternalIODispatch.java:39)
        at org.apache.http.impl.nio.reactor.AbstractIODispatch.timeout(AbstractIODispatch.java:175)
        at org.apache.http.impl.nio.reactor.BaseIOReactor.sessionTimedOut(BaseIOReactor.java:263)
        at org.apache.http.impl.nio.reactor.AbstractIOReactor.timeoutCheck(AbstractIOReactor.java:492)
        at org.apache.http.impl.nio.reactor.BaseIOReactor.validate(BaseIOReactor.java:213)
        at org.apache.http.impl.nio.reactor.AbstractIOReactor.execute(AbstractIOReactor.java:280)
        at org.apache.http.impl.nio.reactor.BaseIOReactor.execute(BaseIOReactor.java:104)
        at org.apache.http.impl.nio.reactor.AbstractMultiworkerIOReactor$Worker.run(AbstractMultiworkerIOReactor.java:591)
        at java.base/java.lang.Thread.run(Thread.java:831)
[2021-06-01 10:59:42,689] WARN Bulk request 44 failed. Retrying request. (io.confluent.connect.elasticsearch.ElasticsearchClient:335)
java.net.SocketTimeoutException: 15,000 milliseconds timeout on connection http-outgoing-8 [ACTIVE]
        at org.apache.http.nio.protocol.HttpAsyncRequestExecutor.timeout(HttpAsyncRequestExecutor.java:387)
        at org.apache.http.impl.nio.client.InternalIODispatch.onTimeout(InternalIODispatch.java:92)
        at org.apache.http.impl.nio.client.InternalIODispatch.onTimeout(InternalIODispatch.java:39)
        at org.apache.http.impl.nio.reactor.AbstractIODispatch.timeout(AbstractIODispatch.java:175)
        at org.apache.http.impl.nio.reactor.BaseIOReactor.sessionTimedOut(BaseIOReactor.java:263)
        at org.apache.http.impl.nio.reactor.AbstractIOReactor.timeoutCheck(AbstractIOReactor.java:492)
        at org.apache.http.impl.nio.reactor.BaseIOReactor.validate(BaseIOReactor.java:213)
        at org.apache.http.impl.nio.reactor.AbstractIOReactor.execute(AbstractIOReactor.java:280)
        at org.apache.http.impl.nio.reactor.BaseIOReactor.execute(BaseIOReactor.java:104)
        at org.apache.http.impl.nio.reactor.AbstractMultiworkerIOReactor$Worker.run(AbstractMultiworkerIOReactor.java:591)
        at java.base/java.lang.Thread.run(Thread.java:831)
[2021-06-01 10:59:59,781] ERROR WorkerSinkTask{id=elasticsearch-sink-0} Task threw an uncaught and unrecoverable exception. Task is being killed and will not recover until manually restarted. Error: Bulk request failed. (org.apache.kafka.connect.runtime.WorkerSinkTask:607)
org.apache.kafka.connect.errors.ConnectException: Bulk request failed.
        at io.confluent.connect.elasticsearch.ElasticsearchClient$1.afterBulk(ElasticsearchClient.java:346)
        at org.elasticsearch.action.bulk.BulkRequestHandler$1.onFailure(BulkRequestHandler.java:76)
        at org.elasticsearch.action.bulk.Retry$RetryHandler.onFailure(Retry.java:122)
        at org.elasticsearch.client.RestHighLevelClient$1.onFailure(RestHighLevelClient.java:1603)
        at org.elasticsearch.client.RestClient$FailureTrackingResponseListener.onDefinitiveFailure(RestClient.java:580)
        at org.elasticsearch.client.RestClient$1.failed(RestClient.java:330)
        at org.apache.http.concurrent.BasicFuture.failed(BasicFuture.java:137)
        at org.apache.http.impl.nio.client.DefaultClientExchangeHandlerImpl.executionFailed(DefaultClientExchangeHandlerImpl.java:101)
        at org.apache.http.impl.nio.client.AbstractClientExchangeHandler.failed(AbstractClientExchangeHandler.java:426)
        at org.apache.http.nio.protocol.HttpAsyncRequestExecutor.timeout(HttpAsyncRequestExecutor.java:387)
        at org.apache.http.impl.nio.client.InternalIODispatch.onTimeout(InternalIODispatch.java:92)
        at org.apache.http.impl.nio.client.InternalIODispatch.onTimeout(InternalIODispatch.java:39)
        at org.apache.http.impl.nio.reactor.AbstractIODispatch.timeout(AbstractIODispatch.java:175)
        at org.apache.http.impl.nio.reactor.BaseIOReactor.sessionTimedOut(BaseIOReactor.java:263)
        at org.apache.http.impl.nio.reactor.AbstractIOReactor.timeoutCheck(AbstractIOReactor.java:492)
        at org.apache.http.impl.nio.reactor.BaseIOReactor.validate(BaseIOReactor.java:213)
        at org.apache.http.impl.nio.reactor.AbstractIOReactor.execute(AbstractIOReactor.java:280)
        at org.apache.http.impl.nio.reactor.BaseIOReactor.execute(BaseIOReactor.java:104)
        at org.apache.http.impl.nio.reactor.AbstractMultiworkerIOReactor$Worker.run(AbstractMultiworkerIOReactor.java:591)
        at java.base/java.lang.Thread.run(Thread.java:831)
Caused by: java.lang.RuntimeException: error while performing request
        at org.elasticsearch.client.RestClient.extractAndWrapCause(RestClient.java:814)
        at org.elasticsearch.client.RestClient.performRequest(RestClient.java:225)
        at org.elasticsearch.client.RestClient.performRequest(RestClient.java:212)
        at org.elasticsearch.client.RestHighLevelClient.internalPerformRequest(RestHighLevelClient.java:1433)
        at org.elasticsearch.client.RestHighLevelClient.performRequest(RestHighLevelClient.java:1403)
        at org.elasticsearch.client.RestHighLevelClient.performRequestAndParseEntity(RestHighLevelClient.java:1373)
        at org.elasticsearch.client.RestHighLevelClient.bulk(RestHighLevelClient.java:477)
        at io.confluent.connect.elasticsearch.ElasticsearchClient$1.lambda$afterBulk$0(ElasticsearchClient.java:341)
        at io.confluent.connect.elasticsearch.RetryUtil.callWithRetries(RetryUtil.java:161)
        at io.confluent.connect.elasticsearch.RetryUtil.callWithRetries(RetryUtil.java:120)
        at io.confluent.connect.elasticsearch.ElasticsearchClient.callWithRetries(ElasticsearchClient.java:364)
        at io.confluent.connect.elasticsearch.ElasticsearchClient.access$500(ElasticsearchClient.java:65)
        at io.confluent.connect.elasticsearch.ElasticsearchClient$1.afterBulk(ElasticsearchClient.java:339)
        ... 19 more
Caused by: java.util.concurrent.TimeoutException: Connection lease request time out
        at org.apache.http.nio.pool.AbstractNIOConnPool.processPendingRequest(AbstractNIOConnPool.java:411)
        at org.apache.http.nio.pool.AbstractNIOConnPool.processNextPendingRequest(AbstractNIOConnPool.java:391)
        at org.apache.http.nio.pool.AbstractNIOConnPool.release(AbstractNIOConnPool.java:355)
        at org.apache.http.impl.nio.conn.PoolingNHttpClientConnectionManager.releaseConnection(PoolingNHttpClientConnectionManager.java:391)
        at org.apache.http.impl.nio.client.AbstractClientExchangeHandler.releaseConnection(AbstractClientExchangeHandler.java:245)
        at org.apache.http.impl.nio.client.MainClientExec.responseCompleted(MainClientExec.java:387)
        at org.apache.http.impl.nio.client.DefaultClientExchangeHandlerImpl.responseCompleted(DefaultClientExchangeHandlerImpl.java:172)
        at org.apache.http.nio.protocol.HttpAsyncRequestExecutor.processResponse(HttpAsyncRequestExecutor.java:448)
        at org.apache.http.nio.protocol.HttpAsyncRequestExecutor.inputReady(HttpAsyncRequestExecutor.java:338)
        at org.apache.http.impl.nio.DefaultNHttpClientConnection.consumeInput(DefaultNHttpClientConnection.java:265)
        at org.apache.http.impl.nio.client.InternalIODispatch.onInputReady(InternalIODispatch.java:81)
        at org.apache.http.impl.nio.client.InternalIODispatch.onInputReady(InternalIODispatch.java:39)
        at org.apache.http.impl.nio.reactor.AbstractIODispatch.inputReady(AbstractIODispatch.java:121)
        at org.apache.http.impl.nio.reactor.BaseIOReactor.readable(BaseIOReactor.java:162)
        at org.apache.http.impl.nio.reactor.AbstractIOReactor.processEvent(AbstractIOReactor.java:337)
        at org.apache.http.impl.nio.reactor.AbstractIOReactor.processEvents(AbstractIOReactor.java:315)
        at org.apache.http.impl.nio.reactor.AbstractIOReactor.execute(AbstractIOReactor.java:276)
        ... 3 more
[2021-06-01 10:59:59,788] ERROR WorkerSinkTask{id=elasticsearch-sink-0} Task threw an uncaught and unrecoverable exception. Task is being killed and will not recover until manually restarted (org.apache.kafka.connect.runtime.WorkerTask:184)
org.apache.kafka.connect.errors.ConnectException: Exiting WorkerSinkTask due to unrecoverable exception.
        at org.apache.kafka.connect.runtime.WorkerSinkTask.deliverMessages(WorkerSinkTask.java:609)
        at org.apache.kafka.connect.runtime.WorkerSinkTask.poll(WorkerSinkTask.java:329)
        at org.apache.kafka.connect.runtime.WorkerSinkTask.iteration(WorkerSinkTask.java:232)
        at org.apache.kafka.connect.runtime.WorkerSinkTask.execute(WorkerSinkTask.java:201)
        at org.apache.kafka.connect.runtime.WorkerTask.doRun(WorkerTask.java:182)
        at org.apache.kafka.connect.runtime.WorkerTask.run(WorkerTask.java:231)
        at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
        at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
        at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1130)
        at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:630)
        at java.base/java.lang.Thread.run(Thread.java:831)
Caused by: org.apache.kafka.connect.errors.ConnectException: Bulk request failed.
        at io.confluent.connect.elasticsearch.ElasticsearchClient$1.afterBulk(ElasticsearchClient.java:346)
        at org.elasticsearch.action.bulk.BulkRequestHandler$1.onFailure(BulkRequestHandler.java:76)
        at org.elasticsearch.action.bulk.Retry$RetryHandler.onFailure(Retry.java:122)
        at org.elasticsearch.client.RestHighLevelClient$1.onFailure(RestHighLevelClient.java:1603)
        at org.elasticsearch.client.RestClient$FailureTrackingResponseListener.onDefinitiveFailure(RestClient.java:580)
        at org.elasticsearch.client.RestClient$1.failed(RestClient.java:330)
        at org.apache.http.concurrent.BasicFuture.failed(BasicFuture.java:137)
        at org.apache.http.impl.nio.client.DefaultClientExchangeHandlerImpl.executionFailed(DefaultClientExchangeHandlerImpl.java:101)
        at org.apache.http.impl.nio.client.AbstractClientExchangeHandler.failed(AbstractClientExchangeHandler.java:426)
        at org.apache.http.nio.protocol.HttpAsyncRequestExecutor.timeout(HttpAsyncRequestExecutor.java:387)
        at org.apache.http.impl.nio.client.InternalIODispatch.onTimeout(InternalIODispatch.java:92)
        at org.apache.http.impl.nio.client.InternalIODispatch.onTimeout(InternalIODispatch.java:39)
        at org.apache.http.impl.nio.reactor.AbstractIODispatch.timeout(AbstractIODispatch.java:175)
        at org.apache.http.impl.nio.reactor.BaseIOReactor.sessionTimedOut(BaseIOReactor.java:263)
        at org.apache.http.impl.nio.reactor.AbstractIOReactor.timeoutCheck(AbstractIOReactor.java:492)
        at org.apache.http.impl.nio.reactor.BaseIOReactor.validate(BaseIOReactor.java:213)
        at org.apache.http.impl.nio.reactor.AbstractIOReactor.execute(AbstractIOReactor.java:280)
        at org.apache.http.impl.nio.reactor.BaseIOReactor.execute(BaseIOReactor.java:104)
        at org.apache.http.impl.nio.reactor.AbstractMultiworkerIOReactor$Worker.run(AbstractMultiworkerIOReactor.java:591)
        ... 1 more
Caused by: java.lang.RuntimeException: error while performing request
        at org.elasticsearch.client.RestClient.extractAndWrapCause(RestClient.java:814)
        at org.elasticsearch.client.RestClient.performRequest(RestClient.java:225)
        at org.elasticsearch.client.RestClient.performRequest(RestClient.java:212)
        at org.elasticsearch.client.RestHighLevelClient.internalPerformRequest(RestHighLevelClient.java:1433)
        at org.elasticsearch.client.RestHighLevelClient.performRequest(RestHighLevelClient.java:1403)
        at org.elasticsearch.client.RestHighLevelClient.performRequestAndParseEntity(RestHighLevelClient.java:1373)
        at org.elasticsearch.client.RestHighLevelClient.bulk(RestHighLevelClient.java:477)
        at io.confluent.connect.elasticsearch.ElasticsearchClient$1.lambda$afterBulk$0(ElasticsearchClient.java:341)
        at io.confluent.connect.elasticsearch.RetryUtil.callWithRetries(RetryUtil.java:161)
        at io.confluent.connect.elasticsearch.RetryUtil.callWithRetries(RetryUtil.java:120)
        at io.confluent.connect.elasticsearch.ElasticsearchClient.callWithRetries(ElasticsearchClient.java:364)
        at io.confluent.connect.elasticsearch.ElasticsearchClient.access$500(ElasticsearchClient.java:65)
        at io.confluent.connect.elasticsearch.ElasticsearchClient$1.afterBulk(ElasticsearchClient.java:339)
        ... 19 more
Caused by: java.util.concurrent.TimeoutException: Connection lease request time out
        at org.apache.http.nio.pool.AbstractNIOConnPool.processPendingRequest(AbstractNIOConnPool.java:411)
        at org.apache.http.nio.pool.AbstractNIOConnPool.processNextPendingRequest(AbstractNIOConnPool.java:391)
        at org.apache.http.nio.pool.AbstractNIOConnPool.release(AbstractNIOConnPool.java:355)
        at org.apache.http.impl.nio.conn.PoolingNHttpClientConnectionManager.releaseConnection(PoolingNHttpClientConnectionManager.java:391)
        at org.apache.http.impl.nio.client.AbstractClientExchangeHandler.releaseConnection(AbstractClientExchangeHandler.java:245)
        at org.apache.http.impl.nio.client.MainClientExec.responseCompleted(MainClientExec.java:387)
        at org.apache.http.impl.nio.client.DefaultClientExchangeHandlerImpl.responseCompleted(DefaultClientExchangeHandlerImpl.java:172)
        at org.apache.http.nio.protocol.HttpAsyncRequestExecutor.processResponse(HttpAsyncRequestExecutor.java:448)
        at org.apache.http.nio.protocol.HttpAsyncRequestExecutor.inputReady(HttpAsyncRequestExecutor.java:338)
        at org.apache.http.impl.nio.DefaultNHttpClientConnection.consumeInput(DefaultNHttpClientConnection.java:265)
        at org.apache.http.impl.nio.client.InternalIODispatch.onInputReady(InternalIODispatch.java:81)
        at org.apache.http.impl.nio.client.InternalIODispatch.onInputReady(InternalIODispatch.java:39)
        at org.apache.http.impl.nio.reactor.AbstractIODispatch.inputReady(AbstractIODispatch.java:121)
        at org.apache.http.impl.nio.reactor.BaseIOReactor.readable(BaseIOReactor.java:162)
        at org.apache.http.impl.nio.reactor.AbstractIOReactor.processEvent(AbstractIOReactor.java:337)
        at org.apache.http.impl.nio.reactor.AbstractIOReactor.processEvents(AbstractIOReactor.java:315)
        at org.apache.http.impl.nio.reactor.AbstractIOReactor.execute(AbstractIOReactor.java:276)
        ... 3 more
[2021-06-01 10:59:59,793] INFO [Consumer clientId=connector-consumer-elasticsearch-sink-0, groupId=connect-elasticsearch-sink] Revoke previously assigned partitions quickstart-events-0 (org.apache.kafka.clients.consumer.internals.ConsumerCoordinator:307)
[2021-06-01 10:59:59,793] INFO [Consumer clientId=connector-consumer-elasticsearch-sink-0, groupId=connect-elasticsearch-sink] Member connector-consumer-elasticsearch-sink-0-6dc85f42-a5fc-4cce-8324-8a75cc01f3d2 sending LeaveGroup request to coordinator MYPCHOSTNAME.comapy.domain.com:9092 (id: 2147483647 rack: null) due to the consumer is being closed (org.apache.kafka.clients.consumer.internals.AbstractCoordinator:1042)
[2021-06-01 10:59:59,798] INFO Metrics scheduler closed (org.apache.kafka.common.metrics.Metrics:659)
[2021-06-01 10:59:59,798] INFO Closing reporter org.apache.kafka.common.metrics.JmxReporter (org.apache.kafka.common.metrics.Metrics:663)
[2021-06-01 10:59:59,800] INFO Metrics reporters closed (org.apache.kafka.common.metrics.Metrics:669)
[2021-06-01 10:59:59,813] INFO App info kafka.consumer for connector-consumer-elasticsearch-sink-0 unregistered (org.apache.kafka.common.utils.AppInfoParser:83)