Whenever there is nested object in Avro class schema is not getting saved always get exception like
org.apache.kafka.connect.errors.ConnectException: Tolerance exceeded in error handler
connect_1 | at org.apache.kafka.connect.runtime.errors.RetryWithToleranceOperator.execAndHandleError(RetryWithToleranceOperator.java:223)
connect_1 | at org.apache.kafka.connect.runtime.errors.RetryWithToleranceOperator.execute(RetryWithToleranceOperator.java:149)
connect_1 | at org.apache.kafka.connect.runtime.WorkerSourceTask.convertTransformedRecord(WorkerSourceTask.java:330)
connect_1 | at org.apache.kafka.connect.runtime.WorkerSourceTask.sendRecords(WorkerSourceTask.java:356)
connect_1 | at org.apache.kafka.connect.runtime.WorkerSourceTask.execute(WorkerSourceTask.java:258)
connect_1 | at org.apache.kafka.connect.runtime.WorkerTask.doRun(WorkerTask.java:188)
connect_1 | at org.apache.kafka.connect.runtime.WorkerTask.run(WorkerTask.java:243)
connect_1 | at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
connect_1 | at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
connect_1 | at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
connect_1 | at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
connect_1 | at java.base/java.lang.Thread.run(Thread.java:829)
connect_1 | Caused by: org.apache.avro.SchemaParseException: Can't redefine: io.confluent.connect.avro.ConnectDefault
connect_1 | at org.apache.avro.Schema$Names.put(Schema.java:1550)
connect_1 | at org.apache.avro.Schema$NamedSchema.writeNameRef(Schema.java:813)
connect_1 | at org.apache.avro.Schema$RecordSchema.toJson(Schema.java:975)
connect_1 | at org.apache.avro.Schema$UnionSchema.toJson(Schema.java:1242)
connect_1 | at org.apache.avro.Schema$RecordSchema.fieldsToJson(Schema.java:1003)
connect_1 | at org.apache.avro.Schema$RecordSchema.toJson(Schema.java:987)
connect_1 | at org.apache.avro.Schema.toString(Schema.java:426)
connect_1 | at org.apache.avro.Schema.toString(Schema.java:398)
connect_1 | at org.apache.avro.Schema.toString(Schema.java:389)
connect_1 | at io.apicurio.registry.serde.avro.AvroKafkaSerializer.getSchemaFromData(AvroKafkaSerializer.java:108)
connect_1 | at io.apicurio.registry.serde.AbstractKafkaSerializer.lambda$serialize$0(AbstractKafkaSerializer.java:90)
connect_1 | at io.apicurio.registry.serde.LazyLoadedParsedSchema.getRawSchema(LazyLoadedParsedSchema.java:55)
connect_1 | at io.apicurio.registry.serde.DefaultSchemaResolver.resolveSchema(DefaultSchemaResolver.java:81)
connect_1 | at io.apicurio.registry.serde.AbstractKafkaSerializer.serialize(AbstractKafkaSerializer.java:92)
connect_1 | at io.apicurio.registry.serde.AbstractKafkaSerializer.serialize(AbstractKafkaSerializer.java:79)
connect_1 | at io.apicurio.registry.utils.converter.SerdeBasedConverter.fromConnectData(SerdeBasedConverter.java:111)
connect_1 | at org.apache.kafka.connect.storage.Converter.fromConnectData(Converter.java:64)
connect_1 | at org.apache.kafka.connect.runtime.WorkerSourceTask.lambda$convertTransformedRecord$3(WorkerSourceTask.java:330)
connect_1 | at org.apache.kafka.connect.runtime.errors.RetryWithToleranceOperator.execAndRetry(RetryWithToleranceOperator.java:173)
connect_1 | at org.apache.kafka.connect.runtime.errors.RetryWithToleranceOperator.execAndHandleError(RetryWithToleranceOperator.java:207)
connect_1 | ... 11 more
Tools useed
- Debezium
- Apicurio Schema registry
- Avro format