From 9900ab346104bb1ef90c8ab95a55a5a2c7b54144 Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Thu, 15 May 2025 16:54:39 +0200 Subject: [PATCH 01/38] Add initial code for KafkaJson and KafkaAvro request handlers. --- examples/pom.xml | 3 +- .../events/kafka-avro-event.json | 51 ++ .../events/kafka-json-event.json | 50 ++ examples/powertools-examples-kafka/pom.xml | 122 +++++ .../src/main/avro/AvroProduct.avsc | 10 + ...kaAvroConsumerDeserializationFunction.java | 53 ++ ...kaJsonConsumerDeserializationFunction.java | 34 ++ .../src/main/java/org/demo/kafka/Product.java | 63 +++ .../java/org/demo/kafka/avro/AvroProduct.java | 476 +++++++++++++++++ .../src/main/resources/log4j2.xml | 16 + .../powertools-examples-kafka/template.yaml | 44 ++ .../powertools-examples-kafka/tools/README.md | 29 ++ .../powertools-examples-kafka/tools/pom.xml | 55 ++ .../java/org/demo/kafka/avro/AvroProduct.java | 477 ++++++++++++++++++ .../demo/kafka/tools/GenerateAvroSamples.java | 121 +++++ pom.xml | 5 +- powertools-kafka/pom.xml | 100 ++++ .../kafka/KafkaAvroRequestHandler.java | 176 +++++++ .../kafka/KafkaJsonRequestHandler.java | 157 ++++++ 19 files changed, 2039 insertions(+), 3 deletions(-) create mode 100644 examples/powertools-examples-kafka/events/kafka-avro-event.json create mode 100644 examples/powertools-examples-kafka/events/kafka-json-event.json create mode 100644 examples/powertools-examples-kafka/pom.xml create mode 100644 examples/powertools-examples-kafka/src/main/avro/AvroProduct.avsc create mode 100644 examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaAvroConsumerDeserializationFunction.java create mode 100644 examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaJsonConsumerDeserializationFunction.java create mode 100644 examples/powertools-examples-kafka/src/main/java/org/demo/kafka/Product.java create mode 100644 examples/powertools-examples-kafka/src/main/java/org/demo/kafka/avro/AvroProduct.java create mode 100644 examples/powertools-examples-kafka/src/main/resources/log4j2.xml create mode 100644 examples/powertools-examples-kafka/template.yaml create mode 100644 examples/powertools-examples-kafka/tools/README.md create mode 100644 examples/powertools-examples-kafka/tools/pom.xml create mode 100644 examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/avro/AvroProduct.java create mode 100644 examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateAvroSamples.java create mode 100644 powertools-kafka/pom.xml create mode 100644 powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/KafkaAvroRequestHandler.java create mode 100644 powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/KafkaJsonRequestHandler.java diff --git a/examples/pom.xml b/examples/pom.xml index 1f985a9cf..9cd9cc825 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -37,6 +37,7 @@ powertools-examples-idempotency powertools-examples-parameters powertools-examples-serialization + powertools-examples-kafka powertools-examples-batch powertools-examples-validation powertools-examples-cloudformation @@ -56,4 +57,4 @@ - \ No newline at end of file + diff --git a/examples/powertools-examples-kafka/events/kafka-avro-event.json b/examples/powertools-examples-kafka/events/kafka-avro-event.json new file mode 100644 index 000000000..8d6ef2210 --- /dev/null +++ b/examples/powertools-examples-kafka/events/kafka-avro-event.json @@ -0,0 +1,51 @@ +{ + "eventSource": "aws:kafka", + "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", + "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + "records": { + "mytopic-0": [ + { + "topic": "mytopic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": "NDI=", + "value": "0g8MTGFwdG9wUrgehes/j0A=", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 16, + "timestamp": 1545084650988, + "timestampType": "CREATE_TIME", + "key": "NDI=", + "value": "1A8UU21hcnRwaG9uZVK4HoXrv4JA", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 17, + "timestamp": 1545084650989, + "timestampType": "CREATE_TIME", + "key": null, + "value": "1g8USGVhZHBob25lc0jhehSuv2JA", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + } + ] + } +} diff --git a/examples/powertools-examples-kafka/events/kafka-json-event.json b/examples/powertools-examples-kafka/events/kafka-json-event.json new file mode 100644 index 000000000..d85c40654 --- /dev/null +++ b/examples/powertools-examples-kafka/events/kafka-json-event.json @@ -0,0 +1,50 @@ +{ + "eventSource": "aws:kafka", + "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", + "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + "records": { + "mytopic-0": [ + { + "topic": "mytopic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": "cmVjb3JkS2V5", + "value": "ewogICJpZCI6IDEyMzQ1LAogICJuYW1lIjogInByb2R1Y3Q1IiwKICAicHJpY2UiOiA0NQp9", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "value": "ewogICJpZCI6IDEyMzQ1LAogICJuYW1lIjogInByb2R1Y3Q1IiwKICAicHJpY2UiOiA0NQp9", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": null, + "value": "ewogICJpZCI6IDEyMzQ1LAogICJuYW1lIjogInByb2R1Y3Q1IiwKICAicHJpY2UiOiA0NQp9", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + } + ] + } +} diff --git a/examples/powertools-examples-kafka/pom.xml b/examples/powertools-examples-kafka/pom.xml new file mode 100644 index 000000000..8f2228d1b --- /dev/null +++ b/examples/powertools-examples-kafka/pom.xml @@ -0,0 +1,122 @@ + + 4.0.0 + software.amazon.lambda.examples + 2.0.0-SNAPSHOT + powertools-examples-kafka + jar + Powertools for AWS Lambda (Java) - Examples - Kafka + + + 11 + 11 + 1.9.24 + 1.12.0 + + + + + software.amazon.lambda + powertools-logging-log4j + ${project.version} + + + software.amazon.lambda + powertools-metrics + ${project.version} + + + software.amazon.lambda + powertools-kafka + ${project.version} + + + com.amazonaws + aws-lambda-java-core + 1.2.3 + + + com.amazonaws + aws-lambda-java-events + 3.15.0 + + + org.aspectj + aspectjrt + ${aspectj.version} + + + org.apache.avro + avro + ${avro.version} + + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + 3.1.2 + + true + + + + dev.aspectj + aspectj-maven-plugin + 1.14 + + ${maven.compiler.source} + ${maven.compiler.target} + ${maven.compiler.target} + + + software.amazon.lambda + powertools-logging + + + software.amazon.lambda + powertools-metrics + + + + + + + compile + + + + + + org.aspectj + aspectjtools + ${aspectj.version} + + + + + org.apache.avro + avro-maven-plugin + ${avro.version} + + + generate-sources + + schema + + + ${project.basedir}/src/main/avro/ + ${project.basedir}/src/main/java/ + String + + + + + + + + + diff --git a/examples/powertools-examples-kafka/src/main/avro/AvroProduct.avsc b/examples/powertools-examples-kafka/src/main/avro/AvroProduct.avsc new file mode 100644 index 000000000..7155857ea --- /dev/null +++ b/examples/powertools-examples-kafka/src/main/avro/AvroProduct.avsc @@ -0,0 +1,10 @@ +{ + "namespace": "org.demo.kafka.avro", + "type": "record", + "name": "AvroProduct", + "fields": [ + {"name": "id", "type": "int"}, + {"name": "name", "type": "string"}, + {"name": "price", "type": "double"} + ] +} diff --git a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaAvroConsumerDeserializationFunction.java b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaAvroConsumerDeserializationFunction.java new file mode 100644 index 000000000..a999df6b3 --- /dev/null +++ b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaAvroConsumerDeserializationFunction.java @@ -0,0 +1,53 @@ +package org.demo.kafka; + +import static software.amazon.lambda.powertools.logging.argument.StructuredArguments.entry; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.demo.kafka.avro.AvroProduct; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.amazonaws.services.lambda.runtime.Context; + +import software.amazon.cloudwatchlogs.emf.logger.MetricsLogger; +import software.amazon.cloudwatchlogs.emf.model.Unit; +import software.amazon.lambda.powertools.kafka.KafkaAvroRequestHandler; +import software.amazon.lambda.powertools.logging.Logging; +import software.amazon.lambda.powertools.metrics.Metrics; +import software.amazon.lambda.powertools.metrics.MetricsUtils; + +public class KafkaAvroConsumerDeserializationFunction extends KafkaAvroRequestHandler { + + private static final Logger LOGGER = LoggerFactory.getLogger(KafkaAvroConsumerDeserializationFunction.class); + private static final MetricsLogger metrics = MetricsUtils.metricsLogger(); + + @Logging + @Metrics + public String handleRecords(ConsumerRecords records, Context context) { + for (ConsumerRecord consumerRecord : records) { + LOGGER.info("{}", consumerRecord, entry("value", avroToMap(consumerRecord.value()))); + metrics.putMetric("ProcessedAvroRecord", 1, Unit.COUNT); + } + + return "OK"; + } + + // TODO: Helper method because Avro objects cannot be serialized by the Jackson ObjectMapper used in the Logging + // module + // entry("value", consumerRecord.value()) would fallback to a string instead of native json object. + private Map avroToMap(AvroProduct avroProduct) { + if (avroProduct == null) { + return Collections.emptyMap(); + } + Map map = new HashMap<>(); + map.put("id", avroProduct.getId()); + map.put("name", avroProduct.getName()); + map.put("price", avroProduct.getPrice()); + return map; + } +} diff --git a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaJsonConsumerDeserializationFunction.java b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaJsonConsumerDeserializationFunction.java new file mode 100644 index 000000000..f9d4650ff --- /dev/null +++ b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaJsonConsumerDeserializationFunction.java @@ -0,0 +1,34 @@ +package org.demo.kafka; + +import static software.amazon.lambda.powertools.logging.argument.StructuredArguments.entry; + +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.amazonaws.services.lambda.runtime.Context; + +import software.amazon.cloudwatchlogs.emf.logger.MetricsLogger; +import software.amazon.cloudwatchlogs.emf.model.Unit; +import software.amazon.lambda.powertools.kafka.KafkaJsonRequestHandler; +import software.amazon.lambda.powertools.logging.Logging; +import software.amazon.lambda.powertools.metrics.Metrics; +import software.amazon.lambda.powertools.metrics.MetricsUtils; + +public class KafkaJsonConsumerDeserializationFunction extends KafkaJsonRequestHandler { + + private static final Logger LOGGER = LoggerFactory.getLogger(KafkaJsonConsumerDeserializationFunction.class); + private static final MetricsLogger metrics = MetricsUtils.metricsLogger(); + + @Logging + @Metrics + public String handleRecords(ConsumerRecords records, Context context) { + for (ConsumerRecord consumerRecord : records) { + LOGGER.info("{}", consumerRecord, entry("value", consumerRecord.value())); + metrics.putMetric("ProcessedRecord", 1, Unit.COUNT); + } + + return "OK"; + } +} diff --git a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/Product.java b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/Product.java new file mode 100644 index 000000000..c6166090c --- /dev/null +++ b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/Product.java @@ -0,0 +1,63 @@ +/* + * Copyright 2023 Amazon.com, Inc. or its affiliates. + * Licensed under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.demo.kafka; + +public class Product { + private long id; + private String name; + private double price; + + public Product() { + } + + public Product(long id, String name, double price) { + this.id = id; + this.name = name; + this.price = price; + } + + public long getId() { + return id; + } + + public void setId(long id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public double getPrice() { + return price; + } + + public void setPrice(double price) { + this.price = price; + } + + @Override + public String toString() { + return "Product{" + + "id=" + id + + ", name='" + name + '\'' + + ", price=" + price + + '}'; + } +} diff --git a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/avro/AvroProduct.java b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/avro/AvroProduct.java new file mode 100644 index 000000000..fad7e2fbf --- /dev/null +++ b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/avro/AvroProduct.java @@ -0,0 +1,476 @@ +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +package org.demo.kafka.avro; + +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.SchemaStore; + +@org.apache.avro.specific.AvroGenerated +public class AvroProduct extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -2929699301240218341L; + + + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"AvroProduct\",\"namespace\":\"org.demo.kafka.avro\",\"fields\":[{\"name\":\"id\",\"type\":\"int\"},{\"name\":\"name\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"price\",\"type\":\"double\"}]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}. + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this AvroProduct to a ByteBuffer. + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a AvroProduct from a ByteBuffer. + * @param b a byte buffer holding serialized data for an instance of this class + * @return a AvroProduct instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class + */ + public static AvroProduct fromByteBuffer( + java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private int id; + private java.lang.String name; + private double price; + + /** + * Default constructor. Note that this does not initialize fields + * to their default values from the schema. If that is desired then + * one should use newBuilder(). + */ + public AvroProduct() {} + + /** + * All-args constructor. + * @param id The new value for id + * @param name The new value for name + * @param price The new value for price + */ + public AvroProduct(java.lang.Integer id, java.lang.String name, java.lang.Double price) { + this.id = id; + this.name = name; + this.price = price; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { return MODEL$; } + + @Override + public org.apache.avro.Schema getSchema() { return SCHEMA$; } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: return id; + case 1: return name; + case 2: return price; + default: throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value="unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: id = (java.lang.Integer)value$; break; + case 1: name = value$ != null ? value$.toString() : null; break; + case 2: price = (java.lang.Double)value$; break; + default: throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'id' field. + * @return The value of the 'id' field. + */ + public int getId() { + return id; + } + + + /** + * Sets the value of the 'id' field. + * @param value the value to set. + */ + public void setId(int value) { + this.id = value; + } + + /** + * Gets the value of the 'name' field. + * @return The value of the 'name' field. + */ + public java.lang.String getName() { + return name; + } + + + /** + * Sets the value of the 'name' field. + * @param value the value to set. + */ + public void setName(java.lang.String value) { + this.name = value; + } + + /** + * Gets the value of the 'price' field. + * @return The value of the 'price' field. + */ + public double getPrice() { + return price; + } + + + /** + * Sets the value of the 'price' field. + * @param value the value to set. + */ + public void setPrice(double value) { + this.price = value; + } + + /** + * Creates a new AvroProduct RecordBuilder. + * @return A new AvroProduct RecordBuilder + */ + public static org.demo.kafka.avro.AvroProduct.Builder newBuilder() { + return new org.demo.kafka.avro.AvroProduct.Builder(); + } + + /** + * Creates a new AvroProduct RecordBuilder by copying an existing Builder. + * @param other The existing builder to copy. + * @return A new AvroProduct RecordBuilder + */ + public static org.demo.kafka.avro.AvroProduct.Builder newBuilder(org.demo.kafka.avro.AvroProduct.Builder other) { + if (other == null) { + return new org.demo.kafka.avro.AvroProduct.Builder(); + } else { + return new org.demo.kafka.avro.AvroProduct.Builder(other); + } + } + + /** + * Creates a new AvroProduct RecordBuilder by copying an existing AvroProduct instance. + * @param other The existing instance to copy. + * @return A new AvroProduct RecordBuilder + */ + public static org.demo.kafka.avro.AvroProduct.Builder newBuilder(org.demo.kafka.avro.AvroProduct other) { + if (other == null) { + return new org.demo.kafka.avro.AvroProduct.Builder(); + } else { + return new org.demo.kafka.avro.AvroProduct.Builder(other); + } + } + + /** + * RecordBuilder for AvroProduct instances. + */ + @org.apache.avro.specific.AvroGenerated + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private int id; + private java.lang.String name; + private double price; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); + } + + /** + * Creates a Builder by copying an existing Builder. + * @param other The existing Builder to copy. + */ + private Builder(org.demo.kafka.avro.AvroProduct.Builder other) { + super(other); + if (isValidValue(fields()[0], other.id)) { + this.id = data().deepCopy(fields()[0].schema(), other.id); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.name)) { + this.name = data().deepCopy(fields()[1].schema(), other.name); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.price)) { + this.price = data().deepCopy(fields()[2].schema(), other.price); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + } + + /** + * Creates a Builder by copying an existing AvroProduct instance + * @param other The existing instance to copy. + */ + private Builder(org.demo.kafka.avro.AvroProduct other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.id)) { + this.id = data().deepCopy(fields()[0].schema(), other.id); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.name)) { + this.name = data().deepCopy(fields()[1].schema(), other.name); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.price)) { + this.price = data().deepCopy(fields()[2].schema(), other.price); + fieldSetFlags()[2] = true; + } + } + + /** + * Gets the value of the 'id' field. + * @return The value. + */ + public int getId() { + return id; + } + + + /** + * Sets the value of the 'id' field. + * @param value The value of 'id'. + * @return This builder. + */ + public org.demo.kafka.avro.AvroProduct.Builder setId(int value) { + validate(fields()[0], value); + this.id = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'id' field has been set. + * @return True if the 'id' field has been set, false otherwise. + */ + public boolean hasId() { + return fieldSetFlags()[0]; + } + + + /** + * Clears the value of the 'id' field. + * @return This builder. + */ + public org.demo.kafka.avro.AvroProduct.Builder clearId() { + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'name' field. + * @return The value. + */ + public java.lang.String getName() { + return name; + } + + + /** + * Sets the value of the 'name' field. + * @param value The value of 'name'. + * @return This builder. + */ + public org.demo.kafka.avro.AvroProduct.Builder setName(java.lang.String value) { + validate(fields()[1], value); + this.name = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'name' field has been set. + * @return True if the 'name' field has been set, false otherwise. + */ + public boolean hasName() { + return fieldSetFlags()[1]; + } + + + /** + * Clears the value of the 'name' field. + * @return This builder. + */ + public org.demo.kafka.avro.AvroProduct.Builder clearName() { + name = null; + fieldSetFlags()[1] = false; + return this; + } + + /** + * Gets the value of the 'price' field. + * @return The value. + */ + public double getPrice() { + return price; + } + + + /** + * Sets the value of the 'price' field. + * @param value The value of 'price'. + * @return This builder. + */ + public org.demo.kafka.avro.AvroProduct.Builder setPrice(double value) { + validate(fields()[2], value); + this.price = value; + fieldSetFlags()[2] = true; + return this; + } + + /** + * Checks whether the 'price' field has been set. + * @return True if the 'price' field has been set, false otherwise. + */ + public boolean hasPrice() { + return fieldSetFlags()[2]; + } + + + /** + * Clears the value of the 'price' field. + * @return This builder. + */ + public org.demo.kafka.avro.AvroProduct.Builder clearPrice() { + fieldSetFlags()[2] = false; + return this; + } + + @Override + @SuppressWarnings("unchecked") + public AvroProduct build() { + try { + AvroProduct record = new AvroProduct(); + record.id = fieldSetFlags()[0] ? this.id : (java.lang.Integer) defaultValue(fields()[0]); + record.name = fieldSetFlags()[1] ? this.name : (java.lang.String) defaultValue(fields()[1]); + record.price = fieldSetFlags()[2] ? this.price : (java.lang.Double) defaultValue(fields()[2]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter + WRITER$ = (org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$); + + @Override public void writeExternal(java.io.ObjectOutput out) + throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader + READER$ = (org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$); + + @Override public void readExternal(java.io.ObjectInput in) + throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + + @Override protected boolean hasCustomCoders() { return true; } + + @Override public void customEncode(org.apache.avro.io.Encoder out) + throws java.io.IOException + { + out.writeInt(this.id); + + out.writeString(this.name); + + out.writeDouble(this.price); + + } + + @Override public void customDecode(org.apache.avro.io.ResolvingDecoder in) + throws java.io.IOException + { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.id = in.readInt(); + + this.name = in.readString(); + + this.price = in.readDouble(); + + } else { + for (int i = 0; i < 3; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.id = in.readInt(); + break; + + case 1: + this.name = in.readString(); + break; + + case 2: + this.price = in.readDouble(); + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); + } + } + } + } +} + + + + + + + + + + diff --git a/examples/powertools-examples-kafka/src/main/resources/log4j2.xml b/examples/powertools-examples-kafka/src/main/resources/log4j2.xml new file mode 100644 index 000000000..fe943d707 --- /dev/null +++ b/examples/powertools-examples-kafka/src/main/resources/log4j2.xml @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/examples/powertools-examples-kafka/template.yaml b/examples/powertools-examples-kafka/template.yaml new file mode 100644 index 000000000..d2ded572b --- /dev/null +++ b/examples/powertools-examples-kafka/template.yaml @@ -0,0 +1,44 @@ +AWSTemplateFormatVersion: "2010-09-09" +Transform: AWS::Serverless-2016-10-31 +Description: > + Kafka Deserialization example with Kafka Lambda ESM + +Globals: + Function: + Timeout: 20 + Runtime: java11 + MemorySize: 512 + Tracing: Active + +Resources: + KafkaJsonConsumerDeserializationFunction: + Type: AWS::Serverless::Function + Properties: + CodeUri: . + Handler: org.demo.kafka.KafkaJsonConsumerDeserializationFunction::handleRequest + Environment: + Variables: + JAVA_TOOL_OPTIONS: "-XX:+TieredCompilation -XX:TieredStopAtLevel=1" + POWERTOOLS_LOG_LEVEL: DEBUG + POWERTOOLS_SERVICE_NAME: KafkaJsonConsumerDeserialization + POWERTOOLS_METRICS_NAMESPACE: KafkaJsonConsumerDeserializationFunction + + KafkaAvroConsumerDeserializationFunction: + Type: AWS::Serverless::Function + Properties: + CodeUri: . + Handler: org.demo.kafka.KafkaAvroConsumerDeserializationFunction::handleRequest + Environment: + Variables: + JAVA_TOOL_OPTIONS: "-XX:+TieredCompilation -XX:TieredStopAtLevel=1" + POWERTOOLS_LOG_LEVEL: DEBUG + POWERTOOLS_SERVICE_NAME: KafkaAvroConsumerDeserialization + POWERTOOLS_METRICS_NAMESPACE: KafkaAvroConsumerDeserializationFunction + +Outputs: + JsonFunction: + Description: "Kafka JSON Lambda Function ARN" + Value: !GetAtt KafkaJsonConsumerDeserializationFunction.Arn + AvroFunction: + Description: "Kafka Avro Lambda Function ARN" + Value: !GetAtt KafkaAvroConsumerDeserializationFunction.Arn diff --git a/examples/powertools-examples-kafka/tools/README.md b/examples/powertools-examples-kafka/tools/README.md new file mode 100644 index 000000000..0afca7933 --- /dev/null +++ b/examples/powertools-examples-kafka/tools/README.md @@ -0,0 +1,29 @@ +# Avro Sample Generator Tool + +This tool generates base64-encoded Avro serialized products for testing the Kafka Avro consumer function. + +## Usage + +Run the following Maven commands from this directory: + +```bash +# Generate Avro classes from schema +mvn generate-sources + +# Compile the code +mvn compile + +# Run the tool +mvn exec:java +``` + +The tool will output base64-encoded values for three different Avro products and an integer key. +You can copy these values into the `../events/kafka-avro-event.json` file to create a test event. + +## Output + +The tool generates: + +1. Three different Avro products (Laptop, Smartphone, Headphones) +2. An integer key (42) +3. A complete sample event structure that can be used directly diff --git a/examples/powertools-examples-kafka/tools/pom.xml b/examples/powertools-examples-kafka/tools/pom.xml new file mode 100644 index 000000000..419473cee --- /dev/null +++ b/examples/powertools-examples-kafka/tools/pom.xml @@ -0,0 +1,55 @@ + + + 4.0.0 + + software.amazon.lambda.examples + powertools-examples-kafka-tools + 2.0.0-SNAPSHOT + + + 11 + 11 + 1.12.0 + + + + + org.apache.avro + avro + ${avro.version} + + + + + + + org.apache.avro + avro-maven-plugin + ${avro.version} + + + generate-sources + + schema + + + ${project.basedir}/../src/main/avro/ + ${project.basedir}/src/main/java/ + String + + + + + + org.codehaus.mojo + exec-maven-plugin + 3.1.0 + + org.demo.kafka.tools.GenerateAvroSamples + + + + + diff --git a/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/avro/AvroProduct.java b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/avro/AvroProduct.java new file mode 100644 index 000000000..37a7e2c61 --- /dev/null +++ b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/avro/AvroProduct.java @@ -0,0 +1,477 @@ +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +package org.demo.kafka.avro; + +import org.apache.avro.generic.GenericArray; +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.SchemaStore; + +@org.apache.avro.specific.AvroGenerated +public class AvroProduct extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -2929699301240218341L; + + + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"AvroProduct\",\"namespace\":\"org.demo.kafka.avro\",\"fields\":[{\"name\":\"id\",\"type\":\"int\"},{\"name\":\"name\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"price\",\"type\":\"double\"}]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}. + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this AvroProduct to a ByteBuffer. + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a AvroProduct from a ByteBuffer. + * @param b a byte buffer holding serialized data for an instance of this class + * @return a AvroProduct instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class + */ + public static AvroProduct fromByteBuffer( + java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private int id; + private java.lang.String name; + private double price; + + /** + * Default constructor. Note that this does not initialize fields + * to their default values from the schema. If that is desired then + * one should use newBuilder(). + */ + public AvroProduct() {} + + /** + * All-args constructor. + * @param id The new value for id + * @param name The new value for name + * @param price The new value for price + */ + public AvroProduct(java.lang.Integer id, java.lang.String name, java.lang.Double price) { + this.id = id; + this.name = name; + this.price = price; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { return MODEL$; } + + @Override + public org.apache.avro.Schema getSchema() { return SCHEMA$; } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: return id; + case 1: return name; + case 2: return price; + default: throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value="unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: id = (java.lang.Integer)value$; break; + case 1: name = value$ != null ? value$.toString() : null; break; + case 2: price = (java.lang.Double)value$; break; + default: throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'id' field. + * @return The value of the 'id' field. + */ + public int getId() { + return id; + } + + + /** + * Sets the value of the 'id' field. + * @param value the value to set. + */ + public void setId(int value) { + this.id = value; + } + + /** + * Gets the value of the 'name' field. + * @return The value of the 'name' field. + */ + public java.lang.String getName() { + return name; + } + + + /** + * Sets the value of the 'name' field. + * @param value the value to set. + */ + public void setName(java.lang.String value) { + this.name = value; + } + + /** + * Gets the value of the 'price' field. + * @return The value of the 'price' field. + */ + public double getPrice() { + return price; + } + + + /** + * Sets the value of the 'price' field. + * @param value the value to set. + */ + public void setPrice(double value) { + this.price = value; + } + + /** + * Creates a new AvroProduct RecordBuilder. + * @return A new AvroProduct RecordBuilder + */ + public static org.demo.kafka.avro.AvroProduct.Builder newBuilder() { + return new org.demo.kafka.avro.AvroProduct.Builder(); + } + + /** + * Creates a new AvroProduct RecordBuilder by copying an existing Builder. + * @param other The existing builder to copy. + * @return A new AvroProduct RecordBuilder + */ + public static org.demo.kafka.avro.AvroProduct.Builder newBuilder(org.demo.kafka.avro.AvroProduct.Builder other) { + if (other == null) { + return new org.demo.kafka.avro.AvroProduct.Builder(); + } else { + return new org.demo.kafka.avro.AvroProduct.Builder(other); + } + } + + /** + * Creates a new AvroProduct RecordBuilder by copying an existing AvroProduct instance. + * @param other The existing instance to copy. + * @return A new AvroProduct RecordBuilder + */ + public static org.demo.kafka.avro.AvroProduct.Builder newBuilder(org.demo.kafka.avro.AvroProduct other) { + if (other == null) { + return new org.demo.kafka.avro.AvroProduct.Builder(); + } else { + return new org.demo.kafka.avro.AvroProduct.Builder(other); + } + } + + /** + * RecordBuilder for AvroProduct instances. + */ + @org.apache.avro.specific.AvroGenerated + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private int id; + private java.lang.String name; + private double price; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); + } + + /** + * Creates a Builder by copying an existing Builder. + * @param other The existing Builder to copy. + */ + private Builder(org.demo.kafka.avro.AvroProduct.Builder other) { + super(other); + if (isValidValue(fields()[0], other.id)) { + this.id = data().deepCopy(fields()[0].schema(), other.id); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.name)) { + this.name = data().deepCopy(fields()[1].schema(), other.name); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.price)) { + this.price = data().deepCopy(fields()[2].schema(), other.price); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + } + + /** + * Creates a Builder by copying an existing AvroProduct instance + * @param other The existing instance to copy. + */ + private Builder(org.demo.kafka.avro.AvroProduct other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.id)) { + this.id = data().deepCopy(fields()[0].schema(), other.id); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.name)) { + this.name = data().deepCopy(fields()[1].schema(), other.name); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.price)) { + this.price = data().deepCopy(fields()[2].schema(), other.price); + fieldSetFlags()[2] = true; + } + } + + /** + * Gets the value of the 'id' field. + * @return The value. + */ + public int getId() { + return id; + } + + + /** + * Sets the value of the 'id' field. + * @param value The value of 'id'. + * @return This builder. + */ + public org.demo.kafka.avro.AvroProduct.Builder setId(int value) { + validate(fields()[0], value); + this.id = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'id' field has been set. + * @return True if the 'id' field has been set, false otherwise. + */ + public boolean hasId() { + return fieldSetFlags()[0]; + } + + + /** + * Clears the value of the 'id' field. + * @return This builder. + */ + public org.demo.kafka.avro.AvroProduct.Builder clearId() { + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'name' field. + * @return The value. + */ + public java.lang.String getName() { + return name; + } + + + /** + * Sets the value of the 'name' field. + * @param value The value of 'name'. + * @return This builder. + */ + public org.demo.kafka.avro.AvroProduct.Builder setName(java.lang.String value) { + validate(fields()[1], value); + this.name = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'name' field has been set. + * @return True if the 'name' field has been set, false otherwise. + */ + public boolean hasName() { + return fieldSetFlags()[1]; + } + + + /** + * Clears the value of the 'name' field. + * @return This builder. + */ + public org.demo.kafka.avro.AvroProduct.Builder clearName() { + name = null; + fieldSetFlags()[1] = false; + return this; + } + + /** + * Gets the value of the 'price' field. + * @return The value. + */ + public double getPrice() { + return price; + } + + + /** + * Sets the value of the 'price' field. + * @param value The value of 'price'. + * @return This builder. + */ + public org.demo.kafka.avro.AvroProduct.Builder setPrice(double value) { + validate(fields()[2], value); + this.price = value; + fieldSetFlags()[2] = true; + return this; + } + + /** + * Checks whether the 'price' field has been set. + * @return True if the 'price' field has been set, false otherwise. + */ + public boolean hasPrice() { + return fieldSetFlags()[2]; + } + + + /** + * Clears the value of the 'price' field. + * @return This builder. + */ + public org.demo.kafka.avro.AvroProduct.Builder clearPrice() { + fieldSetFlags()[2] = false; + return this; + } + + @Override + @SuppressWarnings("unchecked") + public AvroProduct build() { + try { + AvroProduct record = new AvroProduct(); + record.id = fieldSetFlags()[0] ? this.id : (java.lang.Integer) defaultValue(fields()[0]); + record.name = fieldSetFlags()[1] ? this.name : (java.lang.String) defaultValue(fields()[1]); + record.price = fieldSetFlags()[2] ? this.price : (java.lang.Double) defaultValue(fields()[2]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter + WRITER$ = (org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$); + + @Override public void writeExternal(java.io.ObjectOutput out) + throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader + READER$ = (org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$); + + @Override public void readExternal(java.io.ObjectInput in) + throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + + @Override protected boolean hasCustomCoders() { return true; } + + @Override public void customEncode(org.apache.avro.io.Encoder out) + throws java.io.IOException + { + out.writeInt(this.id); + + out.writeString(this.name); + + out.writeDouble(this.price); + + } + + @Override public void customDecode(org.apache.avro.io.ResolvingDecoder in) + throws java.io.IOException + { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.id = in.readInt(); + + this.name = in.readString(); + + this.price = in.readDouble(); + + } else { + for (int i = 0; i < 3; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.id = in.readInt(); + break; + + case 1: + this.name = in.readString(); + break; + + case 2: + this.price = in.readDouble(); + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); + } + } + } + } +} + + + + + + + + + + diff --git a/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateAvroSamples.java b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateAvroSamples.java new file mode 100644 index 000000000..4bd6ebd13 --- /dev/null +++ b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateAvroSamples.java @@ -0,0 +1,121 @@ +package org.demo.kafka.tools; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.Base64; + +import org.apache.avro.io.BinaryEncoder; +import org.apache.avro.io.DatumWriter; +import org.apache.avro.io.EncoderFactory; +import org.apache.avro.specific.SpecificDatumWriter; +import org.demo.kafka.avro.AvroProduct; + +/** + * Utility class to generate base64-encoded Avro serialized products + * for use in test events. + */ +public class GenerateAvroSamples { + + public static void main(String[] args) throws IOException { + // Create three different products + AvroProduct product1 = new AvroProduct(1001, "Laptop", 999.99); + AvroProduct product2 = new AvroProduct(1002, "Smartphone", 599.99); + AvroProduct product3 = new AvroProduct(1003, "Headphones", 149.99); + + // Serialize and encode each product + String encodedProduct1 = serializeAndEncode(product1); + String encodedProduct2 = serializeAndEncode(product2); + String encodedProduct3 = serializeAndEncode(product3); + + // Serialize and encode an integer key + String encodedKey = serializeAndEncodeInteger(42); + + // Print the results + System.out.println("Base64 encoded Avro products for use in kafka-avro-event.json:"); + System.out.println("\nProduct 1 (with key):"); + System.out.println("key: \"" + encodedKey + "\","); + System.out.println("value: \"" + encodedProduct1 + "\","); + + System.out.println("\nProduct 2 (with key):"); + System.out.println("key: \"" + encodedKey + "\","); + System.out.println("value: \"" + encodedProduct2 + "\","); + + System.out.println("\nProduct 3 (without key):"); + System.out.println("key: null,"); + System.out.println("value: \"" + encodedProduct3 + "\","); + + // Print a sample event structure + System.out.println("\nSample event structure:"); + printSampleEvent(encodedKey, encodedProduct1, encodedProduct2, encodedProduct3); + } + + private static String serializeAndEncode(AvroProduct product) throws IOException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(baos, null); + DatumWriter writer = new SpecificDatumWriter<>(AvroProduct.class); + + writer.write(product, encoder); + encoder.flush(); + + return Base64.getEncoder().encodeToString(baos.toByteArray()); + } + + private static String serializeAndEncodeInteger(Integer value) throws IOException { + // For simple types like integers, we'll just convert to string and encode + return Base64.getEncoder().encodeToString(value.toString().getBytes()); + } + + private static void printSampleEvent(String key, String product1, String product2, String product3) { + System.out.println("{\n" + + " \"eventSource\": \"aws:kafka\",\n" + + " \"eventSourceArn\": \"arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4\",\n" + + " \"bootstrapServers\": \"b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092\",\n" + + " \"records\": {\n" + + " \"mytopic-0\": [\n" + + " {\n" + + " \"topic\": \"mytopic\",\n" + + " \"partition\": 0,\n" + + " \"offset\": 15,\n" + + " \"timestamp\": 1545084650987,\n" + + " \"timestampType\": \"CREATE_TIME\",\n" + + " \"key\": \"" + key + "\",\n" + + " \"value\": \"" + product1 + "\",\n" + + " \"headers\": [\n" + + " {\n" + + " \"headerKey\": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]\n" + + " }\n" + + " ]\n" + + " },\n" + + " {\n" + + " \"topic\": \"mytopic\",\n" + + " \"partition\": 0,\n" + + " \"offset\": 16,\n" + + " \"timestamp\": 1545084650988,\n" + + " \"timestampType\": \"CREATE_TIME\",\n" + + " \"key\": \"" + key + "\",\n" + + " \"value\": \"" + product2 + "\",\n" + + " \"headers\": [\n" + + " {\n" + + " \"headerKey\": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]\n" + + " }\n" + + " ]\n" + + " },\n" + + " {\n" + + " \"topic\": \"mytopic\",\n" + + " \"partition\": 0,\n" + + " \"offset\": 17,\n" + + " \"timestamp\": 1545084650989,\n" + + " \"timestampType\": \"CREATE_TIME\",\n" + + " \"key\": null,\n" + + " \"value\": \"" + product3 + "\",\n" + + " \"headers\": [\n" + + " {\n" + + " \"headerKey\": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]\n" + + " }\n" + + " ]\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"); + } +} diff --git a/pom.xml b/pom.xml index 0c12aa9c6..a8d58aab9 100644 --- a/pom.xml +++ b/pom.xml @@ -14,8 +14,8 @@ --> + xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" + xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> 4.0.0 software.amazon.lambda @@ -43,6 +43,7 @@ powertools-common powertools-serialization + powertools-kafka powertools-logging powertools-logging/powertools-logging-log4j powertools-logging/powertools-logging-logback diff --git a/powertools-kafka/pom.xml b/powertools-kafka/pom.xml new file mode 100644 index 000000000..b08c93ea6 --- /dev/null +++ b/powertools-kafka/pom.xml @@ -0,0 +1,100 @@ + + + + + 4.0.0 + + + powertools-parent + software.amazon.lambda + 2.0.0-SNAPSHOT + + + powertools-kafka + jar + + Powertools for AWS Lambda (Java) - Kafka Consumer + + + + + + + org.slf4j + slf4j-api + + + org.aspectj + aspectjrt + provided + + + com.amazonaws + aws-lambda-java-core + + + com.amazonaws + aws-lambda-java-events + + + org.apache.kafka + kafka-clients + 3.6.1 + + + com.fasterxml.jackson.core + jackson-databind + 2.15.2 + + + org.apache.avro + avro + 1.11.3 + + + + + org.junit.jupiter + junit-jupiter-api + test + + + org.slf4j + slf4j-simple + test + + + org.assertj + assertj-core + test + + + + + + + dev.aspectj + aspectj-maven-plugin + ${aspectj-maven-plugin.version} + + true + + + + + + diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/KafkaAvroRequestHandler.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/KafkaAvroRequestHandler.java new file mode 100644 index 000000000..8b477b769 --- /dev/null +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/KafkaAvroRequestHandler.java @@ -0,0 +1,176 @@ +package software.amazon.lambda.powertools.kafka; + +import java.io.IOException; +import java.lang.reflect.ParameterizedType; +import java.lang.reflect.Type; +import java.util.ArrayList; +import java.util.Base64; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.avro.io.DatumReader; +import org.apache.avro.io.Decoder; +import org.apache.avro.io.DecoderFactory; +import org.apache.avro.specific.SpecificDatumReader; +import org.apache.avro.specific.SpecificRecordBase; +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.common.TopicPartition; +import org.apache.kafka.common.header.Headers; +import org.apache.kafka.common.header.internals.RecordHeaders; +import org.apache.kafka.common.record.TimestampType; + +import com.amazonaws.services.lambda.runtime.Context; +import com.amazonaws.services.lambda.runtime.RequestHandler; +import com.amazonaws.services.lambda.runtime.events.KafkaEvent; + +/** + * A request handler for processing Kafka events with Avro-encoded data. + * + * @param The type of the key in the Kafka record + * @param The type of the value in the Kafka record + * @param The return type of the handler + */ +public abstract class KafkaAvroRequestHandler implements RequestHandler { + private final Class keyType; + private final Class valueType; + + @SuppressWarnings("unchecked") + protected KafkaAvroRequestHandler() { + Type superClass = getClass().getGenericSuperclass(); + ParameterizedType parameterizedType = (ParameterizedType) superClass; + Type[] typeArguments = parameterizedType.getActualTypeArguments(); + this.keyType = (Class) typeArguments[0]; + this.valueType = (Class) typeArguments[1]; + } + + @Override + public R handleRequest(KafkaEvent input, Context context) { + if (input == null || input.getRecords() == null) { + return handleRecords(ConsumerRecords.empty(), context); + } + + Map>> recordsMap = new HashMap<>(); + + for (Map.Entry> entry : input.getRecords().entrySet()) { + String topic = entry.getKey(); + + for (KafkaEvent.KafkaEventRecord record : entry.getValue()) { + ConsumerRecord consumerRecord = convertToConsumerRecord(topic, record); + + TopicPartition topicPartition = new TopicPartition(topic, record.getPartition()); + recordsMap.computeIfAbsent(topicPartition, k -> new ArrayList<>()).add(consumerRecord); + } + } + + return handleRecords(new ConsumerRecords<>(recordsMap), context); + } + + private ConsumerRecord convertToConsumerRecord(String topic, KafkaEvent.KafkaEventRecord record) { + K key = null; + V value = null; + int keySize = ConsumerRecord.NULL_SIZE; + int valueSize = ConsumerRecord.NULL_SIZE; + + if (record.getKey() != null) { + try { + byte[] decodedKeyBytes = Base64.getDecoder().decode(record.getKey()); + keySize = decodedKeyBytes.length; + key = deserialize(decodedKeyBytes, keyType); + } catch (Exception e) { + throw new RuntimeException("Failed to deserialize Kafka record key.", e); + } + } + + if (record.getValue() != null) { + try { + byte[] decodedValueBytes = Base64.getDecoder().decode(record.getValue()); + valueSize = decodedValueBytes.length; + value = deserialize(decodedValueBytes, valueType); + } catch (Exception e) { + throw new RuntimeException("Failed to deserialize Kafka record value.", e); + } + } + + Headers headers = new RecordHeaders(); + if (record.getHeaders() != null) { + for (Map headerMap : record.getHeaders()) { + for (Map.Entry header : headerMap.entrySet()) { + if (header.getValue() != null) { + headers.add(header.getKey(), header.getValue()); + } + } + } + } + + return new ConsumerRecord<>( + topic, + record.getPartition(), + record.getOffset(), + record.getTimestamp(), + // TODO: Do not hardcode + TimestampType.CREATE_TIME, + keySize, + valueSize, + key, + value, + headers, + java.util.Optional.empty()); + } + + @SuppressWarnings("unchecked") + private T deserialize(byte[] data, Class type) throws IOException { + // Handle primitive types and String + if (type == String.class) { + return (T) new String(data); + } else if (type == Integer.class || type == int.class) { + return (T) Integer.valueOf(new String(data)); + } else if (type == Long.class || type == long.class) { + return (T) Long.valueOf(new String(data)); + } else if (type == Double.class || type == double.class) { + return (T) Double.valueOf(new String(data)); + } else if (type == Float.class || type == float.class) { + return (T) Float.valueOf(new String(data)); + } else if (type == Boolean.class || type == boolean.class) { + return (T) Boolean.valueOf(new String(data)); + } else if (type == Byte.class || type == byte.class) { + return (T) Byte.valueOf(new String(data)); + } else if (type == Short.class || type == short.class) { + return (T) Short.valueOf(new String(data)); + } else if (type == Character.class || type == char.class) { + String str = new String(data); + if (!str.isEmpty()) { + return (T) Character.valueOf(str.charAt(0)); + } + throw new IllegalArgumentException("Cannot convert empty string to char"); + } else if (SpecificRecordBase.class.isAssignableFrom(type)) { + // Handle Avro specific record + try { + // Create a datum reader for the Avro record + DatumReader datumReader = new SpecificDatumReader<>(type); + + // Create a binary decoder for the data + Decoder decoder = DecoderFactory.get().binaryDecoder(data, null); + + // Read and return the record + return datumReader.read(null, decoder); + } catch (Exception e) { + throw new IOException("Failed to deserialize Avro data", e); + } + } else { + throw new IOException("Unsupported type for Avro deserialization: " + type.getName() + ". " + + "Avro deserialization requires a type of org.apache.avro.specific.SpecificRecord. " + + "Consider using an alternative Deserializer."); + } + } + + /** + * Handle the Kafka records. + * + * @param records ConsumerRecords containing deserialized Kafka ConsumerRecord objects + * @param context Lambda context + * @return Response of type R + */ + public abstract R handleRecords(ConsumerRecords records, Context context); +} diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/KafkaJsonRequestHandler.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/KafkaJsonRequestHandler.java new file mode 100644 index 000000000..609831cf1 --- /dev/null +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/KafkaJsonRequestHandler.java @@ -0,0 +1,157 @@ +package software.amazon.lambda.powertools.kafka; + +import java.lang.reflect.ParameterizedType; +import java.lang.reflect.Type; +import java.util.ArrayList; +import java.util.Base64; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.common.TopicPartition; +import org.apache.kafka.common.header.Headers; +import org.apache.kafka.common.header.internals.RecordHeaders; +import org.apache.kafka.common.record.TimestampType; + +import com.amazonaws.services.lambda.runtime.Context; +import com.amazonaws.services.lambda.runtime.RequestHandler; +import com.amazonaws.services.lambda.runtime.events.KafkaEvent; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; + +public abstract class KafkaJsonRequestHandler implements RequestHandler { + private static final ObjectMapper objectMapper = new ObjectMapper(); + private final Class keyType; + private final Class valueType; + + @SuppressWarnings("unchecked") + protected KafkaJsonRequestHandler() { + Type superClass = getClass().getGenericSuperclass(); + ParameterizedType parameterizedType = (ParameterizedType) superClass; + Type[] typeArguments = parameterizedType.getActualTypeArguments(); + this.keyType = (Class) typeArguments[0]; + this.valueType = (Class) typeArguments[1]; + } + + @Override + public R handleRequest(KafkaEvent input, Context context) { + if (input == null || input.getRecords() == null) { + return handleRecords(ConsumerRecords.empty(), context); + } + + Map>> recordsMap = new HashMap<>(); + + for (Map.Entry> entry : input.getRecords().entrySet()) { + String topic = entry.getKey(); + + for (KafkaEvent.KafkaEventRecord record : entry.getValue()) { + ConsumerRecord consumerRecord = convertToConsumerRecord(topic, record); + + TopicPartition topicPartition = new TopicPartition(topic, record.getPartition()); + recordsMap.computeIfAbsent(topicPartition, k -> new ArrayList<>()).add(consumerRecord); + } + } + + return handleRecords(new ConsumerRecords<>(recordsMap), context); + } + + private ConsumerRecord convertToConsumerRecord(String topic, KafkaEvent.KafkaEventRecord record) { + K key = null; + V value = null; + int keySize = ConsumerRecord.NULL_SIZE; + int valueSize = ConsumerRecord.NULL_SIZE; + + if (record.getKey() != null) { + try { + byte[] decodedKeyBytes = Base64.getDecoder().decode(record.getKey()); + keySize = decodedKeyBytes.length; + key = deserialize(record.getKey(), keyType); + } catch (Exception e) { + throw new RuntimeException("Failed to deserialize Kafka record key.", e); + } + } + + if (record.getValue() != null) { + try { + byte[] decodedValueBytes = Base64.getDecoder().decode(record.getValue()); + valueSize = decodedValueBytes.length; + value = deserialize(record.getValue(), valueType); + } catch (Exception e) { + throw new RuntimeException("Failed to deserialize Kafka record value.", e); + } + } + + Headers headers = new RecordHeaders(); + if (record.getHeaders() != null) { + for (Map headerMap : record.getHeaders()) { + for (Map.Entry header : headerMap.entrySet()) { + if (header.getValue() != null) { + headers.add(header.getKey(), header.getValue()); + } + } + } + } + + return new ConsumerRecord<>( + topic, + record.getPartition(), + record.getOffset(), + record.getTimestamp(), + // TODO: Do not hardcode + TimestampType.CREATE_TIME, + keySize, + valueSize, + key, + value, + headers, + java.util.Optional.empty()); + } + + @SuppressWarnings("unchecked") + private T deserialize(String data, Class type) throws JsonProcessingException { + byte[] decodedBytes = Base64.getDecoder().decode(data); + + // Handle String type + if (type == String.class) { + return (T) new String(decodedBytes); + } + + // Handle primitive types and their wrappers + String decodedStr = new String(decodedBytes); + + if (type == Integer.class || type == int.class) { + return (T) Integer.valueOf(decodedStr); + } else if (type == Long.class || type == long.class) { + return (T) Long.valueOf(decodedStr); + } else if (type == Double.class || type == double.class) { + return (T) Double.valueOf(decodedStr); + } else if (type == Float.class || type == float.class) { + return (T) Float.valueOf(decodedStr); + } else if (type == Boolean.class || type == boolean.class) { + return (T) Boolean.valueOf(decodedStr); + } else if (type == Byte.class || type == byte.class) { + return (T) Byte.valueOf(decodedStr); + } else if (type == Short.class || type == short.class) { + return (T) Short.valueOf(decodedStr); + } else if (type == Character.class || type == char.class) { + if (decodedStr.length() > 0) { + return (T) Character.valueOf(decodedStr.charAt(0)); + } + throw new IllegalArgumentException("Cannot convert empty string to char"); + } else { + // For all other types, use Jackson ObjectMapper + return objectMapper.readValue(decodedStr, type); + } + } + + /** + * Handle the Kafka records. + * + * @param records ConsumerRecords containing deserialized Kafka ConsumerRecord objects + * @param context Lambda context + * @return Response of type R + */ + public abstract R handleRecords(ConsumerRecords records, Context context); +} From 47cbc76a1e976f5a5f6d57b37daca05d554798bc Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Fri, 16 May 2025 13:57:56 +0200 Subject: [PATCH 02/38] Add deserialization via @Deserialization annotation. --- ...kaAvroConsumerDeserializationFunction.java | 4 +- ...kaJsonConsumerDeserializationFunction.java | 1 + ...kaAvroConsumerDeserializationFunction.java | 57 ++++ ...kaJsonConsumerDeserializationFunction.java | 39 +++ .../powertools-examples-kafka/template.yaml | 24 ++ powertools-kafka/pom.xml | 23 +- .../powertools/kafka/Deserialization.java | 32 +++ .../powertools/kafka/DeserializationType.java | 17 ++ .../kafka/PowertoolsSerializer.java | 53 ++++ .../kafka/internal/DeserializationUtils.java | 76 +++++ .../AbstractKafkaDeserializer.java | 269 ++++++++++++++++++ .../serializers/KafkaAvroDeserializer.java | 46 +++ .../serializers/KafkaJsonDeserializer.java | 27 ++ .../LambdaDefaultDeserializer.java | 33 +++ .../serializers/PowertoolsDeserializer.java | 27 ++ ...rvices.lambda.runtime.CustomPojoSerializer | 1 + 16 files changed, 719 insertions(+), 10 deletions(-) create mode 100644 examples/powertools-examples-kafka/src/main/java/org/demo/kafka/NativeKafkaAvroConsumerDeserializationFunction.java create mode 100644 examples/powertools-examples-kafka/src/main/java/org/demo/kafka/NativeKafkaJsonConsumerDeserializationFunction.java create mode 100644 powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/Deserialization.java create mode 100644 powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/DeserializationType.java create mode 100644 powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializer.java create mode 100644 powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtils.java create mode 100644 powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java create mode 100644 powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaAvroDeserializer.java create mode 100644 powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializer.java create mode 100644 powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/LambdaDefaultDeserializer.java create mode 100644 powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/PowertoolsDeserializer.java create mode 100644 powertools-kafka/src/main/resources/META-INF/services/com.amazonaws.services.lambda.runtime.CustomPojoSerializer diff --git a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaAvroConsumerDeserializationFunction.java b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaAvroConsumerDeserializationFunction.java index a999df6b3..ab4d0b90d 100644 --- a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaAvroConsumerDeserializationFunction.java +++ b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaAvroConsumerDeserializationFunction.java @@ -26,6 +26,7 @@ public class KafkaAvroConsumerDeserializationFunction extends KafkaAvroRequestHa private static final Logger LOGGER = LoggerFactory.getLogger(KafkaAvroConsumerDeserializationFunction.class); private static final MetricsLogger metrics = MetricsUtils.metricsLogger(); + @Override @Logging @Metrics public String handleRecords(ConsumerRecords records, Context context) { @@ -38,8 +39,7 @@ public String handleRecords(ConsumerRecords records, Contex } // TODO: Helper method because Avro objects cannot be serialized by the Jackson ObjectMapper used in the Logging - // module - // entry("value", consumerRecord.value()) would fallback to a string instead of native json object. + // module entry("value", consumerRecord.value()) would fallback to a string instead of native json object. private Map avroToMap(AvroProduct avroProduct) { if (avroProduct == null) { return Collections.emptyMap(); diff --git a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaJsonConsumerDeserializationFunction.java b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaJsonConsumerDeserializationFunction.java index f9d4650ff..caeb65f20 100644 --- a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaJsonConsumerDeserializationFunction.java +++ b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaJsonConsumerDeserializationFunction.java @@ -21,6 +21,7 @@ public class KafkaJsonConsumerDeserializationFunction extends KafkaJsonRequestHa private static final Logger LOGGER = LoggerFactory.getLogger(KafkaJsonConsumerDeserializationFunction.class); private static final MetricsLogger metrics = MetricsUtils.metricsLogger(); + @Override @Logging @Metrics public String handleRecords(ConsumerRecords records, Context context) { diff --git a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/NativeKafkaAvroConsumerDeserializationFunction.java b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/NativeKafkaAvroConsumerDeserializationFunction.java new file mode 100644 index 000000000..b5777315b --- /dev/null +++ b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/NativeKafkaAvroConsumerDeserializationFunction.java @@ -0,0 +1,57 @@ +package org.demo.kafka; + +import static software.amazon.lambda.powertools.logging.argument.StructuredArguments.entry; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.demo.kafka.avro.AvroProduct; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.amazonaws.services.lambda.runtime.Context; +import com.amazonaws.services.lambda.runtime.RequestHandler; + +import software.amazon.cloudwatchlogs.emf.logger.MetricsLogger; +import software.amazon.cloudwatchlogs.emf.model.Unit; +import software.amazon.lambda.powertools.kafka.Deserialization; +import software.amazon.lambda.powertools.kafka.DeserializationType; +import software.amazon.lambda.powertools.logging.Logging; +import software.amazon.lambda.powertools.metrics.Metrics; +import software.amazon.lambda.powertools.metrics.MetricsUtils; + +public class NativeKafkaAvroConsumerDeserializationFunction + implements RequestHandler, String> { + + private static final Logger LOGGER = LoggerFactory.getLogger(NativeKafkaAvroConsumerDeserializationFunction.class); + private static final MetricsLogger metrics = MetricsUtils.metricsLogger(); + + @Override + @Logging + @Metrics + @Deserialization(type = DeserializationType.KAFKA_AVRO) + public String handleRequest(ConsumerRecords records, Context context) { + for (ConsumerRecord consumerRecord : records) { + LOGGER.info("{}", consumerRecord, entry("value", avroToMap(consumerRecord.value()))); + metrics.putMetric("ProcessedAvroRecord", 1, Unit.COUNT); + } + + return "OK"; + } + + // TODO: Helper method because Avro objects cannot be serialized by the Jackson ObjectMapper used in the Logging + // module entry("value", consumerRecord.value()) would fallback to a string instead of native json object. + private Map avroToMap(AvroProduct avroProduct) { + if (avroProduct == null) { + return Collections.emptyMap(); + } + Map map = new HashMap<>(); + map.put("id", avroProduct.getId()); + map.put("name", avroProduct.getName()); + map.put("price", avroProduct.getPrice()); + return map; + } +} diff --git a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/NativeKafkaJsonConsumerDeserializationFunction.java b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/NativeKafkaJsonConsumerDeserializationFunction.java new file mode 100644 index 000000000..c5b8c4f3d --- /dev/null +++ b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/NativeKafkaJsonConsumerDeserializationFunction.java @@ -0,0 +1,39 @@ +package org.demo.kafka; + +import static software.amazon.lambda.powertools.logging.argument.StructuredArguments.entry; + +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.amazonaws.services.lambda.runtime.Context; +import com.amazonaws.services.lambda.runtime.RequestHandler; + +import software.amazon.cloudwatchlogs.emf.logger.MetricsLogger; +import software.amazon.cloudwatchlogs.emf.model.Unit; +import software.amazon.lambda.powertools.kafka.Deserialization; +import software.amazon.lambda.powertools.kafka.DeserializationType; +import software.amazon.lambda.powertools.logging.Logging; +import software.amazon.lambda.powertools.metrics.Metrics; +import software.amazon.lambda.powertools.metrics.MetricsUtils; + +public class NativeKafkaJsonConsumerDeserializationFunction + implements RequestHandler, String> { + + private static final Logger LOGGER = LoggerFactory.getLogger(NativeKafkaJsonConsumerDeserializationFunction.class); + private final MetricsLogger metrics = MetricsUtils.metricsLogger(); + + @Override + @Logging + @Metrics + @Deserialization(type = DeserializationType.KAFKA_JSON) + public String handleRequest(ConsumerRecords consumerRecords, Context context) { + for (ConsumerRecord consumerRecord : consumerRecords) { + LOGGER.info("{}", consumerRecord, entry("value", consumerRecord.value())); + metrics.putMetric("ProcessedRecord", 1, Unit.COUNT); + } + + return "OK"; + } +} diff --git a/examples/powertools-examples-kafka/template.yaml b/examples/powertools-examples-kafka/template.yaml index d2ded572b..60e167eb9 100644 --- a/examples/powertools-examples-kafka/template.yaml +++ b/examples/powertools-examples-kafka/template.yaml @@ -11,6 +11,30 @@ Globals: Tracing: Active Resources: + NativeKafkaJsonConsumerDeserializationFunction: + Type: AWS::Serverless::Function + Properties: + CodeUri: . + Handler: org.demo.kafka.NativeKafkaJsonConsumerDeserializationFunction::handleRequest + Environment: + Variables: + JAVA_TOOL_OPTIONS: "-XX:+TieredCompilation -XX:TieredStopAtLevel=1" + POWERTOOLS_LOG_LEVEL: DEBUG + POWERTOOLS_SERVICE_NAME: NativeKafkaJsonConsumerDeserialization + POWERTOOLS_METRICS_NAMESPACE: NativeKafkaJsonConsumerDeserialization + + NativeKafkaAvroConsumerDeserializationFunction: + Type: AWS::Serverless::Function + Properties: + CodeUri: . + Handler: org.demo.kafka.NativeKafkaAvroConsumerDeserializationFunction::handleRequest + Environment: + Variables: + JAVA_TOOL_OPTIONS: "-XX:+TieredCompilation -XX:TieredStopAtLevel=1" + POWERTOOLS_LOG_LEVEL: DEBUG + POWERTOOLS_SERVICE_NAME: NativeKafkaAvroConsumerDeserialization + POWERTOOLS_METRICS_NAMESPACE: NativeKafkaAvroConsumerDeserialization + KafkaJsonConsumerDeserializationFunction: Type: AWS::Serverless::Function Properties: diff --git a/powertools-kafka/pom.xml b/powertools-kafka/pom.xml index b08c93ea6..82b69cb45 100644 --- a/powertools-kafka/pom.xml +++ b/powertools-kafka/pom.xml @@ -28,9 +28,7 @@ jar Powertools for AWS Lambda (Java) - Kafka Consumer - - - + @@ -53,17 +51,21 @@ org.apache.kafka kafka-clients - 3.6.1 + 4.0.0 + + + org.apache.avro + avro + 1.12.0 com.fasterxml.jackson.core jackson-databind - 2.15.2 - org.apache.avro - avro - 1.11.3 + com.amazonaws + aws-lambda-java-serialization + 1.1.5 @@ -85,6 +87,11 @@ + + + src/main/resources + + dev.aspectj diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/Deserialization.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/Deserialization.java new file mode 100644 index 000000000..698149ab0 --- /dev/null +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/Deserialization.java @@ -0,0 +1,32 @@ +/* + * Copyright 2023 Amazon.com, Inc. or its affiliates. + * Licensed under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package software.amazon.lambda.powertools.kafka; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Annotation to specify the deserialization type for Kafka messages. + */ +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.METHOD) +public @interface Deserialization { + /** + * The type of deserialization to use. + * @return the deserialization type + */ + DeserializationType type(); +} diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/DeserializationType.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/DeserializationType.java new file mode 100644 index 000000000..f4f8177d6 --- /dev/null +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/DeserializationType.java @@ -0,0 +1,17 @@ +/* + * Copyright 2023 Amazon.com, Inc. or its affiliates. + * Licensed under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package software.amazon.lambda.powertools.kafka; + +public enum DeserializationType { + LAMBDA_DEFAULT, KAFKA_JSON, KAFKA_AVRO +} diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializer.java new file mode 100644 index 000000000..22c65f8ba --- /dev/null +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializer.java @@ -0,0 +1,53 @@ +package software.amazon.lambda.powertools.kafka; + +import java.io.InputStream; +import java.io.OutputStream; +import java.lang.reflect.Type; +import java.util.Map; + +import com.amazonaws.services.lambda.runtime.CustomPojoSerializer; +import com.amazonaws.services.lambda.runtime.serialization.factories.JacksonFactory; + +import software.amazon.lambda.powertools.kafka.internal.DeserializationUtils; +import software.amazon.lambda.powertools.kafka.serializers.KafkaAvroDeserializer; +import software.amazon.lambda.powertools.kafka.serializers.KafkaJsonDeserializer; +import software.amazon.lambda.powertools.kafka.serializers.LambdaDefaultDeserializer; +import software.amazon.lambda.powertools.kafka.serializers.PowertoolsDeserializer; + +/** + * Custom Lambda serializer supporting Kafka events. It delegates to the appropriate deserializer based on the + * deserialization type. + * + * Kafka serializers need to be specified explicitly, otherwise, the default Lambda serializer from + * {@link com.amazonaws.services.lambda.runtime.serialization.factories.JacksonFactory} will be used. + */ +public class PowertoolsSerializer implements CustomPojoSerializer { + private static final Map DESERIALIZERS = Map.of( + DeserializationType.KAFKA_JSON, new KafkaJsonDeserializer(), + DeserializationType.KAFKA_AVRO, new KafkaAvroDeserializer(), + DeserializationType.LAMBDA_DEFAULT, new LambdaDefaultDeserializer()); + + private final PowertoolsDeserializer deserializer; + + public PowertoolsSerializer() { + this.deserializer = DESERIALIZERS.getOrDefault( + DeserializationUtils.determineDeserializationType(), + new LambdaDefaultDeserializer()); + } + + @Override + public T fromJson(InputStream input, Type type) { + return deserializer.fromJson(input, type); + } + + @Override + public T fromJson(String input, Type type) { + return deserializer.fromJson(input, type); + } + + @Override + public void toJson(T value, OutputStream output, Type type) { + // This is the Lambda default Output serialization + JacksonFactory.getInstance().getSerializer(type).toJson(value, output); + } +} diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtils.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtils.java new file mode 100644 index 000000000..938a65221 --- /dev/null +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtils.java @@ -0,0 +1,76 @@ +/* + * Copyright 2023 Amazon.com, Inc. or its affiliates. + * Licensed under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package software.amazon.lambda.powertools.kafka.internal; + +import java.lang.reflect.Method; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.amazonaws.services.lambda.runtime.RequestHandler; + +import software.amazon.lambda.powertools.kafka.Deserialization; +import software.amazon.lambda.powertools.kafka.DeserializationType; + +/** + * Utility class to determine the deserialization type from Lambda request handler methods annotated with + * {@link Deserialization} utility. + * + * Relies on the Lambda _HANDLER environment variable to detect the currently active handler method. + */ +public final class DeserializationUtils { + private static final Logger LOGGER = LoggerFactory.getLogger(DeserializationUtils.class); + + private DeserializationUtils() { + } + + public static DeserializationType determineDeserializationType() { + try { + // Get the handler from the environment. It has a format like org.example.MyRequestHandler::handleRequest + String handler = System.getenv("_HANDLER"); + if (handler != null && handler.contains("::")) { + String className = handler.substring(0, handler.indexOf("::")); + String methodName = handler.substring(handler.indexOf("::") + 2); + + Class handlerClazz = Class.forName(className); + + // Only consider if it implements RequestHandler + if (RequestHandler.class.isAssignableFrom(handlerClazz)) { + // Look for deserialization type on annotation on handler method + for (Method method : handlerClazz.getDeclaredMethods()) { + if (method.getName().equals(methodName) && method.isAnnotationPresent(Deserialization.class)) { + Deserialization annotation = method.getAnnotation(Deserialization.class); + LOGGER.debug("Found deserialization type: {}", annotation.type()); + return annotation.type(); + } + } + } else { + LOGGER.warn("Candidate class for custom deserialization '{}'' does not implement RequestHandler. " + + "Ignoring.", className); + } + } else { + LOGGER.error( + "Cannot determine deserialization type for custom deserialization. " + + "Defaulting to standard. " + + "No valid handler found in environment variable _HANDLER: {}.", + handler); + } + } catch (Exception e) { + LOGGER.error( + "Cannot determine deserialization type for custom deserialization. Defaulting to standard.", + e); + } + + return DeserializationType.LAMBDA_DEFAULT; + } +} diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java new file mode 100644 index 000000000..92699b5e0 --- /dev/null +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java @@ -0,0 +1,269 @@ +/* + * Copyright 2023 Amazon.com, Inc. or its affiliates. + * Licensed under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package software.amazon.lambda.powertools.kafka.serializers; + +import java.io.IOException; +import java.io.InputStream; +import java.lang.reflect.ParameterizedType; +import java.lang.reflect.Type; +import java.util.ArrayList; +import java.util.Base64; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.common.TopicPartition; +import org.apache.kafka.common.header.Headers; +import org.apache.kafka.common.header.internals.RecordHeaders; +import org.apache.kafka.common.record.TimestampType; + +import com.amazonaws.services.lambda.runtime.events.KafkaEvent; +import com.fasterxml.jackson.databind.ObjectMapper; + +/** + * Abstract base class for Kafka deserializers that implements common functionality. + */ +public abstract class AbstractKafkaDeserializer implements PowertoolsDeserializer { + protected static final ObjectMapper objectMapper = new ObjectMapper(); + + /** + * Deserialize JSON from InputStream into ConsumerRecords + * + * @param input InputStream containing JSON data + * @param type Type representing ConsumerRecords + * @param The type to deserialize to + * @return Deserialized ConsumerRecords object + * @throws IllegalArgumentException if type is not ConsumerRecords + */ + @SuppressWarnings("unchecked") + @Override + public T fromJson(InputStream input, Type type) { + if (!isConsumerRecordsType(type)) { + throw new IllegalArgumentException("Type must be ConsumerRecords when using this deserializer"); + } + + try { + // Parse the KafkaEvent from the input stream + KafkaEvent kafkaEvent = objectMapper.readValue(input, KafkaEvent.class); + + // Extract the key and value types from the ConsumerRecords type + ParameterizedType parameterizedType = (ParameterizedType) type; + Type[] typeArguments = parameterizedType.getActualTypeArguments(); + Class keyType = (Class) typeArguments[0]; + Class valueType = (Class) typeArguments[1]; + + // Convert KafkaEvent to ConsumerRecords + return (T) convertToConsumerRecords(kafkaEvent, keyType, valueType); + } catch (IOException e) { + throw new RuntimeException("Failed to deserialize JSON to ConsumerRecords", e); + } + } + + /** + * Deserialize JSON from String into ConsumerRecords + * + * @param input String containing JSON data + * @param type Type representing ConsumerRecords + * @param The type to deserialize to + * @return Deserialized ConsumerRecords object + * @throws IllegalArgumentException if type is not ConsumerRecords + */ + @SuppressWarnings("unchecked") + @Override + public T fromJson(String input, Type type) { + if (!isConsumerRecordsType(type)) { + throw new IllegalArgumentException("Type must be ConsumerRecords when using this deserializer"); + } + + try { + // Parse the KafkaEvent from the input string + KafkaEvent kafkaEvent = objectMapper.readValue(input, KafkaEvent.class); + + // Extract the key and value types from the ConsumerRecords type + ParameterizedType parameterizedType = (ParameterizedType) type; + Type[] typeArguments = parameterizedType.getActualTypeArguments(); + Class keyType = (Class) typeArguments[0]; + Class valueType = (Class) typeArguments[1]; + + // Convert KafkaEvent to ConsumerRecords + return (T) convertToConsumerRecords(kafkaEvent, keyType, valueType); + } catch (IOException e) { + throw new RuntimeException("Failed to deserialize JSON to ConsumerRecords", e); + } + } + + private boolean isConsumerRecordsType(Type type) { + if (!(type instanceof ParameterizedType)) { + return false; + } + + ParameterizedType parameterizedType = (ParameterizedType) type; + return parameterizedType.getRawType().equals(ConsumerRecords.class); + } + + private ConsumerRecords convertToConsumerRecords(KafkaEvent kafkaEvent, Class keyType, + Class valueType) { + + if (kafkaEvent == null || kafkaEvent.getRecords() == null) { + return ConsumerRecords.empty(); + } + + Map>> recordsMap = new HashMap<>(); + + for (Map.Entry> entry : kafkaEvent.getRecords().entrySet()) { + String topic = entry.getKey(); + + for (KafkaEvent.KafkaEventRecord eventRecord : entry.getValue()) { + ConsumerRecord consumerRecord = convertToConsumerRecord(topic, eventRecord, keyType, valueType); + + TopicPartition topicPartition = new TopicPartition(topic, eventRecord.getPartition()); + recordsMap.computeIfAbsent(topicPartition, k -> new ArrayList<>()).add(consumerRecord); + } + } + + return new ConsumerRecords<>(recordsMap, Map.of()); + } + + private ConsumerRecord convertToConsumerRecord( + String topic, + KafkaEvent.KafkaEventRecord eventRecord, + Class keyType, + Class valueType) { + + K key = null; + V value = null; + int keySize = ConsumerRecord.NULL_SIZE; + int valueSize = ConsumerRecord.NULL_SIZE; + + if (eventRecord.getKey() != null) { + try { + byte[] decodedKeyBytes = Base64.getDecoder().decode(eventRecord.getKey()); + keySize = decodedKeyBytes.length; + key = deserialize(decodedKeyBytes, keyType); + } catch (Exception e) { + throw new RuntimeException("Failed to deserialize Kafka record key.", e); + } + } + + if (eventRecord.getValue() != null) { + try { + byte[] decodedValueBytes = Base64.getDecoder().decode(eventRecord.getValue()); + valueSize = decodedValueBytes.length; + value = deserialize(decodedValueBytes, valueType); + } catch (Exception e) { + throw new RuntimeException("Failed to deserialize Kafka record value.", e); + } + } + + Headers headers = new RecordHeaders(); + if (eventRecord.getHeaders() != null) { + for (Map headerMap : eventRecord.getHeaders()) { + for (Map.Entry header : headerMap.entrySet()) { + if (header.getValue() != null) { + headers.add(header.getKey(), header.getValue()); + } + } + } + } + + return new ConsumerRecord<>( + topic, + eventRecord.getPartition(), + eventRecord.getOffset(), + eventRecord.getTimestamp(), + TimestampType.CREATE_TIME, + keySize, + valueSize, + key, + value, + headers, + Optional.empty()); + } + + /** + * Template method to be implemented by subclasses for specific deserialization logic + * for complex types (non-primitives). + * + * @param The type to deserialize to + * @param data The byte array to deserialize coming from the base64 decoded Kafka field + * @param type The class type to deserialize to + * @return The deserialized object + * @throws IOException If deserialization fails + */ + protected abstract T deserializeComplex(byte[] data, Class type) throws IOException; + + /** + * Main deserialize method that handles primitive types and delegates to subclasses for complex types. + * + * @param The type to deserialize to + * @param data The byte array to deserialize + * @param type The class type to deserialize to + * @return The deserialized object + * @throws IOException If deserialization fails + */ + private T deserialize(byte[] data, Class type) throws IOException { + // First try to deserialize as a primitive type + T result = deserializePrimitive(data, type); + if (result != null) { + return result; + } + + // Delegate to subclass for complex type deserialization + return deserializeComplex(data, type); + } + + /** + * Helper method for handling primitive types and String deserialization. + * + * @param The type to deserialize to + * @param data The byte array to deserialize + * @param type The class type to deserialize to + * @return The deserialized primitive or String, or null if not a primitive or String + */ + @SuppressWarnings("unchecked") + private T deserializePrimitive(byte[] data, Class type) { + // Handle String type + if (type == String.class) { + return (T) new String(data); + } + + // Handle primitive types and their wrappers + String str = new String(data); + + if (type == Integer.class || type == int.class) { + return (T) Integer.valueOf(str); + } else if (type == Long.class || type == long.class) { + return (T) Long.valueOf(str); + } else if (type == Double.class || type == double.class) { + return (T) Double.valueOf(str); + } else if (type == Float.class || type == float.class) { + return (T) Float.valueOf(str); + } else if (type == Boolean.class || type == boolean.class) { + return (T) Boolean.valueOf(str); + } else if (type == Byte.class || type == byte.class) { + return (T) Byte.valueOf(str); + } else if (type == Short.class || type == short.class) { + return (T) Short.valueOf(str); + } else if (type == Character.class || type == char.class) { + if (!str.isEmpty()) { + return (T) Character.valueOf(str.charAt(0)); + } + throw new IllegalArgumentException("Cannot convert empty string to char"); + } + + return null; + } +} diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaAvroDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaAvroDeserializer.java new file mode 100644 index 000000000..c54bc3c26 --- /dev/null +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaAvroDeserializer.java @@ -0,0 +1,46 @@ +/* + * Copyright 2023 Amazon.com, Inc. or its affiliates. + * Licensed under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package software.amazon.lambda.powertools.kafka.serializers; + +import java.io.IOException; + +import org.apache.avro.io.DatumReader; +import org.apache.avro.io.Decoder; +import org.apache.avro.io.DecoderFactory; +import org.apache.avro.specific.SpecificDatumReader; +import org.apache.avro.specific.SpecificRecordBase; + +/** + * Deserializer for Kafka records using Avro format. + */ +public class KafkaAvroDeserializer extends AbstractKafkaDeserializer { + + @Override + protected T deserializeComplex(byte[] data, Class type) throws IOException { + // If no Avro generated class is passed we cannot deserialize using Avro + if (SpecificRecordBase.class.isAssignableFrom(type)) { + try { + DatumReader datumReader = new SpecificDatumReader<>(type); + Decoder decoder = DecoderFactory.get().binaryDecoder(data, null); + + return datumReader.read(null, decoder); + } catch (Exception e) { + throw new IOException("Failed to deserialize Avro data.", e); + } + } else { + throw new IOException("Unsupported type for Avro deserialization: " + type.getName() + ". " + + "Avro deserialization requires a type of org.apache.avro.specific.SpecificRecord. " + + "Consider using an alternative Deserializer."); + } + } +} diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializer.java new file mode 100644 index 000000000..0fbc64445 --- /dev/null +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializer.java @@ -0,0 +1,27 @@ +/* + * Copyright 2023 Amazon.com, Inc. or its affiliates. + * Licensed under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package software.amazon.lambda.powertools.kafka.serializers; + +import java.io.IOException; + +/** + * Deserializer for Kafka records using JSON format. + */ +public class KafkaJsonDeserializer extends AbstractKafkaDeserializer { + + @Override + protected T deserializeComplex(byte[] data, Class type) throws IOException { + String decodedStr = new String(data); + return objectMapper.readValue(decodedStr, type); + } +} diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/LambdaDefaultDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/LambdaDefaultDeserializer.java new file mode 100644 index 000000000..a611759cf --- /dev/null +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/LambdaDefaultDeserializer.java @@ -0,0 +1,33 @@ +/* + * Copyright 2023 Amazon.com, Inc. or its affiliates. + * Licensed under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package software.amazon.lambda.powertools.kafka.serializers; + +import java.io.InputStream; +import java.lang.reflect.Type; + +import com.amazonaws.services.lambda.runtime.serialization.factories.JacksonFactory; + +public class LambdaDefaultDeserializer implements PowertoolsDeserializer { + + @SuppressWarnings("unchecked") + @Override + public T fromJson(InputStream input, Type type) { + return JacksonFactory.getInstance().getSerializer((Class) type).fromJson(input); + } + + @SuppressWarnings("unchecked") + @Override + public T fromJson(String input, Type type) { + return JacksonFactory.getInstance().getSerializer((Class) type).fromJson(input); + } +} diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/PowertoolsDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/PowertoolsDeserializer.java new file mode 100644 index 000000000..7b4938647 --- /dev/null +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/PowertoolsDeserializer.java @@ -0,0 +1,27 @@ +/* + * Copyright 2023 Amazon.com, Inc. or its affiliates. + * Licensed under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package software.amazon.lambda.powertools.kafka.serializers; + +import java.io.InputStream; +import java.lang.reflect.Type; + +/** + * Interface for deserializers that can handle both String and InputStream inputs. + * + * Similar to {@link com.amazonaws.services.lambda.runtime.CustomPojoSerializer} but only for input serialization. + */ +public interface PowertoolsDeserializer { + T fromJson(InputStream input, Type type); + + T fromJson(String input, Type type); +} diff --git a/powertools-kafka/src/main/resources/META-INF/services/com.amazonaws.services.lambda.runtime.CustomPojoSerializer b/powertools-kafka/src/main/resources/META-INF/services/com.amazonaws.services.lambda.runtime.CustomPojoSerializer new file mode 100644 index 000000000..abc84b035 --- /dev/null +++ b/powertools-kafka/src/main/resources/META-INF/services/com.amazonaws.services.lambda.runtime.CustomPojoSerializer @@ -0,0 +1 @@ +software.amazon.lambda.powertools.kafka.PowertoolsSerializer From 680b97910e5f8b4790a49bfceb0001a97d712ff4 Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Fri, 16 May 2025 14:47:11 +0200 Subject: [PATCH 03/38] Add TODOs in code. --- .../powertools/kafka/serializers/AbstractKafkaDeserializer.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java index 92699b5e0..b12b88126 100644 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java @@ -134,6 +134,7 @@ private ConsumerRecords convertToConsumerRecords(KafkaEvent kafkaEv } } + // TODO: Understand what nextOffsets is and if we need to use it. return new ConsumerRecords<>(recordsMap, Map.of()); } @@ -184,6 +185,7 @@ private ConsumerRecord convertToConsumerRecord( eventRecord.getPartition(), eventRecord.getOffset(), eventRecord.getTimestamp(), + // TODO: Do not hardcode this TimestampType.CREATE_TIME, keySize, valueSize, From 15f292352b86dfeb7336f2d650dca565f9c4817a Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Fri, 16 May 2025 16:27:05 +0200 Subject: [PATCH 04/38] Fix typos and make AbstractKafkaDeserializer package private. --- .../lambda/powertools/kafka/internal/DeserializationUtils.java | 2 +- .../powertools/kafka/serializers/AbstractKafkaDeserializer.java | 2 +- .../powertools/kafka/serializers/PowertoolsDeserializer.java | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtils.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtils.java index 938a65221..b2704e5bb 100644 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtils.java +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtils.java @@ -55,7 +55,7 @@ public static DeserializationType determineDeserializationType() { } } } else { - LOGGER.warn("Candidate class for custom deserialization '{}'' does not implement RequestHandler. " + LOGGER.warn("Candidate class for custom deserialization '{}' does not implement RequestHandler. " + "Ignoring.", className); } } else { diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java index b12b88126..a1f5cc6e0 100644 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java @@ -36,7 +36,7 @@ /** * Abstract base class for Kafka deserializers that implements common functionality. */ -public abstract class AbstractKafkaDeserializer implements PowertoolsDeserializer { +abstract class AbstractKafkaDeserializer implements PowertoolsDeserializer { protected static final ObjectMapper objectMapper = new ObjectMapper(); /** diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/PowertoolsDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/PowertoolsDeserializer.java index 7b4938647..1ac0ca0ba 100644 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/PowertoolsDeserializer.java +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/PowertoolsDeserializer.java @@ -18,7 +18,7 @@ /** * Interface for deserializers that can handle both String and InputStream inputs. * - * Similar to {@link com.amazonaws.services.lambda.runtime.CustomPojoSerializer} but only for input serialization. + * Similar to {@link com.amazonaws.services.lambda.runtime.CustomPojoSerializer} but only for input deserialization. */ public interface PowertoolsDeserializer { T fromJson(InputStream input, Type type); From 6ac583ffddbf7f748dc4d7dbde6024a985c97dfe Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Wed, 21 May 2025 12:27:52 +0200 Subject: [PATCH 05/38] Remove request handler implementation in favor for @Deserialization annotation. --- .../kafka/KafkaAvroRequestHandler.java | 176 ------------------ .../kafka/KafkaJsonRequestHandler.java | 157 ---------------- 2 files changed, 333 deletions(-) delete mode 100644 powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/KafkaAvroRequestHandler.java delete mode 100644 powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/KafkaJsonRequestHandler.java diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/KafkaAvroRequestHandler.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/KafkaAvroRequestHandler.java deleted file mode 100644 index 8b477b769..000000000 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/KafkaAvroRequestHandler.java +++ /dev/null @@ -1,176 +0,0 @@ -package software.amazon.lambda.powertools.kafka; - -import java.io.IOException; -import java.lang.reflect.ParameterizedType; -import java.lang.reflect.Type; -import java.util.ArrayList; -import java.util.Base64; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.avro.io.DatumReader; -import org.apache.avro.io.Decoder; -import org.apache.avro.io.DecoderFactory; -import org.apache.avro.specific.SpecificDatumReader; -import org.apache.avro.specific.SpecificRecordBase; -import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.apache.kafka.clients.consumer.ConsumerRecords; -import org.apache.kafka.common.TopicPartition; -import org.apache.kafka.common.header.Headers; -import org.apache.kafka.common.header.internals.RecordHeaders; -import org.apache.kafka.common.record.TimestampType; - -import com.amazonaws.services.lambda.runtime.Context; -import com.amazonaws.services.lambda.runtime.RequestHandler; -import com.amazonaws.services.lambda.runtime.events.KafkaEvent; - -/** - * A request handler for processing Kafka events with Avro-encoded data. - * - * @param The type of the key in the Kafka record - * @param The type of the value in the Kafka record - * @param The return type of the handler - */ -public abstract class KafkaAvroRequestHandler implements RequestHandler { - private final Class keyType; - private final Class valueType; - - @SuppressWarnings("unchecked") - protected KafkaAvroRequestHandler() { - Type superClass = getClass().getGenericSuperclass(); - ParameterizedType parameterizedType = (ParameterizedType) superClass; - Type[] typeArguments = parameterizedType.getActualTypeArguments(); - this.keyType = (Class) typeArguments[0]; - this.valueType = (Class) typeArguments[1]; - } - - @Override - public R handleRequest(KafkaEvent input, Context context) { - if (input == null || input.getRecords() == null) { - return handleRecords(ConsumerRecords.empty(), context); - } - - Map>> recordsMap = new HashMap<>(); - - for (Map.Entry> entry : input.getRecords().entrySet()) { - String topic = entry.getKey(); - - for (KafkaEvent.KafkaEventRecord record : entry.getValue()) { - ConsumerRecord consumerRecord = convertToConsumerRecord(topic, record); - - TopicPartition topicPartition = new TopicPartition(topic, record.getPartition()); - recordsMap.computeIfAbsent(topicPartition, k -> new ArrayList<>()).add(consumerRecord); - } - } - - return handleRecords(new ConsumerRecords<>(recordsMap), context); - } - - private ConsumerRecord convertToConsumerRecord(String topic, KafkaEvent.KafkaEventRecord record) { - K key = null; - V value = null; - int keySize = ConsumerRecord.NULL_SIZE; - int valueSize = ConsumerRecord.NULL_SIZE; - - if (record.getKey() != null) { - try { - byte[] decodedKeyBytes = Base64.getDecoder().decode(record.getKey()); - keySize = decodedKeyBytes.length; - key = deserialize(decodedKeyBytes, keyType); - } catch (Exception e) { - throw new RuntimeException("Failed to deserialize Kafka record key.", e); - } - } - - if (record.getValue() != null) { - try { - byte[] decodedValueBytes = Base64.getDecoder().decode(record.getValue()); - valueSize = decodedValueBytes.length; - value = deserialize(decodedValueBytes, valueType); - } catch (Exception e) { - throw new RuntimeException("Failed to deserialize Kafka record value.", e); - } - } - - Headers headers = new RecordHeaders(); - if (record.getHeaders() != null) { - for (Map headerMap : record.getHeaders()) { - for (Map.Entry header : headerMap.entrySet()) { - if (header.getValue() != null) { - headers.add(header.getKey(), header.getValue()); - } - } - } - } - - return new ConsumerRecord<>( - topic, - record.getPartition(), - record.getOffset(), - record.getTimestamp(), - // TODO: Do not hardcode - TimestampType.CREATE_TIME, - keySize, - valueSize, - key, - value, - headers, - java.util.Optional.empty()); - } - - @SuppressWarnings("unchecked") - private T deserialize(byte[] data, Class type) throws IOException { - // Handle primitive types and String - if (type == String.class) { - return (T) new String(data); - } else if (type == Integer.class || type == int.class) { - return (T) Integer.valueOf(new String(data)); - } else if (type == Long.class || type == long.class) { - return (T) Long.valueOf(new String(data)); - } else if (type == Double.class || type == double.class) { - return (T) Double.valueOf(new String(data)); - } else if (type == Float.class || type == float.class) { - return (T) Float.valueOf(new String(data)); - } else if (type == Boolean.class || type == boolean.class) { - return (T) Boolean.valueOf(new String(data)); - } else if (type == Byte.class || type == byte.class) { - return (T) Byte.valueOf(new String(data)); - } else if (type == Short.class || type == short.class) { - return (T) Short.valueOf(new String(data)); - } else if (type == Character.class || type == char.class) { - String str = new String(data); - if (!str.isEmpty()) { - return (T) Character.valueOf(str.charAt(0)); - } - throw new IllegalArgumentException("Cannot convert empty string to char"); - } else if (SpecificRecordBase.class.isAssignableFrom(type)) { - // Handle Avro specific record - try { - // Create a datum reader for the Avro record - DatumReader datumReader = new SpecificDatumReader<>(type); - - // Create a binary decoder for the data - Decoder decoder = DecoderFactory.get().binaryDecoder(data, null); - - // Read and return the record - return datumReader.read(null, decoder); - } catch (Exception e) { - throw new IOException("Failed to deserialize Avro data", e); - } - } else { - throw new IOException("Unsupported type for Avro deserialization: " + type.getName() + ". " - + "Avro deserialization requires a type of org.apache.avro.specific.SpecificRecord. " - + "Consider using an alternative Deserializer."); - } - } - - /** - * Handle the Kafka records. - * - * @param records ConsumerRecords containing deserialized Kafka ConsumerRecord objects - * @param context Lambda context - * @return Response of type R - */ - public abstract R handleRecords(ConsumerRecords records, Context context); -} diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/KafkaJsonRequestHandler.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/KafkaJsonRequestHandler.java deleted file mode 100644 index 609831cf1..000000000 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/KafkaJsonRequestHandler.java +++ /dev/null @@ -1,157 +0,0 @@ -package software.amazon.lambda.powertools.kafka; - -import java.lang.reflect.ParameterizedType; -import java.lang.reflect.Type; -import java.util.ArrayList; -import java.util.Base64; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.apache.kafka.clients.consumer.ConsumerRecords; -import org.apache.kafka.common.TopicPartition; -import org.apache.kafka.common.header.Headers; -import org.apache.kafka.common.header.internals.RecordHeaders; -import org.apache.kafka.common.record.TimestampType; - -import com.amazonaws.services.lambda.runtime.Context; -import com.amazonaws.services.lambda.runtime.RequestHandler; -import com.amazonaws.services.lambda.runtime.events.KafkaEvent; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; - -public abstract class KafkaJsonRequestHandler implements RequestHandler { - private static final ObjectMapper objectMapper = new ObjectMapper(); - private final Class keyType; - private final Class valueType; - - @SuppressWarnings("unchecked") - protected KafkaJsonRequestHandler() { - Type superClass = getClass().getGenericSuperclass(); - ParameterizedType parameterizedType = (ParameterizedType) superClass; - Type[] typeArguments = parameterizedType.getActualTypeArguments(); - this.keyType = (Class) typeArguments[0]; - this.valueType = (Class) typeArguments[1]; - } - - @Override - public R handleRequest(KafkaEvent input, Context context) { - if (input == null || input.getRecords() == null) { - return handleRecords(ConsumerRecords.empty(), context); - } - - Map>> recordsMap = new HashMap<>(); - - for (Map.Entry> entry : input.getRecords().entrySet()) { - String topic = entry.getKey(); - - for (KafkaEvent.KafkaEventRecord record : entry.getValue()) { - ConsumerRecord consumerRecord = convertToConsumerRecord(topic, record); - - TopicPartition topicPartition = new TopicPartition(topic, record.getPartition()); - recordsMap.computeIfAbsent(topicPartition, k -> new ArrayList<>()).add(consumerRecord); - } - } - - return handleRecords(new ConsumerRecords<>(recordsMap), context); - } - - private ConsumerRecord convertToConsumerRecord(String topic, KafkaEvent.KafkaEventRecord record) { - K key = null; - V value = null; - int keySize = ConsumerRecord.NULL_SIZE; - int valueSize = ConsumerRecord.NULL_SIZE; - - if (record.getKey() != null) { - try { - byte[] decodedKeyBytes = Base64.getDecoder().decode(record.getKey()); - keySize = decodedKeyBytes.length; - key = deserialize(record.getKey(), keyType); - } catch (Exception e) { - throw new RuntimeException("Failed to deserialize Kafka record key.", e); - } - } - - if (record.getValue() != null) { - try { - byte[] decodedValueBytes = Base64.getDecoder().decode(record.getValue()); - valueSize = decodedValueBytes.length; - value = deserialize(record.getValue(), valueType); - } catch (Exception e) { - throw new RuntimeException("Failed to deserialize Kafka record value.", e); - } - } - - Headers headers = new RecordHeaders(); - if (record.getHeaders() != null) { - for (Map headerMap : record.getHeaders()) { - for (Map.Entry header : headerMap.entrySet()) { - if (header.getValue() != null) { - headers.add(header.getKey(), header.getValue()); - } - } - } - } - - return new ConsumerRecord<>( - topic, - record.getPartition(), - record.getOffset(), - record.getTimestamp(), - // TODO: Do not hardcode - TimestampType.CREATE_TIME, - keySize, - valueSize, - key, - value, - headers, - java.util.Optional.empty()); - } - - @SuppressWarnings("unchecked") - private T deserialize(String data, Class type) throws JsonProcessingException { - byte[] decodedBytes = Base64.getDecoder().decode(data); - - // Handle String type - if (type == String.class) { - return (T) new String(decodedBytes); - } - - // Handle primitive types and their wrappers - String decodedStr = new String(decodedBytes); - - if (type == Integer.class || type == int.class) { - return (T) Integer.valueOf(decodedStr); - } else if (type == Long.class || type == long.class) { - return (T) Long.valueOf(decodedStr); - } else if (type == Double.class || type == double.class) { - return (T) Double.valueOf(decodedStr); - } else if (type == Float.class || type == float.class) { - return (T) Float.valueOf(decodedStr); - } else if (type == Boolean.class || type == boolean.class) { - return (T) Boolean.valueOf(decodedStr); - } else if (type == Byte.class || type == byte.class) { - return (T) Byte.valueOf(decodedStr); - } else if (type == Short.class || type == short.class) { - return (T) Short.valueOf(decodedStr); - } else if (type == Character.class || type == char.class) { - if (decodedStr.length() > 0) { - return (T) Character.valueOf(decodedStr.charAt(0)); - } - throw new IllegalArgumentException("Cannot convert empty string to char"); - } else { - // For all other types, use Jackson ObjectMapper - return objectMapper.readValue(decodedStr, type); - } - } - - /** - * Handle the Kafka records. - * - * @param records ConsumerRecords containing deserialized Kafka ConsumerRecord objects - * @param context Lambda context - * @return Response of type R - */ - public abstract R handleRecords(ConsumerRecords records, Context context); -} From 92ae6ab1cd49cc9b8c67e7d08031aa0ec6b0711f Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Wed, 21 May 2025 12:37:21 +0200 Subject: [PATCH 06/38] Parse Timestamp type correctly. --- .../amazon/lambda/powertools/kafka/PowertoolsSerializer.java | 2 +- .../kafka/serializers/AbstractKafkaDeserializer.java | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializer.java index 22c65f8ba..a5ed5b070 100644 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializer.java +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializer.java @@ -16,7 +16,7 @@ /** * Custom Lambda serializer supporting Kafka events. It delegates to the appropriate deserializer based on the - * deserialization type. + * deserialization type specified by {@link software.amazon.lambda.powertools.kafka.Deserialization} annotation. * * Kafka serializers need to be specified explicitly, otherwise, the default Lambda serializer from * {@link com.amazonaws.services.lambda.runtime.serialization.factories.JacksonFactory} will be used. diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java index a1f5cc6e0..3cdc69725 100644 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java @@ -185,8 +185,7 @@ private ConsumerRecord convertToConsumerRecord( eventRecord.getPartition(), eventRecord.getOffset(), eventRecord.getTimestamp(), - // TODO: Do not hardcode this - TimestampType.CREATE_TIME, + TimestampType.valueOf(eventRecord.getTimestampType()), keySize, valueSize, key, From 8e13dd6d3fa1ae308e177855424a684f5c0c5fc2 Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Wed, 21 May 2025 12:48:35 +0200 Subject: [PATCH 07/38] Remove custom RequestHandler implementation example. --- ...kaAvroConsumerDeserializationFunction.java | 10 +++- ...kaJsonConsumerDeserializationFunction.java | 14 +++-- ...kaAvroConsumerDeserializationFunction.java | 57 ------------------- ...kaJsonConsumerDeserializationFunction.java | 39 ------------- .../powertools-examples-kafka/template.yaml | 24 -------- .../serializers/KafkaJsonDeserializer.java | 1 + 6 files changed, 17 insertions(+), 128 deletions(-) delete mode 100644 examples/powertools-examples-kafka/src/main/java/org/demo/kafka/NativeKafkaAvroConsumerDeserializationFunction.java delete mode 100644 examples/powertools-examples-kafka/src/main/java/org/demo/kafka/NativeKafkaJsonConsumerDeserializationFunction.java diff --git a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaAvroConsumerDeserializationFunction.java b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaAvroConsumerDeserializationFunction.java index ab4d0b90d..a4dfb7732 100644 --- a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaAvroConsumerDeserializationFunction.java +++ b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaAvroConsumerDeserializationFunction.java @@ -13,15 +13,18 @@ import org.slf4j.LoggerFactory; import com.amazonaws.services.lambda.runtime.Context; +import com.amazonaws.services.lambda.runtime.RequestHandler; import software.amazon.cloudwatchlogs.emf.logger.MetricsLogger; import software.amazon.cloudwatchlogs.emf.model.Unit; -import software.amazon.lambda.powertools.kafka.KafkaAvroRequestHandler; +import software.amazon.lambda.powertools.kafka.Deserialization; +import software.amazon.lambda.powertools.kafka.DeserializationType; import software.amazon.lambda.powertools.logging.Logging; import software.amazon.lambda.powertools.metrics.Metrics; import software.amazon.lambda.powertools.metrics.MetricsUtils; -public class KafkaAvroConsumerDeserializationFunction extends KafkaAvroRequestHandler { +public class KafkaAvroConsumerDeserializationFunction + implements RequestHandler, String> { private static final Logger LOGGER = LoggerFactory.getLogger(KafkaAvroConsumerDeserializationFunction.class); private static final MetricsLogger metrics = MetricsUtils.metricsLogger(); @@ -29,7 +32,8 @@ public class KafkaAvroConsumerDeserializationFunction extends KafkaAvroRequestHa @Override @Logging @Metrics - public String handleRecords(ConsumerRecords records, Context context) { + @Deserialization(type = DeserializationType.KAFKA_AVRO) + public String handleRequest(ConsumerRecords records, Context context) { for (ConsumerRecord consumerRecord : records) { LOGGER.info("{}", consumerRecord, entry("value", avroToMap(consumerRecord.value()))); metrics.putMetric("ProcessedAvroRecord", 1, Unit.COUNT); diff --git a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaJsonConsumerDeserializationFunction.java b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaJsonConsumerDeserializationFunction.java index caeb65f20..0922037bf 100644 --- a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaJsonConsumerDeserializationFunction.java +++ b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaJsonConsumerDeserializationFunction.java @@ -8,24 +8,28 @@ import org.slf4j.LoggerFactory; import com.amazonaws.services.lambda.runtime.Context; +import com.amazonaws.services.lambda.runtime.RequestHandler; import software.amazon.cloudwatchlogs.emf.logger.MetricsLogger; import software.amazon.cloudwatchlogs.emf.model.Unit; -import software.amazon.lambda.powertools.kafka.KafkaJsonRequestHandler; +import software.amazon.lambda.powertools.kafka.Deserialization; +import software.amazon.lambda.powertools.kafka.DeserializationType; import software.amazon.lambda.powertools.logging.Logging; import software.amazon.lambda.powertools.metrics.Metrics; import software.amazon.lambda.powertools.metrics.MetricsUtils; -public class KafkaJsonConsumerDeserializationFunction extends KafkaJsonRequestHandler { +public class KafkaJsonConsumerDeserializationFunction + implements RequestHandler, String> { private static final Logger LOGGER = LoggerFactory.getLogger(KafkaJsonConsumerDeserializationFunction.class); - private static final MetricsLogger metrics = MetricsUtils.metricsLogger(); + private final MetricsLogger metrics = MetricsUtils.metricsLogger(); @Override @Logging @Metrics - public String handleRecords(ConsumerRecords records, Context context) { - for (ConsumerRecord consumerRecord : records) { + @Deserialization(type = DeserializationType.KAFKA_JSON) + public String handleRequest(ConsumerRecords consumerRecords, Context context) { + for (ConsumerRecord consumerRecord : consumerRecords) { LOGGER.info("{}", consumerRecord, entry("value", consumerRecord.value())); metrics.putMetric("ProcessedRecord", 1, Unit.COUNT); } diff --git a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/NativeKafkaAvroConsumerDeserializationFunction.java b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/NativeKafkaAvroConsumerDeserializationFunction.java deleted file mode 100644 index b5777315b..000000000 --- a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/NativeKafkaAvroConsumerDeserializationFunction.java +++ /dev/null @@ -1,57 +0,0 @@ -package org.demo.kafka; - -import static software.amazon.lambda.powertools.logging.argument.StructuredArguments.entry; - -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.apache.kafka.clients.consumer.ConsumerRecords; -import org.demo.kafka.avro.AvroProduct; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.amazonaws.services.lambda.runtime.Context; -import com.amazonaws.services.lambda.runtime.RequestHandler; - -import software.amazon.cloudwatchlogs.emf.logger.MetricsLogger; -import software.amazon.cloudwatchlogs.emf.model.Unit; -import software.amazon.lambda.powertools.kafka.Deserialization; -import software.amazon.lambda.powertools.kafka.DeserializationType; -import software.amazon.lambda.powertools.logging.Logging; -import software.amazon.lambda.powertools.metrics.Metrics; -import software.amazon.lambda.powertools.metrics.MetricsUtils; - -public class NativeKafkaAvroConsumerDeserializationFunction - implements RequestHandler, String> { - - private static final Logger LOGGER = LoggerFactory.getLogger(NativeKafkaAvroConsumerDeserializationFunction.class); - private static final MetricsLogger metrics = MetricsUtils.metricsLogger(); - - @Override - @Logging - @Metrics - @Deserialization(type = DeserializationType.KAFKA_AVRO) - public String handleRequest(ConsumerRecords records, Context context) { - for (ConsumerRecord consumerRecord : records) { - LOGGER.info("{}", consumerRecord, entry("value", avroToMap(consumerRecord.value()))); - metrics.putMetric("ProcessedAvroRecord", 1, Unit.COUNT); - } - - return "OK"; - } - - // TODO: Helper method because Avro objects cannot be serialized by the Jackson ObjectMapper used in the Logging - // module entry("value", consumerRecord.value()) would fallback to a string instead of native json object. - private Map avroToMap(AvroProduct avroProduct) { - if (avroProduct == null) { - return Collections.emptyMap(); - } - Map map = new HashMap<>(); - map.put("id", avroProduct.getId()); - map.put("name", avroProduct.getName()); - map.put("price", avroProduct.getPrice()); - return map; - } -} diff --git a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/NativeKafkaJsonConsumerDeserializationFunction.java b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/NativeKafkaJsonConsumerDeserializationFunction.java deleted file mode 100644 index c5b8c4f3d..000000000 --- a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/NativeKafkaJsonConsumerDeserializationFunction.java +++ /dev/null @@ -1,39 +0,0 @@ -package org.demo.kafka; - -import static software.amazon.lambda.powertools.logging.argument.StructuredArguments.entry; - -import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.apache.kafka.clients.consumer.ConsumerRecords; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.amazonaws.services.lambda.runtime.Context; -import com.amazonaws.services.lambda.runtime.RequestHandler; - -import software.amazon.cloudwatchlogs.emf.logger.MetricsLogger; -import software.amazon.cloudwatchlogs.emf.model.Unit; -import software.amazon.lambda.powertools.kafka.Deserialization; -import software.amazon.lambda.powertools.kafka.DeserializationType; -import software.amazon.lambda.powertools.logging.Logging; -import software.amazon.lambda.powertools.metrics.Metrics; -import software.amazon.lambda.powertools.metrics.MetricsUtils; - -public class NativeKafkaJsonConsumerDeserializationFunction - implements RequestHandler, String> { - - private static final Logger LOGGER = LoggerFactory.getLogger(NativeKafkaJsonConsumerDeserializationFunction.class); - private final MetricsLogger metrics = MetricsUtils.metricsLogger(); - - @Override - @Logging - @Metrics - @Deserialization(type = DeserializationType.KAFKA_JSON) - public String handleRequest(ConsumerRecords consumerRecords, Context context) { - for (ConsumerRecord consumerRecord : consumerRecords) { - LOGGER.info("{}", consumerRecord, entry("value", consumerRecord.value())); - metrics.putMetric("ProcessedRecord", 1, Unit.COUNT); - } - - return "OK"; - } -} diff --git a/examples/powertools-examples-kafka/template.yaml b/examples/powertools-examples-kafka/template.yaml index 60e167eb9..d2ded572b 100644 --- a/examples/powertools-examples-kafka/template.yaml +++ b/examples/powertools-examples-kafka/template.yaml @@ -11,30 +11,6 @@ Globals: Tracing: Active Resources: - NativeKafkaJsonConsumerDeserializationFunction: - Type: AWS::Serverless::Function - Properties: - CodeUri: . - Handler: org.demo.kafka.NativeKafkaJsonConsumerDeserializationFunction::handleRequest - Environment: - Variables: - JAVA_TOOL_OPTIONS: "-XX:+TieredCompilation -XX:TieredStopAtLevel=1" - POWERTOOLS_LOG_LEVEL: DEBUG - POWERTOOLS_SERVICE_NAME: NativeKafkaJsonConsumerDeserialization - POWERTOOLS_METRICS_NAMESPACE: NativeKafkaJsonConsumerDeserialization - - NativeKafkaAvroConsumerDeserializationFunction: - Type: AWS::Serverless::Function - Properties: - CodeUri: . - Handler: org.demo.kafka.NativeKafkaAvroConsumerDeserializationFunction::handleRequest - Environment: - Variables: - JAVA_TOOL_OPTIONS: "-XX:+TieredCompilation -XX:TieredStopAtLevel=1" - POWERTOOLS_LOG_LEVEL: DEBUG - POWERTOOLS_SERVICE_NAME: NativeKafkaAvroConsumerDeserialization - POWERTOOLS_METRICS_NAMESPACE: NativeKafkaAvroConsumerDeserialization - KafkaJsonConsumerDeserializationFunction: Type: AWS::Serverless::Function Properties: diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializer.java index 0fbc64445..d35c50de7 100644 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializer.java +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializer.java @@ -22,6 +22,7 @@ public class KafkaJsonDeserializer extends AbstractKafkaDeserializer { @Override protected T deserializeComplex(byte[] data, Class type) throws IOException { String decodedStr = new String(data); + return objectMapper.readValue(decodedStr, type); } } From 5ea6d49d5f692a0a7ba1b4a12b90b4b4be5735c9 Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Mon, 26 May 2025 17:24:10 +0200 Subject: [PATCH 08/38] Make AspectJ version compatible with min version Java 11. --- examples/powertools-examples-kafka/pom.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/powertools-examples-kafka/pom.xml b/examples/powertools-examples-kafka/pom.xml index 8f2228d1b..22ad6cb11 100644 --- a/examples/powertools-examples-kafka/pom.xml +++ b/examples/powertools-examples-kafka/pom.xml @@ -10,7 +10,7 @@ 11 11 - 1.9.24 + 1.9.20.1 1.12.0 From 73e64e499618bcba1a5dcc3894a38278bbbe3d78 Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Tue, 27 May 2025 11:10:03 +0200 Subject: [PATCH 09/38] Clarify exception message when deserialization fails. --- .../kafka/serializers/AbstractKafkaDeserializer.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java index 3cdc69725..f178fd988 100644 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java @@ -68,7 +68,7 @@ public T fromJson(InputStream input, Type type) { // Convert KafkaEvent to ConsumerRecords return (T) convertToConsumerRecords(kafkaEvent, keyType, valueType); } catch (IOException e) { - throw new RuntimeException("Failed to deserialize JSON to ConsumerRecords", e); + throw new RuntimeException("Failed to deserialize Lambda handler input to ConsumerRecords", e); } } @@ -101,7 +101,7 @@ public T fromJson(String input, Type type) { // Convert KafkaEvent to ConsumerRecords return (T) convertToConsumerRecords(kafkaEvent, keyType, valueType); } catch (IOException e) { - throw new RuntimeException("Failed to deserialize JSON to ConsumerRecords", e); + throw new RuntimeException("Failed to deserialize Lambda handler input to ConsumerRecords", e); } } From cbe91816acb5e40f1333defb158aa083bafe687c Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Tue, 27 May 2025 12:18:57 +0200 Subject: [PATCH 10/38] Add more advanced JSON escpaing to JSONSerializer in logging module. --- .../logging/internal/LambdaEcsEncoderTest.java | 2 +- .../logging/internal/LambdaJsonEncoderTest.java | 2 +- .../powertools/logging/internal/JsonSerializer.java | 11 +++++++++-- 3 files changed, 11 insertions(+), 4 deletions(-) diff --git a/powertools-logging/powertools-logging-logback/src/test/java/software/amazon/lambda/powertools/logging/internal/LambdaEcsEncoderTest.java b/powertools-logging/powertools-logging-logback/src/test/java/software/amazon/lambda/powertools/logging/internal/LambdaEcsEncoderTest.java index 5dcca2fb2..7e8977508 100644 --- a/powertools-logging/powertools-logging-logback/src/test/java/software/amazon/lambda/powertools/logging/internal/LambdaEcsEncoderTest.java +++ b/powertools-logging/powertools-logging-logback/src/test/java/software/amazon/lambda/powertools/logging/internal/LambdaEcsEncoderTest.java @@ -150,7 +150,7 @@ void shouldLogException() { result = new String(encoded, StandardCharsets.UTF_8); // THEN (stack is logged with root cause first) - assertThat(result).contains("\"message\":\"Error\",\"error.message\":\"Unexpected value\",\"error.type\":\"java.lang.IllegalStateException\",\"error.stack_trace\":\"java.lang.IllegalStateException: Unexpected value\n"); + assertThat(result).contains("\"message\":\"Error\",\"error.message\":\"Unexpected value\",\"error.type\":\"java.lang.IllegalStateException\",\"error.stack_trace\":\"java.lang.IllegalStateException: Unexpected value\\n"); } private void setMDC() { diff --git a/powertools-logging/powertools-logging-logback/src/test/java/software/amazon/lambda/powertools/logging/internal/LambdaJsonEncoderTest.java b/powertools-logging/powertools-logging-logback/src/test/java/software/amazon/lambda/powertools/logging/internal/LambdaJsonEncoderTest.java index 4a7067540..81e830045 100644 --- a/powertools-logging/powertools-logging-logback/src/test/java/software/amazon/lambda/powertools/logging/internal/LambdaJsonEncoderTest.java +++ b/powertools-logging/powertools-logging-logback/src/test/java/software/amazon/lambda/powertools/logging/internal/LambdaJsonEncoderTest.java @@ -419,7 +419,7 @@ void shouldLogException() { // THEN (stack is logged with root cause first) assertThat(result).contains("\"message\":\"Unexpected value\"") .contains("\"name\":\"java.lang.IllegalStateException\"") - .contains("\"stack\":\"java.lang.IllegalStateException: Unexpected value\n"); + .contains("\"stack\":\"java.lang.IllegalStateException: Unexpected value\\n"); } private void setupContext() { diff --git a/powertools-logging/src/main/java/software/amazon/lambda/powertools/logging/internal/JsonSerializer.java b/powertools-logging/src/main/java/software/amazon/lambda/powertools/logging/internal/JsonSerializer.java index 82bc76a38..c69789519 100644 --- a/powertools-logging/src/main/java/software/amazon/lambda/powertools/logging/internal/JsonSerializer.java +++ b/powertools-logging/src/main/java/software/amazon/lambda/powertools/logging/internal/JsonSerializer.java @@ -84,8 +84,15 @@ public void writeString(String text) { if (text == null) { writeNull(); } else { - // Escape double quotes to avoid breaking JSON format - builder.append("\"").append(text.replace("\"", "\\\"")).append("\""); + // Escape special characters to avoid breaking JSON format + String escaped = text.replace("\\", "\\\\") + .replace("\"", "\\\"") + .replace("\n", "\\n") + .replace("\r", "\\r") + .replace("\t", "\\t") + .replace("\b", "\\b") + .replace("\f", "\\f"); + builder.append("\"").append(escaped).append("\""); } } From 64e7080668afd13c9f2eafcd96c1c3645b236784 Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Tue, 27 May 2025 12:29:26 +0200 Subject: [PATCH 11/38] Add protobuf deserialization logic and fully working example. --- .../events/kafka-protobuf-event.json | 51 ++ examples/powertools-examples-kafka/pom.xml | 27 + ...kaAvroConsumerDeserializationFunction.java | 4 +- ...otobufConsumerDeserializationFunction.java | 57 ++ .../demo/kafka/protobuf/ProtobufProduct.java | 636 ++++++++++++++++++ .../protobuf/ProtobufProductOrBuilder.java | 36 + .../protobuf/ProtobufProductOuterClass.java | 63 ++ .../src/main/proto/ProtobufProduct.proto | 13 + .../powertools-examples-kafka/template.yaml | 15 + .../powertools-examples-kafka/tools/README.md | 48 +- .../powertools-examples-kafka/tools/pom.xml | 44 +- .../java/org/demo/kafka/avro/AvroProduct.java | 1 - .../demo/kafka/protobuf/ProtobufProduct.java | 636 ++++++++++++++++++ .../protobuf/ProtobufProductOrBuilder.java | 36 + .../protobuf/ProtobufProductOuterClass.java | 63 ++ .../kafka/tools/GenerateProtobufSamples.java | 125 ++++ powertools-kafka/pom.xml | 23 +- .../powertools/kafka/DeserializationType.java | 2 +- .../kafka/PowertoolsSerializer.java | 2 + .../KafkaProtobufDeserializer.java | 43 ++ 20 files changed, 1899 insertions(+), 26 deletions(-) create mode 100644 examples/powertools-examples-kafka/events/kafka-protobuf-event.json create mode 100644 examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaProtobufConsumerDeserializationFunction.java create mode 100644 examples/powertools-examples-kafka/src/main/java/org/demo/kafka/protobuf/ProtobufProduct.java create mode 100644 examples/powertools-examples-kafka/src/main/java/org/demo/kafka/protobuf/ProtobufProductOrBuilder.java create mode 100644 examples/powertools-examples-kafka/src/main/java/org/demo/kafka/protobuf/ProtobufProductOuterClass.java create mode 100644 examples/powertools-examples-kafka/src/main/proto/ProtobufProduct.proto create mode 100644 examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/protobuf/ProtobufProduct.java create mode 100644 examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/protobuf/ProtobufProductOrBuilder.java create mode 100644 examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/protobuf/ProtobufProductOuterClass.java create mode 100644 examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateProtobufSamples.java create mode 100644 powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializer.java diff --git a/examples/powertools-examples-kafka/events/kafka-protobuf-event.json b/examples/powertools-examples-kafka/events/kafka-protobuf-event.json new file mode 100644 index 000000000..b3e0139e3 --- /dev/null +++ b/examples/powertools-examples-kafka/events/kafka-protobuf-event.json @@ -0,0 +1,51 @@ +{ + "eventSource": "aws:kafka", + "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", + "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + "records": { + "mytopic-0": [ + { + "topic": "mytopic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": "NDI=", + "value": "COkHEgZMYXB0b3AZUrgehes/j0A=", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 16, + "timestamp": 1545084650988, + "timestampType": "CREATE_TIME", + "key": "NDI=", + "value": "COoHEgpTbWFydHBob25lGVK4HoXrv4JA", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 17, + "timestamp": 1545084650989, + "timestampType": "CREATE_TIME", + "key": null, + "value": "COsHEgpIZWFkcGhvbmVzGUjhehSuv2JA", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + } + ] + } +} diff --git a/examples/powertools-examples-kafka/pom.xml b/examples/powertools-examples-kafka/pom.xml index 22ad6cb11..37eeff610 100644 --- a/examples/powertools-examples-kafka/pom.xml +++ b/examples/powertools-examples-kafka/pom.xml @@ -12,6 +12,7 @@ 11 1.9.20.1 1.12.0 + 4.31.0 @@ -50,6 +51,11 @@ avro ${avro.version} + + com.google.protobuf + protobuf-java + ${protobuf.version} + @@ -115,6 +121,27 @@ + + io.github.ascopes + protobuf-maven-plugin + 3.3.0 + + + + generate + + generate-sources + + ${protobuf.version} + + ${project.basedir}/src/main/proto + + ${project.basedir}/src/main/java + false + + + + diff --git a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaAvroConsumerDeserializationFunction.java b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaAvroConsumerDeserializationFunction.java index a4dfb7732..cf68ac8d7 100644 --- a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaAvroConsumerDeserializationFunction.java +++ b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaAvroConsumerDeserializationFunction.java @@ -42,8 +42,8 @@ public String handleRequest(ConsumerRecords records, Contex return "OK"; } - // TODO: Helper method because Avro objects cannot be serialized by the Jackson ObjectMapper used in the Logging - // module entry("value", consumerRecord.value()) would fallback to a string instead of native json object. + // Avro objects cannot be serialized to JSON by Jackson Object Mapper used by powertools-logging. + // We convert to a map first to retrieve a meaningful representation. private Map avroToMap(AvroProduct avroProduct) { if (avroProduct == null) { return Collections.emptyMap(); diff --git a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaProtobufConsumerDeserializationFunction.java b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaProtobufConsumerDeserializationFunction.java new file mode 100644 index 000000000..5fe048fab --- /dev/null +++ b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaProtobufConsumerDeserializationFunction.java @@ -0,0 +1,57 @@ +package org.demo.kafka; + +import static software.amazon.lambda.powertools.logging.argument.StructuredArguments.entry; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.demo.kafka.protobuf.ProtobufProduct; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.amazonaws.services.lambda.runtime.Context; +import com.amazonaws.services.lambda.runtime.RequestHandler; + +import software.amazon.cloudwatchlogs.emf.logger.MetricsLogger; +import software.amazon.cloudwatchlogs.emf.model.Unit; +import software.amazon.lambda.powertools.kafka.Deserialization; +import software.amazon.lambda.powertools.kafka.DeserializationType; +import software.amazon.lambda.powertools.logging.Logging; +import software.amazon.lambda.powertools.metrics.Metrics; +import software.amazon.lambda.powertools.metrics.MetricsUtils; + +public class KafkaProtobufConsumerDeserializationFunction + implements RequestHandler, String> { + + private static final Logger LOGGER = LoggerFactory.getLogger(KafkaProtobufConsumerDeserializationFunction.class); + private static final MetricsLogger metrics = MetricsUtils.metricsLogger(); + + @Override + @Logging + @Metrics + @Deserialization(type = DeserializationType.KAFKA_PROTOBUF) + public String handleRequest(ConsumerRecords records, Context context) { + for (ConsumerRecord consumerRecord : records) { + LOGGER.info("{}", consumerRecord, entry("value", protobufToMap(consumerRecord.value()))); + metrics.putMetric("ProcessedProtobufRecord", 1, Unit.COUNT); + } + + return "OK"; + } + + // Protobuf Message objects cannot be serialized to JSON by Jackson Object Mapper used by powertools-logging. + // We convert to a map first to retrieve a meaningful representation. + private Map protobufToMap(ProtobufProduct protobufProduct) { + if (protobufProduct == null) { + return Collections.emptyMap(); + } + Map map = new HashMap<>(); + map.put("id", protobufProduct.getId()); + map.put("name", protobufProduct.getName()); + map.put("price", protobufProduct.getPrice()); + return map; + } +} diff --git a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/protobuf/ProtobufProduct.java b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/protobuf/ProtobufProduct.java new file mode 100644 index 000000000..6da9113fc --- /dev/null +++ b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/protobuf/ProtobufProduct.java @@ -0,0 +1,636 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// NO CHECKED-IN PROTOBUF GENCODE +// source: ProtobufProduct.proto +// Protobuf Java Version: 4.31.0 + +package org.demo.kafka.protobuf; + +/** + * Protobuf type {@code org.demo.kafka.protobuf.ProtobufProduct} + */ +@com.google.protobuf.Generated +public final class ProtobufProduct extends + com.google.protobuf.GeneratedMessage implements + // @@protoc_insertion_point(message_implements:org.demo.kafka.protobuf.ProtobufProduct) + ProtobufProductOrBuilder { +private static final long serialVersionUID = 0L; + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 31, + /* patch= */ 0, + /* suffix= */ "", + ProtobufProduct.class.getName()); + } + // Use ProtobufProduct.newBuilder() to construct. + private ProtobufProduct(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + } + private ProtobufProduct() { + name_ = ""; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.demo.kafka.protobuf.ProtobufProductOuterClass.internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.demo.kafka.protobuf.ProtobufProductOuterClass.internal_static_org_demo_kafka_protobuf_ProtobufProduct_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.demo.kafka.protobuf.ProtobufProduct.class, org.demo.kafka.protobuf.ProtobufProduct.Builder.class); + } + + public static final int ID_FIELD_NUMBER = 1; + private int id_ = 0; + /** + * int32 id = 1; + * @return The id. + */ + @java.lang.Override + public int getId() { + return id_; + } + + public static final int NAME_FIELD_NUMBER = 2; + @SuppressWarnings("serial") + private volatile java.lang.Object name_ = ""; + /** + * string name = 2; + * @return The name. + */ + @java.lang.Override + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + * string name = 2; + * @return The bytes for name. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int PRICE_FIELD_NUMBER = 3; + private double price_ = 0D; + /** + * double price = 3; + * @return The price. + */ + @java.lang.Override + public double getPrice() { + return price_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (id_ != 0) { + output.writeInt32(1, id_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(name_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 2, name_); + } + if (java.lang.Double.doubleToRawLongBits(price_) != 0) { + output.writeDouble(3, price_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (id_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(1, id_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(name_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(2, name_); + } + if (java.lang.Double.doubleToRawLongBits(price_) != 0) { + size += com.google.protobuf.CodedOutputStream + .computeDoubleSize(3, price_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.demo.kafka.protobuf.ProtobufProduct)) { + return super.equals(obj); + } + org.demo.kafka.protobuf.ProtobufProduct other = (org.demo.kafka.protobuf.ProtobufProduct) obj; + + if (getId() + != other.getId()) return false; + if (!getName() + .equals(other.getName())) return false; + if (java.lang.Double.doubleToLongBits(getPrice()) + != java.lang.Double.doubleToLongBits( + other.getPrice())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + ID_FIELD_NUMBER; + hash = (53 * hash) + getId(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (37 * hash) + PRICE_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + java.lang.Double.doubleToLongBits(getPrice())); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.demo.kafka.protobuf.ProtobufProduct parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.demo.kafka.protobuf.ProtobufProduct parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.demo.kafka.protobuf.ProtobufProduct parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.demo.kafka.protobuf.ProtobufProduct parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.demo.kafka.protobuf.ProtobufProduct prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code org.demo.kafka.protobuf.ProtobufProduct} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder implements + // @@protoc_insertion_point(builder_implements:org.demo.kafka.protobuf.ProtobufProduct) + org.demo.kafka.protobuf.ProtobufProductOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.demo.kafka.protobuf.ProtobufProductOuterClass.internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.demo.kafka.protobuf.ProtobufProductOuterClass.internal_static_org_demo_kafka_protobuf_ProtobufProduct_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.demo.kafka.protobuf.ProtobufProduct.class, org.demo.kafka.protobuf.ProtobufProduct.Builder.class); + } + + // Construct using org.demo.kafka.protobuf.ProtobufProduct.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + id_ = 0; + name_ = ""; + price_ = 0D; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.demo.kafka.protobuf.ProtobufProductOuterClass.internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor; + } + + @java.lang.Override + public org.demo.kafka.protobuf.ProtobufProduct getDefaultInstanceForType() { + return org.demo.kafka.protobuf.ProtobufProduct.getDefaultInstance(); + } + + @java.lang.Override + public org.demo.kafka.protobuf.ProtobufProduct build() { + org.demo.kafka.protobuf.ProtobufProduct result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.demo.kafka.protobuf.ProtobufProduct buildPartial() { + org.demo.kafka.protobuf.ProtobufProduct result = new org.demo.kafka.protobuf.ProtobufProduct(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.demo.kafka.protobuf.ProtobufProduct result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.id_ = id_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.name_ = name_; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.price_ = price_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.demo.kafka.protobuf.ProtobufProduct) { + return mergeFrom((org.demo.kafka.protobuf.ProtobufProduct)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.demo.kafka.protobuf.ProtobufProduct other) { + if (other == org.demo.kafka.protobuf.ProtobufProduct.getDefaultInstance()) return this; + if (other.getId() != 0) { + setId(other.getId()); + } + if (!other.getName().isEmpty()) { + name_ = other.name_; + bitField0_ |= 0x00000002; + onChanged(); + } + if (java.lang.Double.doubleToRawLongBits(other.getPrice()) != 0) { + setPrice(other.getPrice()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 8: { + id_ = input.readInt32(); + bitField0_ |= 0x00000001; + break; + } // case 8 + case 18: { + name_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 25: { + price_ = input.readDouble(); + bitField0_ |= 0x00000004; + break; + } // case 25 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private int id_ ; + /** + * int32 id = 1; + * @return The id. + */ + @java.lang.Override + public int getId() { + return id_; + } + /** + * int32 id = 1; + * @param value The id to set. + * @return This builder for chaining. + */ + public Builder setId(int value) { + + id_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * int32 id = 1; + * @return This builder for chaining. + */ + public Builder clearId() { + bitField0_ = (bitField0_ & ~0x00000001); + id_ = 0; + onChanged(); + return this; + } + + private java.lang.Object name_ = ""; + /** + * string name = 2; + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string name = 2; + * @return The bytes for name. + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string name = 2; + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + name_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * string name = 2; + * @return This builder for chaining. + */ + public Builder clearName() { + name_ = getDefaultInstance().getName(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + * string name = 2; + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + name_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + + private double price_ ; + /** + * double price = 3; + * @return The price. + */ + @java.lang.Override + public double getPrice() { + return price_; + } + /** + * double price = 3; + * @param value The price to set. + * @return This builder for chaining. + */ + public Builder setPrice(double value) { + + price_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * double price = 3; + * @return This builder for chaining. + */ + public Builder clearPrice() { + bitField0_ = (bitField0_ & ~0x00000004); + price_ = 0D; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:org.demo.kafka.protobuf.ProtobufProduct) + } + + // @@protoc_insertion_point(class_scope:org.demo.kafka.protobuf.ProtobufProduct) + private static final org.demo.kafka.protobuf.ProtobufProduct DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.demo.kafka.protobuf.ProtobufProduct(); + } + + public static org.demo.kafka.protobuf.ProtobufProduct getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ProtobufProduct parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.demo.kafka.protobuf.ProtobufProduct getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/protobuf/ProtobufProductOrBuilder.java b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/protobuf/ProtobufProductOrBuilder.java new file mode 100644 index 000000000..9c1518db3 --- /dev/null +++ b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/protobuf/ProtobufProductOrBuilder.java @@ -0,0 +1,36 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// NO CHECKED-IN PROTOBUF GENCODE +// source: ProtobufProduct.proto +// Protobuf Java Version: 4.31.0 + +package org.demo.kafka.protobuf; + +@com.google.protobuf.Generated +public interface ProtobufProductOrBuilder extends + // @@protoc_insertion_point(interface_extends:org.demo.kafka.protobuf.ProtobufProduct) + com.google.protobuf.MessageOrBuilder { + + /** + * int32 id = 1; + * @return The id. + */ + int getId(); + + /** + * string name = 2; + * @return The name. + */ + java.lang.String getName(); + /** + * string name = 2; + * @return The bytes for name. + */ + com.google.protobuf.ByteString + getNameBytes(); + + /** + * double price = 3; + * @return The price. + */ + double getPrice(); +} diff --git a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/protobuf/ProtobufProductOuterClass.java b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/protobuf/ProtobufProductOuterClass.java new file mode 100644 index 000000000..6a99f35ec --- /dev/null +++ b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/protobuf/ProtobufProductOuterClass.java @@ -0,0 +1,63 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// NO CHECKED-IN PROTOBUF GENCODE +// source: ProtobufProduct.proto +// Protobuf Java Version: 4.31.0 + +package org.demo.kafka.protobuf; + +@com.google.protobuf.Generated +public final class ProtobufProductOuterClass { + private ProtobufProductOuterClass() {} + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 31, + /* patch= */ 0, + /* suffix= */ "", + ProtobufProductOuterClass.class.getName()); + } + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + static final com.google.protobuf.Descriptors.Descriptor + internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor; + static final + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_org_demo_kafka_protobuf_ProtobufProduct_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\025ProtobufProduct.proto\022\027org.demo.kafka." + + "protobuf\":\n\017ProtobufProduct\022\n\n\002id\030\001 \001(\005\022" + + "\014\n\004name\030\002 \001(\t\022\r\n\005price\030\003 \001(\001B6\n\027org.demo" + + ".kafka.protobufB\031ProtobufProductOuterCla" + + "ssP\001b\006proto3" + }; + descriptor = com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }); + internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_org_demo_kafka_protobuf_ProtobufProduct_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor, + new java.lang.String[] { "Id", "Name", "Price", }); + descriptor.resolveAllFeaturesImmutable(); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/examples/powertools-examples-kafka/src/main/proto/ProtobufProduct.proto b/examples/powertools-examples-kafka/src/main/proto/ProtobufProduct.proto new file mode 100644 index 000000000..4d3338a6f --- /dev/null +++ b/examples/powertools-examples-kafka/src/main/proto/ProtobufProduct.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +package org.demo.kafka.protobuf; + +option java_package = "org.demo.kafka.protobuf"; +option java_outer_classname = "ProtobufProductOuterClass"; +option java_multiple_files = true; + +message ProtobufProduct { + int32 id = 1; + string name = 2; + double price = 3; +} \ No newline at end of file diff --git a/examples/powertools-examples-kafka/template.yaml b/examples/powertools-examples-kafka/template.yaml index d2ded572b..e196ed508 100644 --- a/examples/powertools-examples-kafka/template.yaml +++ b/examples/powertools-examples-kafka/template.yaml @@ -35,6 +35,18 @@ Resources: POWERTOOLS_SERVICE_NAME: KafkaAvroConsumerDeserialization POWERTOOLS_METRICS_NAMESPACE: KafkaAvroConsumerDeserializationFunction + KafkaProtobufConsumerDeserializationFunction: + Type: AWS::Serverless::Function + Properties: + CodeUri: . + Handler: org.demo.kafka.KafkaProtobufConsumerDeserializationFunction::handleRequest + Environment: + Variables: + JAVA_TOOL_OPTIONS: "-XX:+TieredCompilation -XX:TieredStopAtLevel=1" + POWERTOOLS_LOG_LEVEL: DEBUG + POWERTOOLS_SERVICE_NAME: KafkaProtobufConsumerDeserialization + POWERTOOLS_METRICS_NAMESPACE: KafkaProtobufConsumerDeserializationFunction + Outputs: JsonFunction: Description: "Kafka JSON Lambda Function ARN" @@ -42,3 +54,6 @@ Outputs: AvroFunction: Description: "Kafka Avro Lambda Function ARN" Value: !GetAtt KafkaAvroConsumerDeserializationFunction.Arn + ProtobufFunction: + Description: "Kafka Protobuf Lambda Function ARN" + Value: !GetAtt KafkaProtobufConsumerDeserializationFunction.Arn diff --git a/examples/powertools-examples-kafka/tools/README.md b/examples/powertools-examples-kafka/tools/README.md index 0afca7933..0497353ff 100644 --- a/examples/powertools-examples-kafka/tools/README.md +++ b/examples/powertools-examples-kafka/tools/README.md @@ -1,29 +1,55 @@ -# Avro Sample Generator Tool +# Kafka Sample Generator Tool -This tool generates base64-encoded Avro serialized products for testing the Kafka Avro consumer function. +This tool generates base64-encoded serialized products for testing the Kafka consumer functions with different serialization formats. + +## Supported Formats + +- **Avro**: Generates base64-encoded Avro serialized products +- **Protobuf**: Generates base64-encoded Protobuf serialized products ## Usage Run the following Maven commands from this directory: ```bash -# Generate Avro classes from schema +# Generate Avro and Protobuf classes from schemas mvn generate-sources # Compile the code mvn compile +``` + +### Generate Avro Samples + +```bash +# Run the Avro sample generator +mvn exec:java -Dexec.mainClass="org.demo.kafka.tools.GenerateAvroSamples" +``` -# Run the tool -mvn exec:java +The tool will output base64-encoded values for Avro products that can be used in `../events/kafka-avro-event.json`. + +### Generate Protobuf Samples + +```bash +# Run the Protobuf sample generator +mvn exec:java -Dexec.mainClass="org.demo.kafka.tools.GenerateProtobufSamples" ``` -The tool will output base64-encoded values for three different Avro products and an integer key. -You can copy these values into the `../events/kafka-avro-event.json` file to create a test event. +The tool will output base64-encoded values for Protobuf products that can be used in `../events/kafka-protobuf-event.json`. ## Output -The tool generates: +Each generator produces: + +1. Three different products (Laptop, Smartphone, Headphones) +2. An integer key (42) and one entry with a nullish key to test for edge-cases +3. A complete sample event structure that can be used directly for testing + +## Example + +After generating the samples, you can copy the output into the respective event files: + +- `../events/kafka-avro-event.json` for Avro samples +- `../events/kafka-protobuf-event.json` for Protobuf samples -1. Three different Avro products (Laptop, Smartphone, Headphones) -2. An integer key (42) -3. A complete sample event structure that can be used directly +These event files can then be used to test the Lambda functions with the appropriate deserializer. diff --git a/examples/powertools-examples-kafka/tools/pom.xml b/examples/powertools-examples-kafka/tools/pom.xml index 419473cee..b94be80f0 100644 --- a/examples/powertools-examples-kafka/tools/pom.xml +++ b/examples/powertools-examples-kafka/tools/pom.xml @@ -12,6 +12,7 @@ 11 11 1.12.0 + 4.31.0 @@ -20,6 +21,11 @@ avro ${avro.version} + + com.google.protobuf + protobuf-java + ${protobuf.version} + @@ -42,13 +48,45 @@ + + io.github.ascopes + protobuf-maven-plugin + 3.3.0 + + + + generate + + generate-sources + + ${protobuf.version} + + ${project.basedir}/../src/main/proto + + ${project.basedir}/src/main/java + false + + + + org.codehaus.mojo exec-maven-plugin 3.1.0 - - org.demo.kafka.tools.GenerateAvroSamples - + + + generate-avro-samples + + org.demo.kafka.tools.GenerateAvroSamples + + + + generate-protobuf-samples + + org.demo.kafka.tools.GenerateProtobufSamples + + + diff --git a/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/avro/AvroProduct.java b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/avro/AvroProduct.java index 37a7e2c61..fad7e2fbf 100644 --- a/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/avro/AvroProduct.java +++ b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/avro/AvroProduct.java @@ -5,7 +5,6 @@ */ package org.demo.kafka.avro; -import org.apache.avro.generic.GenericArray; import org.apache.avro.specific.SpecificData; import org.apache.avro.util.Utf8; import org.apache.avro.message.BinaryMessageEncoder; diff --git a/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/protobuf/ProtobufProduct.java b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/protobuf/ProtobufProduct.java new file mode 100644 index 000000000..6da9113fc --- /dev/null +++ b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/protobuf/ProtobufProduct.java @@ -0,0 +1,636 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// NO CHECKED-IN PROTOBUF GENCODE +// source: ProtobufProduct.proto +// Protobuf Java Version: 4.31.0 + +package org.demo.kafka.protobuf; + +/** + * Protobuf type {@code org.demo.kafka.protobuf.ProtobufProduct} + */ +@com.google.protobuf.Generated +public final class ProtobufProduct extends + com.google.protobuf.GeneratedMessage implements + // @@protoc_insertion_point(message_implements:org.demo.kafka.protobuf.ProtobufProduct) + ProtobufProductOrBuilder { +private static final long serialVersionUID = 0L; + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 31, + /* patch= */ 0, + /* suffix= */ "", + ProtobufProduct.class.getName()); + } + // Use ProtobufProduct.newBuilder() to construct. + private ProtobufProduct(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + } + private ProtobufProduct() { + name_ = ""; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.demo.kafka.protobuf.ProtobufProductOuterClass.internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.demo.kafka.protobuf.ProtobufProductOuterClass.internal_static_org_demo_kafka_protobuf_ProtobufProduct_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.demo.kafka.protobuf.ProtobufProduct.class, org.demo.kafka.protobuf.ProtobufProduct.Builder.class); + } + + public static final int ID_FIELD_NUMBER = 1; + private int id_ = 0; + /** + * int32 id = 1; + * @return The id. + */ + @java.lang.Override + public int getId() { + return id_; + } + + public static final int NAME_FIELD_NUMBER = 2; + @SuppressWarnings("serial") + private volatile java.lang.Object name_ = ""; + /** + * string name = 2; + * @return The name. + */ + @java.lang.Override + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + * string name = 2; + * @return The bytes for name. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int PRICE_FIELD_NUMBER = 3; + private double price_ = 0D; + /** + * double price = 3; + * @return The price. + */ + @java.lang.Override + public double getPrice() { + return price_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (id_ != 0) { + output.writeInt32(1, id_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(name_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 2, name_); + } + if (java.lang.Double.doubleToRawLongBits(price_) != 0) { + output.writeDouble(3, price_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (id_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(1, id_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(name_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(2, name_); + } + if (java.lang.Double.doubleToRawLongBits(price_) != 0) { + size += com.google.protobuf.CodedOutputStream + .computeDoubleSize(3, price_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.demo.kafka.protobuf.ProtobufProduct)) { + return super.equals(obj); + } + org.demo.kafka.protobuf.ProtobufProduct other = (org.demo.kafka.protobuf.ProtobufProduct) obj; + + if (getId() + != other.getId()) return false; + if (!getName() + .equals(other.getName())) return false; + if (java.lang.Double.doubleToLongBits(getPrice()) + != java.lang.Double.doubleToLongBits( + other.getPrice())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + ID_FIELD_NUMBER; + hash = (53 * hash) + getId(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (37 * hash) + PRICE_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + java.lang.Double.doubleToLongBits(getPrice())); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.demo.kafka.protobuf.ProtobufProduct parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.demo.kafka.protobuf.ProtobufProduct parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.demo.kafka.protobuf.ProtobufProduct parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.demo.kafka.protobuf.ProtobufProduct parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.demo.kafka.protobuf.ProtobufProduct prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code org.demo.kafka.protobuf.ProtobufProduct} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder implements + // @@protoc_insertion_point(builder_implements:org.demo.kafka.protobuf.ProtobufProduct) + org.demo.kafka.protobuf.ProtobufProductOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.demo.kafka.protobuf.ProtobufProductOuterClass.internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.demo.kafka.protobuf.ProtobufProductOuterClass.internal_static_org_demo_kafka_protobuf_ProtobufProduct_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.demo.kafka.protobuf.ProtobufProduct.class, org.demo.kafka.protobuf.ProtobufProduct.Builder.class); + } + + // Construct using org.demo.kafka.protobuf.ProtobufProduct.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + id_ = 0; + name_ = ""; + price_ = 0D; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.demo.kafka.protobuf.ProtobufProductOuterClass.internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor; + } + + @java.lang.Override + public org.demo.kafka.protobuf.ProtobufProduct getDefaultInstanceForType() { + return org.demo.kafka.protobuf.ProtobufProduct.getDefaultInstance(); + } + + @java.lang.Override + public org.demo.kafka.protobuf.ProtobufProduct build() { + org.demo.kafka.protobuf.ProtobufProduct result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.demo.kafka.protobuf.ProtobufProduct buildPartial() { + org.demo.kafka.protobuf.ProtobufProduct result = new org.demo.kafka.protobuf.ProtobufProduct(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.demo.kafka.protobuf.ProtobufProduct result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.id_ = id_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.name_ = name_; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.price_ = price_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.demo.kafka.protobuf.ProtobufProduct) { + return mergeFrom((org.demo.kafka.protobuf.ProtobufProduct)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.demo.kafka.protobuf.ProtobufProduct other) { + if (other == org.demo.kafka.protobuf.ProtobufProduct.getDefaultInstance()) return this; + if (other.getId() != 0) { + setId(other.getId()); + } + if (!other.getName().isEmpty()) { + name_ = other.name_; + bitField0_ |= 0x00000002; + onChanged(); + } + if (java.lang.Double.doubleToRawLongBits(other.getPrice()) != 0) { + setPrice(other.getPrice()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 8: { + id_ = input.readInt32(); + bitField0_ |= 0x00000001; + break; + } // case 8 + case 18: { + name_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 25: { + price_ = input.readDouble(); + bitField0_ |= 0x00000004; + break; + } // case 25 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private int id_ ; + /** + * int32 id = 1; + * @return The id. + */ + @java.lang.Override + public int getId() { + return id_; + } + /** + * int32 id = 1; + * @param value The id to set. + * @return This builder for chaining. + */ + public Builder setId(int value) { + + id_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * int32 id = 1; + * @return This builder for chaining. + */ + public Builder clearId() { + bitField0_ = (bitField0_ & ~0x00000001); + id_ = 0; + onChanged(); + return this; + } + + private java.lang.Object name_ = ""; + /** + * string name = 2; + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string name = 2; + * @return The bytes for name. + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string name = 2; + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + name_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * string name = 2; + * @return This builder for chaining. + */ + public Builder clearName() { + name_ = getDefaultInstance().getName(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + * string name = 2; + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + name_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + + private double price_ ; + /** + * double price = 3; + * @return The price. + */ + @java.lang.Override + public double getPrice() { + return price_; + } + /** + * double price = 3; + * @param value The price to set. + * @return This builder for chaining. + */ + public Builder setPrice(double value) { + + price_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * double price = 3; + * @return This builder for chaining. + */ + public Builder clearPrice() { + bitField0_ = (bitField0_ & ~0x00000004); + price_ = 0D; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:org.demo.kafka.protobuf.ProtobufProduct) + } + + // @@protoc_insertion_point(class_scope:org.demo.kafka.protobuf.ProtobufProduct) + private static final org.demo.kafka.protobuf.ProtobufProduct DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.demo.kafka.protobuf.ProtobufProduct(); + } + + public static org.demo.kafka.protobuf.ProtobufProduct getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ProtobufProduct parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.demo.kafka.protobuf.ProtobufProduct getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/protobuf/ProtobufProductOrBuilder.java b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/protobuf/ProtobufProductOrBuilder.java new file mode 100644 index 000000000..9c1518db3 --- /dev/null +++ b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/protobuf/ProtobufProductOrBuilder.java @@ -0,0 +1,36 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// NO CHECKED-IN PROTOBUF GENCODE +// source: ProtobufProduct.proto +// Protobuf Java Version: 4.31.0 + +package org.demo.kafka.protobuf; + +@com.google.protobuf.Generated +public interface ProtobufProductOrBuilder extends + // @@protoc_insertion_point(interface_extends:org.demo.kafka.protobuf.ProtobufProduct) + com.google.protobuf.MessageOrBuilder { + + /** + * int32 id = 1; + * @return The id. + */ + int getId(); + + /** + * string name = 2; + * @return The name. + */ + java.lang.String getName(); + /** + * string name = 2; + * @return The bytes for name. + */ + com.google.protobuf.ByteString + getNameBytes(); + + /** + * double price = 3; + * @return The price. + */ + double getPrice(); +} diff --git a/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/protobuf/ProtobufProductOuterClass.java b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/protobuf/ProtobufProductOuterClass.java new file mode 100644 index 000000000..6a99f35ec --- /dev/null +++ b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/protobuf/ProtobufProductOuterClass.java @@ -0,0 +1,63 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// NO CHECKED-IN PROTOBUF GENCODE +// source: ProtobufProduct.proto +// Protobuf Java Version: 4.31.0 + +package org.demo.kafka.protobuf; + +@com.google.protobuf.Generated +public final class ProtobufProductOuterClass { + private ProtobufProductOuterClass() {} + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 31, + /* patch= */ 0, + /* suffix= */ "", + ProtobufProductOuterClass.class.getName()); + } + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + static final com.google.protobuf.Descriptors.Descriptor + internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor; + static final + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_org_demo_kafka_protobuf_ProtobufProduct_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\025ProtobufProduct.proto\022\027org.demo.kafka." + + "protobuf\":\n\017ProtobufProduct\022\n\n\002id\030\001 \001(\005\022" + + "\014\n\004name\030\002 \001(\t\022\r\n\005price\030\003 \001(\001B6\n\027org.demo" + + ".kafka.protobufB\031ProtobufProductOuterCla" + + "ssP\001b\006proto3" + }; + descriptor = com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }); + internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_org_demo_kafka_protobuf_ProtobufProduct_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor, + new java.lang.String[] { "Id", "Name", "Price", }); + descriptor.resolveAllFeaturesImmutable(); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateProtobufSamples.java b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateProtobufSamples.java new file mode 100644 index 000000000..ae078a28a --- /dev/null +++ b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateProtobufSamples.java @@ -0,0 +1,125 @@ +package org.demo.kafka.tools; + +import java.io.IOException; +import java.util.Base64; + +import org.demo.kafka.protobuf.ProtobufProduct; + +/** + * Utility class to generate base64-encoded Protobuf serialized products + * for use in test events. + */ +public class GenerateProtobufSamples { + + public static void main(String[] args) throws IOException { + // Create three different products + ProtobufProduct product1 = ProtobufProduct.newBuilder() + .setId(1001) + .setName("Laptop") + .setPrice(999.99) + .build(); + + ProtobufProduct product2 = ProtobufProduct.newBuilder() + .setId(1002) + .setName("Smartphone") + .setPrice(599.99) + .build(); + + ProtobufProduct product3 = ProtobufProduct.newBuilder() + .setId(1003) + .setName("Headphones") + .setPrice(149.99) + .build(); + + // Serialize and encode each product + String encodedProduct1 = serializeAndEncode(product1); + String encodedProduct2 = serializeAndEncode(product2); + String encodedProduct3 = serializeAndEncode(product3); + + // Serialize and encode an integer key + String encodedKey = serializeAndEncodeInteger(42); + + // Print the results + System.out.println("Base64 encoded Protobuf products for use in kafka-protobuf-event.json:"); + System.out.println("\nProduct 1 (with key):"); + System.out.println("key: \"" + encodedKey + "\","); + System.out.println("value: \"" + encodedProduct1 + "\","); + + System.out.println("\nProduct 2 (with key):"); + System.out.println("key: \"" + encodedKey + "\","); + System.out.println("value: \"" + encodedProduct2 + "\","); + + System.out.println("\nProduct 3 (without key):"); + System.out.println("key: null,"); + System.out.println("value: \"" + encodedProduct3 + "\","); + + // Print a sample event structure + System.out.println("\nSample event structure:"); + printSampleEvent(encodedKey, encodedProduct1, encodedProduct2, encodedProduct3); + } + + private static String serializeAndEncode(ProtobufProduct product) { + return Base64.getEncoder().encodeToString(product.toByteArray()); + } + + private static String serializeAndEncodeInteger(Integer value) { + // For simple types like integers, we'll just convert to string and encode + return Base64.getEncoder().encodeToString(value.toString().getBytes()); + } + + private static void printSampleEvent(String key, String product1, String product2, String product3) { + System.out.println("{\n" + + " \"eventSource\": \"aws:kafka\",\n" + + " \"eventSourceArn\": \"arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4\",\n" + + + " \"bootstrapServers\": \"b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092\",\n" + + + " \"records\": {\n" + + " \"mytopic-0\": [\n" + + " {\n" + + " \"topic\": \"mytopic\",\n" + + " \"partition\": 0,\n" + + " \"offset\": 15,\n" + + " \"timestamp\": 1545084650987,\n" + + " \"timestampType\": \"CREATE_TIME\",\n" + + " \"key\": \"" + key + "\",\n" + + " \"value\": \"" + product1 + "\",\n" + + " \"headers\": [\n" + + " {\n" + + " \"headerKey\": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]\n" + + " }\n" + + " ]\n" + + " },\n" + + " {\n" + + " \"topic\": \"mytopic\",\n" + + " \"partition\": 0,\n" + + " \"offset\": 16,\n" + + " \"timestamp\": 1545084650988,\n" + + " \"timestampType\": \"CREATE_TIME\",\n" + + " \"key\": \"" + key + "\",\n" + + " \"value\": \"" + product2 + "\",\n" + + " \"headers\": [\n" + + " {\n" + + " \"headerKey\": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]\n" + + " }\n" + + " ]\n" + + " },\n" + + " {\n" + + " \"topic\": \"mytopic\",\n" + + " \"partition\": 0,\n" + + " \"offset\": 17,\n" + + " \"timestamp\": 1545084650989,\n" + + " \"timestampType\": \"CREATE_TIME\",\n" + + " \"key\": null,\n" + + " \"value\": \"" + product3 + "\",\n" + + " \"headers\": [\n" + + " {\n" + + " \"headerKey\": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]\n" + + " }\n" + + " ]\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"); + } +} diff --git a/powertools-kafka/pom.xml b/powertools-kafka/pom.xml index 82b69cb45..df4e23979 100644 --- a/powertools-kafka/pom.xml +++ b/powertools-kafka/pom.xml @@ -30,16 +30,18 @@ Powertools for AWS Lambda (Java) - Kafka Consumer + + 4.0.0 + 1.12.0 + 4.31.0 + 1.1.5 + + org.slf4j slf4j-api - - org.aspectj - aspectjrt - provided - com.amazonaws aws-lambda-java-core @@ -51,12 +53,17 @@ org.apache.kafka kafka-clients - 4.0.0 + ${kafka-clients.version} org.apache.avro avro - 1.12.0 + ${avro.version} + + + com.google.protobuf + protobuf-java + ${protobuf.version} com.fasterxml.jackson.core @@ -65,7 +72,7 @@ com.amazonaws aws-lambda-java-serialization - 1.1.5 + ${lambda-serialization.version} diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/DeserializationType.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/DeserializationType.java index f4f8177d6..a4ac95389 100644 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/DeserializationType.java +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/DeserializationType.java @@ -13,5 +13,5 @@ package software.amazon.lambda.powertools.kafka; public enum DeserializationType { - LAMBDA_DEFAULT, KAFKA_JSON, KAFKA_AVRO + LAMBDA_DEFAULT, KAFKA_JSON, KAFKA_AVRO, KAFKA_PROTOBUF } diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializer.java index a5ed5b070..5a723721b 100644 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializer.java +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializer.java @@ -11,6 +11,7 @@ import software.amazon.lambda.powertools.kafka.internal.DeserializationUtils; import software.amazon.lambda.powertools.kafka.serializers.KafkaAvroDeserializer; import software.amazon.lambda.powertools.kafka.serializers.KafkaJsonDeserializer; +import software.amazon.lambda.powertools.kafka.serializers.KafkaProtobufDeserializer; import software.amazon.lambda.powertools.kafka.serializers.LambdaDefaultDeserializer; import software.amazon.lambda.powertools.kafka.serializers.PowertoolsDeserializer; @@ -25,6 +26,7 @@ public class PowertoolsSerializer implements CustomPojoSerializer { private static final Map DESERIALIZERS = Map.of( DeserializationType.KAFKA_JSON, new KafkaJsonDeserializer(), DeserializationType.KAFKA_AVRO, new KafkaAvroDeserializer(), + DeserializationType.KAFKA_PROTOBUF, new KafkaProtobufDeserializer(), DeserializationType.LAMBDA_DEFAULT, new LambdaDefaultDeserializer()); private final PowertoolsDeserializer deserializer; diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializer.java new file mode 100644 index 000000000..a1db4e0db --- /dev/null +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializer.java @@ -0,0 +1,43 @@ +/* + * Copyright 2023 Amazon.com, Inc. or its affiliates. + * Licensed under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package software.amazon.lambda.powertools.kafka.serializers; + +import java.io.IOException; +import com.google.protobuf.Message; +import com.google.protobuf.Parser; + +/** + * Deserializer for Kafka records using Protocol Buffers format. + */ +public class KafkaProtobufDeserializer extends AbstractKafkaDeserializer { + + @Override + @SuppressWarnings("unchecked") + protected T deserializeComplex(byte[] data, Class type) throws IOException { + // If no Protobuf generated class is passed we cannot deserialize using Protobuf + if (Message.class.isAssignableFrom(type)) { + try { + // Get the parser from the generated Protobuf class + Parser parser = (Parser) type.getMethod("parser").invoke(null); + Message message = parser.parseFrom(data); + return type.cast(message); + } catch (Exception e) { + throw new IOException("Failed to deserialize Protobuf data.", e); + } + } else { + throw new IOException("Unsupported type for Protobuf deserialization: " + type.getName() + ". " + + "Protobuf deserialization requires a type of com.google.protobuf.Message. " + + "Consider using an alternative Deserializer."); + } + } +} From f08142499c5c88b00af76c755f4e8407c2344f45 Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Tue, 27 May 2025 13:03:05 +0200 Subject: [PATCH 12/38] Add Maven profile to compile a JAR with different dependency combinations. --- examples/powertools-examples-kafka/pom.xml | 94 ++++++++++++++++++++++ 1 file changed, 94 insertions(+) diff --git a/examples/powertools-examples-kafka/pom.xml b/examples/powertools-examples-kafka/pom.xml index 37eeff610..4956505cb 100644 --- a/examples/powertools-examples-kafka/pom.xml +++ b/examples/powertools-examples-kafka/pom.xml @@ -69,6 +69,33 @@ true + + org.apache.maven.plugins + maven-shade-plugin + 3.6.0 + + + package + + shade + + + false + + + + + + + + + org.apache.logging.log4j + log4j-transform-maven-shade-plugin-extensions + 0.1.0 + + + dev.aspectj aspectj-maven-plugin @@ -146,4 +173,71 @@ + + + + base + + base + + + + + org.apache.avro + avro + ${avro.version} + provided + + + com.google.protobuf + protobuf-java + ${protobuf.version} + provided + + + + + + + avro-only + + avro-only + + + + com.google.protobuf + protobuf-java + ${protobuf.version} + provided + + + + + + + protobuf-only + + protobuf-only + + + + org.apache.avro + avro + ${avro.version} + provided + + + + + + + full + + true + + + full + + + From e11db9a45041e9dba116b6f2df67f8cda47d0006 Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Wed, 28 May 2025 09:42:40 +0200 Subject: [PATCH 13/38] Add minimal kafka example. --- .../events/kafka-avro-event.json | 51 ++ .../events/kafka-json-event.json | 50 ++ .../events/kafka-protobuf-event.json | 51 ++ .../powertools-examples-kafka-minimal/pom.xml | 199 ++++++ .../src/main/avro/AvroProduct.avsc | 10 + .../java/org/demo/kafka/avro/AvroProduct.java | 476 +++++++++++++ .../KafkaAvroConsumerMinimalFunction.java | 29 + .../KafkaJsonConsumerMinimalFunction.java | 28 + .../KafkaProtobufConsumerMinimalFunction.java | 29 + .../demo/kafka/protobuf/ProtobufProduct.java | 636 ++++++++++++++++++ .../protobuf/ProtobufProductOrBuilder.java | 36 + .../protobuf/ProtobufProductOuterClass.java | 63 ++ .../src/main/proto/ProtobufProduct.proto | 13 + .../main/resources/simplelogger.properties | 6 + .../template.yaml | 50 ++ 15 files changed, 1727 insertions(+) create mode 100644 examples/powertools-examples-kafka-minimal/events/kafka-avro-event.json create mode 100644 examples/powertools-examples-kafka-minimal/events/kafka-json-event.json create mode 100644 examples/powertools-examples-kafka-minimal/events/kafka-protobuf-event.json create mode 100644 examples/powertools-examples-kafka-minimal/pom.xml create mode 100644 examples/powertools-examples-kafka-minimal/src/main/avro/AvroProduct.avsc create mode 100644 examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/avro/AvroProduct.java create mode 100644 examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/minimal/KafkaAvroConsumerMinimalFunction.java create mode 100644 examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/minimal/KafkaJsonConsumerMinimalFunction.java create mode 100644 examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/minimal/KafkaProtobufConsumerMinimalFunction.java create mode 100644 examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/protobuf/ProtobufProduct.java create mode 100644 examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/protobuf/ProtobufProductOrBuilder.java create mode 100644 examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/protobuf/ProtobufProductOuterClass.java create mode 100644 examples/powertools-examples-kafka-minimal/src/main/proto/ProtobufProduct.proto create mode 100644 examples/powertools-examples-kafka-minimal/src/main/resources/simplelogger.properties create mode 100644 examples/powertools-examples-kafka-minimal/template.yaml diff --git a/examples/powertools-examples-kafka-minimal/events/kafka-avro-event.json b/examples/powertools-examples-kafka-minimal/events/kafka-avro-event.json new file mode 100644 index 000000000..8d6ef2210 --- /dev/null +++ b/examples/powertools-examples-kafka-minimal/events/kafka-avro-event.json @@ -0,0 +1,51 @@ +{ + "eventSource": "aws:kafka", + "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", + "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + "records": { + "mytopic-0": [ + { + "topic": "mytopic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": "NDI=", + "value": "0g8MTGFwdG9wUrgehes/j0A=", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 16, + "timestamp": 1545084650988, + "timestampType": "CREATE_TIME", + "key": "NDI=", + "value": "1A8UU21hcnRwaG9uZVK4HoXrv4JA", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 17, + "timestamp": 1545084650989, + "timestampType": "CREATE_TIME", + "key": null, + "value": "1g8USGVhZHBob25lc0jhehSuv2JA", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + } + ] + } +} diff --git a/examples/powertools-examples-kafka-minimal/events/kafka-json-event.json b/examples/powertools-examples-kafka-minimal/events/kafka-json-event.json new file mode 100644 index 000000000..d85c40654 --- /dev/null +++ b/examples/powertools-examples-kafka-minimal/events/kafka-json-event.json @@ -0,0 +1,50 @@ +{ + "eventSource": "aws:kafka", + "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", + "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + "records": { + "mytopic-0": [ + { + "topic": "mytopic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": "cmVjb3JkS2V5", + "value": "ewogICJpZCI6IDEyMzQ1LAogICJuYW1lIjogInByb2R1Y3Q1IiwKICAicHJpY2UiOiA0NQp9", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "value": "ewogICJpZCI6IDEyMzQ1LAogICJuYW1lIjogInByb2R1Y3Q1IiwKICAicHJpY2UiOiA0NQp9", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": null, + "value": "ewogICJpZCI6IDEyMzQ1LAogICJuYW1lIjogInByb2R1Y3Q1IiwKICAicHJpY2UiOiA0NQp9", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + } + ] + } +} diff --git a/examples/powertools-examples-kafka-minimal/events/kafka-protobuf-event.json b/examples/powertools-examples-kafka-minimal/events/kafka-protobuf-event.json new file mode 100644 index 000000000..b3e0139e3 --- /dev/null +++ b/examples/powertools-examples-kafka-minimal/events/kafka-protobuf-event.json @@ -0,0 +1,51 @@ +{ + "eventSource": "aws:kafka", + "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", + "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + "records": { + "mytopic-0": [ + { + "topic": "mytopic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": "NDI=", + "value": "COkHEgZMYXB0b3AZUrgehes/j0A=", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 16, + "timestamp": 1545084650988, + "timestampType": "CREATE_TIME", + "key": "NDI=", + "value": "COoHEgpTbWFydHBob25lGVK4HoXrv4JA", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 17, + "timestamp": 1545084650989, + "timestampType": "CREATE_TIME", + "key": null, + "value": "COsHEgpIZWFkcGhvbmVzGUjhehSuv2JA", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + } + ] + } +} diff --git a/examples/powertools-examples-kafka-minimal/pom.xml b/examples/powertools-examples-kafka-minimal/pom.xml new file mode 100644 index 000000000..1a6ce1ebb --- /dev/null +++ b/examples/powertools-examples-kafka-minimal/pom.xml @@ -0,0 +1,199 @@ + + + 4.0.0 + software.amazon.lambda.examples + 2.0.0-SNAPSHOT + powertools-examples-kafka-minimal + jar + Powertools for AWS Lambda (Java) - Examples - Kafka Minimal + + + 11 + 11 + 1.12.0 + 4.31.0 + 2.0.9 + + + + + + software.amazon.lambda + powertools-kafka + ${project.version} + + + com.amazonaws + aws-lambda-java-core + 1.2.3 + + + com.amazonaws + aws-lambda-java-events + 3.15.0 + + + + org.slf4j + slf4j-api + ${slf4j.version} + + + org.slf4j + slf4j-simple + ${slf4j.version} + + + + org.apache.avro + avro + ${avro.version} + + + com.google.protobuf + protobuf-java + ${protobuf.version} + + + + + + + + org.apache.maven.plugins + maven-deploy-plugin + 3.1.2 + + true + + + + + org.apache.maven.plugins + maven-shade-plugin + 3.6.0 + + + package + + shade + + + false + + + + + + + org.apache.avro + avro-maven-plugin + ${avro.version} + + + generate-sources + + schema + + + ${project.basedir}/src/main/avro/ + ${project.basedir}/src/main/java/ + String + + + + + + + io.github.ascopes + protobuf-maven-plugin + 3.3.0 + + + + generate + + generate-sources + + ${protobuf.version} + + ${project.basedir}/src/main/proto + + ${project.basedir}/src/main/java + false + + + + + + + + + + + base + + base + + + + + org.apache.avro + avro + ${avro.version} + provided + + + com.google.protobuf + protobuf-java + ${protobuf.version} + provided + + + + + + + avro-only + + avro-only + + + + com.google.protobuf + protobuf-java + ${protobuf.version} + provided + + + + + + + protobuf-only + + protobuf-only + + + + org.apache.avro + avro + ${avro.version} + provided + + + + + + + full + + true + + + full + + + + diff --git a/examples/powertools-examples-kafka-minimal/src/main/avro/AvroProduct.avsc b/examples/powertools-examples-kafka-minimal/src/main/avro/AvroProduct.avsc new file mode 100644 index 000000000..7155857ea --- /dev/null +++ b/examples/powertools-examples-kafka-minimal/src/main/avro/AvroProduct.avsc @@ -0,0 +1,10 @@ +{ + "namespace": "org.demo.kafka.avro", + "type": "record", + "name": "AvroProduct", + "fields": [ + {"name": "id", "type": "int"}, + {"name": "name", "type": "string"}, + {"name": "price", "type": "double"} + ] +} diff --git a/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/avro/AvroProduct.java b/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/avro/AvroProduct.java new file mode 100644 index 000000000..fad7e2fbf --- /dev/null +++ b/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/avro/AvroProduct.java @@ -0,0 +1,476 @@ +/** + * Autogenerated by Avro + * + * DO NOT EDIT DIRECTLY + */ +package org.demo.kafka.avro; + +import org.apache.avro.specific.SpecificData; +import org.apache.avro.util.Utf8; +import org.apache.avro.message.BinaryMessageEncoder; +import org.apache.avro.message.BinaryMessageDecoder; +import org.apache.avro.message.SchemaStore; + +@org.apache.avro.specific.AvroGenerated +public class AvroProduct extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { + private static final long serialVersionUID = -2929699301240218341L; + + + public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"AvroProduct\",\"namespace\":\"org.demo.kafka.avro\",\"fields\":[{\"name\":\"id\",\"type\":\"int\"},{\"name\":\"name\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"price\",\"type\":\"double\"}]}"); + public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } + + private static final SpecificData MODEL$ = new SpecificData(); + + private static final BinaryMessageEncoder ENCODER = + new BinaryMessageEncoder<>(MODEL$, SCHEMA$); + + private static final BinaryMessageDecoder DECODER = + new BinaryMessageDecoder<>(MODEL$, SCHEMA$); + + /** + * Return the BinaryMessageEncoder instance used by this class. + * @return the message encoder used by this class + */ + public static BinaryMessageEncoder getEncoder() { + return ENCODER; + } + + /** + * Return the BinaryMessageDecoder instance used by this class. + * @return the message decoder used by this class + */ + public static BinaryMessageDecoder getDecoder() { + return DECODER; + } + + /** + * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}. + * @param resolver a {@link SchemaStore} used to find schemas by fingerprint + * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore + */ + public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { + return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); + } + + /** + * Serializes this AvroProduct to a ByteBuffer. + * @return a buffer holding the serialized data for this instance + * @throws java.io.IOException if this instance could not be serialized + */ + public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { + return ENCODER.encode(this); + } + + /** + * Deserializes a AvroProduct from a ByteBuffer. + * @param b a byte buffer holding serialized data for an instance of this class + * @return a AvroProduct instance decoded from the given buffer + * @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class + */ + public static AvroProduct fromByteBuffer( + java.nio.ByteBuffer b) throws java.io.IOException { + return DECODER.decode(b); + } + + private int id; + private java.lang.String name; + private double price; + + /** + * Default constructor. Note that this does not initialize fields + * to their default values from the schema. If that is desired then + * one should use newBuilder(). + */ + public AvroProduct() {} + + /** + * All-args constructor. + * @param id The new value for id + * @param name The new value for name + * @param price The new value for price + */ + public AvroProduct(java.lang.Integer id, java.lang.String name, java.lang.Double price) { + this.id = id; + this.name = name; + this.price = price; + } + + @Override + public org.apache.avro.specific.SpecificData getSpecificData() { return MODEL$; } + + @Override + public org.apache.avro.Schema getSchema() { return SCHEMA$; } + + // Used by DatumWriter. Applications should not call. + @Override + public java.lang.Object get(int field$) { + switch (field$) { + case 0: return id; + case 1: return name; + case 2: return price; + default: throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + // Used by DatumReader. Applications should not call. + @Override + @SuppressWarnings(value="unchecked") + public void put(int field$, java.lang.Object value$) { + switch (field$) { + case 0: id = (java.lang.Integer)value$; break; + case 1: name = value$ != null ? value$.toString() : null; break; + case 2: price = (java.lang.Double)value$; break; + default: throw new IndexOutOfBoundsException("Invalid index: " + field$); + } + } + + /** + * Gets the value of the 'id' field. + * @return The value of the 'id' field. + */ + public int getId() { + return id; + } + + + /** + * Sets the value of the 'id' field. + * @param value the value to set. + */ + public void setId(int value) { + this.id = value; + } + + /** + * Gets the value of the 'name' field. + * @return The value of the 'name' field. + */ + public java.lang.String getName() { + return name; + } + + + /** + * Sets the value of the 'name' field. + * @param value the value to set. + */ + public void setName(java.lang.String value) { + this.name = value; + } + + /** + * Gets the value of the 'price' field. + * @return The value of the 'price' field. + */ + public double getPrice() { + return price; + } + + + /** + * Sets the value of the 'price' field. + * @param value the value to set. + */ + public void setPrice(double value) { + this.price = value; + } + + /** + * Creates a new AvroProduct RecordBuilder. + * @return A new AvroProduct RecordBuilder + */ + public static org.demo.kafka.avro.AvroProduct.Builder newBuilder() { + return new org.demo.kafka.avro.AvroProduct.Builder(); + } + + /** + * Creates a new AvroProduct RecordBuilder by copying an existing Builder. + * @param other The existing builder to copy. + * @return A new AvroProduct RecordBuilder + */ + public static org.demo.kafka.avro.AvroProduct.Builder newBuilder(org.demo.kafka.avro.AvroProduct.Builder other) { + if (other == null) { + return new org.demo.kafka.avro.AvroProduct.Builder(); + } else { + return new org.demo.kafka.avro.AvroProduct.Builder(other); + } + } + + /** + * Creates a new AvroProduct RecordBuilder by copying an existing AvroProduct instance. + * @param other The existing instance to copy. + * @return A new AvroProduct RecordBuilder + */ + public static org.demo.kafka.avro.AvroProduct.Builder newBuilder(org.demo.kafka.avro.AvroProduct other) { + if (other == null) { + return new org.demo.kafka.avro.AvroProduct.Builder(); + } else { + return new org.demo.kafka.avro.AvroProduct.Builder(other); + } + } + + /** + * RecordBuilder for AvroProduct instances. + */ + @org.apache.avro.specific.AvroGenerated + public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase + implements org.apache.avro.data.RecordBuilder { + + private int id; + private java.lang.String name; + private double price; + + /** Creates a new Builder */ + private Builder() { + super(SCHEMA$, MODEL$); + } + + /** + * Creates a Builder by copying an existing Builder. + * @param other The existing Builder to copy. + */ + private Builder(org.demo.kafka.avro.AvroProduct.Builder other) { + super(other); + if (isValidValue(fields()[0], other.id)) { + this.id = data().deepCopy(fields()[0].schema(), other.id); + fieldSetFlags()[0] = other.fieldSetFlags()[0]; + } + if (isValidValue(fields()[1], other.name)) { + this.name = data().deepCopy(fields()[1].schema(), other.name); + fieldSetFlags()[1] = other.fieldSetFlags()[1]; + } + if (isValidValue(fields()[2], other.price)) { + this.price = data().deepCopy(fields()[2].schema(), other.price); + fieldSetFlags()[2] = other.fieldSetFlags()[2]; + } + } + + /** + * Creates a Builder by copying an existing AvroProduct instance + * @param other The existing instance to copy. + */ + private Builder(org.demo.kafka.avro.AvroProduct other) { + super(SCHEMA$, MODEL$); + if (isValidValue(fields()[0], other.id)) { + this.id = data().deepCopy(fields()[0].schema(), other.id); + fieldSetFlags()[0] = true; + } + if (isValidValue(fields()[1], other.name)) { + this.name = data().deepCopy(fields()[1].schema(), other.name); + fieldSetFlags()[1] = true; + } + if (isValidValue(fields()[2], other.price)) { + this.price = data().deepCopy(fields()[2].schema(), other.price); + fieldSetFlags()[2] = true; + } + } + + /** + * Gets the value of the 'id' field. + * @return The value. + */ + public int getId() { + return id; + } + + + /** + * Sets the value of the 'id' field. + * @param value The value of 'id'. + * @return This builder. + */ + public org.demo.kafka.avro.AvroProduct.Builder setId(int value) { + validate(fields()[0], value); + this.id = value; + fieldSetFlags()[0] = true; + return this; + } + + /** + * Checks whether the 'id' field has been set. + * @return True if the 'id' field has been set, false otherwise. + */ + public boolean hasId() { + return fieldSetFlags()[0]; + } + + + /** + * Clears the value of the 'id' field. + * @return This builder. + */ + public org.demo.kafka.avro.AvroProduct.Builder clearId() { + fieldSetFlags()[0] = false; + return this; + } + + /** + * Gets the value of the 'name' field. + * @return The value. + */ + public java.lang.String getName() { + return name; + } + + + /** + * Sets the value of the 'name' field. + * @param value The value of 'name'. + * @return This builder. + */ + public org.demo.kafka.avro.AvroProduct.Builder setName(java.lang.String value) { + validate(fields()[1], value); + this.name = value; + fieldSetFlags()[1] = true; + return this; + } + + /** + * Checks whether the 'name' field has been set. + * @return True if the 'name' field has been set, false otherwise. + */ + public boolean hasName() { + return fieldSetFlags()[1]; + } + + + /** + * Clears the value of the 'name' field. + * @return This builder. + */ + public org.demo.kafka.avro.AvroProduct.Builder clearName() { + name = null; + fieldSetFlags()[1] = false; + return this; + } + + /** + * Gets the value of the 'price' field. + * @return The value. + */ + public double getPrice() { + return price; + } + + + /** + * Sets the value of the 'price' field. + * @param value The value of 'price'. + * @return This builder. + */ + public org.demo.kafka.avro.AvroProduct.Builder setPrice(double value) { + validate(fields()[2], value); + this.price = value; + fieldSetFlags()[2] = true; + return this; + } + + /** + * Checks whether the 'price' field has been set. + * @return True if the 'price' field has been set, false otherwise. + */ + public boolean hasPrice() { + return fieldSetFlags()[2]; + } + + + /** + * Clears the value of the 'price' field. + * @return This builder. + */ + public org.demo.kafka.avro.AvroProduct.Builder clearPrice() { + fieldSetFlags()[2] = false; + return this; + } + + @Override + @SuppressWarnings("unchecked") + public AvroProduct build() { + try { + AvroProduct record = new AvroProduct(); + record.id = fieldSetFlags()[0] ? this.id : (java.lang.Integer) defaultValue(fields()[0]); + record.name = fieldSetFlags()[1] ? this.name : (java.lang.String) defaultValue(fields()[1]); + record.price = fieldSetFlags()[2] ? this.price : (java.lang.Double) defaultValue(fields()[2]); + return record; + } catch (org.apache.avro.AvroMissingFieldException e) { + throw e; + } catch (java.lang.Exception e) { + throw new org.apache.avro.AvroRuntimeException(e); + } + } + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumWriter + WRITER$ = (org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$); + + @Override public void writeExternal(java.io.ObjectOutput out) + throws java.io.IOException { + WRITER$.write(this, SpecificData.getEncoder(out)); + } + + @SuppressWarnings("unchecked") + private static final org.apache.avro.io.DatumReader + READER$ = (org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$); + + @Override public void readExternal(java.io.ObjectInput in) + throws java.io.IOException { + READER$.read(this, SpecificData.getDecoder(in)); + } + + @Override protected boolean hasCustomCoders() { return true; } + + @Override public void customEncode(org.apache.avro.io.Encoder out) + throws java.io.IOException + { + out.writeInt(this.id); + + out.writeString(this.name); + + out.writeDouble(this.price); + + } + + @Override public void customDecode(org.apache.avro.io.ResolvingDecoder in) + throws java.io.IOException + { + org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); + if (fieldOrder == null) { + this.id = in.readInt(); + + this.name = in.readString(); + + this.price = in.readDouble(); + + } else { + for (int i = 0; i < 3; i++) { + switch (fieldOrder[i].pos()) { + case 0: + this.id = in.readInt(); + break; + + case 1: + this.name = in.readString(); + break; + + case 2: + this.price = in.readDouble(); + break; + + default: + throw new java.io.IOException("Corrupt ResolvingDecoder."); + } + } + } + } +} + + + + + + + + + + diff --git a/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/minimal/KafkaAvroConsumerMinimalFunction.java b/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/minimal/KafkaAvroConsumerMinimalFunction.java new file mode 100644 index 000000000..124173ace --- /dev/null +++ b/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/minimal/KafkaAvroConsumerMinimalFunction.java @@ -0,0 +1,29 @@ +package org.demo.kafka.minimal; + +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.demo.kafka.avro.AvroProduct; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.amazonaws.services.lambda.runtime.Context; +import com.amazonaws.services.lambda.runtime.RequestHandler; + +import software.amazon.lambda.powertools.kafka.Deserialization; +import software.amazon.lambda.powertools.kafka.DeserializationType; + +public class KafkaAvroConsumerMinimalFunction + implements RequestHandler, String> { + + private static final Logger LOGGER = LoggerFactory.getLogger(KafkaAvroConsumerMinimalFunction.class); + + @Override + @Deserialization(type = DeserializationType.KAFKA_AVRO) + public String handleRequest(ConsumerRecords records, Context context) { + for (ConsumerRecord consumerRecord : records) { + LOGGER.info("Received record: {}", consumerRecord); + } + + return "OK"; + } +} diff --git a/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/minimal/KafkaJsonConsumerMinimalFunction.java b/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/minimal/KafkaJsonConsumerMinimalFunction.java new file mode 100644 index 000000000..cfd3ab81e --- /dev/null +++ b/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/minimal/KafkaJsonConsumerMinimalFunction.java @@ -0,0 +1,28 @@ +package org.demo.kafka.minimal; + +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.amazonaws.services.lambda.runtime.Context; +import com.amazonaws.services.lambda.runtime.RequestHandler; + +import software.amazon.lambda.powertools.kafka.Deserialization; +import software.amazon.lambda.powertools.kafka.DeserializationType; + +public class KafkaJsonConsumerMinimalFunction + implements RequestHandler, String> { + + private static final Logger LOGGER = LoggerFactory.getLogger(KafkaJsonConsumerMinimalFunction.class); + + @Override + @Deserialization(type = DeserializationType.KAFKA_JSON) + public String handleRequest(ConsumerRecords records, Context context) { + for (ConsumerRecord consumerRecord : records) { + LOGGER.info("Received record: {}", consumerRecord); + } + + return "OK"; + } +} diff --git a/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/minimal/KafkaProtobufConsumerMinimalFunction.java b/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/minimal/KafkaProtobufConsumerMinimalFunction.java new file mode 100644 index 000000000..6fef249ba --- /dev/null +++ b/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/minimal/KafkaProtobufConsumerMinimalFunction.java @@ -0,0 +1,29 @@ +package org.demo.kafka.minimal; + +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.demo.kafka.protobuf.ProtobufProduct; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.amazonaws.services.lambda.runtime.Context; +import com.amazonaws.services.lambda.runtime.RequestHandler; + +import software.amazon.lambda.powertools.kafka.Deserialization; +import software.amazon.lambda.powertools.kafka.DeserializationType; + +public class KafkaProtobufConsumerMinimalFunction + implements RequestHandler, String> { + + private static final Logger LOGGER = LoggerFactory.getLogger(KafkaProtobufConsumerMinimalFunction.class); + + @Override + @Deserialization(type = DeserializationType.KAFKA_PROTOBUF) + public String handleRequest(ConsumerRecords records, Context context) { + for (ConsumerRecord consumerRecord : records) { + LOGGER.info("Received record: {}", consumerRecord); + } + + return "OK"; + } +} diff --git a/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/protobuf/ProtobufProduct.java b/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/protobuf/ProtobufProduct.java new file mode 100644 index 000000000..6da9113fc --- /dev/null +++ b/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/protobuf/ProtobufProduct.java @@ -0,0 +1,636 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// NO CHECKED-IN PROTOBUF GENCODE +// source: ProtobufProduct.proto +// Protobuf Java Version: 4.31.0 + +package org.demo.kafka.protobuf; + +/** + * Protobuf type {@code org.demo.kafka.protobuf.ProtobufProduct} + */ +@com.google.protobuf.Generated +public final class ProtobufProduct extends + com.google.protobuf.GeneratedMessage implements + // @@protoc_insertion_point(message_implements:org.demo.kafka.protobuf.ProtobufProduct) + ProtobufProductOrBuilder { +private static final long serialVersionUID = 0L; + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 31, + /* patch= */ 0, + /* suffix= */ "", + ProtobufProduct.class.getName()); + } + // Use ProtobufProduct.newBuilder() to construct. + private ProtobufProduct(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + } + private ProtobufProduct() { + name_ = ""; + } + + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.demo.kafka.protobuf.ProtobufProductOuterClass.internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.demo.kafka.protobuf.ProtobufProductOuterClass.internal_static_org_demo_kafka_protobuf_ProtobufProduct_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.demo.kafka.protobuf.ProtobufProduct.class, org.demo.kafka.protobuf.ProtobufProduct.Builder.class); + } + + public static final int ID_FIELD_NUMBER = 1; + private int id_ = 0; + /** + * int32 id = 1; + * @return The id. + */ + @java.lang.Override + public int getId() { + return id_; + } + + public static final int NAME_FIELD_NUMBER = 2; + @SuppressWarnings("serial") + private volatile java.lang.Object name_ = ""; + /** + * string name = 2; + * @return The name. + */ + @java.lang.Override + public java.lang.String getName() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } + } + /** + * string name = 2; + * @return The bytes for name. + */ + @java.lang.Override + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + public static final int PRICE_FIELD_NUMBER = 3; + private double price_ = 0D; + /** + * double price = 3; + * @return The price. + */ + @java.lang.Override + public double getPrice() { + return price_; + } + + private byte memoizedIsInitialized = -1; + @java.lang.Override + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized == 1) return true; + if (isInitialized == 0) return false; + + memoizedIsInitialized = 1; + return true; + } + + @java.lang.Override + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + if (id_ != 0) { + output.writeInt32(1, id_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(name_)) { + com.google.protobuf.GeneratedMessage.writeString(output, 2, name_); + } + if (java.lang.Double.doubleToRawLongBits(price_) != 0) { + output.writeDouble(3, price_); + } + getUnknownFields().writeTo(output); + } + + @java.lang.Override + public int getSerializedSize() { + int size = memoizedSize; + if (size != -1) return size; + + size = 0; + if (id_ != 0) { + size += com.google.protobuf.CodedOutputStream + .computeInt32Size(1, id_); + } + if (!com.google.protobuf.GeneratedMessage.isStringEmpty(name_)) { + size += com.google.protobuf.GeneratedMessage.computeStringSize(2, name_); + } + if (java.lang.Double.doubleToRawLongBits(price_) != 0) { + size += com.google.protobuf.CodedOutputStream + .computeDoubleSize(3, price_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSize = size; + return size; + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.demo.kafka.protobuf.ProtobufProduct)) { + return super.equals(obj); + } + org.demo.kafka.protobuf.ProtobufProduct other = (org.demo.kafka.protobuf.ProtobufProduct) obj; + + if (getId() + != other.getId()) return false; + if (!getName() + .equals(other.getName())) return false; + if (java.lang.Double.doubleToLongBits(getPrice()) + != java.lang.Double.doubleToLongBits( + other.getPrice())) return false; + if (!getUnknownFields().equals(other.getUnknownFields())) return false; + return true; + } + + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptor().hashCode(); + hash = (37 * hash) + ID_FIELD_NUMBER; + hash = (53 * hash) + getId(); + hash = (37 * hash) + NAME_FIELD_NUMBER; + hash = (53 * hash) + getName().hashCode(); + hash = (37 * hash) + PRICE_FIELD_NUMBER; + hash = (53 * hash) + com.google.protobuf.Internal.hashLong( + java.lang.Double.doubleToLongBits(getPrice())); + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( + java.nio.ByteBuffer data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( + java.nio.ByteBuffer data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.demo.kafka.protobuf.ProtobufProduct parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.demo.kafka.protobuf.ProtobufProduct parseFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + public static org.demo.kafka.protobuf.ProtobufProduct parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input); + } + + public static org.demo.kafka.protobuf.ProtobufProduct parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseDelimitedWithIOException(PARSER, input, extensionRegistry); + } + public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input); + } + public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return com.google.protobuf.GeneratedMessage + .parseWithIOException(PARSER, input, extensionRegistry); + } + + @java.lang.Override + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder() { + return DEFAULT_INSTANCE.toBuilder(); + } + public static Builder newBuilder(org.demo.kafka.protobuf.ProtobufProduct prototype) { + return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); + } + @java.lang.Override + public Builder toBuilder() { + return this == DEFAULT_INSTANCE + ? new Builder() : new Builder().mergeFrom(this); + } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code org.demo.kafka.protobuf.ProtobufProduct} + */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder implements + // @@protoc_insertion_point(builder_implements:org.demo.kafka.protobuf.ProtobufProduct) + org.demo.kafka.protobuf.ProtobufProductOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.demo.kafka.protobuf.ProtobufProductOuterClass.internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor; + } + + @java.lang.Override + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.demo.kafka.protobuf.ProtobufProductOuterClass.internal_static_org_demo_kafka_protobuf_ProtobufProduct_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.demo.kafka.protobuf.ProtobufProduct.class, org.demo.kafka.protobuf.ProtobufProduct.Builder.class); + } + + // Construct using org.demo.kafka.protobuf.ProtobufProduct.newBuilder() + private Builder() { + + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + + } + @java.lang.Override + public Builder clear() { + super.clear(); + bitField0_ = 0; + id_ = 0; + name_ = ""; + price_ = 0D; + return this; + } + + @java.lang.Override + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.demo.kafka.protobuf.ProtobufProductOuterClass.internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor; + } + + @java.lang.Override + public org.demo.kafka.protobuf.ProtobufProduct getDefaultInstanceForType() { + return org.demo.kafka.protobuf.ProtobufProduct.getDefaultInstance(); + } + + @java.lang.Override + public org.demo.kafka.protobuf.ProtobufProduct build() { + org.demo.kafka.protobuf.ProtobufProduct result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + @java.lang.Override + public org.demo.kafka.protobuf.ProtobufProduct buildPartial() { + org.demo.kafka.protobuf.ProtobufProduct result = new org.demo.kafka.protobuf.ProtobufProduct(this); + if (bitField0_ != 0) { buildPartial0(result); } + onBuilt(); + return result; + } + + private void buildPartial0(org.demo.kafka.protobuf.ProtobufProduct result) { + int from_bitField0_ = bitField0_; + if (((from_bitField0_ & 0x00000001) != 0)) { + result.id_ = id_; + } + if (((from_bitField0_ & 0x00000002) != 0)) { + result.name_ = name_; + } + if (((from_bitField0_ & 0x00000004) != 0)) { + result.price_ = price_; + } + } + + @java.lang.Override + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.demo.kafka.protobuf.ProtobufProduct) { + return mergeFrom((org.demo.kafka.protobuf.ProtobufProduct)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.demo.kafka.protobuf.ProtobufProduct other) { + if (other == org.demo.kafka.protobuf.ProtobufProduct.getDefaultInstance()) return this; + if (other.getId() != 0) { + setId(other.getId()); + } + if (!other.getName().isEmpty()) { + name_ = other.name_; + bitField0_ |= 0x00000002; + onChanged(); + } + if (java.lang.Double.doubleToRawLongBits(other.getPrice()) != 0) { + setPrice(other.getPrice()); + } + this.mergeUnknownFields(other.getUnknownFields()); + onChanged(); + return this; + } + + @java.lang.Override + public final boolean isInitialized() { + return true; + } + + @java.lang.Override + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + if (extensionRegistry == null) { + throw new java.lang.NullPointerException(); + } + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + case 8: { + id_ = input.readInt32(); + bitField0_ |= 0x00000001; + break; + } // case 8 + case 18: { + name_ = input.readStringRequireUtf8(); + bitField0_ |= 0x00000002; + break; + } // case 18 + case 25: { + price_ = input.readDouble(); + bitField0_ |= 0x00000004; + break; + } // case 25 + default: { + if (!super.parseUnknownField(input, extensionRegistry, tag)) { + done = true; // was an endgroup tag + } + break; + } // default: + } // switch (tag) + } // while (!done) + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.unwrapIOException(); + } finally { + onChanged(); + } // finally + return this; + } + private int bitField0_; + + private int id_ ; + /** + * int32 id = 1; + * @return The id. + */ + @java.lang.Override + public int getId() { + return id_; + } + /** + * int32 id = 1; + * @param value The id to set. + * @return This builder for chaining. + */ + public Builder setId(int value) { + + id_ = value; + bitField0_ |= 0x00000001; + onChanged(); + return this; + } + /** + * int32 id = 1; + * @return This builder for chaining. + */ + public Builder clearId() { + bitField0_ = (bitField0_ & ~0x00000001); + id_ = 0; + onChanged(); + return this; + } + + private java.lang.Object name_ = ""; + /** + * string name = 2; + * @return The name. + */ + public java.lang.String getName() { + java.lang.Object ref = name_; + if (!(ref instanceof java.lang.String)) { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + name_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * string name = 2; + * @return The bytes for name. + */ + public com.google.protobuf.ByteString + getNameBytes() { + java.lang.Object ref = name_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + name_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * string name = 2; + * @param value The name to set. + * @return This builder for chaining. + */ + public Builder setName( + java.lang.String value) { + if (value == null) { throw new NullPointerException(); } + name_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + /** + * string name = 2; + * @return This builder for chaining. + */ + public Builder clearName() { + name_ = getDefaultInstance().getName(); + bitField0_ = (bitField0_ & ~0x00000002); + onChanged(); + return this; + } + /** + * string name = 2; + * @param value The bytes for name to set. + * @return This builder for chaining. + */ + public Builder setNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { throw new NullPointerException(); } + checkByteStringIsUtf8(value); + name_ = value; + bitField0_ |= 0x00000002; + onChanged(); + return this; + } + + private double price_ ; + /** + * double price = 3; + * @return The price. + */ + @java.lang.Override + public double getPrice() { + return price_; + } + /** + * double price = 3; + * @param value The price to set. + * @return This builder for chaining. + */ + public Builder setPrice(double value) { + + price_ = value; + bitField0_ |= 0x00000004; + onChanged(); + return this; + } + /** + * double price = 3; + * @return This builder for chaining. + */ + public Builder clearPrice() { + bitField0_ = (bitField0_ & ~0x00000004); + price_ = 0D; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:org.demo.kafka.protobuf.ProtobufProduct) + } + + // @@protoc_insertion_point(class_scope:org.demo.kafka.protobuf.ProtobufProduct) + private static final org.demo.kafka.protobuf.ProtobufProduct DEFAULT_INSTANCE; + static { + DEFAULT_INSTANCE = new org.demo.kafka.protobuf.ProtobufProduct(); + } + + public static org.demo.kafka.protobuf.ProtobufProduct getDefaultInstance() { + return DEFAULT_INSTANCE; + } + + private static final com.google.protobuf.Parser + PARSER = new com.google.protobuf.AbstractParser() { + @java.lang.Override + public ProtobufProduct parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + Builder builder = newBuilder(); + try { + builder.mergeFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(builder.buildPartial()); + } catch (com.google.protobuf.UninitializedMessageException e) { + throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException(e) + .setUnfinishedMessage(builder.buildPartial()); + } + return builder.buildPartial(); + } + }; + + public static com.google.protobuf.Parser parser() { + return PARSER; + } + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + @java.lang.Override + public org.demo.kafka.protobuf.ProtobufProduct getDefaultInstanceForType() { + return DEFAULT_INSTANCE; + } + +} + diff --git a/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/protobuf/ProtobufProductOrBuilder.java b/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/protobuf/ProtobufProductOrBuilder.java new file mode 100644 index 000000000..9c1518db3 --- /dev/null +++ b/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/protobuf/ProtobufProductOrBuilder.java @@ -0,0 +1,36 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// NO CHECKED-IN PROTOBUF GENCODE +// source: ProtobufProduct.proto +// Protobuf Java Version: 4.31.0 + +package org.demo.kafka.protobuf; + +@com.google.protobuf.Generated +public interface ProtobufProductOrBuilder extends + // @@protoc_insertion_point(interface_extends:org.demo.kafka.protobuf.ProtobufProduct) + com.google.protobuf.MessageOrBuilder { + + /** + * int32 id = 1; + * @return The id. + */ + int getId(); + + /** + * string name = 2; + * @return The name. + */ + java.lang.String getName(); + /** + * string name = 2; + * @return The bytes for name. + */ + com.google.protobuf.ByteString + getNameBytes(); + + /** + * double price = 3; + * @return The price. + */ + double getPrice(); +} diff --git a/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/protobuf/ProtobufProductOuterClass.java b/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/protobuf/ProtobufProductOuterClass.java new file mode 100644 index 000000000..6a99f35ec --- /dev/null +++ b/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/protobuf/ProtobufProductOuterClass.java @@ -0,0 +1,63 @@ +// Generated by the protocol buffer compiler. DO NOT EDIT! +// NO CHECKED-IN PROTOBUF GENCODE +// source: ProtobufProduct.proto +// Protobuf Java Version: 4.31.0 + +package org.demo.kafka.protobuf; + +@com.google.protobuf.Generated +public final class ProtobufProductOuterClass { + private ProtobufProductOuterClass() {} + static { + com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( + com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, + /* major= */ 4, + /* minor= */ 31, + /* patch= */ 0, + /* suffix= */ "", + ProtobufProductOuterClass.class.getName()); + } + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistryLite registry) { + } + + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + registerAllExtensions( + (com.google.protobuf.ExtensionRegistryLite) registry); + } + static final com.google.protobuf.Descriptors.Descriptor + internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor; + static final + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_org_demo_kafka_protobuf_ProtobufProduct_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\025ProtobufProduct.proto\022\027org.demo.kafka." + + "protobuf\":\n\017ProtobufProduct\022\n\n\002id\030\001 \001(\005\022" + + "\014\n\004name\030\002 \001(\t\022\r\n\005price\030\003 \001(\001B6\n\027org.demo" + + ".kafka.protobufB\031ProtobufProductOuterCla" + + "ssP\001b\006proto3" + }; + descriptor = com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }); + internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_org_demo_kafka_protobuf_ProtobufProduct_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor, + new java.lang.String[] { "Id", "Name", "Price", }); + descriptor.resolveAllFeaturesImmutable(); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/examples/powertools-examples-kafka-minimal/src/main/proto/ProtobufProduct.proto b/examples/powertools-examples-kafka-minimal/src/main/proto/ProtobufProduct.proto new file mode 100644 index 000000000..4d3338a6f --- /dev/null +++ b/examples/powertools-examples-kafka-minimal/src/main/proto/ProtobufProduct.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +package org.demo.kafka.protobuf; + +option java_package = "org.demo.kafka.protobuf"; +option java_outer_classname = "ProtobufProductOuterClass"; +option java_multiple_files = true; + +message ProtobufProduct { + int32 id = 1; + string name = 2; + double price = 3; +} \ No newline at end of file diff --git a/examples/powertools-examples-kafka-minimal/src/main/resources/simplelogger.properties b/examples/powertools-examples-kafka-minimal/src/main/resources/simplelogger.properties new file mode 100644 index 000000000..c4610d4ff --- /dev/null +++ b/examples/powertools-examples-kafka-minimal/src/main/resources/simplelogger.properties @@ -0,0 +1,6 @@ +org.slf4j.simpleLogger.defaultLogLevel=info +org.slf4j.simpleLogger.showDateTime=true +org.slf4j.simpleLogger.dateTimeFormat=yyyy-MM-dd HH:mm:ss.SSS +org.slf4j.simpleLogger.showThreadName=true +org.slf4j.simpleLogger.showLogName=true +org.slf4j.simpleLogger.showShortLogName=false \ No newline at end of file diff --git a/examples/powertools-examples-kafka-minimal/template.yaml b/examples/powertools-examples-kafka-minimal/template.yaml new file mode 100644 index 000000000..73b5933ca --- /dev/null +++ b/examples/powertools-examples-kafka-minimal/template.yaml @@ -0,0 +1,50 @@ +AWSTemplateFormatVersion: "2010-09-09" +Transform: AWS::Serverless-2016-10-31 +Description: > + Minimal Kafka Deserialization example with Kafka Lambda ESM + +Globals: + Function: + Timeout: 20 + Runtime: java11 + MemorySize: 512 + Tracing: Active + +Resources: + KafkaJsonConsumerMinimalFunction: + Type: AWS::Serverless::Function + Properties: + CodeUri: . + Handler: org.demo.kafka.minimal.KafkaJsonConsumerMinimalFunction::handleRequest + Environment: + Variables: + JAVA_TOOL_OPTIONS: "-XX:+TieredCompilation -XX:TieredStopAtLevel=1" + + KafkaAvroConsumerMinimalFunction: + Type: AWS::Serverless::Function + Properties: + CodeUri: . + Handler: org.demo.kafka.minimal.KafkaAvroConsumerMinimalFunction::handleRequest + Environment: + Variables: + JAVA_TOOL_OPTIONS: "-XX:+TieredCompilation -XX:TieredStopAtLevel=1" + + KafkaProtobufConsumerMinimalFunction: + Type: AWS::Serverless::Function + Properties: + CodeUri: . + Handler: org.demo.kafka.minimal.KafkaProtobufConsumerMinimalFunction::handleRequest + Environment: + Variables: + JAVA_TOOL_OPTIONS: "-XX:+TieredCompilation -XX:TieredStopAtLevel=1" + +Outputs: + JsonFunction: + Description: "Kafka JSON Lambda Function ARN" + Value: !GetAtt KafkaJsonConsumerMinimalFunction.Arn + AvroFunction: + Description: "Kafka Avro Lambda Function ARN" + Value: !GetAtt KafkaAvroConsumerMinimalFunction.Arn + ProtobufFunction: + Description: "Kafka Protobuf Lambda Function ARN" + Value: !GetAtt KafkaProtobufConsumerMinimalFunction.Arn From 4282a77223a4a9c82b5e36c77cebde1f2ba64d7d Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Wed, 28 May 2025 09:44:48 +0200 Subject: [PATCH 14/38] Add missing copyright. --- .../lambda/powertools/kafka/Deserialization.java | 1 - .../powertools/kafka/PowertoolsSerializer.java | 12 ++++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/Deserialization.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/Deserialization.java index 698149ab0..4b96c49db 100644 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/Deserialization.java +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/Deserialization.java @@ -10,7 +10,6 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - package software.amazon.lambda.powertools.kafka; import java.lang.annotation.ElementType; diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializer.java index 5a723721b..be8563b8e 100644 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializer.java +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializer.java @@ -1,3 +1,15 @@ +/* + * Copyright 2023 Amazon.com, Inc. or its affiliates. + * Licensed under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package software.amazon.lambda.powertools.kafka; import java.io.InputStream; From 64f7e18fc44f23535777548876a6e9a70b42d6ef Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Wed, 28 May 2025 18:52:42 +0200 Subject: [PATCH 15/38] Add unit tests for kafka utility. --- pom.xml | 12 +- powertools-kafka/pom.xml | 108 +++++ .../serializers/KafkaJsonDeserializer.java | 2 +- .../src/test/avro/TestProduct.avsc | 10 + .../powertools/kafka/DeserializationTest.java | 71 +++ .../kafka/DeserializationTypeTest.java | 50 ++ .../kafka/PowertoolsSerializerTest.java | 325 +++++++++++++ .../internal/DeserializationUtilsTest.java | 105 +++++ .../AbstractKafkaDeserializerTest.java | 436 ++++++++++++++++++ .../KafkaAvroDeserializerTest.java | 73 +++ .../KafkaJsonDeserializerTest.java | 66 +++ .../KafkaProtobufDeserializerTest.java | 75 +++ .../kafka/testutils/AvroHandler.java | 30 ++ .../kafka/testutils/DefaultHandler.java | 29 ++ .../kafka/testutils/JsonHandler.java | 29 ++ .../kafka/testutils/ProtobufHandler.java | 30 ++ .../kafka/testutils/TestProductPojo.java | 87 ++++ .../powertools/kafka/testutils/TestUtils.java | 75 +++ .../src/test/proto/TestProduct.proto | 13 + .../test/resources/simplelogger.properties | 13 + 20 files changed, 1636 insertions(+), 3 deletions(-) create mode 100644 powertools-kafka/src/test/avro/TestProduct.avsc create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/DeserializationTest.java create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/DeserializationTypeTest.java create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializerTest.java create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtilsTest.java create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializerTest.java create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaAvroDeserializerTest.java create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializerTest.java create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializerTest.java create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/AvroHandler.java create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/DefaultHandler.java create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/JsonHandler.java create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/ProtobufHandler.java create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/TestProductPojo.java create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/TestUtils.java create mode 100644 powertools-kafka/src/test/proto/TestProduct.proto create mode 100644 powertools-kafka/src/test/resources/simplelogger.properties diff --git a/pom.xml b/pom.xml index a8d58aab9..d116f5110 100644 --- a/pom.xml +++ b/pom.xml @@ -102,7 +102,9 @@ 1.12.781 2.18.0 1.6.0 - 5.12.0 + 5.18.0 + 5.18.0 + 2.3.0 @@ -292,7 +294,7 @@ org.junit-pioneer junit-pioneer - 1.9.1 + ${junit-pioneer.version} test @@ -342,6 +344,12 @@ ${mockito.version} test + + org.mockito + mockito-junit-jupiter + ${mockito-junit-jupiter.version} + test + com.amazonaws aws-lambda-java-tests diff --git a/powertools-kafka/pom.xml b/powertools-kafka/pom.xml index df4e23979..f267f69f3 100644 --- a/powertools-kafka/pom.xml +++ b/powertools-kafka/pom.xml @@ -81,6 +81,31 @@ junit-jupiter-api test + + org.junit.jupiter + junit-jupiter-engine + test + + + org.junit.jupiter + junit-jupiter-params + test + + + org.mockito + mockito-junit-jupiter + test + + + org.mockito + mockito-core + test + + + org.junit-pioneer + junit-pioneer + test + org.slf4j slf4j-simple @@ -99,6 +124,11 @@ src/main/resources + + + src/test/resources + + dev.aspectj @@ -108,6 +138,84 @@ true + + org.apache.maven.plugins + maven-surefire-plugin + + + + + @{argLine} + --add-opens java.base/java.util=ALL-UNNAMED + --add-opens java.base/java.lang=ALL-UNNAMED + + + + + + org.apache.avro + avro-maven-plugin + ${avro.version} + + + generate-test-sources + generate-test-sources + + schema + + + ${project.basedir}/src/test/avro/ + ${project.basedir}/target/generated-test-sources/avro/ + String + ${project.basedir}/src/test/avro/ + ${project.basedir}/target/generated-test-sources/avro/ + + + + + + + io.github.ascopes + protobuf-maven-plugin + 3.3.0 + + + generate-test-sources + + generate-test + + generate-test-sources + + ${protobuf.version} + + ${project.basedir}/src/test/proto + + ${project.basedir}/target/generated-test-sources/protobuf + + + + + + + org.codehaus.mojo + build-helper-maven-plugin + 3.6.0 + + + add-test-source + generate-test-sources + + add-test-source + + + + ${project.basedir}/target/generated-test-sources/avro + ${project.basedir}/target/generated-test-sources/protobuf + + + + + diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializer.java index d35c50de7..f7b09c75d 100644 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializer.java +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializer.java @@ -22,7 +22,7 @@ public class KafkaJsonDeserializer extends AbstractKafkaDeserializer { @Override protected T deserializeComplex(byte[] data, Class type) throws IOException { String decodedStr = new String(data); - + return objectMapper.readValue(decodedStr, type); } } diff --git a/powertools-kafka/src/test/avro/TestProduct.avsc b/powertools-kafka/src/test/avro/TestProduct.avsc new file mode 100644 index 000000000..aad903d40 --- /dev/null +++ b/powertools-kafka/src/test/avro/TestProduct.avsc @@ -0,0 +1,10 @@ +{ + "namespace": "software.amazon.lambda.powertools.kafka.serializers.test.avro", + "type": "record", + "name": "TestProduct", + "fields": [ + {"name": "id", "type": "int"}, + {"name": "name", "type": "string"}, + {"name": "price", "type": "double"} + ] +} \ No newline at end of file diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/DeserializationTest.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/DeserializationTest.java new file mode 100644 index 000000000..964498d99 --- /dev/null +++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/DeserializationTest.java @@ -0,0 +1,71 @@ +/* + * Copyright 2023 Amazon.com, Inc. or its affiliates. + * Licensed under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package software.amazon.lambda.powertools.kafka; + +import static org.assertj.core.api.Assertions.assertThat; + +import java.lang.reflect.Method; + +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.junit.jupiter.api.Test; + +import com.amazonaws.services.lambda.runtime.Context; +import com.amazonaws.services.lambda.runtime.RequestHandler; + +class DeserializationTest { + + @Test + void shouldHaveCorrectAnnotationRetention() { + // Given + Class annotationClass = Deserialization.class; + + // When/Then + assertThat(annotationClass.isAnnotation()).isTrue(); + assertThat(annotationClass.getAnnotation(java.lang.annotation.Retention.class).value()) + .isEqualTo(java.lang.annotation.RetentionPolicy.RUNTIME); + assertThat(annotationClass.getAnnotation(java.lang.annotation.Target.class).value()) + .contains(java.lang.annotation.ElementType.METHOD); + } + + @Test + void shouldHaveTypeMethod() throws NoSuchMethodException { + // Given + Class annotationClass = Deserialization.class; + + // When + java.lang.reflect.Method typeMethod = annotationClass.getMethod("type"); + + // Then + assertThat(typeMethod.getReturnType()).isEqualTo(DeserializationType.class); + } + + @Test + void shouldBeAccessibleReflectivelyAtRuntime() throws NoSuchMethodException, SecurityException { + // Given + class TestHandler implements RequestHandler, String> { + @Override + @Deserialization(type = DeserializationType.KAFKA_JSON) + public String handleRequest(ConsumerRecords input, Context context) { + return "OK"; + } + } + + // When + Method handleRequestMethod = TestHandler.class.getMethod("handleRequest", ConsumerRecords.class, Context.class); + + // Then + Deserialization annotation = handleRequestMethod.getAnnotation(Deserialization.class); + assertThat(annotation).isNotNull(); + assertThat(annotation.type()).isEqualTo(DeserializationType.KAFKA_JSON); + } +} diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/DeserializationTypeTest.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/DeserializationTypeTest.java new file mode 100644 index 000000000..6999b66d4 --- /dev/null +++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/DeserializationTypeTest.java @@ -0,0 +1,50 @@ +/* + * Copyright 2023 Amazon.com, Inc. or its affiliates. + * Licensed under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package software.amazon.lambda.powertools.kafka; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.junit.jupiter.api.Test; + +// Mainly present to remind us to write unit tests once we add support for a new Deserializer. If we add a new type in +// the enum it will fail this test. +class DeserializationTypeTest { + + @Test + void shouldHaveExpectedEnumValues() { + // Given/When + DeserializationType[] values = DeserializationType.values(); + + // Then + assertThat(values).contains( + DeserializationType.LAMBDA_DEFAULT, + DeserializationType.KAFKA_JSON, + DeserializationType.KAFKA_AVRO, + DeserializationType.KAFKA_PROTOBUF); + } + + @Test + void shouldBeAbleToValueOf() { + // Given/When + DeserializationType jsonType = DeserializationType.valueOf("KAFKA_JSON"); + DeserializationType avroType = DeserializationType.valueOf("KAFKA_AVRO"); + DeserializationType protobufType = DeserializationType.valueOf("KAFKA_PROTOBUF"); + DeserializationType defaultType = DeserializationType.valueOf("LAMBDA_DEFAULT"); + + // Then + assertThat(jsonType).isEqualTo(DeserializationType.KAFKA_JSON); + assertThat(avroType).isEqualTo(DeserializationType.KAFKA_AVRO); + assertThat(protobufType).isEqualTo(DeserializationType.KAFKA_PROTOBUF); + assertThat(defaultType).isEqualTo(DeserializationType.LAMBDA_DEFAULT); + } +} diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializerTest.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializerTest.java new file mode 100644 index 000000000..bdc36b6c7 --- /dev/null +++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializerTest.java @@ -0,0 +1,325 @@ +/* + * Copyright 2023 Amazon.com, Inc. or its affiliates. + * Licensed under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package software.amazon.lambda.powertools.kafka; + +import static org.assertj.core.api.Assertions.assertThat; +import static software.amazon.lambda.powertools.kafka.testutils.TestUtils.createConsumerRecordsType; +import static software.amazon.lambda.powertools.kafka.testutils.TestUtils.serializeAvro; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.lang.reflect.Type; +import java.util.Arrays; +import java.util.Base64; +import java.util.List; +import java.util.stream.Stream; + +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.common.TopicPartition; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; +import org.junitpioneer.jupiter.SetEnvironmentVariable; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; + +import software.amazon.lambda.powertools.kafka.serializers.PowertoolsDeserializer; +import software.amazon.lambda.powertools.kafka.testutils.TestProductPojo; + +// This is testing the whole serializer end-to-end. More detailed serializer tests are placed in serializers folder. +@ExtendWith(MockitoExtension.class) +class PowertoolsSerializerTest { + + @Mock + private PowertoolsDeserializer mockDeserializer; + + private static final ObjectMapper objectMapper = new ObjectMapper(); + + // Helper for parameterized tests + static Stream inputTypes() { + return Stream.of(InputType.INPUT_STREAM, InputType.STRING); + } + + @ParameterizedTest + @MethodSource("inputTypes") + @SetEnvironmentVariable(key = "_HANDLER", value = "") + void shouldUseDefaultDeserializerWhenHandlerNotFound(InputType inputType) throws JsonProcessingException { + // When + PowertoolsSerializer serializer = new PowertoolsSerializer(); + + // Then + TestProductPojo product = new TestProductPojo(123, "Test Product", 99.99, Arrays.asList("tag1", "tag2")); + String json = objectMapper.writeValueAsString(product); + + // This will use the Lambda default deserializer (no Kafka logic) + TestProductPojo result; + if (inputType == InputType.INPUT_STREAM) { + ByteArrayInputStream input = new ByteArrayInputStream(json.getBytes()); + result = serializer.fromJson(input, TestProductPojo.class); + } else { + result = serializer.fromJson(json, TestProductPojo.class); + } + + assertThat(result.getId()).isEqualTo(123); + assertThat(result.getName()).isEqualTo("Test Product"); + assertThat(result.getPrice()).isEqualTo(99.99); + assertThat(result.getTags()).containsExactly("tag1", "tag2"); + } + + @ParameterizedTest + @MethodSource("inputTypes") + @SetEnvironmentVariable(key = "_HANDLER", value = "software.amazon.lambda.powertools.kafka.testutils.DefaultHandler::handleRequest") + void shouldUseLambdaDefaultDeserializer(InputType inputType) throws JsonProcessingException { + // When + PowertoolsSerializer serializer = new PowertoolsSerializer(); + + // Then + TestProductPojo product = new TestProductPojo(123, "Test Product", 99.99, Arrays.asList("tag1", "tag2")); + String json = objectMapper.writeValueAsString(product); + + // This will use the Lambda default deserializer (no Kafka logic) + TestProductPojo result; + if (inputType == InputType.INPUT_STREAM) { + ByteArrayInputStream input = new ByteArrayInputStream(json.getBytes()); + result = serializer.fromJson(input, TestProductPojo.class); + } else { + result = serializer.fromJson(json, TestProductPojo.class); + } + + assertThat(result.getId()).isEqualTo(123); + assertThat(result.getName()).isEqualTo("Test Product"); + assertThat(result.getPrice()).isEqualTo(99.99); + assertThat(result.getTags()).containsExactly("tag1", "tag2"); + } + + @ParameterizedTest + @MethodSource("inputTypes") + @SetEnvironmentVariable(key = "_HANDLER", value = "software.amazon.lambda.powertools.kafka.testutils.JsonHandler::handleRequest") + void shouldUseKafkaJsonDeserializer(InputType inputType) throws JsonProcessingException { + // When + PowertoolsSerializer serializer = new PowertoolsSerializer(); + + // Create a TestProductPojo and serialize it + TestProductPojo product = new TestProductPojo(123, "Test Product", 99.99, Arrays.asList("tag1", "tag2")); + String productJson = objectMapper.writeValueAsString(product); + String base64Value = Base64.getEncoder().encodeToString(productJson.getBytes()); + + // Then + String kafkaJson = "{\n" + + " \"eventSource\": \"aws:kafka\",\n" + + " \"records\": {\n" + + " \"test-topic-1\": [\n" + + " {\n" + + " \"topic\": \"test-topic-1\",\n" + + " \"partition\": 0,\n" + + " \"offset\": 15,\n" + + " \"timestamp\": 1545084650987,\n" + + " \"timestampType\": \"CREATE_TIME\",\n" + + " \"key\": null,\n" + + " \"value\": \"" + base64Value + "\",\n" + + " \"headers\": []\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + + Type type = createConsumerRecordsType(String.class, TestProductPojo.class); + + // This should use the KafkaJsonDeserializer + ConsumerRecords records; + + if (inputType == InputType.INPUT_STREAM) { + ByteArrayInputStream input = new ByteArrayInputStream(kafkaJson.getBytes()); + records = serializer.fromJson(input, type); + } else { + records = serializer.fromJson(kafkaJson, type); + } + + // Verify we got a valid ConsumerRecords object + assertThat(records).isNotNull(); + + // Get the record and verify its content + TopicPartition tp = new TopicPartition("test-topic-1", 0); + List> topicRecords = records.records(tp); + assertThat(topicRecords).hasSize(1); + + ConsumerRecord consumerRecord = topicRecords.get(0); + TestProductPojo deserializedProduct = consumerRecord.value(); + + assertThat(deserializedProduct.getId()).isEqualTo(123); + assertThat(deserializedProduct.getName()).isEqualTo("Test Product"); + assertThat(deserializedProduct.getPrice()).isEqualTo(99.99); + assertThat(deserializedProduct.getTags()).containsExactly("tag1", "tag2"); + } + + @ParameterizedTest + @MethodSource("inputTypes") + @SetEnvironmentVariable(key = "_HANDLER", value = "software.amazon.lambda.powertools.kafka.testutils.AvroHandler::handleRequest") + void shouldUseKafkaAvroDeserializer(InputType inputType) throws IOException { + // When + PowertoolsSerializer serializer = new PowertoolsSerializer(); + + // Create an Avro TestProduct and serialize it + software.amazon.lambda.powertools.kafka.serializers.test.avro.TestProduct product = new software.amazon.lambda.powertools.kafka.serializers.test.avro.TestProduct( + 123, "Test Product", 99.99); + String base64Value = Base64.getEncoder().encodeToString(serializeAvro(product)); + + // Then + String kafkaJson = "{\n" + + " \"eventSource\": \"aws:kafka\",\n" + + " \"records\": {\n" + + " \"test-topic-1\": [\n" + + " {\n" + + " \"topic\": \"test-topic-1\",\n" + + " \"partition\": 0,\n" + + " \"offset\": 15,\n" + + " \"timestamp\": 1545084650987,\n" + + " \"timestampType\": \"CREATE_TIME\",\n" + + " \"key\": null,\n" + + " \"value\": \"" + base64Value + "\",\n" + + " \"headers\": []\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + + Type type = createConsumerRecordsType(String.class, + software.amazon.lambda.powertools.kafka.serializers.test.avro.TestProduct.class); + + // This should use the KafkaAvroDeserializer + ConsumerRecords records; + + if (inputType == InputType.INPUT_STREAM) { + ByteArrayInputStream input = new ByteArrayInputStream(kafkaJson.getBytes()); + records = serializer.fromJson(input, type); + } else { + records = serializer.fromJson(kafkaJson, type); + } + + // Verify we got a valid ConsumerRecords object + assertThat(records).isNotNull(); + + // Get the record and verify its content + TopicPartition tp = new TopicPartition("test-topic-1", 0); + List> topicRecords = records + .records(tp); + assertThat(topicRecords).hasSize(1); + + ConsumerRecord consumerRecord = topicRecords + .get(0); + software.amazon.lambda.powertools.kafka.serializers.test.avro.TestProduct deserializedProduct = consumerRecord + .value(); + + assertThat(deserializedProduct.getId()).isEqualTo(123); + assertThat(deserializedProduct.getName()).isEqualTo("Test Product"); + assertThat(deserializedProduct.getPrice()).isEqualTo(99.99); + } + + @ParameterizedTest + @MethodSource("inputTypes") + @SetEnvironmentVariable(key = "_HANDLER", value = "software.amazon.lambda.powertools.kafka.testutils.ProtobufHandler::handleRequest") + void shouldUseKafkaProtobufDeserializer(InputType inputType) { + // When + PowertoolsSerializer serializer = new PowertoolsSerializer(); + + // Create a Protobuf TestProduct and serialize it + software.amazon.lambda.powertools.kafka.serializers.test.protobuf.TestProduct product = software.amazon.lambda.powertools.kafka.serializers.test.protobuf.TestProduct + .newBuilder() + .setId(123) + .setName("Test Product") + .setPrice(99.99) + .build(); + String base64Value = Base64.getEncoder().encodeToString(product.toByteArray()); + + // Then + String kafkaJson = "{\n" + + " \"eventSource\": \"aws:kafka\",\n" + + " \"records\": {\n" + + " \"test-topic-1\": [\n" + + " {\n" + + " \"topic\": \"test-topic-1\",\n" + + " \"partition\": 0,\n" + + " \"offset\": 15,\n" + + " \"timestamp\": 1545084650987,\n" + + " \"timestampType\": \"CREATE_TIME\",\n" + + " \"key\": null,\n" + + " \"value\": \"" + base64Value + "\",\n" + + " \"headers\": []\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + + Type type = createConsumerRecordsType(String.class, + software.amazon.lambda.powertools.kafka.serializers.test.protobuf.TestProduct.class); + + // This should use the KafkaProtobufDeserializer + ConsumerRecords records; + + if (inputType == InputType.INPUT_STREAM) { + ByteArrayInputStream input = new ByteArrayInputStream(kafkaJson.getBytes()); + records = serializer.fromJson(input, type); + } else { + records = serializer.fromJson(kafkaJson, type); + } + + // Verify we got a valid ConsumerRecords object + assertThat(records).isNotNull(); + + // Get the record and verify its content + TopicPartition tp = new TopicPartition("test-topic-1", 0); + List> topicRecords = records + .records(tp); + assertThat(topicRecords).hasSize(1); + + ConsumerRecord consumerRecord = topicRecords + .get(0); + software.amazon.lambda.powertools.kafka.serializers.test.protobuf.TestProduct deserializedProduct = consumerRecord + .value(); + + assertThat(deserializedProduct.getId()).isEqualTo(123); + assertThat(deserializedProduct.getName()).isEqualTo("Test Product"); + assertThat(deserializedProduct.getPrice()).isEqualTo(99.99); + } + + @Test + @SetEnvironmentVariable(key = "_HANDLER", value = "") + void shouldDelegateToJsonOutput() { + // Given + PowertoolsSerializer serializer = new PowertoolsSerializer(); + + // When + TestProductPojo product = new TestProductPojo(123, "Test Product", 99.99, Arrays.asList("tag1", "tag2")); + ByteArrayOutputStream output = new ByteArrayOutputStream(); + + // Then + serializer.toJson(product, output, TestProductPojo.class); + String json = output.toString(); + + // Verify the output is valid JSON + assertThat(json).contains("\"id\":123") + .contains("\"name\":\"Test Product\"") + .contains("\"price\":99.99") + .contains("\"tags\":[\"tag1\",\"tag2\"]"); + } + + private enum InputType { + INPUT_STREAM, STRING + } +} diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtilsTest.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtilsTest.java new file mode 100644 index 000000000..a6f45ad7a --- /dev/null +++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtilsTest.java @@ -0,0 +1,105 @@ +/* + * Copyright 2023 Amazon.com, Inc. or its affiliates. + * Licensed under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package software.amazon.lambda.powertools.kafka.internal; + +import static org.assertj.core.api.Assertions.assertThat; + +import org.junit.jupiter.api.Test; +import org.junitpioneer.jupiter.SetEnvironmentVariable; + +import software.amazon.lambda.powertools.kafka.DeserializationType; + +class DeserializationUtilsTest { + + // NOTE: We don't use a parameterized test here because this is not compatible with the @SetEnvironmentVariable + // annotation. + @Test + @SetEnvironmentVariable(key = "_HANDLER", value = "") + void shouldReturnDefaultDeserializationTypeWhenHandlerIsEmpty() { + // When + DeserializationType type = DeserializationUtils.determineDeserializationType(); + + // Then + assertThat(type).isEqualTo(DeserializationType.LAMBDA_DEFAULT); + } + + @Test + @SetEnvironmentVariable(key = "_HANDLER", value = "InvalidHandlerFormat") + void shouldReturnDefaultDeserializationTypeWhenHandlerFormatIsInvalid() { + // When + DeserializationType type = DeserializationUtils.determineDeserializationType(); + + // Then + assertThat(type).isEqualTo(DeserializationType.LAMBDA_DEFAULT); + } + + @Test + @SetEnvironmentVariable(key = "_HANDLER", value = "com.example.NonExistentClass::handleRequest") + void shouldReturnDefaultDeserializationTypeWhenClassNotFound() { + // When + DeserializationType type = DeserializationUtils.determineDeserializationType(); + + // Then + assertThat(type).isEqualTo(DeserializationType.LAMBDA_DEFAULT); + } + + @Test + @SetEnvironmentVariable(key = "_HANDLER", value = "java.lang.String::toString") + void shouldReturnDefaultDeserializationTypeWhenClassIsNotRequestHandler() { + // When + DeserializationType type = DeserializationUtils.determineDeserializationType(); + + // Then + assertThat(type).isEqualTo(DeserializationType.LAMBDA_DEFAULT); + } + + @Test + @SetEnvironmentVariable(key = "_HANDLER", value = "software.amazon.lambda.powertools.kafka.internal.DeserializationUtilsTest$TestHandler::nonExistentMethod") + void shouldReturnDefaultDeserializationTypeWhenMethodNotFound() { + // When + DeserializationType type = DeserializationUtils.determineDeserializationType(); + + // Then + assertThat(type).isEqualTo(DeserializationType.LAMBDA_DEFAULT); + } + + @Test + @SetEnvironmentVariable(key = "_HANDLER", value = "software.amazon.lambda.powertools.kafka.testutils.JsonHandler::handleRequest") + void shouldReturnJsonDeserializationTypeFromAnnotation() { + // When + DeserializationType type = DeserializationUtils.determineDeserializationType(); + + // Then + assertThat(type).isEqualTo(DeserializationType.KAFKA_JSON); + } + + @Test + @SetEnvironmentVariable(key = "_HANDLER", value = "software.amazon.lambda.powertools.kafka.testutils.AvroHandler::handleRequest") + void shouldReturnAvroDeserializationTypeFromAnnotation() { + // When + DeserializationType type = DeserializationUtils.determineDeserializationType(); + + // Then + assertThat(type).isEqualTo(DeserializationType.KAFKA_AVRO); + } + + @Test + @SetEnvironmentVariable(key = "_HANDLER", value = "software.amazon.lambda.powertools.kafka.testutils.ProtobufHandler::handleRequest") + void shouldReturnProtobufDeserializationTypeFromAnnotation() { + // When + DeserializationType type = DeserializationUtils.determineDeserializationType(); + + // Then + assertThat(type).isEqualTo(DeserializationType.KAFKA_PROTOBUF); + } +} diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializerTest.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializerTest.java new file mode 100644 index 000000000..d250c69bc --- /dev/null +++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializerTest.java @@ -0,0 +1,436 @@ +/* + * Copyright 2023 Amazon.com, Inc. or its affiliates. + * Licensed under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package software.amazon.lambda.powertools.kafka.serializers; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.lang.reflect.Type; +import java.util.Base64; +import java.util.List; +import java.util.stream.Stream; + +import org.apache.kafka.clients.consumer.ConsumerRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; +import org.apache.kafka.common.TopicPartition; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.Arguments; +import org.junit.jupiter.params.provider.MethodSource; + +import com.fasterxml.jackson.databind.ObjectMapper; + +import software.amazon.lambda.powertools.kafka.testutils.TestProductPojo; +import software.amazon.lambda.powertools.kafka.testutils.TestUtils; + +class AbstractKafkaDeserializerTest { + + private TestDeserializer deserializer; + private static final ObjectMapper objectMapper = new ObjectMapper(); + + @BeforeEach + void setUp() { + deserializer = new TestDeserializer(); + } + + static Stream inputTypes() { + return Stream.of(InputType.INPUT_STREAM, InputType.STRING); + } + + @ParameterizedTest + @MethodSource("inputTypes") + void shouldThrowExceptionWhenTypeIsNotConsumerRecords(InputType inputType) { + // Given + String json = "{}"; + + // When/Then + if (inputType == InputType.INPUT_STREAM) { + ByteArrayInputStream inputStream = new ByteArrayInputStream(json.getBytes()); + assertThatThrownBy(() -> deserializer.fromJson(inputStream, String.class)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessageContaining("Type must be ConsumerRecords"); + } else { + assertThatThrownBy(() -> deserializer.fromJson(json, String.class)) + .isInstanceOf(IllegalArgumentException.class) + .hasMessageContaining("Type must be ConsumerRecords"); + } + } + + @ParameterizedTest + @MethodSource("inputTypes") + void shouldThrowExceptionWhenJsonIsInvalid(InputType inputType) { + // Given + String invalidJson = "{invalid json"; + Type type = TestUtils.createConsumerRecordsType(String.class, TestProductPojo.class); + + // When/Then + if (inputType == InputType.INPUT_STREAM) { + ByteArrayInputStream inputStream = new ByteArrayInputStream(invalidJson.getBytes()); + assertThatThrownBy(() -> deserializer.fromJson(inputStream, type)) + .isInstanceOf(RuntimeException.class) + .hasMessageContaining("Failed to deserialize Lambda handler input to ConsumerRecords"); + } else { + assertThatThrownBy(() -> deserializer.fromJson(invalidJson, type)) + .isInstanceOf(RuntimeException.class) + .hasMessageContaining("Failed to deserialize Lambda handler input to ConsumerRecords"); + } + } + + @ParameterizedTest + @MethodSource("inputTypes") + void shouldThrowExceptionWhenKeyDeserializationFails(InputType inputType) { + // Given + // Create a Kafka event with invalid Base64 for the key + String kafkaJson = "{\n" + + " \"eventSource\": \"aws:kafka\",\n" + + " \"records\": {\n" + + " \"test-topic-1\": [\n" + + " {\n" + + " \"topic\": \"test-topic-1\",\n" + + " \"partition\": 0,\n" + + " \"offset\": 15,\n" + + " \"timestamp\": 1545084650987,\n" + + " \"timestampType\": \"CREATE_TIME\",\n" + + " \"key\": \"invalid-base64!\",\n" + + " \"value\": \"eyJrZXkiOiJ2YWx1ZSJ9\",\n" + + " \"headers\": []\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + Type type = TestUtils.createConsumerRecordsType(String.class, TestProductPojo.class); + + // When/Then + if (inputType == InputType.INPUT_STREAM) { + ByteArrayInputStream inputStream = new ByteArrayInputStream(kafkaJson.getBytes()); + assertThatThrownBy(() -> deserializer.fromJson(inputStream, type)) + .isInstanceOf(RuntimeException.class) + .hasMessageContaining("Failed to deserialize Kafka record key"); + } else { + assertThatThrownBy(() -> deserializer.fromJson(kafkaJson, type)) + .isInstanceOf(RuntimeException.class) + .hasMessageContaining("Failed to deserialize Kafka record key"); + } + } + + @ParameterizedTest + @MethodSource("inputTypes") + void shouldThrowExceptionWhenValueDeserializationFails(InputType inputType) { + // Given + // Create a Kafka event with invalid Base64 for the value + String kafkaJson = "{\n" + + " \"eventSource\": \"aws:kafka\",\n" + + " \"records\": {\n" + + " \"test-topic-1\": [\n" + + " {\n" + + " \"topic\": \"test-topic-1\",\n" + + " \"partition\": 0,\n" + + " \"offset\": 15,\n" + + " \"timestamp\": 1545084650987,\n" + + " \"timestampType\": \"CREATE_TIME\",\n" + + " \"key\": null,\n" + + " \"value\": \"invalid-base64!\",\n" + + " \"headers\": []\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + Type type = TestUtils.createConsumerRecordsType(String.class, TestProductPojo.class); + + // When/Then + if (inputType == InputType.INPUT_STREAM) { + ByteArrayInputStream inputStream = new ByteArrayInputStream(kafkaJson.getBytes()); + assertThatThrownBy(() -> deserializer.fromJson(inputStream, type)) + .isInstanceOf(RuntimeException.class) + .hasMessageContaining("Failed to deserialize Kafka record value"); + } else { + assertThatThrownBy(() -> deserializer.fromJson(kafkaJson, type)) + .isInstanceOf(RuntimeException.class) + .hasMessageContaining("Failed to deserialize Kafka record value"); + } + } + + @ParameterizedTest + @MethodSource("inputTypes") + void shouldHandleNullKeyAndValue(InputType inputType) { + // Given + // Create a Kafka event with null key and value + String kafkaJson = "{\n" + + " \"eventSource\": \"aws:kafka\",\n" + + " \"records\": {\n" + + " \"test-topic-1\": [\n" + + " {\n" + + " \"topic\": \"test-topic-1\",\n" + + " \"partition\": 0,\n" + + " \"offset\": 15,\n" + + " \"timestamp\": 1545084650987,\n" + + " \"timestampType\": \"CREATE_TIME\",\n" + + " \"key\": null,\n" + + " \"value\": null,\n" + + " \"headers\": []\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + Type type = TestUtils.createConsumerRecordsType(String.class, TestProductPojo.class); + + // When + ConsumerRecords records; + if (inputType == InputType.INPUT_STREAM) { + ByteArrayInputStream inputStream = new ByteArrayInputStream(kafkaJson.getBytes()); + records = deserializer.fromJson(inputStream, type); + } else { + records = deserializer.fromJson(kafkaJson, type); + } + + // Then + assertThat(records).isNotNull(); + TopicPartition tp = new TopicPartition("test-topic-1", 0); + List> topicRecords = records.records(tp); + assertThat(topicRecords).hasSize(1); + + ConsumerRecord consumerRecord = topicRecords.get(0); + assertThat(consumerRecord.key()).isNull(); + assertThat(consumerRecord.value()).isNull(); + } + + @ParameterizedTest + @MethodSource("inputTypes") + void shouldHandleHeadersCorrectly(InputType inputType) { + // Given + // Create a Kafka event with headers + String kafkaJson = "{\n" + + " \"eventSource\": \"aws:kafka\",\n" + + " \"records\": {\n" + + " \"test-topic-1\": [\n" + + " {\n" + + " \"topic\": \"test-topic-1\",\n" + + " \"partition\": 0,\n" + + " \"offset\": 15,\n" + + " \"timestamp\": 1545084650987,\n" + + " \"timestampType\": \"CREATE_TIME\",\n" + + " \"key\": null,\n" + + " \"value\": null,\n" + + " \"headers\": [\n" + + " {\n" + + " \"headerKey1\": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101, 49],\n" + + " \"headerKey2\": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101, 50]\n" + + " }\n" + + " ]\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + Type type = TestUtils.createConsumerRecordsType(String.class, TestProductPojo.class); + + // When + ConsumerRecords records; + if (inputType == InputType.INPUT_STREAM) { + ByteArrayInputStream inputStream = new ByteArrayInputStream(kafkaJson.getBytes()); + records = deserializer.fromJson(inputStream, type); + } else { + records = deserializer.fromJson(kafkaJson, type); + } + + // Then + assertThat(records).isNotNull(); + TopicPartition tp = new TopicPartition("test-topic-1", 0); + List> topicRecords = records.records(tp); + assertThat(topicRecords).hasSize(1); + + ConsumerRecord consumerRecord = topicRecords.get(0); + assertThat(consumerRecord.headers()).isNotNull(); + assertThat(consumerRecord.headers().toArray()).hasSize(2); + assertThat(new String(consumerRecord.headers().lastHeader("headerKey1").value())).isEqualTo("headerValue1"); + assertThat(new String(consumerRecord.headers().lastHeader("headerKey2").value())).isEqualTo("headerValue2"); + } + + @ParameterizedTest + @MethodSource("inputTypes") + void shouldHandleEmptyRecords(InputType inputType) { + // Given + // Create a Kafka event with no records + String kafkaJson = "{\n" + + " \"eventSource\": \"aws:kafka\",\n" + + " \"records\": {}\n" + + "}"; + Type type = TestUtils.createConsumerRecordsType(String.class, TestProductPojo.class); + + // When + ConsumerRecords records; + if (inputType == InputType.INPUT_STREAM) { + ByteArrayInputStream inputStream = new ByteArrayInputStream(kafkaJson.getBytes()); + records = deserializer.fromJson(inputStream, type); + } else { + records = deserializer.fromJson(kafkaJson, type); + } + + // Then + assertThat(records).isNotNull(); + assertThat(records.count()).isZero(); + } + + @ParameterizedTest + @MethodSource("inputTypes") + void shouldHandleNullRecords(InputType inputType) { + // Given + // Create a Kafka event with null records + String kafkaJson = "{\n" + + " \"eventSource\": \"aws:kafka\"\n" + + "}"; + Type type = TestUtils.createConsumerRecordsType(String.class, TestProductPojo.class); + + // When + ConsumerRecords records; + if (inputType == InputType.INPUT_STREAM) { + ByteArrayInputStream inputStream = new ByteArrayInputStream(kafkaJson.getBytes()); + records = deserializer.fromJson(inputStream, type); + } else { + records = deserializer.fromJson(kafkaJson, type); + } + + // Then + assertThat(records).isNotNull(); + assertThat(records.count()).isZero(); + } + + static Stream primitiveTypesProvider() { + return Stream.of( + // For each primitive type, test with both INPUT_STREAM and STRING + Arguments.of("String-InputStream", String.class, "test-string", "test-string", InputType.INPUT_STREAM), + Arguments.of("String-String", String.class, "test-string", "test-string", InputType.STRING), + Arguments.of("Integer-InputStream", Integer.class, "123", 123, InputType.INPUT_STREAM), + Arguments.of("Integer-String", Integer.class, "123", 123, InputType.STRING), + Arguments.of("Long-InputStream", Long.class, "123456789", 123456789L, InputType.INPUT_STREAM), + Arguments.of("Long-String", Long.class, "123456789", 123456789L, InputType.STRING), + Arguments.of("Double-InputStream", Double.class, "123.456", 123.456, InputType.INPUT_STREAM), + Arguments.of("Double-String", Double.class, "123.456", 123.456, InputType.STRING), + Arguments.of("Float-InputStream", Float.class, "123.45", 123.45f, InputType.INPUT_STREAM), + Arguments.of("Float-String", Float.class, "123.45", 123.45f, InputType.STRING), + Arguments.of("Boolean-InputStream", Boolean.class, "true", true, InputType.INPUT_STREAM), + Arguments.of("Boolean-String", Boolean.class, "true", true, InputType.STRING), + Arguments.of("Byte-InputStream", Byte.class, "127", (byte) 127, InputType.INPUT_STREAM), + Arguments.of("Byte-String", Byte.class, "127", (byte) 127, InputType.STRING), + Arguments.of("Short-InputStream", Short.class, "32767", (short) 32767, InputType.INPUT_STREAM), + Arguments.of("Short-String", Short.class, "32767", (short) 32767, InputType.STRING), + Arguments.of("Character-InputStream", Character.class, "A", 'A', InputType.INPUT_STREAM), + Arguments.of("Character-String", Character.class, "A", 'A', InputType.STRING)); + } + + @ParameterizedTest(name = "Should handle {0}") + @MethodSource("primitiveTypesProvider") + void shouldHandlePrimitiveTypes(String testName, Class keyType, String keyValue, T expectedKey, + InputType inputType) throws IOException { + // Given + // Create a TestProductPojo and serialize it to JSON + TestProductPojo product = new TestProductPojo(123, "Test Product", 99.99, null); + String productJson = objectMapper.writeValueAsString(product); + String base64Value = Base64.getEncoder().encodeToString(productJson.getBytes()); + String base64Key = Base64.getEncoder().encodeToString(keyValue.getBytes()); + + // Create a Kafka event with primitive type for key + String kafkaJson = "{\n" + + " \"eventSource\": \"aws:kafka\",\n" + + " \"records\": {\n" + + " \"test-topic-1\": [\n" + + " {\n" + + " \"topic\": \"test-topic-1\",\n" + + " \"partition\": 0,\n" + + " \"offset\": 15,\n" + + " \"timestamp\": 1545084650987,\n" + + " \"timestampType\": \"CREATE_TIME\",\n" + + " \"key\": \"" + base64Key + "\",\n" + + " \"value\": \"" + base64Value + "\",\n" + + " \"headers\": []\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + Type type = TestUtils.createConsumerRecordsType(keyType, TestProductPojo.class); + + // When + ConsumerRecords records; + if (inputType == InputType.INPUT_STREAM) { + ByteArrayInputStream inputStream = new ByteArrayInputStream(kafkaJson.getBytes()); + records = deserializer.fromJson(inputStream, type); + } else { + records = deserializer.fromJson(kafkaJson, type); + } + + // Then + assertThat(records).isNotNull(); + TopicPartition tp = new TopicPartition("test-topic-1", 0); + List> topicRecords = records.records(tp); + assertThat(topicRecords).hasSize(1); + + ConsumerRecord consumerRecord = topicRecords.get(0); + assertThat(consumerRecord.key()).isEqualTo(expectedKey); + assertThat(consumerRecord.value()).isNotNull(); + assertThat(consumerRecord.value().getId()).isEqualTo(123); + } + + @ParameterizedTest + @MethodSource("inputTypes") + void shouldThrowExceptionWhenConvertingEmptyStringToChar(InputType inputType) { + // Given + String base64EmptyString = Base64.getEncoder().encodeToString("".getBytes()); + String kafkaJson = "{\n" + + " \"eventSource\": \"aws:kafka\",\n" + + " \"records\": {\n" + + " \"test-topic-1\": [\n" + + " {\n" + + " \"topic\": \"test-topic-1\",\n" + + " \"partition\": 0,\n" + + " \"offset\": 15,\n" + + " \"timestamp\": 1545084650987,\n" + + " \"timestampType\": \"CREATE_TIME\",\n" + + " \"key\": \"" + base64EmptyString + "\",\n" + + " \"value\": null,\n" + + " \"headers\": []\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + Type type = TestUtils.createConsumerRecordsType(Character.class, TestProductPojo.class); + + // When/Then + if (inputType == InputType.INPUT_STREAM) { + ByteArrayInputStream inputStream = new ByteArrayInputStream(kafkaJson.getBytes()); + assertThatThrownBy(() -> deserializer.fromJson(inputStream, type)) + .isInstanceOf(RuntimeException.class) + .hasMessageContaining("Failed to deserialize Kafka record key") + .hasRootCauseInstanceOf(IllegalArgumentException.class) + .hasRootCauseMessage("Cannot convert empty string to char"); + } else { + assertThatThrownBy(() -> deserializer.fromJson(kafkaJson, type)) + .isInstanceOf(RuntimeException.class) + .hasMessageContaining("Failed to deserialize Kafka record key") + .hasRootCauseInstanceOf(IllegalArgumentException.class) + .hasRootCauseMessage("Cannot convert empty string to char"); + } + } + + // Test implementation of AbstractKafkaDeserializer + private static class TestDeserializer extends AbstractKafkaDeserializer { + @Override + protected T deserializeComplex(byte[] data, Class type) throws IOException { + return objectMapper.readValue(data, type); + } + } + + enum InputType { + INPUT_STREAM, STRING + } +} diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaAvroDeserializerTest.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaAvroDeserializerTest.java new file mode 100644 index 000000000..3abaed7dd --- /dev/null +++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaAvroDeserializerTest.java @@ -0,0 +1,73 @@ +/* + * Copyright 2023 Amazon.com, Inc. or its affiliates. + * Licensed under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package software.amazon.lambda.powertools.kafka.serializers; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; +import static software.amazon.lambda.powertools.kafka.testutils.TestUtils.serializeAvro; + +import java.io.IOException; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import software.amazon.lambda.powertools.kafka.serializers.test.avro.TestProduct; + +class KafkaAvroDeserializerTest { + + private KafkaAvroDeserializer deserializer; + + @BeforeEach + void setUp() { + deserializer = new KafkaAvroDeserializer(); + } + + @Test + void shouldThrowExceptionWhenTypeIsNotAvroSpecificRecord() { + // Given + byte[] data = new byte[] { 1, 2, 3 }; + + // When/Then + assertThatThrownBy(() -> deserializer.deserializeComplex(data, String.class)) + .isInstanceOf(IOException.class) + .hasMessageContaining("Unsupported type for Avro deserialization"); + } + + @Test + void shouldDeserializeValidAvroData() throws IOException { + // Given + TestProduct product = new TestProduct(123, "Test Product", 99.99); + byte[] avroData = serializeAvro(product); + + // When + TestProduct result = deserializer.deserializeComplex(avroData, TestProduct.class); + + // Then + assertThat(result).isNotNull(); + assertThat(result.getId()).isEqualTo(123); + assertThat(result.getName()).isEqualTo("Test Product"); + assertThat(result.getPrice()).isEqualTo(99.99); + } + + @Test + void shouldThrowExceptionWhenDeserializingInvalidAvroData() { + // Given + byte[] invalidAvroData = new byte[] { 1, 2, 3, 4, 5 }; + + // When/Then + assertThatThrownBy(() -> deserializer.deserializeComplex(invalidAvroData, TestProduct.class)) + .isInstanceOf(IOException.class) + .hasMessageContaining("Failed to deserialize Avro data"); + } + +} diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializerTest.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializerTest.java new file mode 100644 index 000000000..540db4b0a --- /dev/null +++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializerTest.java @@ -0,0 +1,66 @@ +/* + * Copyright 2023 Amazon.com, Inc. or its affiliates. + * Licensed under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package software.amazon.lambda.powertools.kafka.serializers; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import java.io.IOException; +import java.util.Arrays; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import com.fasterxml.jackson.core.JsonParseException; +import com.fasterxml.jackson.databind.ObjectMapper; + +import software.amazon.lambda.powertools.kafka.testutils.TestProductPojo; + +class KafkaJsonDeserializerTest { + + private KafkaJsonDeserializer deserializer; + private static final ObjectMapper objectMapper = new ObjectMapper(); + + @BeforeEach + void setUp() { + deserializer = new KafkaJsonDeserializer(); + } + + @Test + void shouldThrowExceptionWhenTypeIsNotSupportedForJson() { + // Given + byte[] data = new byte[] { 1, 2, 3 }; + + // When/Then + assertThatThrownBy(() -> deserializer.deserializeComplex(data, Object.class)) + .isInstanceOf(JsonParseException.class); + } + + @Test + void shouldDeserializeValidJsonData() throws IOException { + // Given + TestProductPojo product = new TestProductPojo(123, "Test Product", 99.99, Arrays.asList("tag1", "tag2")); + byte[] jsonData = objectMapper.writeValueAsBytes(product); + + // When + TestProductPojo result = deserializer.deserializeComplex(jsonData, TestProductPojo.class); + + // Then + assertThat(result).isNotNull(); + assertThat(result.getId()).isEqualTo(123); + assertThat(result.getName()).isEqualTo("Test Product"); + assertThat(result.getPrice()).isEqualTo(99.99); + assertThat(result.getTags()).containsExactly("tag1", "tag2"); + } + +} diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializerTest.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializerTest.java new file mode 100644 index 000000000..db949a3f7 --- /dev/null +++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializerTest.java @@ -0,0 +1,75 @@ +/* + * Copyright 2023 Amazon.com, Inc. or its affiliates. + * Licensed under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package software.amazon.lambda.powertools.kafka.serializers; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; + +import java.io.IOException; + +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import software.amazon.lambda.powertools.kafka.serializers.test.protobuf.TestProduct; + +class KafkaProtobufDeserializerTest { + + private KafkaProtobufDeserializer deserializer; + + @BeforeEach + void setUp() { + deserializer = new KafkaProtobufDeserializer(); + } + + @Test + void shouldThrowExceptionWhenTypeIsNotProtobufMessage() { + // Given + byte[] data = new byte[] { 1, 2, 3 }; + + // When/Then + assertThatThrownBy(() -> deserializer.deserializeComplex(data, String.class)) + .isInstanceOf(IOException.class) + .hasMessageContaining("Unsupported type for Protobuf deserialization"); + } + + @Test + void shouldDeserializeValidProtobufData() throws IOException { + // Given + TestProduct product = TestProduct.newBuilder() + .setId(123) + .setName("Test Product") + .setPrice(99.99) + .build(); + byte[] protobufData = product.toByteArray(); + + // When + TestProduct result = deserializer.deserializeComplex(protobufData, TestProduct.class); + + // Then + assertThat(result).isNotNull(); + assertThat(result.getId()).isEqualTo(123); + assertThat(result.getName()).isEqualTo("Test Product"); + assertThat(result.getPrice()).isEqualTo(99.99); + } + + @Test + void shouldThrowExceptionWhenDeserializingInvalidProtobufData() { + // Given + byte[] invalidProtobufData = new byte[] { 1, 2, 3, 4, 5 }; + + // When/Then + assertThatThrownBy(() -> deserializer.deserializeComplex(invalidProtobufData, TestProduct.class)) + .isInstanceOf(IOException.class) + .hasMessageContaining("Failed to deserialize Protobuf data"); + } +} diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/AvroHandler.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/AvroHandler.java new file mode 100644 index 000000000..d0fc9c1ba --- /dev/null +++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/AvroHandler.java @@ -0,0 +1,30 @@ +/* + * Copyright 2023 Amazon.com, Inc. or its affiliates. + * Licensed under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package software.amazon.lambda.powertools.kafka.testutils; + +import org.apache.kafka.clients.consumer.ConsumerRecords; + +import com.amazonaws.services.lambda.runtime.Context; +import com.amazonaws.services.lambda.runtime.RequestHandler; + +import software.amazon.lambda.powertools.kafka.Deserialization; +import software.amazon.lambda.powertools.kafka.DeserializationType; +import software.amazon.lambda.powertools.kafka.serializers.test.avro.TestProduct; + +public class AvroHandler implements RequestHandler, String> { + @Override + @Deserialization(type = DeserializationType.KAFKA_AVRO) + public String handleRequest(ConsumerRecords input, Context context) { + return "OK"; + } +} diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/DefaultHandler.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/DefaultHandler.java new file mode 100644 index 000000000..31e93d872 --- /dev/null +++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/DefaultHandler.java @@ -0,0 +1,29 @@ +/* + * Copyright 2023 Amazon.com, Inc. or its affiliates. + * Licensed under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package software.amazon.lambda.powertools.kafka.testutils; + +import com.amazonaws.services.lambda.runtime.Context; +import com.amazonaws.services.lambda.runtime.RequestHandler; + +import software.amazon.lambda.powertools.kafka.Deserialization; +import software.amazon.lambda.powertools.kafka.DeserializationType; + +// This is a non-Kafka specific handler. Just a handler using default deserialization into a Pojo. Used for testing +// fallback to default Lambda serialization. +public class DefaultHandler implements RequestHandler { + @Override + @Deserialization(type = DeserializationType.LAMBDA_DEFAULT) + public String handleRequest(TestProductPojo input, Context context) { + return "OK"; + } +} diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/JsonHandler.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/JsonHandler.java new file mode 100644 index 000000000..b6422f73c --- /dev/null +++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/JsonHandler.java @@ -0,0 +1,29 @@ +/* + * Copyright 2023 Amazon.com, Inc. or its affiliates. + * Licensed under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package software.amazon.lambda.powertools.kafka.testutils; + +import org.apache.kafka.clients.consumer.ConsumerRecords; + +import com.amazonaws.services.lambda.runtime.Context; +import com.amazonaws.services.lambda.runtime.RequestHandler; + +import software.amazon.lambda.powertools.kafka.Deserialization; +import software.amazon.lambda.powertools.kafka.DeserializationType; + +public class JsonHandler implements RequestHandler, String> { + @Override + @Deserialization(type = DeserializationType.KAFKA_JSON) + public String handleRequest(ConsumerRecords input, Context context) { + return "OK"; + } +} diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/ProtobufHandler.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/ProtobufHandler.java new file mode 100644 index 000000000..a4ce61765 --- /dev/null +++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/ProtobufHandler.java @@ -0,0 +1,30 @@ +/* + * Copyright 2023 Amazon.com, Inc. or its affiliates. + * Licensed under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package software.amazon.lambda.powertools.kafka.testutils; + +import org.apache.kafka.clients.consumer.ConsumerRecords; + +import com.amazonaws.services.lambda.runtime.Context; +import com.amazonaws.services.lambda.runtime.RequestHandler; + +import software.amazon.lambda.powertools.kafka.Deserialization; +import software.amazon.lambda.powertools.kafka.DeserializationType; +import software.amazon.lambda.powertools.kafka.serializers.test.protobuf.TestProduct; + +public class ProtobufHandler implements RequestHandler, String> { + @Override + @Deserialization(type = DeserializationType.KAFKA_PROTOBUF) + public String handleRequest(ConsumerRecords input, Context context) { + return "OK"; + } +} diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/TestProductPojo.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/TestProductPojo.java new file mode 100644 index 000000000..8cd261aef --- /dev/null +++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/TestProductPojo.java @@ -0,0 +1,87 @@ +/* + * Copyright 2023 Amazon.com, Inc. or its affiliates. + * Licensed under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package software.amazon.lambda.powertools.kafka.testutils; + +import java.util.List; +import java.util.Objects; + +/** + * Simple POJO for testing JSON deserialization + */ +public class TestProductPojo { + private int id; + private String name; + private double price; + private List tags; + + // Default constructor required for Jackson + public TestProductPojo() { + } + + public TestProductPojo(int id, String name, double price, List tags) { + this.id = id; + this.name = name; + this.price = price; + this.tags = tags; + } + + public int getId() { + return id; + } + + public void setId(int id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public double getPrice() { + return price; + } + + public void setPrice(double price) { + this.price = price; + } + + public List getTags() { + return tags; + } + + public void setTags(List tags) { + this.tags = tags; + } + + @Override + public boolean equals(Object o) { + if (this == o) + return true; + if (o == null || getClass() != o.getClass()) + return false; + TestProductPojo that = (TestProductPojo) o; + return id == that.id && + Double.compare(that.price, price) == 0 && + Objects.equals(name, that.name) && + Objects.equals(tags, that.tags); + } + + @Override + public int hashCode() { + return Objects.hash(id, name, price, tags); + } +} diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/TestUtils.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/TestUtils.java new file mode 100644 index 000000000..b905bdd27 --- /dev/null +++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/TestUtils.java @@ -0,0 +1,75 @@ +/* + * Copyright 2023 Amazon.com, Inc. or its affiliates. + * Licensed under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package software.amazon.lambda.powertools.kafka.testutils; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.lang.reflect.ParameterizedType; +import java.lang.reflect.Type; + +import org.apache.avro.io.BinaryEncoder; +import org.apache.avro.io.DatumWriter; +import org.apache.avro.io.EncoderFactory; +import org.apache.avro.specific.SpecificDatumWriter; +import org.apache.avro.specific.SpecificRecord; +import org.apache.kafka.clients.consumer.ConsumerRecords; + +/** + * Utility class for common test functions + */ +public class TestUtils { + + /** + * Helper method to create a ParameterizedType for ConsumerRecords + * + * @param keyClass The class for the key type + * @param valueClass The class for the value type + * @return A Type representing ConsumerRecords + */ + public static Type createConsumerRecordsType(final Class keyClass, final Class valueClass) { + return new ParameterizedType() { + @Override + public Type[] getActualTypeArguments() { + return new Type[] { keyClass, valueClass }; + } + + @Override + public Type getRawType() { + return ConsumerRecords.class; + } + + @Override + public Type getOwnerType() { + return null; + } + }; + } + + /** + * Helper method to serialize an Avro object + * + * @param The type of the Avro record + * @param record The Avro record to serialize + * @return The serialized bytes + * @throws IOException If serialization fails + */ + public static byte[] serializeAvro(T record) throws IOException { + ByteArrayOutputStream baos = new ByteArrayOutputStream(); + BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(baos, null); + @SuppressWarnings("unchecked") + DatumWriter writer = new SpecificDatumWriter<>((Class) record.getClass()); + writer.write(record, encoder); + encoder.flush(); + return baos.toByteArray(); + } +} \ No newline at end of file diff --git a/powertools-kafka/src/test/proto/TestProduct.proto b/powertools-kafka/src/test/proto/TestProduct.proto new file mode 100644 index 000000000..53c654494 --- /dev/null +++ b/powertools-kafka/src/test/proto/TestProduct.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +package software.amazon.lambda.powertools.kafka.serializers.test.protobuf; + +option java_package = "software.amazon.lambda.powertools.kafka.serializers.test.protobuf"; +option java_outer_classname = "TestProductOuterClass"; +option java_multiple_files = true; + +message TestProduct { + int32 id = 1; + string name = 2; + double price = 3; +} \ No newline at end of file diff --git a/powertools-kafka/src/test/resources/simplelogger.properties b/powertools-kafka/src/test/resources/simplelogger.properties new file mode 100644 index 000000000..167581f74 --- /dev/null +++ b/powertools-kafka/src/test/resources/simplelogger.properties @@ -0,0 +1,13 @@ +# SLF4J Simple Logger configuration for tests +org.slf4j.simpleLogger.defaultLogLevel=debug +org.slf4j.simpleLogger.showDateTime=true +org.slf4j.simpleLogger.dateTimeFormat=yyyy-MM-dd HH:mm:ss.SSS +org.slf4j.simpleLogger.showThreadName=true +org.slf4j.simpleLogger.showLogName=true +org.slf4j.simpleLogger.showShortLogName=false + +# Redirect logs to a file instead of console to avoid bloated console output during tests +org.slf4j.simpleLogger.logFile=target/test.log + +# Set specific logger levels +org.slf4j.simpleLogger.log.software.amazon.lambda.powertools=debug From 1191c56444b88023343569683f5ed21b465938b5 Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Thu, 29 May 2025 10:37:33 +0200 Subject: [PATCH 16/38] Add minimal kafka example to examples module in pom.xml. --- examples/pom.xml | 1 + 1 file changed, 1 insertion(+) diff --git a/examples/pom.xml b/examples/pom.xml index 1065fb99f..dafc94e94 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -40,6 +40,7 @@ powertools-examples-parameters/sam-graalvm powertools-examples-serialization powertools-examples-kafka + powertools-examples-kafka-minimal powertools-examples-batch powertools-examples-validation powertools-examples-cloudformation From db9b98985f1020f1105734df637e0148d0b9b77a Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Thu, 29 May 2025 10:48:19 +0200 Subject: [PATCH 17/38] Add some comments. --- .../kafka/serializers/LambdaDefaultDeserializer.java | 6 ++++++ .../powertools/kafka/PowertoolsSerializerTest.java | 2 +- .../serializers/AbstractKafkaDeserializerTest.java | 1 + .../lambda/powertools/kafka/testutils/TestUtils.java | 10 +++++----- 4 files changed, 13 insertions(+), 6 deletions(-) diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/LambdaDefaultDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/LambdaDefaultDeserializer.java index a611759cf..bfc51e372 100644 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/LambdaDefaultDeserializer.java +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/LambdaDefaultDeserializer.java @@ -17,6 +17,12 @@ import com.amazonaws.services.lambda.runtime.serialization.factories.JacksonFactory; +/** + * Default deserializer for Kafka events proxying to Lambda default behavior. + * + * This deserializer uses the default Jackson ObjectMapper to deserialize the event from + * {@link com.amazonaws.services.lambda.runtime.serialization}. + */ public class LambdaDefaultDeserializer implements PowertoolsDeserializer { @SuppressWarnings("unchecked") diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializerTest.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializerTest.java index bdc36b6c7..4ad0d46ef 100644 --- a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializerTest.java +++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializerTest.java @@ -51,7 +51,7 @@ class PowertoolsSerializerTest { private static final ObjectMapper objectMapper = new ObjectMapper(); - // Helper for parameterized tests + // CustomPojoSerializer has fromJson(String input, ...) and fromJson(InputStream input, ...). We want to test both. static Stream inputTypes() { return Stream.of(InputType.INPUT_STREAM, InputType.STRING); } diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializerTest.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializerTest.java index d250c69bc..9692c6b83 100644 --- a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializerTest.java +++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializerTest.java @@ -45,6 +45,7 @@ void setUp() { deserializer = new TestDeserializer(); } + // CustomPojoSerializer has fromJson(String input, ...) and fromJson(InputStream input, ...). We want to test both. static Stream inputTypes() { return Stream.of(InputType.INPUT_STREAM, InputType.STRING); } diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/TestUtils.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/TestUtils.java index b905bdd27..33623a9b2 100644 --- a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/TestUtils.java +++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/TestUtils.java @@ -59,17 +59,17 @@ public Type getOwnerType() { * Helper method to serialize an Avro object * * @param The type of the Avro record - * @param record The Avro record to serialize + * @param consumerRecord The Avro record to serialize * @return The serialized bytes * @throws IOException If serialization fails */ - public static byte[] serializeAvro(T record) throws IOException { + public static byte[] serializeAvro(T consumerRecord) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); BinaryEncoder encoder = EncoderFactory.get().binaryEncoder(baos, null); @SuppressWarnings("unchecked") - DatumWriter writer = new SpecificDatumWriter<>((Class) record.getClass()); - writer.write(record, encoder); + DatumWriter writer = new SpecificDatumWriter<>((Class) consumerRecord.getClass()); + writer.write(consumerRecord, encoder); encoder.flush(); return baos.toByteArray(); } -} \ No newline at end of file +} From b64dcbdc048ed74f2f731e66fb44c9298f930aec Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Fri, 6 Jun 2025 10:38:26 +0200 Subject: [PATCH 18/38] Update powertools-examples-kafka with README and make it more minimalistic. Remove powertools-examples-kafka-minimal. --- examples/pom.xml | 1 - .../events/kafka-avro-event.json | 51 -- .../events/kafka-json-event.json | 50 -- .../events/kafka-protobuf-event.json | 51 -- .../powertools-examples-kafka-minimal/pom.xml | 199 ------ .../src/main/avro/AvroProduct.avsc | 10 - .../java/org/demo/kafka/avro/AvroProduct.java | 476 ------------- .../KafkaJsonConsumerMinimalFunction.java | 28 - .../demo/kafka/protobuf/ProtobufProduct.java | 636 ------------------ .../protobuf/ProtobufProductOrBuilder.java | 36 - .../protobuf/ProtobufProductOuterClass.java | 63 -- .../src/main/proto/ProtobufProduct.proto | 13 - .../main/resources/simplelogger.properties | 6 - .../template.yaml | 50 -- examples/powertools-examples-kafka/README.md | 77 +++ .../events/kafka-json-event.json | 9 +- examples/powertools-examples-kafka/pom.xml | 50 +- .../kafka/AvroDeserializationFunction.java} | 18 +- ....java => JsonDeserializationFunction.java} | 22 +- ...kaAvroConsumerDeserializationFunction.java | 57 -- ...otobufConsumerDeserializationFunction.java | 57 -- .../ProtobufDeserializationFunction.java} | 17 +- .../powertools-examples-kafka/template.yaml | 24 +- .../powertools-examples-kafka/tools/README.md | 11 + .../powertools-examples-kafka/tools/pom.xml | 11 + .../demo/kafka/tools/GenerateJsonSamples.java | 126 ++++ 26 files changed, 294 insertions(+), 1855 deletions(-) delete mode 100644 examples/powertools-examples-kafka-minimal/events/kafka-avro-event.json delete mode 100644 examples/powertools-examples-kafka-minimal/events/kafka-json-event.json delete mode 100644 examples/powertools-examples-kafka-minimal/events/kafka-protobuf-event.json delete mode 100644 examples/powertools-examples-kafka-minimal/pom.xml delete mode 100644 examples/powertools-examples-kafka-minimal/src/main/avro/AvroProduct.avsc delete mode 100644 examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/avro/AvroProduct.java delete mode 100644 examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/minimal/KafkaJsonConsumerMinimalFunction.java delete mode 100644 examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/protobuf/ProtobufProduct.java delete mode 100644 examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/protobuf/ProtobufProductOrBuilder.java delete mode 100644 examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/protobuf/ProtobufProductOuterClass.java delete mode 100644 examples/powertools-examples-kafka-minimal/src/main/proto/ProtobufProduct.proto delete mode 100644 examples/powertools-examples-kafka-minimal/src/main/resources/simplelogger.properties delete mode 100644 examples/powertools-examples-kafka-minimal/template.yaml create mode 100644 examples/powertools-examples-kafka/README.md rename examples/{powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/minimal/KafkaAvroConsumerMinimalFunction.java => powertools-examples-kafka/src/main/java/org/demo/kafka/AvroDeserializationFunction.java} (61%) rename examples/powertools-examples-kafka/src/main/java/org/demo/kafka/{KafkaJsonConsumerDeserializationFunction.java => JsonDeserializationFunction.java} (54%) delete mode 100644 examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaAvroConsumerDeserializationFunction.java delete mode 100644 examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaProtobufConsumerDeserializationFunction.java rename examples/{powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/minimal/KafkaProtobufConsumerMinimalFunction.java => powertools-examples-kafka/src/main/java/org/demo/kafka/ProtobufDeserializationFunction.java} (67%) create mode 100644 examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateJsonSamples.java diff --git a/examples/pom.xml b/examples/pom.xml index dafc94e94..1065fb99f 100644 --- a/examples/pom.xml +++ b/examples/pom.xml @@ -40,7 +40,6 @@ powertools-examples-parameters/sam-graalvm powertools-examples-serialization powertools-examples-kafka - powertools-examples-kafka-minimal powertools-examples-batch powertools-examples-validation powertools-examples-cloudformation diff --git a/examples/powertools-examples-kafka-minimal/events/kafka-avro-event.json b/examples/powertools-examples-kafka-minimal/events/kafka-avro-event.json deleted file mode 100644 index 8d6ef2210..000000000 --- a/examples/powertools-examples-kafka-minimal/events/kafka-avro-event.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "eventSource": "aws:kafka", - "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", - "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", - "records": { - "mytopic-0": [ - { - "topic": "mytopic", - "partition": 0, - "offset": 15, - "timestamp": 1545084650987, - "timestampType": "CREATE_TIME", - "key": "NDI=", - "value": "0g8MTGFwdG9wUrgehes/j0A=", - "headers": [ - { - "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] - } - ] - }, - { - "topic": "mytopic", - "partition": 0, - "offset": 16, - "timestamp": 1545084650988, - "timestampType": "CREATE_TIME", - "key": "NDI=", - "value": "1A8UU21hcnRwaG9uZVK4HoXrv4JA", - "headers": [ - { - "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] - } - ] - }, - { - "topic": "mytopic", - "partition": 0, - "offset": 17, - "timestamp": 1545084650989, - "timestampType": "CREATE_TIME", - "key": null, - "value": "1g8USGVhZHBob25lc0jhehSuv2JA", - "headers": [ - { - "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] - } - ] - } - ] - } -} diff --git a/examples/powertools-examples-kafka-minimal/events/kafka-json-event.json b/examples/powertools-examples-kafka-minimal/events/kafka-json-event.json deleted file mode 100644 index d85c40654..000000000 --- a/examples/powertools-examples-kafka-minimal/events/kafka-json-event.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "eventSource": "aws:kafka", - "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", - "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", - "records": { - "mytopic-0": [ - { - "topic": "mytopic", - "partition": 0, - "offset": 15, - "timestamp": 1545084650987, - "timestampType": "CREATE_TIME", - "key": "cmVjb3JkS2V5", - "value": "ewogICJpZCI6IDEyMzQ1LAogICJuYW1lIjogInByb2R1Y3Q1IiwKICAicHJpY2UiOiA0NQp9", - "headers": [ - { - "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] - } - ] - }, - { - "topic": "mytopic", - "partition": 0, - "offset": 15, - "timestamp": 1545084650987, - "timestampType": "CREATE_TIME", - "value": "ewogICJpZCI6IDEyMzQ1LAogICJuYW1lIjogInByb2R1Y3Q1IiwKICAicHJpY2UiOiA0NQp9", - "headers": [ - { - "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] - } - ] - }, - { - "topic": "mytopic", - "partition": 0, - "offset": 15, - "timestamp": 1545084650987, - "timestampType": "CREATE_TIME", - "key": null, - "value": "ewogICJpZCI6IDEyMzQ1LAogICJuYW1lIjogInByb2R1Y3Q1IiwKICAicHJpY2UiOiA0NQp9", - "headers": [ - { - "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] - } - ] - } - ] - } -} diff --git a/examples/powertools-examples-kafka-minimal/events/kafka-protobuf-event.json b/examples/powertools-examples-kafka-minimal/events/kafka-protobuf-event.json deleted file mode 100644 index b3e0139e3..000000000 --- a/examples/powertools-examples-kafka-minimal/events/kafka-protobuf-event.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "eventSource": "aws:kafka", - "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", - "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", - "records": { - "mytopic-0": [ - { - "topic": "mytopic", - "partition": 0, - "offset": 15, - "timestamp": 1545084650987, - "timestampType": "CREATE_TIME", - "key": "NDI=", - "value": "COkHEgZMYXB0b3AZUrgehes/j0A=", - "headers": [ - { - "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] - } - ] - }, - { - "topic": "mytopic", - "partition": 0, - "offset": 16, - "timestamp": 1545084650988, - "timestampType": "CREATE_TIME", - "key": "NDI=", - "value": "COoHEgpTbWFydHBob25lGVK4HoXrv4JA", - "headers": [ - { - "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] - } - ] - }, - { - "topic": "mytopic", - "partition": 0, - "offset": 17, - "timestamp": 1545084650989, - "timestampType": "CREATE_TIME", - "key": null, - "value": "COsHEgpIZWFkcGhvbmVzGUjhehSuv2JA", - "headers": [ - { - "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] - } - ] - } - ] - } -} diff --git a/examples/powertools-examples-kafka-minimal/pom.xml b/examples/powertools-examples-kafka-minimal/pom.xml deleted file mode 100644 index 1a6ce1ebb..000000000 --- a/examples/powertools-examples-kafka-minimal/pom.xml +++ /dev/null @@ -1,199 +0,0 @@ - - - 4.0.0 - software.amazon.lambda.examples - 2.0.0-SNAPSHOT - powertools-examples-kafka-minimal - jar - Powertools for AWS Lambda (Java) - Examples - Kafka Minimal - - - 11 - 11 - 1.12.0 - 4.31.0 - 2.0.9 - - - - - - software.amazon.lambda - powertools-kafka - ${project.version} - - - com.amazonaws - aws-lambda-java-core - 1.2.3 - - - com.amazonaws - aws-lambda-java-events - 3.15.0 - - - - org.slf4j - slf4j-api - ${slf4j.version} - - - org.slf4j - slf4j-simple - ${slf4j.version} - - - - org.apache.avro - avro - ${avro.version} - - - com.google.protobuf - protobuf-java - ${protobuf.version} - - - - - - - - org.apache.maven.plugins - maven-deploy-plugin - 3.1.2 - - true - - - - - org.apache.maven.plugins - maven-shade-plugin - 3.6.0 - - - package - - shade - - - false - - - - - - - org.apache.avro - avro-maven-plugin - ${avro.version} - - - generate-sources - - schema - - - ${project.basedir}/src/main/avro/ - ${project.basedir}/src/main/java/ - String - - - - - - - io.github.ascopes - protobuf-maven-plugin - 3.3.0 - - - - generate - - generate-sources - - ${protobuf.version} - - ${project.basedir}/src/main/proto - - ${project.basedir}/src/main/java - false - - - - - - - - - - - base - - base - - - - - org.apache.avro - avro - ${avro.version} - provided - - - com.google.protobuf - protobuf-java - ${protobuf.version} - provided - - - - - - - avro-only - - avro-only - - - - com.google.protobuf - protobuf-java - ${protobuf.version} - provided - - - - - - - protobuf-only - - protobuf-only - - - - org.apache.avro - avro - ${avro.version} - provided - - - - - - - full - - true - - - full - - - - diff --git a/examples/powertools-examples-kafka-minimal/src/main/avro/AvroProduct.avsc b/examples/powertools-examples-kafka-minimal/src/main/avro/AvroProduct.avsc deleted file mode 100644 index 7155857ea..000000000 --- a/examples/powertools-examples-kafka-minimal/src/main/avro/AvroProduct.avsc +++ /dev/null @@ -1,10 +0,0 @@ -{ - "namespace": "org.demo.kafka.avro", - "type": "record", - "name": "AvroProduct", - "fields": [ - {"name": "id", "type": "int"}, - {"name": "name", "type": "string"}, - {"name": "price", "type": "double"} - ] -} diff --git a/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/avro/AvroProduct.java b/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/avro/AvroProduct.java deleted file mode 100644 index fad7e2fbf..000000000 --- a/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/avro/AvroProduct.java +++ /dev/null @@ -1,476 +0,0 @@ -/** - * Autogenerated by Avro - * - * DO NOT EDIT DIRECTLY - */ -package org.demo.kafka.avro; - -import org.apache.avro.specific.SpecificData; -import org.apache.avro.util.Utf8; -import org.apache.avro.message.BinaryMessageEncoder; -import org.apache.avro.message.BinaryMessageDecoder; -import org.apache.avro.message.SchemaStore; - -@org.apache.avro.specific.AvroGenerated -public class AvroProduct extends org.apache.avro.specific.SpecificRecordBase implements org.apache.avro.specific.SpecificRecord { - private static final long serialVersionUID = -2929699301240218341L; - - - public static final org.apache.avro.Schema SCHEMA$ = new org.apache.avro.Schema.Parser().parse("{\"type\":\"record\",\"name\":\"AvroProduct\",\"namespace\":\"org.demo.kafka.avro\",\"fields\":[{\"name\":\"id\",\"type\":\"int\"},{\"name\":\"name\",\"type\":{\"type\":\"string\",\"avro.java.string\":\"String\"}},{\"name\":\"price\",\"type\":\"double\"}]}"); - public static org.apache.avro.Schema getClassSchema() { return SCHEMA$; } - - private static final SpecificData MODEL$ = new SpecificData(); - - private static final BinaryMessageEncoder ENCODER = - new BinaryMessageEncoder<>(MODEL$, SCHEMA$); - - private static final BinaryMessageDecoder DECODER = - new BinaryMessageDecoder<>(MODEL$, SCHEMA$); - - /** - * Return the BinaryMessageEncoder instance used by this class. - * @return the message encoder used by this class - */ - public static BinaryMessageEncoder getEncoder() { - return ENCODER; - } - - /** - * Return the BinaryMessageDecoder instance used by this class. - * @return the message decoder used by this class - */ - public static BinaryMessageDecoder getDecoder() { - return DECODER; - } - - /** - * Create a new BinaryMessageDecoder instance for this class that uses the specified {@link SchemaStore}. - * @param resolver a {@link SchemaStore} used to find schemas by fingerprint - * @return a BinaryMessageDecoder instance for this class backed by the given SchemaStore - */ - public static BinaryMessageDecoder createDecoder(SchemaStore resolver) { - return new BinaryMessageDecoder<>(MODEL$, SCHEMA$, resolver); - } - - /** - * Serializes this AvroProduct to a ByteBuffer. - * @return a buffer holding the serialized data for this instance - * @throws java.io.IOException if this instance could not be serialized - */ - public java.nio.ByteBuffer toByteBuffer() throws java.io.IOException { - return ENCODER.encode(this); - } - - /** - * Deserializes a AvroProduct from a ByteBuffer. - * @param b a byte buffer holding serialized data for an instance of this class - * @return a AvroProduct instance decoded from the given buffer - * @throws java.io.IOException if the given bytes could not be deserialized into an instance of this class - */ - public static AvroProduct fromByteBuffer( - java.nio.ByteBuffer b) throws java.io.IOException { - return DECODER.decode(b); - } - - private int id; - private java.lang.String name; - private double price; - - /** - * Default constructor. Note that this does not initialize fields - * to their default values from the schema. If that is desired then - * one should use newBuilder(). - */ - public AvroProduct() {} - - /** - * All-args constructor. - * @param id The new value for id - * @param name The new value for name - * @param price The new value for price - */ - public AvroProduct(java.lang.Integer id, java.lang.String name, java.lang.Double price) { - this.id = id; - this.name = name; - this.price = price; - } - - @Override - public org.apache.avro.specific.SpecificData getSpecificData() { return MODEL$; } - - @Override - public org.apache.avro.Schema getSchema() { return SCHEMA$; } - - // Used by DatumWriter. Applications should not call. - @Override - public java.lang.Object get(int field$) { - switch (field$) { - case 0: return id; - case 1: return name; - case 2: return price; - default: throw new IndexOutOfBoundsException("Invalid index: " + field$); - } - } - - // Used by DatumReader. Applications should not call. - @Override - @SuppressWarnings(value="unchecked") - public void put(int field$, java.lang.Object value$) { - switch (field$) { - case 0: id = (java.lang.Integer)value$; break; - case 1: name = value$ != null ? value$.toString() : null; break; - case 2: price = (java.lang.Double)value$; break; - default: throw new IndexOutOfBoundsException("Invalid index: " + field$); - } - } - - /** - * Gets the value of the 'id' field. - * @return The value of the 'id' field. - */ - public int getId() { - return id; - } - - - /** - * Sets the value of the 'id' field. - * @param value the value to set. - */ - public void setId(int value) { - this.id = value; - } - - /** - * Gets the value of the 'name' field. - * @return The value of the 'name' field. - */ - public java.lang.String getName() { - return name; - } - - - /** - * Sets the value of the 'name' field. - * @param value the value to set. - */ - public void setName(java.lang.String value) { - this.name = value; - } - - /** - * Gets the value of the 'price' field. - * @return The value of the 'price' field. - */ - public double getPrice() { - return price; - } - - - /** - * Sets the value of the 'price' field. - * @param value the value to set. - */ - public void setPrice(double value) { - this.price = value; - } - - /** - * Creates a new AvroProduct RecordBuilder. - * @return A new AvroProduct RecordBuilder - */ - public static org.demo.kafka.avro.AvroProduct.Builder newBuilder() { - return new org.demo.kafka.avro.AvroProduct.Builder(); - } - - /** - * Creates a new AvroProduct RecordBuilder by copying an existing Builder. - * @param other The existing builder to copy. - * @return A new AvroProduct RecordBuilder - */ - public static org.demo.kafka.avro.AvroProduct.Builder newBuilder(org.demo.kafka.avro.AvroProduct.Builder other) { - if (other == null) { - return new org.demo.kafka.avro.AvroProduct.Builder(); - } else { - return new org.demo.kafka.avro.AvroProduct.Builder(other); - } - } - - /** - * Creates a new AvroProduct RecordBuilder by copying an existing AvroProduct instance. - * @param other The existing instance to copy. - * @return A new AvroProduct RecordBuilder - */ - public static org.demo.kafka.avro.AvroProduct.Builder newBuilder(org.demo.kafka.avro.AvroProduct other) { - if (other == null) { - return new org.demo.kafka.avro.AvroProduct.Builder(); - } else { - return new org.demo.kafka.avro.AvroProduct.Builder(other); - } - } - - /** - * RecordBuilder for AvroProduct instances. - */ - @org.apache.avro.specific.AvroGenerated - public static class Builder extends org.apache.avro.specific.SpecificRecordBuilderBase - implements org.apache.avro.data.RecordBuilder { - - private int id; - private java.lang.String name; - private double price; - - /** Creates a new Builder */ - private Builder() { - super(SCHEMA$, MODEL$); - } - - /** - * Creates a Builder by copying an existing Builder. - * @param other The existing Builder to copy. - */ - private Builder(org.demo.kafka.avro.AvroProduct.Builder other) { - super(other); - if (isValidValue(fields()[0], other.id)) { - this.id = data().deepCopy(fields()[0].schema(), other.id); - fieldSetFlags()[0] = other.fieldSetFlags()[0]; - } - if (isValidValue(fields()[1], other.name)) { - this.name = data().deepCopy(fields()[1].schema(), other.name); - fieldSetFlags()[1] = other.fieldSetFlags()[1]; - } - if (isValidValue(fields()[2], other.price)) { - this.price = data().deepCopy(fields()[2].schema(), other.price); - fieldSetFlags()[2] = other.fieldSetFlags()[2]; - } - } - - /** - * Creates a Builder by copying an existing AvroProduct instance - * @param other The existing instance to copy. - */ - private Builder(org.demo.kafka.avro.AvroProduct other) { - super(SCHEMA$, MODEL$); - if (isValidValue(fields()[0], other.id)) { - this.id = data().deepCopy(fields()[0].schema(), other.id); - fieldSetFlags()[0] = true; - } - if (isValidValue(fields()[1], other.name)) { - this.name = data().deepCopy(fields()[1].schema(), other.name); - fieldSetFlags()[1] = true; - } - if (isValidValue(fields()[2], other.price)) { - this.price = data().deepCopy(fields()[2].schema(), other.price); - fieldSetFlags()[2] = true; - } - } - - /** - * Gets the value of the 'id' field. - * @return The value. - */ - public int getId() { - return id; - } - - - /** - * Sets the value of the 'id' field. - * @param value The value of 'id'. - * @return This builder. - */ - public org.demo.kafka.avro.AvroProduct.Builder setId(int value) { - validate(fields()[0], value); - this.id = value; - fieldSetFlags()[0] = true; - return this; - } - - /** - * Checks whether the 'id' field has been set. - * @return True if the 'id' field has been set, false otherwise. - */ - public boolean hasId() { - return fieldSetFlags()[0]; - } - - - /** - * Clears the value of the 'id' field. - * @return This builder. - */ - public org.demo.kafka.avro.AvroProduct.Builder clearId() { - fieldSetFlags()[0] = false; - return this; - } - - /** - * Gets the value of the 'name' field. - * @return The value. - */ - public java.lang.String getName() { - return name; - } - - - /** - * Sets the value of the 'name' field. - * @param value The value of 'name'. - * @return This builder. - */ - public org.demo.kafka.avro.AvroProduct.Builder setName(java.lang.String value) { - validate(fields()[1], value); - this.name = value; - fieldSetFlags()[1] = true; - return this; - } - - /** - * Checks whether the 'name' field has been set. - * @return True if the 'name' field has been set, false otherwise. - */ - public boolean hasName() { - return fieldSetFlags()[1]; - } - - - /** - * Clears the value of the 'name' field. - * @return This builder. - */ - public org.demo.kafka.avro.AvroProduct.Builder clearName() { - name = null; - fieldSetFlags()[1] = false; - return this; - } - - /** - * Gets the value of the 'price' field. - * @return The value. - */ - public double getPrice() { - return price; - } - - - /** - * Sets the value of the 'price' field. - * @param value The value of 'price'. - * @return This builder. - */ - public org.demo.kafka.avro.AvroProduct.Builder setPrice(double value) { - validate(fields()[2], value); - this.price = value; - fieldSetFlags()[2] = true; - return this; - } - - /** - * Checks whether the 'price' field has been set. - * @return True if the 'price' field has been set, false otherwise. - */ - public boolean hasPrice() { - return fieldSetFlags()[2]; - } - - - /** - * Clears the value of the 'price' field. - * @return This builder. - */ - public org.demo.kafka.avro.AvroProduct.Builder clearPrice() { - fieldSetFlags()[2] = false; - return this; - } - - @Override - @SuppressWarnings("unchecked") - public AvroProduct build() { - try { - AvroProduct record = new AvroProduct(); - record.id = fieldSetFlags()[0] ? this.id : (java.lang.Integer) defaultValue(fields()[0]); - record.name = fieldSetFlags()[1] ? this.name : (java.lang.String) defaultValue(fields()[1]); - record.price = fieldSetFlags()[2] ? this.price : (java.lang.Double) defaultValue(fields()[2]); - return record; - } catch (org.apache.avro.AvroMissingFieldException e) { - throw e; - } catch (java.lang.Exception e) { - throw new org.apache.avro.AvroRuntimeException(e); - } - } - } - - @SuppressWarnings("unchecked") - private static final org.apache.avro.io.DatumWriter - WRITER$ = (org.apache.avro.io.DatumWriter)MODEL$.createDatumWriter(SCHEMA$); - - @Override public void writeExternal(java.io.ObjectOutput out) - throws java.io.IOException { - WRITER$.write(this, SpecificData.getEncoder(out)); - } - - @SuppressWarnings("unchecked") - private static final org.apache.avro.io.DatumReader - READER$ = (org.apache.avro.io.DatumReader)MODEL$.createDatumReader(SCHEMA$); - - @Override public void readExternal(java.io.ObjectInput in) - throws java.io.IOException { - READER$.read(this, SpecificData.getDecoder(in)); - } - - @Override protected boolean hasCustomCoders() { return true; } - - @Override public void customEncode(org.apache.avro.io.Encoder out) - throws java.io.IOException - { - out.writeInt(this.id); - - out.writeString(this.name); - - out.writeDouble(this.price); - - } - - @Override public void customDecode(org.apache.avro.io.ResolvingDecoder in) - throws java.io.IOException - { - org.apache.avro.Schema.Field[] fieldOrder = in.readFieldOrderIfDiff(); - if (fieldOrder == null) { - this.id = in.readInt(); - - this.name = in.readString(); - - this.price = in.readDouble(); - - } else { - for (int i = 0; i < 3; i++) { - switch (fieldOrder[i].pos()) { - case 0: - this.id = in.readInt(); - break; - - case 1: - this.name = in.readString(); - break; - - case 2: - this.price = in.readDouble(); - break; - - default: - throw new java.io.IOException("Corrupt ResolvingDecoder."); - } - } - } - } -} - - - - - - - - - - diff --git a/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/minimal/KafkaJsonConsumerMinimalFunction.java b/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/minimal/KafkaJsonConsumerMinimalFunction.java deleted file mode 100644 index cfd3ab81e..000000000 --- a/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/minimal/KafkaJsonConsumerMinimalFunction.java +++ /dev/null @@ -1,28 +0,0 @@ -package org.demo.kafka.minimal; - -import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.apache.kafka.clients.consumer.ConsumerRecords; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.amazonaws.services.lambda.runtime.Context; -import com.amazonaws.services.lambda.runtime.RequestHandler; - -import software.amazon.lambda.powertools.kafka.Deserialization; -import software.amazon.lambda.powertools.kafka.DeserializationType; - -public class KafkaJsonConsumerMinimalFunction - implements RequestHandler, String> { - - private static final Logger LOGGER = LoggerFactory.getLogger(KafkaJsonConsumerMinimalFunction.class); - - @Override - @Deserialization(type = DeserializationType.KAFKA_JSON) - public String handleRequest(ConsumerRecords records, Context context) { - for (ConsumerRecord consumerRecord : records) { - LOGGER.info("Received record: {}", consumerRecord); - } - - return "OK"; - } -} diff --git a/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/protobuf/ProtobufProduct.java b/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/protobuf/ProtobufProduct.java deleted file mode 100644 index 6da9113fc..000000000 --- a/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/protobuf/ProtobufProduct.java +++ /dev/null @@ -1,636 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// NO CHECKED-IN PROTOBUF GENCODE -// source: ProtobufProduct.proto -// Protobuf Java Version: 4.31.0 - -package org.demo.kafka.protobuf; - -/** - * Protobuf type {@code org.demo.kafka.protobuf.ProtobufProduct} - */ -@com.google.protobuf.Generated -public final class ProtobufProduct extends - com.google.protobuf.GeneratedMessage implements - // @@protoc_insertion_point(message_implements:org.demo.kafka.protobuf.ProtobufProduct) - ProtobufProductOrBuilder { -private static final long serialVersionUID = 0L; - static { - com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( - com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, - /* major= */ 4, - /* minor= */ 31, - /* patch= */ 0, - /* suffix= */ "", - ProtobufProduct.class.getName()); - } - // Use ProtobufProduct.newBuilder() to construct. - private ProtobufProduct(com.google.protobuf.GeneratedMessage.Builder builder) { - super(builder); - } - private ProtobufProduct() { - name_ = ""; - } - - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.demo.kafka.protobuf.ProtobufProductOuterClass.internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.demo.kafka.protobuf.ProtobufProductOuterClass.internal_static_org_demo_kafka_protobuf_ProtobufProduct_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.demo.kafka.protobuf.ProtobufProduct.class, org.demo.kafka.protobuf.ProtobufProduct.Builder.class); - } - - public static final int ID_FIELD_NUMBER = 1; - private int id_ = 0; - /** - * int32 id = 1; - * @return The id. - */ - @java.lang.Override - public int getId() { - return id_; - } - - public static final int NAME_FIELD_NUMBER = 2; - @SuppressWarnings("serial") - private volatile java.lang.Object name_ = ""; - /** - * string name = 2; - * @return The name. - */ - @java.lang.Override - public java.lang.String getName() { - java.lang.Object ref = name_; - if (ref instanceof java.lang.String) { - return (java.lang.String) ref; - } else { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - name_ = s; - return s; - } - } - /** - * string name = 2; - * @return The bytes for name. - */ - @java.lang.Override - public com.google.protobuf.ByteString - getNameBytes() { - java.lang.Object ref = name_; - if (ref instanceof java.lang.String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - name_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - - public static final int PRICE_FIELD_NUMBER = 3; - private double price_ = 0D; - /** - * double price = 3; - * @return The price. - */ - @java.lang.Override - public double getPrice() { - return price_; - } - - private byte memoizedIsInitialized = -1; - @java.lang.Override - public final boolean isInitialized() { - byte isInitialized = memoizedIsInitialized; - if (isInitialized == 1) return true; - if (isInitialized == 0) return false; - - memoizedIsInitialized = 1; - return true; - } - - @java.lang.Override - public void writeTo(com.google.protobuf.CodedOutputStream output) - throws java.io.IOException { - if (id_ != 0) { - output.writeInt32(1, id_); - } - if (!com.google.protobuf.GeneratedMessage.isStringEmpty(name_)) { - com.google.protobuf.GeneratedMessage.writeString(output, 2, name_); - } - if (java.lang.Double.doubleToRawLongBits(price_) != 0) { - output.writeDouble(3, price_); - } - getUnknownFields().writeTo(output); - } - - @java.lang.Override - public int getSerializedSize() { - int size = memoizedSize; - if (size != -1) return size; - - size = 0; - if (id_ != 0) { - size += com.google.protobuf.CodedOutputStream - .computeInt32Size(1, id_); - } - if (!com.google.protobuf.GeneratedMessage.isStringEmpty(name_)) { - size += com.google.protobuf.GeneratedMessage.computeStringSize(2, name_); - } - if (java.lang.Double.doubleToRawLongBits(price_) != 0) { - size += com.google.protobuf.CodedOutputStream - .computeDoubleSize(3, price_); - } - size += getUnknownFields().getSerializedSize(); - memoizedSize = size; - return size; - } - - @java.lang.Override - public boolean equals(final java.lang.Object obj) { - if (obj == this) { - return true; - } - if (!(obj instanceof org.demo.kafka.protobuf.ProtobufProduct)) { - return super.equals(obj); - } - org.demo.kafka.protobuf.ProtobufProduct other = (org.demo.kafka.protobuf.ProtobufProduct) obj; - - if (getId() - != other.getId()) return false; - if (!getName() - .equals(other.getName())) return false; - if (java.lang.Double.doubleToLongBits(getPrice()) - != java.lang.Double.doubleToLongBits( - other.getPrice())) return false; - if (!getUnknownFields().equals(other.getUnknownFields())) return false; - return true; - } - - @java.lang.Override - public int hashCode() { - if (memoizedHashCode != 0) { - return memoizedHashCode; - } - int hash = 41; - hash = (19 * hash) + getDescriptor().hashCode(); - hash = (37 * hash) + ID_FIELD_NUMBER; - hash = (53 * hash) + getId(); - hash = (37 * hash) + NAME_FIELD_NUMBER; - hash = (53 * hash) + getName().hashCode(); - hash = (37 * hash) + PRICE_FIELD_NUMBER; - hash = (53 * hash) + com.google.protobuf.Internal.hashLong( - java.lang.Double.doubleToLongBits(getPrice())); - hash = (29 * hash) + getUnknownFields().hashCode(); - memoizedHashCode = hash; - return hash; - } - - public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( - java.nio.ByteBuffer data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( - java.nio.ByteBuffer data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( - com.google.protobuf.ByteString data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( - com.google.protobuf.ByteString data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.demo.kafka.protobuf.ProtobufProduct parseFrom(byte[] data) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data); - } - public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( - byte[] data, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - return PARSER.parseFrom(data, extensionRegistry); - } - public static org.demo.kafka.protobuf.ProtobufProduct parseFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessage - .parseWithIOException(PARSER, input); - } - public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessage - .parseWithIOException(PARSER, input, extensionRegistry); - } - - public static org.demo.kafka.protobuf.ProtobufProduct parseDelimitedFrom(java.io.InputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessage - .parseDelimitedWithIOException(PARSER, input); - } - - public static org.demo.kafka.protobuf.ProtobufProduct parseDelimitedFrom( - java.io.InputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessage - .parseDelimitedWithIOException(PARSER, input, extensionRegistry); - } - public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( - com.google.protobuf.CodedInputStream input) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessage - .parseWithIOException(PARSER, input); - } - public static org.demo.kafka.protobuf.ProtobufProduct parseFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - return com.google.protobuf.GeneratedMessage - .parseWithIOException(PARSER, input, extensionRegistry); - } - - @java.lang.Override - public Builder newBuilderForType() { return newBuilder(); } - public static Builder newBuilder() { - return DEFAULT_INSTANCE.toBuilder(); - } - public static Builder newBuilder(org.demo.kafka.protobuf.ProtobufProduct prototype) { - return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); - } - @java.lang.Override - public Builder toBuilder() { - return this == DEFAULT_INSTANCE - ? new Builder() : new Builder().mergeFrom(this); - } - - @java.lang.Override - protected Builder newBuilderForType( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - Builder builder = new Builder(parent); - return builder; - } - /** - * Protobuf type {@code org.demo.kafka.protobuf.ProtobufProduct} - */ - public static final class Builder extends - com.google.protobuf.GeneratedMessage.Builder implements - // @@protoc_insertion_point(builder_implements:org.demo.kafka.protobuf.ProtobufProduct) - org.demo.kafka.protobuf.ProtobufProductOrBuilder { - public static final com.google.protobuf.Descriptors.Descriptor - getDescriptor() { - return org.demo.kafka.protobuf.ProtobufProductOuterClass.internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor; - } - - @java.lang.Override - protected com.google.protobuf.GeneratedMessage.FieldAccessorTable - internalGetFieldAccessorTable() { - return org.demo.kafka.protobuf.ProtobufProductOuterClass.internal_static_org_demo_kafka_protobuf_ProtobufProduct_fieldAccessorTable - .ensureFieldAccessorsInitialized( - org.demo.kafka.protobuf.ProtobufProduct.class, org.demo.kafka.protobuf.ProtobufProduct.Builder.class); - } - - // Construct using org.demo.kafka.protobuf.ProtobufProduct.newBuilder() - private Builder() { - - } - - private Builder( - com.google.protobuf.GeneratedMessage.BuilderParent parent) { - super(parent); - - } - @java.lang.Override - public Builder clear() { - super.clear(); - bitField0_ = 0; - id_ = 0; - name_ = ""; - price_ = 0D; - return this; - } - - @java.lang.Override - public com.google.protobuf.Descriptors.Descriptor - getDescriptorForType() { - return org.demo.kafka.protobuf.ProtobufProductOuterClass.internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor; - } - - @java.lang.Override - public org.demo.kafka.protobuf.ProtobufProduct getDefaultInstanceForType() { - return org.demo.kafka.protobuf.ProtobufProduct.getDefaultInstance(); - } - - @java.lang.Override - public org.demo.kafka.protobuf.ProtobufProduct build() { - org.demo.kafka.protobuf.ProtobufProduct result = buildPartial(); - if (!result.isInitialized()) { - throw newUninitializedMessageException(result); - } - return result; - } - - @java.lang.Override - public org.demo.kafka.protobuf.ProtobufProduct buildPartial() { - org.demo.kafka.protobuf.ProtobufProduct result = new org.demo.kafka.protobuf.ProtobufProduct(this); - if (bitField0_ != 0) { buildPartial0(result); } - onBuilt(); - return result; - } - - private void buildPartial0(org.demo.kafka.protobuf.ProtobufProduct result) { - int from_bitField0_ = bitField0_; - if (((from_bitField0_ & 0x00000001) != 0)) { - result.id_ = id_; - } - if (((from_bitField0_ & 0x00000002) != 0)) { - result.name_ = name_; - } - if (((from_bitField0_ & 0x00000004) != 0)) { - result.price_ = price_; - } - } - - @java.lang.Override - public Builder mergeFrom(com.google.protobuf.Message other) { - if (other instanceof org.demo.kafka.protobuf.ProtobufProduct) { - return mergeFrom((org.demo.kafka.protobuf.ProtobufProduct)other); - } else { - super.mergeFrom(other); - return this; - } - } - - public Builder mergeFrom(org.demo.kafka.protobuf.ProtobufProduct other) { - if (other == org.demo.kafka.protobuf.ProtobufProduct.getDefaultInstance()) return this; - if (other.getId() != 0) { - setId(other.getId()); - } - if (!other.getName().isEmpty()) { - name_ = other.name_; - bitField0_ |= 0x00000002; - onChanged(); - } - if (java.lang.Double.doubleToRawLongBits(other.getPrice()) != 0) { - setPrice(other.getPrice()); - } - this.mergeUnknownFields(other.getUnknownFields()); - onChanged(); - return this; - } - - @java.lang.Override - public final boolean isInitialized() { - return true; - } - - @java.lang.Override - public Builder mergeFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws java.io.IOException { - if (extensionRegistry == null) { - throw new java.lang.NullPointerException(); - } - try { - boolean done = false; - while (!done) { - int tag = input.readTag(); - switch (tag) { - case 0: - done = true; - break; - case 8: { - id_ = input.readInt32(); - bitField0_ |= 0x00000001; - break; - } // case 8 - case 18: { - name_ = input.readStringRequireUtf8(); - bitField0_ |= 0x00000002; - break; - } // case 18 - case 25: { - price_ = input.readDouble(); - bitField0_ |= 0x00000004; - break; - } // case 25 - default: { - if (!super.parseUnknownField(input, extensionRegistry, tag)) { - done = true; // was an endgroup tag - } - break; - } // default: - } // switch (tag) - } // while (!done) - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.unwrapIOException(); - } finally { - onChanged(); - } // finally - return this; - } - private int bitField0_; - - private int id_ ; - /** - * int32 id = 1; - * @return The id. - */ - @java.lang.Override - public int getId() { - return id_; - } - /** - * int32 id = 1; - * @param value The id to set. - * @return This builder for chaining. - */ - public Builder setId(int value) { - - id_ = value; - bitField0_ |= 0x00000001; - onChanged(); - return this; - } - /** - * int32 id = 1; - * @return This builder for chaining. - */ - public Builder clearId() { - bitField0_ = (bitField0_ & ~0x00000001); - id_ = 0; - onChanged(); - return this; - } - - private java.lang.Object name_ = ""; - /** - * string name = 2; - * @return The name. - */ - public java.lang.String getName() { - java.lang.Object ref = name_; - if (!(ref instanceof java.lang.String)) { - com.google.protobuf.ByteString bs = - (com.google.protobuf.ByteString) ref; - java.lang.String s = bs.toStringUtf8(); - name_ = s; - return s; - } else { - return (java.lang.String) ref; - } - } - /** - * string name = 2; - * @return The bytes for name. - */ - public com.google.protobuf.ByteString - getNameBytes() { - java.lang.Object ref = name_; - if (ref instanceof String) { - com.google.protobuf.ByteString b = - com.google.protobuf.ByteString.copyFromUtf8( - (java.lang.String) ref); - name_ = b; - return b; - } else { - return (com.google.protobuf.ByteString) ref; - } - } - /** - * string name = 2; - * @param value The name to set. - * @return This builder for chaining. - */ - public Builder setName( - java.lang.String value) { - if (value == null) { throw new NullPointerException(); } - name_ = value; - bitField0_ |= 0x00000002; - onChanged(); - return this; - } - /** - * string name = 2; - * @return This builder for chaining. - */ - public Builder clearName() { - name_ = getDefaultInstance().getName(); - bitField0_ = (bitField0_ & ~0x00000002); - onChanged(); - return this; - } - /** - * string name = 2; - * @param value The bytes for name to set. - * @return This builder for chaining. - */ - public Builder setNameBytes( - com.google.protobuf.ByteString value) { - if (value == null) { throw new NullPointerException(); } - checkByteStringIsUtf8(value); - name_ = value; - bitField0_ |= 0x00000002; - onChanged(); - return this; - } - - private double price_ ; - /** - * double price = 3; - * @return The price. - */ - @java.lang.Override - public double getPrice() { - return price_; - } - /** - * double price = 3; - * @param value The price to set. - * @return This builder for chaining. - */ - public Builder setPrice(double value) { - - price_ = value; - bitField0_ |= 0x00000004; - onChanged(); - return this; - } - /** - * double price = 3; - * @return This builder for chaining. - */ - public Builder clearPrice() { - bitField0_ = (bitField0_ & ~0x00000004); - price_ = 0D; - onChanged(); - return this; - } - - // @@protoc_insertion_point(builder_scope:org.demo.kafka.protobuf.ProtobufProduct) - } - - // @@protoc_insertion_point(class_scope:org.demo.kafka.protobuf.ProtobufProduct) - private static final org.demo.kafka.protobuf.ProtobufProduct DEFAULT_INSTANCE; - static { - DEFAULT_INSTANCE = new org.demo.kafka.protobuf.ProtobufProduct(); - } - - public static org.demo.kafka.protobuf.ProtobufProduct getDefaultInstance() { - return DEFAULT_INSTANCE; - } - - private static final com.google.protobuf.Parser - PARSER = new com.google.protobuf.AbstractParser() { - @java.lang.Override - public ProtobufProduct parsePartialFrom( - com.google.protobuf.CodedInputStream input, - com.google.protobuf.ExtensionRegistryLite extensionRegistry) - throws com.google.protobuf.InvalidProtocolBufferException { - Builder builder = newBuilder(); - try { - builder.mergeFrom(input, extensionRegistry); - } catch (com.google.protobuf.InvalidProtocolBufferException e) { - throw e.setUnfinishedMessage(builder.buildPartial()); - } catch (com.google.protobuf.UninitializedMessageException e) { - throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); - } catch (java.io.IOException e) { - throw new com.google.protobuf.InvalidProtocolBufferException(e) - .setUnfinishedMessage(builder.buildPartial()); - } - return builder.buildPartial(); - } - }; - - public static com.google.protobuf.Parser parser() { - return PARSER; - } - - @java.lang.Override - public com.google.protobuf.Parser getParserForType() { - return PARSER; - } - - @java.lang.Override - public org.demo.kafka.protobuf.ProtobufProduct getDefaultInstanceForType() { - return DEFAULT_INSTANCE; - } - -} - diff --git a/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/protobuf/ProtobufProductOrBuilder.java b/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/protobuf/ProtobufProductOrBuilder.java deleted file mode 100644 index 9c1518db3..000000000 --- a/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/protobuf/ProtobufProductOrBuilder.java +++ /dev/null @@ -1,36 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// NO CHECKED-IN PROTOBUF GENCODE -// source: ProtobufProduct.proto -// Protobuf Java Version: 4.31.0 - -package org.demo.kafka.protobuf; - -@com.google.protobuf.Generated -public interface ProtobufProductOrBuilder extends - // @@protoc_insertion_point(interface_extends:org.demo.kafka.protobuf.ProtobufProduct) - com.google.protobuf.MessageOrBuilder { - - /** - * int32 id = 1; - * @return The id. - */ - int getId(); - - /** - * string name = 2; - * @return The name. - */ - java.lang.String getName(); - /** - * string name = 2; - * @return The bytes for name. - */ - com.google.protobuf.ByteString - getNameBytes(); - - /** - * double price = 3; - * @return The price. - */ - double getPrice(); -} diff --git a/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/protobuf/ProtobufProductOuterClass.java b/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/protobuf/ProtobufProductOuterClass.java deleted file mode 100644 index 6a99f35ec..000000000 --- a/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/protobuf/ProtobufProductOuterClass.java +++ /dev/null @@ -1,63 +0,0 @@ -// Generated by the protocol buffer compiler. DO NOT EDIT! -// NO CHECKED-IN PROTOBUF GENCODE -// source: ProtobufProduct.proto -// Protobuf Java Version: 4.31.0 - -package org.demo.kafka.protobuf; - -@com.google.protobuf.Generated -public final class ProtobufProductOuterClass { - private ProtobufProductOuterClass() {} - static { - com.google.protobuf.RuntimeVersion.validateProtobufGencodeVersion( - com.google.protobuf.RuntimeVersion.RuntimeDomain.PUBLIC, - /* major= */ 4, - /* minor= */ 31, - /* patch= */ 0, - /* suffix= */ "", - ProtobufProductOuterClass.class.getName()); - } - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistryLite registry) { - } - - public static void registerAllExtensions( - com.google.protobuf.ExtensionRegistry registry) { - registerAllExtensions( - (com.google.protobuf.ExtensionRegistryLite) registry); - } - static final com.google.protobuf.Descriptors.Descriptor - internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor; - static final - com.google.protobuf.GeneratedMessage.FieldAccessorTable - internal_static_org_demo_kafka_protobuf_ProtobufProduct_fieldAccessorTable; - - public static com.google.protobuf.Descriptors.FileDescriptor - getDescriptor() { - return descriptor; - } - private static com.google.protobuf.Descriptors.FileDescriptor - descriptor; - static { - java.lang.String[] descriptorData = { - "\n\025ProtobufProduct.proto\022\027org.demo.kafka." + - "protobuf\":\n\017ProtobufProduct\022\n\n\002id\030\001 \001(\005\022" + - "\014\n\004name\030\002 \001(\t\022\r\n\005price\030\003 \001(\001B6\n\027org.demo" + - ".kafka.protobufB\031ProtobufProductOuterCla" + - "ssP\001b\006proto3" - }; - descriptor = com.google.protobuf.Descriptors.FileDescriptor - .internalBuildGeneratedFileFrom(descriptorData, - new com.google.protobuf.Descriptors.FileDescriptor[] { - }); - internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor = - getDescriptor().getMessageTypes().get(0); - internal_static_org_demo_kafka_protobuf_ProtobufProduct_fieldAccessorTable = new - com.google.protobuf.GeneratedMessage.FieldAccessorTable( - internal_static_org_demo_kafka_protobuf_ProtobufProduct_descriptor, - new java.lang.String[] { "Id", "Name", "Price", }); - descriptor.resolveAllFeaturesImmutable(); - } - - // @@protoc_insertion_point(outer_class_scope) -} diff --git a/examples/powertools-examples-kafka-minimal/src/main/proto/ProtobufProduct.proto b/examples/powertools-examples-kafka-minimal/src/main/proto/ProtobufProduct.proto deleted file mode 100644 index 4d3338a6f..000000000 --- a/examples/powertools-examples-kafka-minimal/src/main/proto/ProtobufProduct.proto +++ /dev/null @@ -1,13 +0,0 @@ -syntax = "proto3"; - -package org.demo.kafka.protobuf; - -option java_package = "org.demo.kafka.protobuf"; -option java_outer_classname = "ProtobufProductOuterClass"; -option java_multiple_files = true; - -message ProtobufProduct { - int32 id = 1; - string name = 2; - double price = 3; -} \ No newline at end of file diff --git a/examples/powertools-examples-kafka-minimal/src/main/resources/simplelogger.properties b/examples/powertools-examples-kafka-minimal/src/main/resources/simplelogger.properties deleted file mode 100644 index c4610d4ff..000000000 --- a/examples/powertools-examples-kafka-minimal/src/main/resources/simplelogger.properties +++ /dev/null @@ -1,6 +0,0 @@ -org.slf4j.simpleLogger.defaultLogLevel=info -org.slf4j.simpleLogger.showDateTime=true -org.slf4j.simpleLogger.dateTimeFormat=yyyy-MM-dd HH:mm:ss.SSS -org.slf4j.simpleLogger.showThreadName=true -org.slf4j.simpleLogger.showLogName=true -org.slf4j.simpleLogger.showShortLogName=false \ No newline at end of file diff --git a/examples/powertools-examples-kafka-minimal/template.yaml b/examples/powertools-examples-kafka-minimal/template.yaml deleted file mode 100644 index 73b5933ca..000000000 --- a/examples/powertools-examples-kafka-minimal/template.yaml +++ /dev/null @@ -1,50 +0,0 @@ -AWSTemplateFormatVersion: "2010-09-09" -Transform: AWS::Serverless-2016-10-31 -Description: > - Minimal Kafka Deserialization example with Kafka Lambda ESM - -Globals: - Function: - Timeout: 20 - Runtime: java11 - MemorySize: 512 - Tracing: Active - -Resources: - KafkaJsonConsumerMinimalFunction: - Type: AWS::Serverless::Function - Properties: - CodeUri: . - Handler: org.demo.kafka.minimal.KafkaJsonConsumerMinimalFunction::handleRequest - Environment: - Variables: - JAVA_TOOL_OPTIONS: "-XX:+TieredCompilation -XX:TieredStopAtLevel=1" - - KafkaAvroConsumerMinimalFunction: - Type: AWS::Serverless::Function - Properties: - CodeUri: . - Handler: org.demo.kafka.minimal.KafkaAvroConsumerMinimalFunction::handleRequest - Environment: - Variables: - JAVA_TOOL_OPTIONS: "-XX:+TieredCompilation -XX:TieredStopAtLevel=1" - - KafkaProtobufConsumerMinimalFunction: - Type: AWS::Serverless::Function - Properties: - CodeUri: . - Handler: org.demo.kafka.minimal.KafkaProtobufConsumerMinimalFunction::handleRequest - Environment: - Variables: - JAVA_TOOL_OPTIONS: "-XX:+TieredCompilation -XX:TieredStopAtLevel=1" - -Outputs: - JsonFunction: - Description: "Kafka JSON Lambda Function ARN" - Value: !GetAtt KafkaJsonConsumerMinimalFunction.Arn - AvroFunction: - Description: "Kafka Avro Lambda Function ARN" - Value: !GetAtt KafkaAvroConsumerMinimalFunction.Arn - ProtobufFunction: - Description: "Kafka Protobuf Lambda Function ARN" - Value: !GetAtt KafkaProtobufConsumerMinimalFunction.Arn diff --git a/examples/powertools-examples-kafka/README.md b/examples/powertools-examples-kafka/README.md new file mode 100644 index 000000000..76cd81cb9 --- /dev/null +++ b/examples/powertools-examples-kafka/README.md @@ -0,0 +1,77 @@ +# Powertools for AWS Lambda (Java) - Kafka Example + +This project demonstrates how to use Powertools for AWS Lambda (Java) to deserialize Kafka Lambda events directly into strongly typed Kafka ConsumerRecords using different serialization formats. + +## Overview + +The example showcases automatic deserialization of Kafka Lambda events into ConsumerRecords using three formats: +- JSON - Using standard JSON serialization +- Avro - Using Apache Avro schema-based serialization +- Protobuf - Using Google Protocol Buffers serialization + +Each format has its own Lambda function handler that demonstrates how to use the `@Deserialization` annotation with the appropriate `DeserializationType`, eliminating the need to handle complex deserialization logic manually. + +## Build and Deploy + +### Prerequisites +- [AWS SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-sam-cli-install.html) +- Java 11+ +- Maven + +### Build + +```bash +# Build the application +sam build +``` + +### Deploy + +```bash +# Deploy the application to AWS +sam deploy --guided +``` + +During the guided deployment, you'll be prompted to provide values for required parameters. After deployment, SAM will output the ARNs of the deployed Lambda functions. + +### Build with Different Serialization Formats + +The project includes Maven profiles to build with different serialization formats: + +```bash +# Build with JSON only (no Avro or Protobuf) +mvn clean package -P base + +# Build with Avro only +mvn clean package -P avro-only + +# Build with Protobuf only +mvn clean package -P protobuf-only + +# Build with all formats (default) +mvn clean package -P full +``` + +## Testing + +The `events` directory contains sample events for each serialization format: +- `kafka-json-event.json` - Sample event with JSON-serialized products +- `kafka-avro-event.json` - Sample event with Avro-serialized products +- `kafka-protobuf-event.json` - Sample event with Protobuf-serialized products + +You can use these events to test the Lambda functions: + +```bash +# Test the JSON deserialization function +sam local invoke JsonDeserializationFunction --event events/kafka-json-event.json + +# Test the Avro deserialization function +sam local invoke AvroDeserializationFunction --event events/kafka-avro-event.json + +# Test the Protobuf deserialization function +sam local invoke ProtobufDeserializationFunction --event events/kafka-protobuf-event.json +``` + +## Sample Generator Tool + +The project includes a tool to generate sample JSON, Avro, and Protobuf serialized data. See the [tools/README.md](tools/README.md) for more information. \ No newline at end of file diff --git a/examples/powertools-examples-kafka/events/kafka-json-event.json b/examples/powertools-examples-kafka/events/kafka-json-event.json index d85c40654..7ffb9a3a6 100644 --- a/examples/powertools-examples-kafka/events/kafka-json-event.json +++ b/examples/powertools-examples-kafka/events/kafka-json-event.json @@ -10,8 +10,8 @@ "offset": 15, "timestamp": 1545084650987, "timestampType": "CREATE_TIME", - "key": "cmVjb3JkS2V5", - "value": "ewogICJpZCI6IDEyMzQ1LAogICJuYW1lIjogInByb2R1Y3Q1IiwKICAicHJpY2UiOiA0NQp9", + "key": "NDI=", + "value": "eyJwcmljZSI6OTk5Ljk5LCJuYW1lIjoiTGFwdG9wIiwiaWQiOjEwMDF9", "headers": [ { "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] @@ -24,7 +24,8 @@ "offset": 15, "timestamp": 1545084650987, "timestampType": "CREATE_TIME", - "value": "ewogICJpZCI6IDEyMzQ1LAogICJuYW1lIjogInByb2R1Y3Q1IiwKICAicHJpY2UiOiA0NQp9", + "key": "NDI=", + "value": "eyJwcmljZSI6NTk5Ljk5LCJuYW1lIjoiU21hcnRwaG9uZSIsImlkIjoxMDAyfQ==", "headers": [ { "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] @@ -38,7 +39,7 @@ "timestamp": 1545084650987, "timestampType": "CREATE_TIME", "key": null, - "value": "ewogICJpZCI6IDEyMzQ1LAogICJuYW1lIjogInByb2R1Y3Q1IiwKICAicHJpY2UiOiA0NQp9", + "value": "eyJwcmljZSI6MTQ5Ljk5LCJuYW1lIjoiSGVhZHBob25lcyIsImlkIjoxMDAzfQ==", "headers": [ { "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] diff --git a/examples/powertools-examples-kafka/pom.xml b/examples/powertools-examples-kafka/pom.xml index 4956505cb..55239c473 100644 --- a/examples/powertools-examples-kafka/pom.xml +++ b/examples/powertools-examples-kafka/pom.xml @@ -16,36 +16,11 @@ - - software.amazon.lambda - powertools-logging-log4j - ${project.version} - - - software.amazon.lambda - powertools-metrics - ${project.version} - software.amazon.lambda powertools-kafka ${project.version} - - com.amazonaws - aws-lambda-java-core - 1.2.3 - - - com.amazonaws - aws-lambda-java-events - 3.15.0 - - - org.aspectj - aspectjrt - ${aspectj.version} - org.apache.avro avro @@ -56,6 +31,18 @@ protobuf-java ${protobuf.version} + + + + software.amazon.lambda + powertools-logging-log4j + ${project.version} + + + org.aspectj + aspectjrt + ${aspectj.version} + @@ -64,7 +51,7 @@ org.apache.maven.plugins maven-deploy-plugin - 3.1.2 + 3.1.4 true @@ -92,7 +79,7 @@ org.apache.logging.log4j log4j-transform-maven-shade-plugin-extensions - 0.1.0 + 0.2.0 @@ -109,10 +96,6 @@ software.amazon.lambda powertools-logging - - software.amazon.lambda - powertools-metrics - @@ -130,6 +113,7 @@ + org.apache.avro avro-maven-plugin @@ -148,6 +132,7 @@ + io.github.ascopes protobuf-maven-plugin @@ -170,11 +155,10 @@ - - + base diff --git a/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/minimal/KafkaAvroConsumerMinimalFunction.java b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/AvroDeserializationFunction.java similarity index 61% rename from examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/minimal/KafkaAvroConsumerMinimalFunction.java rename to examples/powertools-examples-kafka/src/main/java/org/demo/kafka/AvroDeserializationFunction.java index 124173ace..72f383eef 100644 --- a/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/minimal/KafkaAvroConsumerMinimalFunction.java +++ b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/AvroDeserializationFunction.java @@ -1,4 +1,4 @@ -package org.demo.kafka.minimal; +package org.demo.kafka; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; @@ -11,19 +11,27 @@ import software.amazon.lambda.powertools.kafka.Deserialization; import software.amazon.lambda.powertools.kafka.DeserializationType; +import software.amazon.lambda.powertools.logging.Logging; -public class KafkaAvroConsumerMinimalFunction - implements RequestHandler, String> { +public class AvroDeserializationFunction implements RequestHandler, String> { - private static final Logger LOGGER = LoggerFactory.getLogger(KafkaAvroConsumerMinimalFunction.class); + private static final Logger LOGGER = LoggerFactory.getLogger(AvroDeserializationFunction.class); @Override + @Logging @Deserialization(type = DeserializationType.KAFKA_AVRO) public String handleRequest(ConsumerRecords records, Context context) { for (ConsumerRecord consumerRecord : records) { - LOGGER.info("Received record: {}", consumerRecord); + LOGGER.info("ConsumerRecord: {}", consumerRecord); + + AvroProduct product = consumerRecord.value(); + LOGGER.info("AvroProduct: {}", product); + + String key = consumerRecord.key(); + LOGGER.info("Key: {}", key); } return "OK"; } + } diff --git a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaJsonConsumerDeserializationFunction.java b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/JsonDeserializationFunction.java similarity index 54% rename from examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaJsonConsumerDeserializationFunction.java rename to examples/powertools-examples-kafka/src/main/java/org/demo/kafka/JsonDeserializationFunction.java index 0922037bf..c1d7f13ae 100644 --- a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaJsonConsumerDeserializationFunction.java +++ b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/JsonDeserializationFunction.java @@ -1,7 +1,5 @@ package org.demo.kafka; -import static software.amazon.lambda.powertools.logging.argument.StructuredArguments.entry; - import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.slf4j.Logger; @@ -10,28 +8,26 @@ import com.amazonaws.services.lambda.runtime.Context; import com.amazonaws.services.lambda.runtime.RequestHandler; -import software.amazon.cloudwatchlogs.emf.logger.MetricsLogger; -import software.amazon.cloudwatchlogs.emf.model.Unit; import software.amazon.lambda.powertools.kafka.Deserialization; import software.amazon.lambda.powertools.kafka.DeserializationType; import software.amazon.lambda.powertools.logging.Logging; -import software.amazon.lambda.powertools.metrics.Metrics; -import software.amazon.lambda.powertools.metrics.MetricsUtils; -public class KafkaJsonConsumerDeserializationFunction - implements RequestHandler, String> { +public class JsonDeserializationFunction implements RequestHandler, String> { - private static final Logger LOGGER = LoggerFactory.getLogger(KafkaJsonConsumerDeserializationFunction.class); - private final MetricsLogger metrics = MetricsUtils.metricsLogger(); + private static final Logger LOGGER = LoggerFactory.getLogger(JsonDeserializationFunction.class); @Override @Logging - @Metrics @Deserialization(type = DeserializationType.KAFKA_JSON) public String handleRequest(ConsumerRecords consumerRecords, Context context) { for (ConsumerRecord consumerRecord : consumerRecords) { - LOGGER.info("{}", consumerRecord, entry("value", consumerRecord.value())); - metrics.putMetric("ProcessedRecord", 1, Unit.COUNT); + LOGGER.info("ConsumerRecord: {}", consumerRecord); + + Product product = consumerRecord.value(); + LOGGER.info("Product: {}", product); + + String key = consumerRecord.key(); + LOGGER.info("Key: {}", key); } return "OK"; diff --git a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaAvroConsumerDeserializationFunction.java b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaAvroConsumerDeserializationFunction.java deleted file mode 100644 index cf68ac8d7..000000000 --- a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaAvroConsumerDeserializationFunction.java +++ /dev/null @@ -1,57 +0,0 @@ -package org.demo.kafka; - -import static software.amazon.lambda.powertools.logging.argument.StructuredArguments.entry; - -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.apache.kafka.clients.consumer.ConsumerRecords; -import org.demo.kafka.avro.AvroProduct; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.amazonaws.services.lambda.runtime.Context; -import com.amazonaws.services.lambda.runtime.RequestHandler; - -import software.amazon.cloudwatchlogs.emf.logger.MetricsLogger; -import software.amazon.cloudwatchlogs.emf.model.Unit; -import software.amazon.lambda.powertools.kafka.Deserialization; -import software.amazon.lambda.powertools.kafka.DeserializationType; -import software.amazon.lambda.powertools.logging.Logging; -import software.amazon.lambda.powertools.metrics.Metrics; -import software.amazon.lambda.powertools.metrics.MetricsUtils; - -public class KafkaAvroConsumerDeserializationFunction - implements RequestHandler, String> { - - private static final Logger LOGGER = LoggerFactory.getLogger(KafkaAvroConsumerDeserializationFunction.class); - private static final MetricsLogger metrics = MetricsUtils.metricsLogger(); - - @Override - @Logging - @Metrics - @Deserialization(type = DeserializationType.KAFKA_AVRO) - public String handleRequest(ConsumerRecords records, Context context) { - for (ConsumerRecord consumerRecord : records) { - LOGGER.info("{}", consumerRecord, entry("value", avroToMap(consumerRecord.value()))); - metrics.putMetric("ProcessedAvroRecord", 1, Unit.COUNT); - } - - return "OK"; - } - - // Avro objects cannot be serialized to JSON by Jackson Object Mapper used by powertools-logging. - // We convert to a map first to retrieve a meaningful representation. - private Map avroToMap(AvroProduct avroProduct) { - if (avroProduct == null) { - return Collections.emptyMap(); - } - Map map = new HashMap<>(); - map.put("id", avroProduct.getId()); - map.put("name", avroProduct.getName()); - map.put("price", avroProduct.getPrice()); - return map; - } -} diff --git a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaProtobufConsumerDeserializationFunction.java b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaProtobufConsumerDeserializationFunction.java deleted file mode 100644 index 5fe048fab..000000000 --- a/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/KafkaProtobufConsumerDeserializationFunction.java +++ /dev/null @@ -1,57 +0,0 @@ -package org.demo.kafka; - -import static software.amazon.lambda.powertools.logging.argument.StructuredArguments.entry; - -import java.util.Collections; -import java.util.HashMap; -import java.util.Map; - -import org.apache.kafka.clients.consumer.ConsumerRecord; -import org.apache.kafka.clients.consumer.ConsumerRecords; -import org.demo.kafka.protobuf.ProtobufProduct; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import com.amazonaws.services.lambda.runtime.Context; -import com.amazonaws.services.lambda.runtime.RequestHandler; - -import software.amazon.cloudwatchlogs.emf.logger.MetricsLogger; -import software.amazon.cloudwatchlogs.emf.model.Unit; -import software.amazon.lambda.powertools.kafka.Deserialization; -import software.amazon.lambda.powertools.kafka.DeserializationType; -import software.amazon.lambda.powertools.logging.Logging; -import software.amazon.lambda.powertools.metrics.Metrics; -import software.amazon.lambda.powertools.metrics.MetricsUtils; - -public class KafkaProtobufConsumerDeserializationFunction - implements RequestHandler, String> { - - private static final Logger LOGGER = LoggerFactory.getLogger(KafkaProtobufConsumerDeserializationFunction.class); - private static final MetricsLogger metrics = MetricsUtils.metricsLogger(); - - @Override - @Logging - @Metrics - @Deserialization(type = DeserializationType.KAFKA_PROTOBUF) - public String handleRequest(ConsumerRecords records, Context context) { - for (ConsumerRecord consumerRecord : records) { - LOGGER.info("{}", consumerRecord, entry("value", protobufToMap(consumerRecord.value()))); - metrics.putMetric("ProcessedProtobufRecord", 1, Unit.COUNT); - } - - return "OK"; - } - - // Protobuf Message objects cannot be serialized to JSON by Jackson Object Mapper used by powertools-logging. - // We convert to a map first to retrieve a meaningful representation. - private Map protobufToMap(ProtobufProduct protobufProduct) { - if (protobufProduct == null) { - return Collections.emptyMap(); - } - Map map = new HashMap<>(); - map.put("id", protobufProduct.getId()); - map.put("name", protobufProduct.getName()); - map.put("price", protobufProduct.getPrice()); - return map; - } -} diff --git a/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/minimal/KafkaProtobufConsumerMinimalFunction.java b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/ProtobufDeserializationFunction.java similarity index 67% rename from examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/minimal/KafkaProtobufConsumerMinimalFunction.java rename to examples/powertools-examples-kafka/src/main/java/org/demo/kafka/ProtobufDeserializationFunction.java index 6fef249ba..1978e8890 100644 --- a/examples/powertools-examples-kafka-minimal/src/main/java/org/demo/kafka/minimal/KafkaProtobufConsumerMinimalFunction.java +++ b/examples/powertools-examples-kafka/src/main/java/org/demo/kafka/ProtobufDeserializationFunction.java @@ -1,4 +1,4 @@ -package org.demo.kafka.minimal; +package org.demo.kafka; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; @@ -11,19 +11,28 @@ import software.amazon.lambda.powertools.kafka.Deserialization; import software.amazon.lambda.powertools.kafka.DeserializationType; +import software.amazon.lambda.powertools.logging.Logging; -public class KafkaProtobufConsumerMinimalFunction +public class ProtobufDeserializationFunction implements RequestHandler, String> { - private static final Logger LOGGER = LoggerFactory.getLogger(KafkaProtobufConsumerMinimalFunction.class); + private static final Logger LOGGER = LoggerFactory.getLogger(ProtobufDeserializationFunction.class); @Override + @Logging @Deserialization(type = DeserializationType.KAFKA_PROTOBUF) public String handleRequest(ConsumerRecords records, Context context) { for (ConsumerRecord consumerRecord : records) { - LOGGER.info("Received record: {}", consumerRecord); + LOGGER.info("ConsumerRecord: {}", consumerRecord); + + ProtobufProduct product = consumerRecord.value(); + LOGGER.info("ProtobufProduct: {}", product); + + String key = consumerRecord.key(); + LOGGER.info("Key: {}", key); } return "OK"; } + } diff --git a/examples/powertools-examples-kafka/template.yaml b/examples/powertools-examples-kafka/template.yaml index e196ed508..4d422c1f2 100644 --- a/examples/powertools-examples-kafka/template.yaml +++ b/examples/powertools-examples-kafka/template.yaml @@ -11,41 +11,41 @@ Globals: Tracing: Active Resources: - KafkaJsonConsumerDeserializationFunction: + JsonDeserializationFunction: Type: AWS::Serverless::Function Properties: CodeUri: . - Handler: org.demo.kafka.KafkaJsonConsumerDeserializationFunction::handleRequest + Handler: org.demo.kafka.JsonDeserializationFunction::handleRequest Environment: Variables: JAVA_TOOL_OPTIONS: "-XX:+TieredCompilation -XX:TieredStopAtLevel=1" POWERTOOLS_LOG_LEVEL: DEBUG - POWERTOOLS_SERVICE_NAME: KafkaJsonConsumerDeserialization - POWERTOOLS_METRICS_NAMESPACE: KafkaJsonConsumerDeserializationFunction + POWERTOOLS_SERVICE_NAME: JsonDeserialization + POWERTOOLS_METRICS_NAMESPACE: JsonDeserializationFunction - KafkaAvroConsumerDeserializationFunction: + AvroDeserializationFunction: Type: AWS::Serverless::Function Properties: CodeUri: . - Handler: org.demo.kafka.KafkaAvroConsumerDeserializationFunction::handleRequest + Handler: org.demo.kafka.AvroDeserializationFunction::handleRequest Environment: Variables: JAVA_TOOL_OPTIONS: "-XX:+TieredCompilation -XX:TieredStopAtLevel=1" POWERTOOLS_LOG_LEVEL: DEBUG - POWERTOOLS_SERVICE_NAME: KafkaAvroConsumerDeserialization - POWERTOOLS_METRICS_NAMESPACE: KafkaAvroConsumerDeserializationFunction + POWERTOOLS_SERVICE_NAME: AvroDeserialization + POWERTOOLS_METRICS_NAMESPACE: AvroDeserializationFunction - KafkaProtobufConsumerDeserializationFunction: + ProtobufDeserializationFunction: Type: AWS::Serverless::Function Properties: CodeUri: . - Handler: org.demo.kafka.KafkaProtobufConsumerDeserializationFunction::handleRequest + Handler: org.demo.kafka.ProtobufDeserializationFunction::handleRequest Environment: Variables: JAVA_TOOL_OPTIONS: "-XX:+TieredCompilation -XX:TieredStopAtLevel=1" POWERTOOLS_LOG_LEVEL: DEBUG - POWERTOOLS_SERVICE_NAME: KafkaProtobufConsumerDeserialization - POWERTOOLS_METRICS_NAMESPACE: KafkaProtobufConsumerDeserializationFunction + POWERTOOLS_SERVICE_NAME: ProtobufDeserialization + POWERTOOLS_METRICS_NAMESPACE: ProtobufDeserializationFunction Outputs: JsonFunction: diff --git a/examples/powertools-examples-kafka/tools/README.md b/examples/powertools-examples-kafka/tools/README.md index 0497353ff..53d07b0c4 100644 --- a/examples/powertools-examples-kafka/tools/README.md +++ b/examples/powertools-examples-kafka/tools/README.md @@ -4,6 +4,7 @@ This tool generates base64-encoded serialized products for testing the Kafka con ## Supported Formats +- **JSON**: Generates base64-encoded JSON serialized products - **Avro**: Generates base64-encoded Avro serialized products - **Protobuf**: Generates base64-encoded Protobuf serialized products @@ -19,6 +20,15 @@ mvn generate-sources mvn compile ``` +### Generate JSON Samples + +```bash +# Run the JSON sample generator +mvn exec:java -Dexec.mainClass="org.demo.kafka.tools.GenerateJsonSamples" +``` + +The tool will output base64-encoded values for JSON products that can be used in `../events/kafka-json-event.json`. + ### Generate Avro Samples ```bash @@ -49,6 +59,7 @@ Each generator produces: After generating the samples, you can copy the output into the respective event files: +- `../events/kafka-json-event.json` for JSON samples - `../events/kafka-avro-event.json` for Avro samples - `../events/kafka-protobuf-event.json` for Protobuf samples diff --git a/examples/powertools-examples-kafka/tools/pom.xml b/examples/powertools-examples-kafka/tools/pom.xml index b94be80f0..52b234359 100644 --- a/examples/powertools-examples-kafka/tools/pom.xml +++ b/examples/powertools-examples-kafka/tools/pom.xml @@ -26,6 +26,11 @@ protobuf-java ${protobuf.version} + + com.fasterxml.jackson.core + jackson-databind + 2.19.0 + @@ -74,6 +79,12 @@ exec-maven-plugin 3.1.0 + + generate-json-samples + + org.demo.kafka.tools.GenerateJsonSamples + + generate-avro-samples diff --git a/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateJsonSamples.java b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateJsonSamples.java new file mode 100644 index 000000000..a4fd6565a --- /dev/null +++ b/examples/powertools-examples-kafka/tools/src/main/java/org/demo/kafka/tools/GenerateJsonSamples.java @@ -0,0 +1,126 @@ +package org.demo.kafka.tools; + +import java.io.IOException; +import java.util.Base64; +import java.util.HashMap; +import java.util.Map; + +import com.fasterxml.jackson.databind.ObjectMapper; + +/** + * Utility class to generate base64-encoded JSON serialized products + * for use in test events. + */ +public class GenerateJsonSamples { + + public static void main(String[] args) throws IOException { + // Create three different products + Map product1 = new HashMap<>(); + product1.put("id", 1001); + product1.put("name", "Laptop"); + product1.put("price", 999.99); + + Map product2 = new HashMap<>(); + product2.put("id", 1002); + product2.put("name", "Smartphone"); + product2.put("price", 599.99); + + Map product3 = new HashMap<>(); + product3.put("id", 1003); + product3.put("name", "Headphones"); + product3.put("price", 149.99); + + // Serialize and encode each product + String encodedProduct1 = serializeAndEncode(product1); + String encodedProduct2 = serializeAndEncode(product2); + String encodedProduct3 = serializeAndEncode(product3); + + // Serialize and encode an integer key + String encodedKey = serializeAndEncodeInteger(42); + + // Print the results + System.out.println("Base64 encoded JSON products for use in kafka-json-event.json:"); + System.out.println("\nProduct 1 (with key):"); + System.out.println("key: \"" + encodedKey + "\","); + System.out.println("value: \"" + encodedProduct1 + "\","); + + System.out.println("\nProduct 2 (with key):"); + System.out.println("key: \"" + encodedKey + "\","); + System.out.println("value: \"" + encodedProduct2 + "\","); + + System.out.println("\nProduct 3 (without key):"); + System.out.println("key: null,"); + System.out.println("value: \"" + encodedProduct3 + "\","); + + // Print a sample event structure + System.out.println("\nSample event structure:"); + printSampleEvent(encodedKey, encodedProduct1, encodedProduct2, encodedProduct3); + } + + private static String serializeAndEncode(Map product) throws IOException { + ObjectMapper mapper = new ObjectMapper(); + String json = mapper.writeValueAsString(product); + return Base64.getEncoder().encodeToString(json.getBytes()); + } + + private static String serializeAndEncodeInteger(Integer value) { + // For simple types like integers, we'll just convert to string and encode + return Base64.getEncoder().encodeToString(value.toString().getBytes()); + } + + private static void printSampleEvent(String key, String product1, String product2, String product3) { + System.out.println("{\n" + + " \"eventSource\": \"aws:kafka\",\n" + + " \"eventSourceArn\": \"arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4\",\n" + + + " \"bootstrapServers\": \"b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092\",\n" + + + " \"records\": {\n" + + " \"mytopic-0\": [\n" + + " {\n" + + " \"topic\": \"mytopic\",\n" + + " \"partition\": 0,\n" + + " \"offset\": 15,\n" + + " \"timestamp\": 1545084650987,\n" + + " \"timestampType\": \"CREATE_TIME\",\n" + + " \"key\": \"" + key + "\",\n" + + " \"value\": \"" + product1 + "\",\n" + + " \"headers\": [\n" + + " {\n" + + " \"headerKey\": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]\n" + + " }\n" + + " ]\n" + + " },\n" + + " {\n" + + " \"topic\": \"mytopic\",\n" + + " \"partition\": 0,\n" + + " \"offset\": 15,\n" + + " \"timestamp\": 1545084650987,\n" + + " \"timestampType\": \"CREATE_TIME\",\n" + + " \"key\": \"" + key + "\",\n" + + " \"value\": \"" + product2 + "\",\n" + + " \"headers\": [\n" + + " {\n" + + " \"headerKey\": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]\n" + + " }\n" + + " ]\n" + + " },\n" + + " {\n" + + " \"topic\": \"mytopic\",\n" + + " \"partition\": 0,\n" + + " \"offset\": 15,\n" + + " \"timestamp\": 1545084650987,\n" + + " \"timestampType\": \"CREATE_TIME\",\n" + + " \"key\": null,\n" + + " \"value\": \"" + product3 + "\",\n" + + " \"headers\": [\n" + + " {\n" + + " \"headerKey\": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101]\n" + + " }\n" + + " ]\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"); + } +} From 4624c12a292f24b79a4e0066bee189c1b1f5ff45 Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Fri, 6 Jun 2025 10:58:33 +0200 Subject: [PATCH 19/38] Implement PR feedback from Karthik. --- .../powertools/kafka/internal/DeserializationUtils.java | 2 +- .../kafka/serializers/AbstractKafkaDeserializer.java | 9 ++++----- .../kafka/serializers/KafkaAvroDeserializer.java | 2 +- .../kafka/serializers/KafkaJsonDeserializer.java | 2 +- .../kafka/serializers/KafkaProtobufDeserializer.java | 2 +- .../kafka/serializers/AbstractKafkaDeserializerTest.java | 2 +- .../kafka/serializers/KafkaAvroDeserializerTest.java | 6 +++--- .../kafka/serializers/KafkaJsonDeserializerTest.java | 4 ++-- .../kafka/serializers/KafkaProtobufDeserializerTest.java | 6 +++--- 9 files changed, 17 insertions(+), 18 deletions(-) diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtils.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtils.java index b2704e5bb..4a1c13399 100644 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtils.java +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtils.java @@ -66,7 +66,7 @@ public static DeserializationType determineDeserializationType() { handler); } } catch (Exception e) { - LOGGER.error( + LOGGER.warn( "Cannot determine deserialization type for custom deserialization. Defaulting to standard.", e); } diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java index f178fd988..48e696c6f 100644 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java @@ -134,7 +134,6 @@ private ConsumerRecords convertToConsumerRecords(KafkaEvent kafkaEv } } - // TODO: Understand what nextOffsets is and if we need to use it. return new ConsumerRecords<>(recordsMap, Map.of()); } @@ -146,13 +145,14 @@ private ConsumerRecord convertToConsumerRecord( K key = null; V value = null; + + // We set these to NULL_SIZE since they are not relevant in the Lambda environment due to ESM pre-processing. int keySize = ConsumerRecord.NULL_SIZE; int valueSize = ConsumerRecord.NULL_SIZE; if (eventRecord.getKey() != null) { try { byte[] decodedKeyBytes = Base64.getDecoder().decode(eventRecord.getKey()); - keySize = decodedKeyBytes.length; key = deserialize(decodedKeyBytes, keyType); } catch (Exception e) { throw new RuntimeException("Failed to deserialize Kafka record key.", e); @@ -162,7 +162,6 @@ private ConsumerRecord convertToConsumerRecord( if (eventRecord.getValue() != null) { try { byte[] decodedValueBytes = Base64.getDecoder().decode(eventRecord.getValue()); - valueSize = decodedValueBytes.length; value = deserialize(decodedValueBytes, valueType); } catch (Exception e) { throw new RuntimeException("Failed to deserialize Kafka record value.", e); @@ -204,7 +203,7 @@ private ConsumerRecord convertToConsumerRecord( * @return The deserialized object * @throws IOException If deserialization fails */ - protected abstract T deserializeComplex(byte[] data, Class type) throws IOException; + protected abstract T deserializeObject(byte[] data, Class type) throws IOException; /** * Main deserialize method that handles primitive types and delegates to subclasses for complex types. @@ -223,7 +222,7 @@ private T deserialize(byte[] data, Class type) throws IOException { } // Delegate to subclass for complex type deserialization - return deserializeComplex(data, type); + return deserializeObject(data, type); } /** diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaAvroDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaAvroDeserializer.java index c54bc3c26..ddf09d4ff 100644 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaAvroDeserializer.java +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaAvroDeserializer.java @@ -26,7 +26,7 @@ public class KafkaAvroDeserializer extends AbstractKafkaDeserializer { @Override - protected T deserializeComplex(byte[] data, Class type) throws IOException { + protected T deserializeObject(byte[] data, Class type) throws IOException { // If no Avro generated class is passed we cannot deserialize using Avro if (SpecificRecordBase.class.isAssignableFrom(type)) { try { diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializer.java index f7b09c75d..e2280364a 100644 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializer.java +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializer.java @@ -20,7 +20,7 @@ public class KafkaJsonDeserializer extends AbstractKafkaDeserializer { @Override - protected T deserializeComplex(byte[] data, Class type) throws IOException { + protected T deserializeObject(byte[] data, Class type) throws IOException { String decodedStr = new String(data); return objectMapper.readValue(decodedStr, type); diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializer.java index a1db4e0db..025f203c4 100644 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializer.java +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializer.java @@ -23,7 +23,7 @@ public class KafkaProtobufDeserializer extends AbstractKafkaDeserializer { @Override @SuppressWarnings("unchecked") - protected T deserializeComplex(byte[] data, Class type) throws IOException { + protected T deserializeObject(byte[] data, Class type) throws IOException { // If no Protobuf generated class is passed we cannot deserialize using Protobuf if (Message.class.isAssignableFrom(type)) { try { diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializerTest.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializerTest.java index 9692c6b83..55b7bd210 100644 --- a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializerTest.java +++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializerTest.java @@ -426,7 +426,7 @@ void shouldThrowExceptionWhenConvertingEmptyStringToChar(InputType inputType) { // Test implementation of AbstractKafkaDeserializer private static class TestDeserializer extends AbstractKafkaDeserializer { @Override - protected T deserializeComplex(byte[] data, Class type) throws IOException { + protected T deserializeObject(byte[] data, Class type) throws IOException { return objectMapper.readValue(data, type); } } diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaAvroDeserializerTest.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaAvroDeserializerTest.java index 3abaed7dd..a0b59b136 100644 --- a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaAvroDeserializerTest.java +++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaAvroDeserializerTest.java @@ -38,7 +38,7 @@ void shouldThrowExceptionWhenTypeIsNotAvroSpecificRecord() { byte[] data = new byte[] { 1, 2, 3 }; // When/Then - assertThatThrownBy(() -> deserializer.deserializeComplex(data, String.class)) + assertThatThrownBy(() -> deserializer.deserializeObject(data, String.class)) .isInstanceOf(IOException.class) .hasMessageContaining("Unsupported type for Avro deserialization"); } @@ -50,7 +50,7 @@ void shouldDeserializeValidAvroData() throws IOException { byte[] avroData = serializeAvro(product); // When - TestProduct result = deserializer.deserializeComplex(avroData, TestProduct.class); + TestProduct result = deserializer.deserializeObject(avroData, TestProduct.class); // Then assertThat(result).isNotNull(); @@ -65,7 +65,7 @@ void shouldThrowExceptionWhenDeserializingInvalidAvroData() { byte[] invalidAvroData = new byte[] { 1, 2, 3, 4, 5 }; // When/Then - assertThatThrownBy(() -> deserializer.deserializeComplex(invalidAvroData, TestProduct.class)) + assertThatThrownBy(() -> deserializer.deserializeObject(invalidAvroData, TestProduct.class)) .isInstanceOf(IOException.class) .hasMessageContaining("Failed to deserialize Avro data"); } diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializerTest.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializerTest.java index 540db4b0a..0cfb2498b 100644 --- a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializerTest.java +++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializerTest.java @@ -42,7 +42,7 @@ void shouldThrowExceptionWhenTypeIsNotSupportedForJson() { byte[] data = new byte[] { 1, 2, 3 }; // When/Then - assertThatThrownBy(() -> deserializer.deserializeComplex(data, Object.class)) + assertThatThrownBy(() -> deserializer.deserializeObject(data, Object.class)) .isInstanceOf(JsonParseException.class); } @@ -53,7 +53,7 @@ void shouldDeserializeValidJsonData() throws IOException { byte[] jsonData = objectMapper.writeValueAsBytes(product); // When - TestProductPojo result = deserializer.deserializeComplex(jsonData, TestProductPojo.class); + TestProductPojo result = deserializer.deserializeObject(jsonData, TestProductPojo.class); // Then assertThat(result).isNotNull(); diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializerTest.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializerTest.java index db949a3f7..2d506de4b 100644 --- a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializerTest.java +++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/KafkaProtobufDeserializerTest.java @@ -37,7 +37,7 @@ void shouldThrowExceptionWhenTypeIsNotProtobufMessage() { byte[] data = new byte[] { 1, 2, 3 }; // When/Then - assertThatThrownBy(() -> deserializer.deserializeComplex(data, String.class)) + assertThatThrownBy(() -> deserializer.deserializeObject(data, String.class)) .isInstanceOf(IOException.class) .hasMessageContaining("Unsupported type for Protobuf deserialization"); } @@ -53,7 +53,7 @@ void shouldDeserializeValidProtobufData() throws IOException { byte[] protobufData = product.toByteArray(); // When - TestProduct result = deserializer.deserializeComplex(protobufData, TestProduct.class); + TestProduct result = deserializer.deserializeObject(protobufData, TestProduct.class); // Then assertThat(result).isNotNull(); @@ -68,7 +68,7 @@ void shouldThrowExceptionWhenDeserializingInvalidProtobufData() { byte[] invalidProtobufData = new byte[] { 1, 2, 3, 4, 5 }; // When/Then - assertThatThrownBy(() -> deserializer.deserializeComplex(invalidProtobufData, TestProduct.class)) + assertThatThrownBy(() -> deserializer.deserializeObject(invalidProtobufData, TestProduct.class)) .isInstanceOf(IOException.class) .hasMessageContaining("Failed to deserialize Protobuf data"); } From 598cc27218548a176194d3b3b08d751c1022b31a Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Fri, 6 Jun 2025 17:07:08 +0200 Subject: [PATCH 20/38] Fix SAM outputs. --- examples/powertools-examples-kafka/template.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/examples/powertools-examples-kafka/template.yaml b/examples/powertools-examples-kafka/template.yaml index 4d422c1f2..509b13ca3 100644 --- a/examples/powertools-examples-kafka/template.yaml +++ b/examples/powertools-examples-kafka/template.yaml @@ -50,10 +50,10 @@ Resources: Outputs: JsonFunction: Description: "Kafka JSON Lambda Function ARN" - Value: !GetAtt KafkaJsonConsumerDeserializationFunction.Arn + Value: !GetAtt JsonDeserializationFunction.Arn AvroFunction: Description: "Kafka Avro Lambda Function ARN" - Value: !GetAtt KafkaAvroConsumerDeserializationFunction.Arn + Value: !GetAtt AvroDeserializationFunction.Arn ProtobufFunction: Description: "Kafka Protobuf Lambda Function ARN" - Value: !GetAtt KafkaProtobufConsumerDeserializationFunction.Arn + Value: !GetAtt ProtobufDeserializationFunction.Arn From 92f6f8fe124c9096eaa76f6ef230c24b780edfa6 Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Fri, 6 Jun 2025 18:38:19 +0200 Subject: [PATCH 21/38] Do not fail on unknown properties when deserializating into KafkaEvent. --- .../kafka/serializers/AbstractKafkaDeserializer.java | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java index 48e696c6f..6658106fd 100644 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java @@ -31,13 +31,15 @@ import org.apache.kafka.common.record.TimestampType; import com.amazonaws.services.lambda.runtime.events.KafkaEvent; +import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; /** * Abstract base class for Kafka deserializers that implements common functionality. */ abstract class AbstractKafkaDeserializer implements PowertoolsDeserializer { - protected static final ObjectMapper objectMapper = new ObjectMapper(); + protected static final ObjectMapper objectMapper = new ObjectMapper() + .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); /** * Deserialize JSON from InputStream into ConsumerRecords From 77845af874b1f6d0ceee18f04894bad05e5be127 Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Mon, 16 Jun 2025 11:17:22 +0200 Subject: [PATCH 22/38] Allow customers to bring their own kafka-clients dependency. --- examples/powertools-examples-kafka/pom.xml | 5 +++++ powertools-kafka/pom.xml | 1 + .../AbstractKafkaDeserializer.java | 20 ++++++++++++++++++- 3 files changed, 25 insertions(+), 1 deletion(-) diff --git a/examples/powertools-examples-kafka/pom.xml b/examples/powertools-examples-kafka/pom.xml index 55239c473..68c1a7900 100644 --- a/examples/powertools-examples-kafka/pom.xml +++ b/examples/powertools-examples-kafka/pom.xml @@ -21,6 +21,11 @@ powertools-kafka ${project.version} + + org.apache.kafka + kafka-clients + 4.0.0 + org.apache.avro avro diff --git a/powertools-kafka/pom.xml b/powertools-kafka/pom.xml index f267f69f3..cc7fd302d 100644 --- a/powertools-kafka/pom.xml +++ b/powertools-kafka/pom.xml @@ -54,6 +54,7 @@ org.apache.kafka kafka-clients ${kafka-clients.version} + provided org.apache.avro diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java index 6658106fd..c74b209ba 100644 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java @@ -136,7 +136,25 @@ private ConsumerRecords convertToConsumerRecords(KafkaEvent kafkaEv } } - return new ConsumerRecords<>(recordsMap, Map.of()); + return createConsumerRecords(recordsMap); + } + + /** + * Creates ConsumerRecords with compatibility for both Kafka 3.x.x and 4.x.x. + * + * @param Key type + * @param Value type + * @param records Map of records by topic partition + * @return ConsumerRecords instance + */ + protected ConsumerRecords createConsumerRecords(Map>> records) { + try { + // Try to use the Kafka 4.x.x constructor with nextOffsets parameter + return new ConsumerRecords<>(records, Map.of()); + } catch (NoSuchMethodError e) { + // Fall back to Kafka 3.x.x constructor if 4.x.x is not available + return new ConsumerRecords<>(records); + } } private ConsumerRecord convertToConsumerRecord( From e4875d82c3b05a744fd69fb3a503c96796f05c7d Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Mon, 16 Jun 2025 13:47:27 +0200 Subject: [PATCH 23/38] Add Kafka utility documentation. --- docs/utilities/kafka.md | 949 ++++++++++++++++++++++++++++++++++++++++ mkdocs.yml | 7 +- 2 files changed, 954 insertions(+), 2 deletions(-) create mode 100644 docs/utilities/kafka.md diff --git a/docs/utilities/kafka.md b/docs/utilities/kafka.md new file mode 100644 index 000000000..82d86d5ea --- /dev/null +++ b/docs/utilities/kafka.md @@ -0,0 +1,949 @@ +--- +title: Kafka +description: Utility +status: new +--- + + + +The Kafka utility transparently handles message deserialization, provides an intuitive developer experience, and integrates seamlessly with the rest of the Powertools for AWS Lambda ecosystem. + +```mermaid +flowchart LR + KafkaTopic["Kafka Topic"] --> MSK["Amazon MSK"] + KafkaTopic --> MSKServerless["Amazon MSK Serverless"] + KafkaTopic --> SelfHosted["Self-hosted Kafka"] + MSK --> EventSourceMapping["Event Source Mapping"] + MSKServerless --> EventSourceMapping + SelfHosted --> EventSourceMapping + EventSourceMapping --> Lambda["Lambda Function"] + Lambda --> KafkaUtility["Kafka Utility"] + KafkaUtility --> Deserialization["Deserialization"] + Deserialization --> YourLogic["Your Business Logic"] +``` + +## Key features + +- Automatic deserialization of Kafka messages (JSON, Avro, and Protocol Buffers) +- Simplified event record handling with familiar Kafka `ConsumerRecords` interface +- Support for key and value deserialization +- Support for ESM with and without Schema Registry integration +- Proper error handling for deserialization issues + +## Terminology + +**Event Source Mapping (ESM)** A Lambda feature that reads from streaming sources (like Kafka) and invokes your Lambda function. It manages polling, batching, and error handling automatically, eliminating the need for consumer management code. + +**Record Key and Value** A Kafka messages contain two important parts: an optional key that determines the partition and a value containing the actual message data. Both are base64-encoded in Lambda events and can be independently deserialized. + +**Deserialization** Is the process of converting binary data (base64-encoded in Lambda events) into usable Java objects according to a specific format like JSON, Avro, or Protocol Buffers. Powertools handles this conversion automatically. + +**DeserializationType enum** Contains parameters that tell Powertools how to interpret message data, including the format type (JSON, Avro, Protocol Buffers). + +**Schema Registry** Is a centralized service that stores and validates schemas, ensuring producers and consumers maintain compatibility when message formats evolve over time. + +## Moving from traditional Kafka consumers + +Lambda processes Kafka messages as discrete events rather than continuous streams, requiring a different approach to consumer development that Powertools for AWS helps standardize. + +| Aspect | Traditional Kafka Consumers | Lambda Kafka Consumer | +| --------------------- | ----------------------------------- | -------------------------------------------------------------- | +| **Model** | Pull-based (you poll for messages) | Push-based (Lambda invoked with messages) | +| **Scaling** | Manual scaling configuration | Automatic scaling to partition count | +| **State** | Long-running application with state | Stateless, ephemeral executions | +| **Offsets** | Manual offset management | Automatic offset commitment | +| **Schema Validation** | Client-side schema validation | Optional Schema Registry integration with Event Source Mapping | +| **Error Handling** | Per-message retry control | Batch-level retry policies | + +## Getting started + +### Installation + +Add the Powertools for AWS Lambda Kafka dependency to your project: + +=== "Maven" + + ```xml + + software.amazon.lambda + powertools-kafka + {{ powertools.version }} + + + + org.apache.kafka + kafka-clients + 4.0.0 + + ``` + +=== "Gradle" + + ```gradle + dependencies { + implementation 'software.amazon.lambda:powertools-kafka:{{ powertools.version }}' + // Kafka clients dependency - compatibility works for >= 3.0.0 + implementation 'org.apache.kafka:kafka-clients:4.0.0' + } + ``` + +### Required resources + +To use the Kafka utility, you need an AWS Lambda function configured with a Kafka event source. This can be Amazon MSK, MSK Serverless, or a self-hosted Kafka cluster. + +=== "getting_started_with_msk.yaml" + + ```yaml + AWSTemplateFormatVersion: '2010-09-09' + Transform: AWS::Serverless-2016-10-31 + Resources: + KafkaConsumerFunction: + Type: AWS::Serverless::Function + Properties: + Handler: org.example.KafkaHandler::handleRequest + Runtime: java21 + Timeout: 30 + Events: + MSKEvent: + Type: MSK + Properties: + StartingPosition: LATEST + Stream: !GetAtt MyMSKCluster.Arn + Topics: + - my-topic-1 + - my-topic-2 + Policies: + - AWSLambdaMSKExecutionRole + ``` + +### Using ESM with Schema Registry + +The Event Source Mapping configuration determines which mode is used. With `JSON`, Lambda converts all messages to JSON before invoking your function. With `SOURCE` mode, Lambda preserves the original format, requiring you function to handle the appropriate deserialization. + +Powertools for AWS supports both Schema Registry integration modes in your Event Source Mapping configuration. + +### Processing Kafka events + +The Kafka utility transforms raw Lambda Kafka events into an intuitive format for processing. To handle messages effectively, you'll need to configure the `@Deserialization` annotation that matches your data format. + + +???+ tip "Using Avro is recommended" + We recommend Avro for production Kafka implementations due to its schema evolution capabilities, compact binary format, and integration with Schema Registry. This offers better type safety and forward/backward compatibility compared to JSON. + +=== "Avro Messages" + + ```java + package org.example; + + import com.amazonaws.services.lambda.runtime.Context; + import com.amazonaws.services.lambda.runtime.RequestHandler; + import org.apache.kafka.clients.consumer.ConsumerRecord; + import org.apache.kafka.clients.consumer.ConsumerRecords; + import software.amazon.lambda.powertools.kafka.Deserialization; + import software.amazon.lambda.powertools.kafka.DeserializationType; + + public class AvroKafkaHandler implements RequestHandler, String> { + + @Override + @Deserialization(type = DeserializationType.KAFKA_AVRO) + public String handleRequest(ConsumerRecords records, Context context) { + for (ConsumerRecord record : records) { + User user = record.value(); // User class is auto-generated from Avro schema + System.out.printf("Processing user: %s, age %d%n", user.getName(), user.getAge()); + } + return "OK"; + } + } + ``` + +=== "Protocol Buffers" + + ```java + package org.example; + + import com.amazonaws.services.lambda.runtime.Context; + import com.amazonaws.services.lambda.runtime.RequestHandler; + import org.apache.kafka.clients.consumer.ConsumerRecord; + import org.apache.kafka.clients.consumer.ConsumerRecords; + import software.amazon.lambda.powertools.kafka.Deserialization; + import software.amazon.lambda.powertools.kafka.DeserializationType; + + public class ProtobufKafkaHandler implements RequestHandler, String> { + + @Override + @Deserialization(type = DeserializationType.KAFKA_PROTOBUF) + public String handleRequest(ConsumerRecords records, Context context) { + for (ConsumerRecord record : records) { + UserProto.User user = record.value(); // UserProto.User class is auto-generated from Protocol Buffer schema + System.out.printf("Processing user: %s, age %d%n", user.getName(), user.getAge()); + } + return "OK"; + } + } + ``` + +=== "JSON Messages" + + ```java + package org.example; + + import com.amazonaws.services.lambda.runtime.Context; + import com.amazonaws.services.lambda.runtime.RequestHandler; + import org.apache.kafka.clients.consumer.ConsumerRecord; + import org.apache.kafka.clients.consumer.ConsumerRecords; + import software.amazon.lambda.powertools.kafka.Deserialization; + import software.amazon.lambda.powertools.kafka.DeserializationType; + + public class JsonKafkaHandler implements RequestHandler, String> { + + @Override + @Deserialization(type = DeserializationType.KAFKA_JSON) + public String handleRequest(ConsumerRecords records, Context context) { + for (ConsumerRecord record : records) { + User user = record.value(); // Deserialized JSON object + System.out.printf("Processing user: %s, age %d%n", user.getName(), user.getAge()); + } + return "OK"; + } + } + ``` + + +???+ tip "Full examples on GitHub" + A full example including how to generate Avro and Protobuf Java classes can be found on GitHub at [https://github.com/aws-powertools/powertools-lambda-java/tree/main/examples/powertools-examples-kafka](https://github.com/aws-powertools/powertools-lambda-java/tree/main/examples/powertools-examples-kafka). + +### Deserializing keys and values + +The `@Deserialization` annotation deserializes both keys and values based on your type configuration. This flexibility allows you to work with different data formats in the same message. + +=== "Key and Value Deserialization" + + ```java + package org.example; + + import com.amazonaws.services.lambda.runtime.Context; + import com.amazonaws.services.lambda.runtime.RequestHandler; + import org.apache.kafka.clients.consumer.ConsumerRecord; + import org.apache.kafka.clients.consumer.ConsumerRecords; + import software.amazon.lambda.powertools.kafka.Deserialization; + import software.amazon.lambda.powertools.kafka.DeserializationType; + + public class KeyValueKafkaHandler implements RequestHandler, String> { + + @Override + @Deserialization(type = DeserializationType.KAFKA_AVRO) + public String handleRequest(ConsumerRecords records, Context context) { + for (ConsumerRecord record : records) { + // Access both deserialized components + ProductKey key = record.key(); // ProductKey class is auto-generated from Avro schema + ProductInfo product = record.value(); // ProductInfo class is auto-generated from Avro schema + + System.out.printf("Processing product ID: %s%n", key.getProductId()); + System.out.printf("Product: %s - $%.2f%n", product.getName(), product.getPrice()); + } + return "OK"; + } + } + ``` + +=== "Value-Only Deserialization" + + ```java + package org.example; + + import com.amazonaws.services.lambda.runtime.Context; + import com.amazonaws.services.lambda.runtime.RequestHandler; + import org.apache.kafka.clients.consumer.ConsumerRecord; + import org.apache.kafka.clients.consumer.ConsumerRecords; + import software.amazon.lambda.powertools.kafka.Deserialization; + import software.amazon.lambda.powertools.kafka.DeserializationType; + + public class ValueOnlyKafkaHandler implements RequestHandler, String> { + + @Override + @Deserialization(type = DeserializationType.KAFKA_JSON) + public String handleRequest(ConsumerRecords records, Context context) { + for (ConsumerRecord record : records) { + // Key remains as string (if present) + String key = record.key(); + if (key != null) { + System.out.printf("Message key: %s%n", key); + } + + // Value is deserialized as JSON + Order order = record.value(); + System.out.printf("Order #%s - Total: $%.2f%n", order.getOrderId(), order.getTotal()); + } + return "OK"; + } + } + ``` + +### Handling primitive types + +When working with primitive data types (strings, integers, etc.) rather than structured objects, you can use any deserialization type such as `KAFKA_JSON`. Simply place the primitive type like `Integer` or `String` in the `ConsumerRecords` generic type parameters, and the library will automatically handle primitive type deserialization. + + +???+ tip "Common pattern: Keys with primitive values" + Using primitive types (strings, integers) as Kafka message keys is a common pattern for partitioning and identifying messages. Powertools automatically handles these primitive keys without requiring special configuration, making it easy to implement this popular design pattern. + +=== "Primitive key" + + ```java + package org.example; + + import com.amazonaws.services.lambda.runtime.Context; + import com.amazonaws.services.lambda.runtime.RequestHandler; + import org.apache.kafka.clients.consumer.ConsumerRecord; + import org.apache.kafka.clients.consumer.ConsumerRecords; + import software.amazon.lambda.powertools.kafka.Deserialization; + import software.amazon.lambda.powertools.kafka.DeserializationType; + + public class PrimitiveKeyHandler implements RequestHandler, String> { + + @Override + @Deserialization(type = DeserializationType.KAFKA_JSON) + public String handleRequest(ConsumerRecords records, Context context) { + for (ConsumerRecord record : records) { + // Key is automatically deserialized as Integer + Integer key = record.key(); + + // Value is deserialized as JSON + Customer customer = record.value(); + + System.out.printf("Key: %d%n", key); + System.out.printf("Name: %s%n", customer.getName()); + System.out.printf("Email: %s%n", customer.getEmail()); + } + return "OK"; + } + } + ``` + +=== "Primitive key and value" + + ```java + package org.example; + + import com.amazonaws.services.lambda.runtime.Context; + import com.amazonaws.services.lambda.runtime.RequestHandler; + import org.apache.kafka.clients.consumer.ConsumerRecord; + import org.apache.kafka.clients.consumer.ConsumerRecords; + import software.amazon.lambda.powertools.kafka.Deserialization; + import software.amazon.lambda.powertools.kafka.DeserializationType; + + public class PrimitiveHandler implements RequestHandler, String> { + + @Override + @Deserialization(type = DeserializationType.KAFKA_JSON) + public String handleRequest(ConsumerRecords records, Context context) { + for (ConsumerRecord record : records) { + // Key is automatically deserialized as String + String key = record.key(); + + // Value is automatically deserialized as String + String value = record.value(); + + System.out.printf("Key: %s%n", key); + System.out.printf("Value: %s%n", value); + } + return "OK"; + } + } + ``` + +### Message format support and comparison + +The Kafka utility supports multiple serialization formats to match your existing Kafka implementation. Choose the format that best suits your needs based on performance, schema evolution requirements, and ecosystem compatibility. + + +???+ tip "Selecting the right format" + For new applications, consider Avro or Protocol Buffers over JSON. Both provide schema validation, evolution support, and significantly better performance with smaller message sizes. Avro is particularly well-suited for Kafka due to its built-in schema evolution capabilities. + +=== "Supported Formats" + + | Format | DeserializationType | Description | Required Dependencies | + |--------|---------------------|-------------|----------------------| + | **JSON** | `KAFKA_JSON` | Human-readable text format | Jackson | + | **Avro** | `KAFKA_AVRO` | Compact binary format with schema | Apache Avro | + | **Protocol Buffers** | `KAFKA_PROTOBUF` | Efficient binary format | Protocol Buffers | + | **Lambda Default** | `LAMBDA_DEFAULT` | Uses Lambda's built-in deserialization (equivalent to removing the @Deserialization annotation) | None | + +=== "Format Comparison" + + | Feature | JSON | Avro | Protocol Buffers | + |---------|------|------|-----------------| + | **Schema Definition** | Optional | Required schema file | Required .proto file | + | **Schema Evolution** | None | Strong support | Strong support | + | **Size Efficiency** | Low | High | Highest | + | **Processing Speed** | Slower | Fast | Fastest | + | **Human Readability** | High | Low | Low | + | **Implementation Complexity** | Low | Medium | Medium | + | **Additional Dependencies** | None | Apache Avro | Protocol Buffers | + +Choose the serialization format that best fits your needs: + +- **JSON**: Best for simplicity and when schema flexibility is important +- **Avro**: Best for systems with evolving schemas and when compatibility is critical +- **Protocol Buffers**: Best for performance-critical systems with structured data +- **Lambda Default**: Best for simple string-based messages or when using Lambda's built-in deserialization + +## Advanced + +### Accessing record metadata + +Each Kafka record contains important metadata that you can access alongside the deserialized message content. This metadata helps with message processing, troubleshooting, and implementing advanced patterns like exactly-once processing. + +=== "Working with Record Metadata" + + ```java + package org.example; + + import com.amazonaws.services.lambda.runtime.Context; + import com.amazonaws.services.lambda.runtime.RequestHandler; + import org.apache.kafka.clients.consumer.ConsumerRecord; + import org.apache.kafka.clients.consumer.ConsumerRecords; + import org.apache.kafka.common.header.Header; + import software.amazon.lambda.powertools.kafka.Deserialization; + import software.amazon.lambda.powertools.kafka.DeserializationType; + + public class MetadataKafkaHandler implements RequestHandler, String> { + + @Override + @Deserialization(type = DeserializationType.KAFKA_AVRO) + public String handleRequest(ConsumerRecords records, Context context) { + for (ConsumerRecord record : records) { + // Log record coordinates for tracing + System.out.printf("Processing message from topic '%s'%n", record.topic()); + System.out.printf(" Partition: %d, Offset: %d%n", record.partition(), record.offset()); + System.out.printf(" Produced at: %d%n", record.timestamp()); + + // Process message headers + if (record.headers() != null) { + for (Header header : record.headers()) { + System.out.printf(" Header: %s = %s%n", + header.key(), new String(header.value())); + } + } + + // Access the Avro deserialized message content + Customer customer = record.value(); // Customer class is auto-generated from Avro schema + System.out.printf("Processing order for: %s%n", customer.getName()); + System.out.printf("Order total: $%.2f%n", customer.getOrderTotal()); + } + return "OK"; + } + } + ``` + +#### Available metadata properties + +| Property | Description | Example Use Case | +| ----------------- | ----------------------------------------------- | ------------------------------------------- | +| `topic()` | Topic name the record was published to | Routing logic in multi-topic consumers | +| `partition()` | Kafka partition number | Tracking message distribution | +| `offset()` | Position in the partition | De-duplication, exactly-once processing | +| `timestamp()` | Unix timestamp when record was created | Event timing analysis | +| `timestampType()` | Timestamp type (CREATE_TIME or LOG_APPEND_TIME) | Data lineage verification | +| `headers()` | Key-value pairs attached to the message | Cross-cutting concerns like correlation IDs | +| `key()` | Deserialized message key | Customer ID or entity identifier | +| `value()` | Deserialized message content | The actual business data | + +### Error handling + +Handle errors gracefully when processing Kafka messages to ensure your application maintains resilience and provides clear diagnostic information. The Kafka utility integrates with standard Java exception handling patterns. + + +!!! info "Treating Deserialization errors" + Read [Deserialization failures](#deserialization-failures). Deserialization failures will fail the whole batch and do not execute your handler. + +=== "Error Handling" + + ```java + package org.example; + + import com.amazonaws.services.lambda.runtime.Context; + import com.amazonaws.services.lambda.runtime.RequestHandler; + import org.apache.kafka.clients.consumer.ConsumerRecord; + import org.apache.kafka.clients.consumer.ConsumerRecords; + import software.amazon.lambda.powertools.kafka.Deserialization; + import software.amazon.lambda.powertools.kafka.DeserializationType; + import software.amazon.lambda.powertools.metrics.FlushMetrics; + import software.amazon.lambda.powertools.metrics.Metrics; + import software.amazon.lambda.powertools.metrics.MetricsFactory; + import software.amazon.lambda.powertools.metrics.model.MetricUnit; + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; + + public class ErrorHandlingKafkaHandler implements RequestHandler, String> { + + private static final Logger logger = LoggerFactory.getLogger(ErrorHandlingKafkaHandler.class); + private static final Metrics metrics = MetricsFactory.getMetricsInstance(); + + @Override + @FlushMetrics(namespace = "KafkaProcessing", service = "order-processing") + @Deserialization(type = DeserializationType.KAFKA_AVRO) + public String handleRequest(ConsumerRecords records, Context context) { + metrics.addMetric("TotalRecords", records.count(), MetricUnit.COUNT); + int successfulRecords = 0; + int failedRecords = 0; + + for (ConsumerRecord record : records) { + try { + Order order = record.value(); // Order class is auto-generated from Avro schema + processOrder(order); + successfulRecords++; + metrics.addMetric("ProcessedRecords", 1, MetricUnit.COUNT); + + } catch (Exception e) { + failedRecords++; + logger.error("Error processing Kafka message from topic: {}, partition: {}, offset: {}", + record.topic(), record.partition(), record.offset(), e); + metrics.addMetric("ProcessingErrors", 1, MetricUnit.COUNT); + // Optionally send to DLQ or error topic + sendToDlq(record); + } + } + + return String.format("Processed %d records successfully, %d failed", + successfulRecords, failedRecords); + } + + private void processOrder(Order order) { + // Your business logic here + System.out.printf("Processing order: %s%n", order.getOrderId()); + } + + private void sendToDlq(ConsumerRecord record) { + // Implementation to send failed records to dead letter queue + } + } + ``` + +### Integrating with Idempotency + +When processing Kafka messages in Lambda, failed batches can result in message reprocessing. The idempotency utility prevents duplicate processing by tracking which messages have already been handled, ensuring each message is processed exactly once. + +The Idempotency utility automatically stores the result of each successful operation, returning the cached result if the same message is processed again, which prevents potentially harmful duplicate operations like double-charging customers or double-counting metrics. + +=== "Idempotent Kafka Processing" + + ```java + package org.example; + + import com.amazonaws.services.lambda.runtime.Context; + import com.amazonaws.services.lambda.runtime.RequestHandler; + import org.apache.kafka.clients.consumer.ConsumerRecord; + import org.apache.kafka.clients.consumer.ConsumerRecords; + import software.amazon.lambda.powertools.kafka.Deserialization; + import software.amazon.lambda.powertools.kafka.DeserializationType; + import software.amazon.lambda.powertools.idempotency.Idempotency; + import software.amazon.lambda.powertools.idempotency.IdempotencyConfig; + import software.amazon.lambda.powertools.idempotency.Idempotent; + import software.amazon.lambda.powertools.idempotency.persistence.dynamodb.DynamoDBPersistenceStore; + + public class IdempotentKafkaHandler implements RequestHandler, String> { + + public IdempotentKafkaHandler() { + // Configure idempotency with DynamoDB persistence store + Idempotency.config() + .withPersistenceStore( + DynamoDBPersistenceStore.builder() + .withTableName("IdempotencyTable") + .build()) + .configure(); + } + + @Override + @Deserialization(type = DeserializationType.KAFKA_JSON) + public String handleRequest(ConsumerRecords records, Context context) { + for (ConsumerRecord record : records) { + // Payment class deserialized from JSON + Payment payment = record.value(); + + // Process each message with idempotency protection + processPayment(payment); + } + return "OK"; + } + + @Idempotent + private void processPayment(Payment payment) { + System.out.printf("Processing payment %s%n", payment.getPaymentId()); + + // Your business logic here + PaymentService.process(payment.getPaymentId(), payment.getCustomerId(), payment.getAmount()); + } + } + ``` + + +???+ tip "Ensuring exactly-once processing" + The @Idempotent annotation will use the JSON representation of the Payment object to make sure that the same object is only processed exactly once. Even if a batch fails and Lambda retries the messages, each unique payment will be processed exactly once. + +### Best practices + +#### Batch size configuration + +The number of Kafka records processed per Lambda invocation is controlled by your Event Source Mapping configuration. Properly sized batches optimize cost and performance. + +=== "Batch size configuration" + + ```yaml + Resources: + OrderProcessingFunction: + Type: AWS::Serverless::Function + Properties: + Handler: org.example.OrderHandler::handleRequest + Runtime: java21 + Events: + KafkaEvent: + Type: MSK + Properties: + Stream: !GetAtt OrdersMSKCluster.Arn + Topics: + - order-events + - payment-events + # Configuration for optimal throughput/latency balance + BatchSize: 100 + MaximumBatchingWindowInSeconds: 5 + StartingPosition: LATEST + # Enable partial batch success reporting + FunctionResponseTypes: + - ReportBatchItemFailures + ``` + +Different workloads benefit from different batch configurations: + +- **High-volume, simple processing**: Use larger batches (100-500 records) with short timeout +- **Complex processing with database operations**: Use smaller batches (10-50 records) +- **Mixed message sizes**: Set appropriate batching window (1-5 seconds) to handle variability + +#### Cross-language compatibility + +When using binary serialization formats across multiple programming languages, ensure consistent schema handling to prevent deserialization failures. + +=== "Using Python naming convention" + + ```java + package org.example; + + import com.amazonaws.services.lambda.runtime.Context; + import com.amazonaws.services.lambda.runtime.RequestHandler; + import org.apache.kafka.clients.consumer.ConsumerRecord; + import org.apache.kafka.clients.consumer.ConsumerRecords; + import software.amazon.lambda.powertools.kafka.Deserialization; + import software.amazon.lambda.powertools.kafka.DeserializationType; + import com.fasterxml.jackson.annotation.JsonProperty; + import java.time.Instant; + + public class CrossLanguageKafkaHandler implements RequestHandler, String> { + + @Override + @Deserialization(type = DeserializationType.KAFKA_JSON) + public String handleRequest(ConsumerRecords records, Context context) { + for (ConsumerRecord record : records) { + OrderEvent order = record.value(); // OrderEvent class handles JSON with Python field names + System.out.printf("Processing order %s from %s%n", + order.getOrderId(), order.getOrderDate()); + } + return "OK"; + } + } + + // Example class that handles Python snake_case field names + public class OrderEvent { + @JsonProperty("order_id") + private String orderId; + + @JsonProperty("customer_id") + private String customerId; + + @JsonProperty("total_amount") + private double totalAmount; + + @JsonProperty("order_date") + private long orderDateMillis; + + // Getters and setters + public String getOrderId() { return orderId; } + public void setOrderId(String orderId) { this.orderId = orderId; } + + public String getCustomerId() { return customerId; } + public void setCustomerId(String customerId) { this.customerId = customerId; } + + public double getTotalAmount() { return totalAmount; } + public void setTotalAmount(double totalAmount) { this.totalAmount = totalAmount; } + + public Instant getOrderDate() { + return Instant.ofEpochMilli(orderDateMillis); + } + public void setOrderDate(long orderDateMillis) { + this.orderDateMillis = orderDateMillis; + } + } + ``` + +Common cross-language challenges to address: + +- **Field naming conventions**: camelCase in Java vs snake_case in Python +- **Date/time**: representation differences +- **Numeric precision handling**: especially decimals + +### Troubleshooting + +#### Deserialization failures + +The Java Kafka utility registers a [custom Lambda serializer](https://docs.aws.amazon.com/lambda/latest/dg/java-custom-serialization.html) that performs **eager deserialization** of all records in the batch before your handler method is invoked. + +This means that if any record in the batch fails deserialization, a `RuntimeException` will be thrown with a concrete error message explaining why deserialization failed, and your handler method will never be called. + +**Key implications:** + +- **Batch-level failure**: If one record fails deserialization, the entire batch fails +- **Early failure detection**: Deserialization errors are caught before your business logic runs +- **Clear error messages**: The `RuntimeException` provides specific details about what went wrong +- **No partial processing**: You cannot process some records while skipping failed ones within the same batch + +**Example of deserialization failure:** + +```java +// If any record in the batch has invalid Avro data, you'll see: +// RuntimeException: Failed to deserialize Kafka record: Invalid Avro schema for record at offset 12345 +``` + + +!!! warning "Handler method not invoked on deserialization failure" + When deserialization fails, your `handleRequest` method will not be invoked at all. The `RuntimeException` is thrown before your handler code runs, preventing any processing of the batch. + +**Handling deserialization failures:** + +Since deserialization happens before your handler is called, you cannot catch these exceptions within your handler method. Instead, configure your Event Source Mapping with appropriate error handling: + +- **Dead Letter Queue (DLQ)**: Configure a DLQ to capture failed batches for later analysis +- **Maximum Retry Attempts**: Set appropriate retry limits to avoid infinite retries +- **Batch Size**: Use smaller batch sizes to minimize the impact of individual record failures + +```yaml +# Example SAM template configuration for error handling +Events: + KafkaEvent: + Type: MSK + Properties: + # ... other properties + BatchSize: 10 # Smaller batches reduce failure impact + MaximumRetryAttempts: 3 + DestinationConfig: + OnFailure: + Type: SQS + Destination: !GetAtt DeadLetterQueue.Arn +``` + +#### Schema compatibility issues + +Schema compatibility issues often manifest as successful connections but failed deserialization. Common causes include: + +- **Schema evolution without backward compatibility**: New producer schema is incompatible with consumer schema +- **Field type mismatches**: For example, a field changed from String to Integer across systems +- **Missing required fields**: Fields required by the consumer schema but absent in the message +- **Default value discrepancies**: Different handling of default values between languages + +When using Schema Registry, verify schema compatibility rules are properly configured for your topics and that all applications use the same registry. + +#### Memory and timeout optimization + +Lambda functions processing Kafka messages may encounter resource constraints, particularly with large batches or complex processing logic. + +For memory errors: + +- Increase Lambda memory allocation, which also provides more CPU resources +- Process fewer records per batch by adjusting the `BatchSize` parameter in your event source mapping +- Consider optimizing your message format to reduce memory footprint + +For timeout issues: + +- Extend your Lambda function timeout setting to accommodate processing time +- Implement chunked or asynchronous processing patterns for time-consuming operations +- Monitor and optimize database operations, external API calls, or other I/O operations in your handler + + +???+ tip "Monitoring memory usage" + Use CloudWatch metrics to track your function's memory utilization. If it consistently exceeds 80% of allocated memory, consider increasing the memory allocation or optimizing your code. + +## Kafka workflow + +### Using ESM with Schema Registry validation (SOURCE) + +
+```mermaid +sequenceDiagram + participant Kafka + participant ESM as Event Source Mapping + participant SchemaRegistry as Schema Registry + participant Lambda + participant KafkaUtility + participant YourCode + Kafka->>+ESM: Send batch of records + ESM->>+SchemaRegistry: Validate schema + SchemaRegistry-->>-ESM: Confirm schema is valid + ESM->>+Lambda: Invoke with validated records (still encoded) + Lambda->>+KafkaUtility: Pass Kafka event + KafkaUtility->>KafkaUtility: Parse event structure + loop For each record + KafkaUtility->>KafkaUtility: Decode base64 data + KafkaUtility->>KafkaUtility: Deserialize based on DeserializationType + end + KafkaUtility->>+YourCode: Provide ConsumerRecords + YourCode->>YourCode: Process records + YourCode-->>-KafkaUtility: Return result + KafkaUtility-->>-Lambda: Pass result back + Lambda-->>-ESM: Return response + ESM-->>-Kafka: Acknowledge processed batch +``` +
+ +### Using ESM with Schema Registry deserialization (JSON) + +
+```mermaid +sequenceDiagram + participant Kafka + participant ESM as Event Source Mapping + participant SchemaRegistry as Schema Registry + participant Lambda + participant KafkaUtility + participant YourCode + Kafka->>+ESM: Send batch of records + ESM->>+SchemaRegistry: Validate and deserialize + SchemaRegistry->>SchemaRegistry: Deserialize records + SchemaRegistry-->>-ESM: Return deserialized data + ESM->>+Lambda: Invoke with pre-deserialized JSON records + Lambda->>+KafkaUtility: Pass Kafka event + KafkaUtility->>KafkaUtility: Parse event structure + loop For each record + KafkaUtility->>KafkaUtility: Record is already deserialized + end + KafkaUtility->>+YourCode: Provide ConsumerRecords + YourCode->>YourCode: Process records + YourCode-->>-KafkaUtility: Return result + KafkaUtility-->>-Lambda: Pass result back + Lambda-->>-ESM: Return response + ESM-->>-Kafka: Acknowledge processed batch +``` +
+ +### Using ESM without Schema Registry integration + +
+```mermaid +sequenceDiagram + participant Kafka + participant Lambda + participant KafkaUtility + participant YourCode + Kafka->>+Lambda: Invoke with batch of records (direct integration) + Lambda->>+KafkaUtility: Pass raw Kafka event + KafkaUtility->>KafkaUtility: Parse event structure + loop For each record + KafkaUtility->>KafkaUtility: Decode base64 data + KafkaUtility->>KafkaUtility: Deserialize based on DeserializationType + end + KafkaUtility->>+YourCode: Provide ConsumerRecords + YourCode->>YourCode: Process records + YourCode-->>-KafkaUtility: Return result + KafkaUtility-->>-Lambda: Pass result back + Lambda-->>-Kafka: Acknowledge processed batch +``` +
+ +## Testing your code + +Testing Kafka consumer functions is straightforward with JUnit. You can construct Kafka `ConsumerRecords` in the default way provided by the kafka-clients library without needing a real Kafka cluster. + +=== "Testing your code" + + ```java + package org.example; + + import com.amazonaws.services.lambda.runtime.Context; + import com.amazonaws.services.lambda.runtime.events.KafkaEvent; + import org.apache.kafka.clients.consumer.ConsumerRecord; + import org.apache.kafka.clients.consumer.ConsumerRecords; + import org.apache.kafka.common.TopicPartition; + import org.junit.jupiter.api.Test; + import org.junit.jupiter.api.extension.ExtendWith; + import org.mockito.Mock; + import org.mockito.junit.jupiter.MockitoExtension; + import java.util.*; + + import static org.junit.jupiter.api.Assertions.*; + import static org.mockito.Mockito.*; + + @ExtendWith(MockitoExtension.class) + class KafkaHandlerTest { + + @Mock + private Context context; + + @Test + void testProcessJsonMessage() { + // Create a test Kafka event with JSON data + Order testOrder = new Order("12345", 99.95); + ConsumerRecord record = new ConsumerRecord<>( + "orders-topic", 0, 15L, null, testOrder); + + Map>> recordsMap = new HashMap<>(); + recordsMap.put(new TopicPartition("orders-topic", 0), Arrays.asList(record)); + ConsumerRecords records = new ConsumerRecords<>(recordsMap); + + // Create handler and invoke + JsonKafkaHandler handler = new JsonKafkaHandler(); + String response = handler.handleRequest(records, context); + + // Verify the response + assertEquals("OK", response); + } + + @Test + void testProcessMultipleRecords() { + // Create a test event with multiple records + Customer customer1 = new Customer("A1", "Alice"); + Customer customer2 = new Customer("B2", "Bob"); + + List> recordList = Arrays.asList( + new ConsumerRecord<>("customers-topic", 0, 10L, null, customer1), + new ConsumerRecord<>("customers-topic", 0, 11L, null, customer2) + ); + + Map>> recordsMap = new HashMap<>(); + recordsMap.put(new TopicPartition("customers-topic", 0), recordList); + ConsumerRecords records = new ConsumerRecords<>(recordsMap); + + // Create handler and invoke + JsonKafkaHandler handler = new JsonKafkaHandler(); + String response = handler.handleRequest(records, context); + + // Verify the response + assertEquals("OK", response); + } + } + ``` + +## Extra Resources + +### Lambda Custom Serializers Compatibility + +This Kafka utility uses [Lambda custom serializers](https://docs.aws.amazon.com/lambda/latest/dg/java-custom-serialization.html) to provide automatic deserialization of Kafka messages. + +**Important compatibility considerations:** + +- **Existing custom serializers**: This utility will not be compatible if you already use your own custom Lambda serializer in your project +- **Non-Kafka handlers**: Installing this library will not affect default Lambda serialization behavior for non-Kafka related handlers +- **Kafka-specific**: The custom serialization only applies to handlers annotated with `@Deserialization` +- **Lambda default fallback**: Using `@Deserialization(type = DeserializationType.LAMBDA_DEFAULT)` will proxy to Lambda's default serialization behavior + +**Need help with compatibility?** + +If you are blocked from adopting this utility due to existing custom serializers or other compatibility concerns, please contact us with your specific use-cases. We'd like to understand your requirements and explore potential solutions. + +For more information about Lambda custom serialization, see the [official AWS documentation](https://docs.aws.amazon.com/lambda/latest/dg/java-custom-serialization.html). diff --git a/mkdocs.yml b/mkdocs.yml index 82a32d49c..07be3c175 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -15,8 +15,9 @@ nav: - Utilities: - utilities/idempotency.md - utilities/parameters.md - - utilities/large_messages.md - utilities/batch.md + - utilities/kafka.md + - utilities/large_messages.md - utilities/validation.md - utilities/custom_resources.md - utilities/serialization.md @@ -101,8 +102,9 @@ plugins: Utilities: - utilities/idempotency.md - utilities/parameters.md - - utilities/large_messages.md - utilities/batch.md + - utilities/kafka.md + - utilities/large_messages.md - utilities/validation.md - utilities/custom_resources.md - utilities/serialization.md @@ -115,6 +117,7 @@ extra_css: extra_javascript: - javascript/aws-amplify.min.js - javascript/extra.js + - https://docs.powertools.aws.dev/shared/mermaid.min.js extra: powertools: From 767109bc5fe35ea9648516c744bef8cc53c8a82a Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Mon, 16 Jun 2025 13:50:35 +0200 Subject: [PATCH 24/38] Update project version consistently to 2.0.0. --- examples/powertools-examples-kafka/pom.xml | 2 +- examples/powertools-examples-kafka/tools/pom.xml | 2 +- powertools-e2e-tests/handlers/batch/pom.xml | 2 +- powertools-e2e-tests/handlers/idempotency/pom.xml | 2 +- powertools-e2e-tests/handlers/largemessage/pom.xml | 2 +- powertools-e2e-tests/handlers/largemessage_idempotent/pom.xml | 2 +- powertools-e2e-tests/handlers/logging/pom.xml | 2 +- powertools-e2e-tests/handlers/metrics/pom.xml | 2 +- powertools-e2e-tests/handlers/parameters/pom.xml | 2 +- powertools-e2e-tests/handlers/pom.xml | 4 ++-- powertools-e2e-tests/handlers/tracing/pom.xml | 2 +- powertools-e2e-tests/handlers/validation-alb-event/pom.xml | 2 +- powertools-e2e-tests/handlers/validation-apigw-event/pom.xml | 2 +- powertools-kafka/pom.xml | 2 +- 14 files changed, 15 insertions(+), 15 deletions(-) diff --git a/examples/powertools-examples-kafka/pom.xml b/examples/powertools-examples-kafka/pom.xml index 68c1a7900..a745ac75d 100644 --- a/examples/powertools-examples-kafka/pom.xml +++ b/examples/powertools-examples-kafka/pom.xml @@ -2,7 +2,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> 4.0.0 software.amazon.lambda.examples - 2.0.0-SNAPSHOT + 2.0.0 powertools-examples-kafka jar Powertools for AWS Lambda (Java) - Examples - Kafka diff --git a/examples/powertools-examples-kafka/tools/pom.xml b/examples/powertools-examples-kafka/tools/pom.xml index 52b234359..97231e5bd 100644 --- a/examples/powertools-examples-kafka/tools/pom.xml +++ b/examples/powertools-examples-kafka/tools/pom.xml @@ -6,7 +6,7 @@ software.amazon.lambda.examples powertools-examples-kafka-tools - 2.0.0-SNAPSHOT + 2.0.0 11 diff --git a/powertools-e2e-tests/handlers/batch/pom.xml b/powertools-e2e-tests/handlers/batch/pom.xml index a36d464ea..3b7238b4e 100644 --- a/powertools-e2e-tests/handlers/batch/pom.xml +++ b/powertools-e2e-tests/handlers/batch/pom.xml @@ -5,7 +5,7 @@ software.amazon.lambda e2e-test-handlers-parent - 2.0.0-SNAPSHOT + 2.0.0 e2e-test-handler-batch diff --git a/powertools-e2e-tests/handlers/idempotency/pom.xml b/powertools-e2e-tests/handlers/idempotency/pom.xml index e3a67a5b5..dfa97225a 100644 --- a/powertools-e2e-tests/handlers/idempotency/pom.xml +++ b/powertools-e2e-tests/handlers/idempotency/pom.xml @@ -5,7 +5,7 @@ software.amazon.lambda e2e-test-handlers-parent - 2.0.0-SNAPSHOT + 2.0.0 e2e-test-handler-idempotency diff --git a/powertools-e2e-tests/handlers/largemessage/pom.xml b/powertools-e2e-tests/handlers/largemessage/pom.xml index 0728404bf..ce3fbbdd5 100644 --- a/powertools-e2e-tests/handlers/largemessage/pom.xml +++ b/powertools-e2e-tests/handlers/largemessage/pom.xml @@ -5,7 +5,7 @@ software.amazon.lambda e2e-test-handlers-parent - 2.0.0-SNAPSHOT + 2.0.0 e2e-test-handler-largemessage diff --git a/powertools-e2e-tests/handlers/largemessage_idempotent/pom.xml b/powertools-e2e-tests/handlers/largemessage_idempotent/pom.xml index b57063346..e9e87da2b 100644 --- a/powertools-e2e-tests/handlers/largemessage_idempotent/pom.xml +++ b/powertools-e2e-tests/handlers/largemessage_idempotent/pom.xml @@ -5,7 +5,7 @@ software.amazon.lambda e2e-test-handlers-parent - 2.0.0-SNAPSHOT + 2.0.0 e2e-test-handler-large-msg-idempotent diff --git a/powertools-e2e-tests/handlers/logging/pom.xml b/powertools-e2e-tests/handlers/logging/pom.xml index 88feda09b..62f2f7530 100644 --- a/powertools-e2e-tests/handlers/logging/pom.xml +++ b/powertools-e2e-tests/handlers/logging/pom.xml @@ -5,7 +5,7 @@ software.amazon.lambda e2e-test-handlers-parent - 2.0.0-SNAPSHOT + 2.0.0 e2e-test-handler-logging diff --git a/powertools-e2e-tests/handlers/metrics/pom.xml b/powertools-e2e-tests/handlers/metrics/pom.xml index 68059e67e..e543c2cd0 100644 --- a/powertools-e2e-tests/handlers/metrics/pom.xml +++ b/powertools-e2e-tests/handlers/metrics/pom.xml @@ -5,7 +5,7 @@ software.amazon.lambda e2e-test-handlers-parent - 2.0.0-SNAPSHOT + 2.0.0 e2e-test-handler-metrics diff --git a/powertools-e2e-tests/handlers/parameters/pom.xml b/powertools-e2e-tests/handlers/parameters/pom.xml index 2d6a9a06a..471e79d8f 100644 --- a/powertools-e2e-tests/handlers/parameters/pom.xml +++ b/powertools-e2e-tests/handlers/parameters/pom.xml @@ -5,7 +5,7 @@ software.amazon.lambda e2e-test-handlers-parent - 2.0.0-SNAPSHOT + 2.0.0 e2e-test-handler-parameters diff --git a/powertools-e2e-tests/handlers/pom.xml b/powertools-e2e-tests/handlers/pom.xml index b55cf436a..988ae3d55 100644 --- a/powertools-e2e-tests/handlers/pom.xml +++ b/powertools-e2e-tests/handlers/pom.xml @@ -4,13 +4,13 @@ software.amazon.lambda e2e-test-handlers-parent - 2.0.0-SNAPSHOT + 2.0.0 pom Handlers for End-to-End tests Fake handlers that use Powertools for AWS Lambda (Java). - 2.0.0-SNAPSHOT + 2.0.0 UTF-8 11 11 diff --git a/powertools-e2e-tests/handlers/tracing/pom.xml b/powertools-e2e-tests/handlers/tracing/pom.xml index b96fcef0a..b1bc14c05 100644 --- a/powertools-e2e-tests/handlers/tracing/pom.xml +++ b/powertools-e2e-tests/handlers/tracing/pom.xml @@ -5,7 +5,7 @@ software.amazon.lambda e2e-test-handlers-parent - 2.0.0-SNAPSHOT + 2.0.0 e2e-test-handler-tracing diff --git a/powertools-e2e-tests/handlers/validation-alb-event/pom.xml b/powertools-e2e-tests/handlers/validation-alb-event/pom.xml index be50094c1..36695b9a4 100644 --- a/powertools-e2e-tests/handlers/validation-alb-event/pom.xml +++ b/powertools-e2e-tests/handlers/validation-alb-event/pom.xml @@ -5,7 +5,7 @@ software.amazon.lambda e2e-test-handlers-parent - 2.0.0-SNAPSHOT + 2.0.0 e2e-test-handler-validation-alb-event diff --git a/powertools-e2e-tests/handlers/validation-apigw-event/pom.xml b/powertools-e2e-tests/handlers/validation-apigw-event/pom.xml index f204a8a9f..8bb927778 100644 --- a/powertools-e2e-tests/handlers/validation-apigw-event/pom.xml +++ b/powertools-e2e-tests/handlers/validation-apigw-event/pom.xml @@ -5,7 +5,7 @@ software.amazon.lambda e2e-test-handlers-parent - 2.0.0-SNAPSHOT + 2.0.0 e2e-test-handler-validation-apigw-event diff --git a/powertools-kafka/pom.xml b/powertools-kafka/pom.xml index cc7fd302d..f5b80012c 100644 --- a/powertools-kafka/pom.xml +++ b/powertools-kafka/pom.xml @@ -21,7 +21,7 @@ powertools-parent software.amazon.lambda - 2.0.0-SNAPSHOT + 2.0.0 powertools-kafka From 3e6a8b7a47b430a6366d78a543f71d9b6865f6b1 Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Tue, 17 Jun 2025 13:48:28 +0200 Subject: [PATCH 25/38] fix: Fix bug where abbreviated _HANDLER env var did not detect the Deserialization annotation. --- .../kafka/internal/DeserializationUtils.java | 15 +++++-- .../internal/DeserializationUtilsTest.java | 40 +++++++++++++++++++ 2 files changed, 52 insertions(+), 3 deletions(-) diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtils.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtils.java index 4a1c13399..f08426a05 100644 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtils.java +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtils.java @@ -37,10 +37,19 @@ private DeserializationUtils() { public static DeserializationType determineDeserializationType() { try { // Get the handler from the environment. It has a format like org.example.MyRequestHandler::handleRequest + // or can be abbreviated as just org.example.MyRequestHandler (defaulting to handleRequest) String handler = System.getenv("_HANDLER"); - if (handler != null && handler.contains("::")) { - String className = handler.substring(0, handler.indexOf("::")); - String methodName = handler.substring(handler.indexOf("::") + 2); + String className; + String methodName = "handleRequest"; // Default method name + + if (handler != null && !handler.trim().isEmpty()) { + if (handler.contains("::")) { + className = handler.substring(0, handler.indexOf("::")); + methodName = handler.substring(handler.indexOf("::") + 2); + } else { + // Handle the case where method name is omitted + className = handler; + } Class handlerClazz = Class.forName(className); diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtilsTest.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtilsTest.java index a6f45ad7a..21f38d9ab 100644 --- a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtilsTest.java +++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtilsTest.java @@ -33,6 +33,16 @@ void shouldReturnDefaultDeserializationTypeWhenHandlerIsEmpty() { assertThat(type).isEqualTo(DeserializationType.LAMBDA_DEFAULT); } + @Test + @SetEnvironmentVariable(key = "_HANDLER", value = " ") + void shouldReturnDefaultDeserializationTypeWhenHandlerIsWhitespaceOnly() { + // When + DeserializationType type = DeserializationUtils.determineDeserializationType(); + + // Then + assertThat(type).isEqualTo(DeserializationType.LAMBDA_DEFAULT); + } + @Test @SetEnvironmentVariable(key = "_HANDLER", value = "InvalidHandlerFormat") void shouldReturnDefaultDeserializationTypeWhenHandlerFormatIsInvalid() { @@ -102,4 +112,34 @@ void shouldReturnProtobufDeserializationTypeFromAnnotation() { // Then assertThat(type).isEqualTo(DeserializationType.KAFKA_PROTOBUF); } + + @Test + @SetEnvironmentVariable(key = "_HANDLER", value = "software.amazon.lambda.powertools.kafka.testutils.JsonHandler") + void shouldReturnJsonDeserializationTypeFromAnnotationWithAbbreviatedHandler() { + // When + DeserializationType type = DeserializationUtils.determineDeserializationType(); + + // Then + assertThat(type).isEqualTo(DeserializationType.KAFKA_JSON); + } + + @Test + @SetEnvironmentVariable(key = "_HANDLER", value = "software.amazon.lambda.powertools.kafka.testutils.AvroHandler") + void shouldReturnAvroDeserializationTypeFromAnnotationWithAbbreviatedHandler() { + // When + DeserializationType type = DeserializationUtils.determineDeserializationType(); + + // Then + assertThat(type).isEqualTo(DeserializationType.KAFKA_AVRO); + } + + @Test + @SetEnvironmentVariable(key = "_HANDLER", value = "software.amazon.lambda.powertools.kafka.testutils.ProtobufHandler") + void shouldReturnProtobufDeserializationTypeFromAnnotationWithAbbreviatedHandler() { + // When + DeserializationType type = DeserializationUtils.determineDeserializationType(); + + // Then + assertThat(type).isEqualTo(DeserializationType.KAFKA_PROTOBUF); + } } From ef04849d0cb5be47753b77a87bcb2a5d6db98a5f Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Tue, 17 Jun 2025 15:28:20 +0200 Subject: [PATCH 26/38] fix: Bug when trying to deserialize a type into itself for Lambda default behavior. We can just return the type itself. Relevant for simple String and InputStream handlers. --- .../LambdaDefaultDeserializer.java | 14 ++++++-- .../kafka/PowertoolsSerializerTest.java | 34 +++++++++++++++++++ .../kafka/testutils/InputStreamHandler.java | 30 ++++++++++++++++ .../kafka/testutils/StringHandler.java | 23 +++++++++++++ 4 files changed, 99 insertions(+), 2 deletions(-) create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/InputStreamHandler.java create mode 100644 powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/StringHandler.java diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/LambdaDefaultDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/LambdaDefaultDeserializer.java index bfc51e372..b1018693d 100644 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/LambdaDefaultDeserializer.java +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/LambdaDefaultDeserializer.java @@ -28,12 +28,22 @@ public class LambdaDefaultDeserializer implements PowertoolsDeserializer { @SuppressWarnings("unchecked") @Override public T fromJson(InputStream input, Type type) { - return JacksonFactory.getInstance().getSerializer((Class) type).fromJson(input); + // If the target type does not require conversion, simply return the value itself + if (type.equals(InputStream.class)) { + return (T) input; + } + + return (T) JacksonFactory.getInstance().getSerializer(type).fromJson(input); } @SuppressWarnings("unchecked") @Override public T fromJson(String input, Type type) { - return JacksonFactory.getInstance().getSerializer((Class) type).fromJson(input); + // If the target type does not require conversion, simply return the value itself + if (type.equals(String.class)) { + return (T) input; + } + + return (T) JacksonFactory.getInstance().getSerializer(type).fromJson(input); } } diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializerTest.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializerTest.java index 4ad0d46ef..3aa28c442 100644 --- a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializerTest.java +++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializerTest.java @@ -19,6 +19,7 @@ import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.io.InputStream; import java.lang.reflect.Type; import java.util.Arrays; import java.util.Base64; @@ -108,6 +109,39 @@ void shouldUseLambdaDefaultDeserializer(InputType inputType) throws JsonProcessi assertThat(result.getTags()).containsExactly("tag1", "tag2"); } + @Test + @SetEnvironmentVariable(key = "_HANDLER", value = "software.amazon.lambda.powertools.kafka.testutils.StringHandler::handleRequest") + void shouldHandleStringInputType() { + // When + PowertoolsSerializer serializer = new PowertoolsSerializer(); + + // Then + String testInput = "This is a test string"; + + // This should directly return the input string + String result = serializer.fromJson(testInput, String.class); + + assertThat(result).isEqualTo(testInput); + } + + @Test + @SetEnvironmentVariable(key = "_HANDLER", value = "software.amazon.lambda.powertools.kafka.testutils.InputStreamHandler::handleRequest") + void shouldHandleInputStreamType() throws IOException { + // When + PowertoolsSerializer serializer = new PowertoolsSerializer(); + + // Then + String testInput = "This is a test string"; + ByteArrayInputStream inputStream = new ByteArrayInputStream(testInput.getBytes()); + + // This should return the input stream directly + InputStream result = serializer.fromJson(inputStream, InputStream.class); + + // Read the content to verify it's the same + String resultString = new String(result.readAllBytes()); + assertThat(resultString).isEqualTo(testInput); + } + @ParameterizedTest @MethodSource("inputTypes") @SetEnvironmentVariable(key = "_HANDLER", value = "software.amazon.lambda.powertools.kafka.testutils.JsonHandler::handleRequest") diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/InputStreamHandler.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/InputStreamHandler.java new file mode 100644 index 000000000..63e225ab8 --- /dev/null +++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/InputStreamHandler.java @@ -0,0 +1,30 @@ +/* + * Copyright 2023 Amazon.com, Inc. or its affiliates. + * Licensed under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package software.amazon.lambda.powertools.kafka.testutils; + +import java.io.IOException; +import java.io.InputStream; + +import com.amazonaws.services.lambda.runtime.Context; +import com.amazonaws.services.lambda.runtime.RequestHandler; + +public class InputStreamHandler implements RequestHandler { + @Override + public String handleRequest(InputStream input, Context context) { + try { + return new String(input.readAllBytes()); + } catch (IOException e) { + throw new RuntimeException("Failed to read input stream", e); + } + } +} diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/StringHandler.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/StringHandler.java new file mode 100644 index 000000000..3ac5649f1 --- /dev/null +++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/testutils/StringHandler.java @@ -0,0 +1,23 @@ +/* + * Copyright 2023 Amazon.com, Inc. or its affiliates. + * Licensed under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package software.amazon.lambda.powertools.kafka.testutils; + +import com.amazonaws.services.lambda.runtime.Context; +import com.amazonaws.services.lambda.runtime.RequestHandler; + +public class StringHandler implements RequestHandler { + @Override + public String handleRequest(String input, Context context) { + return input; + } +} From 6da89a305b4c8607a24c901143bfb160326c49dc Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Tue, 17 Jun 2025 16:33:16 +0200 Subject: [PATCH 27/38] When falling back to Lambda default, handle conversion between InputStream and String. --- .../LambdaDefaultDeserializer.java | 15 +++++ .../kafka/PowertoolsSerializerTest.java | 58 +++++++++++++++++++ 2 files changed, 73 insertions(+) diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/LambdaDefaultDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/LambdaDefaultDeserializer.java index b1018693d..582f858ae 100644 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/LambdaDefaultDeserializer.java +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/LambdaDefaultDeserializer.java @@ -12,6 +12,7 @@ */ package software.amazon.lambda.powertools.kafka.serializers; +import java.io.IOException; import java.io.InputStream; import java.lang.reflect.Type; @@ -33,6 +34,15 @@ public T fromJson(InputStream input, Type type) { return (T) input; } + // If the target type is String, read the input stream as a String + if (type.equals(String.class)) { + try { + return (T) new String(input.readAllBytes()); + } catch (IOException e) { + throw new RuntimeException("Failed to read input stream as String", e); + } + } + return (T) JacksonFactory.getInstance().getSerializer(type).fromJson(input); } @@ -44,6 +54,11 @@ public T fromJson(String input, Type type) { return (T) input; } + // If the target type is InputStream, read the input stream as a String + if (type.equals(InputStream.class)) { + return (T) new String(input).getBytes(); + } + return (T) JacksonFactory.getInstance().getSerializer(type).fromJson(input); } } diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializerTest.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializerTest.java index 3aa28c442..6ce57ecd5 100644 --- a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializerTest.java +++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/PowertoolsSerializerTest.java @@ -13,6 +13,7 @@ package software.amazon.lambda.powertools.kafka; import static org.assertj.core.api.Assertions.assertThat; +import static org.assertj.core.api.Assertions.assertThatThrownBy; import static software.amazon.lambda.powertools.kafka.testutils.TestUtils.createConsumerRecordsType; import static software.amazon.lambda.powertools.kafka.testutils.TestUtils.serializeAvro; @@ -40,6 +41,7 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; +import software.amazon.lambda.powertools.kafka.serializers.LambdaDefaultDeserializer; import software.amazon.lambda.powertools.kafka.serializers.PowertoolsDeserializer; import software.amazon.lambda.powertools.kafka.testutils.TestProductPojo; @@ -142,6 +144,62 @@ void shouldHandleInputStreamType() throws IOException { assertThat(resultString).isEqualTo(testInput); } + @Test + void shouldConvertInputStreamToString() { + // When + LambdaDefaultDeserializer deserializer = new LambdaDefaultDeserializer(); + + // Then + String expected = "This is a test string"; + ByteArrayInputStream inputStream = new ByteArrayInputStream(expected.getBytes()); + + // Convert InputStream to String + String result = deserializer.fromJson(inputStream, String.class); + + // Verify the result + assertThat(result).isEqualTo(expected); + } + + @Test + void shouldThrowRuntimeExceptionWhenInputStreamIsInvalid() { + // When + LambdaDefaultDeserializer deserializer = new LambdaDefaultDeserializer(); + + // Create a problematic InputStream that throws IOException when read + InputStream problematicStream = new InputStream() { + @Override + public int read() throws IOException { + throw new IOException("Simulated IO error"); + } + + @Override + public byte[] readAllBytes() throws IOException { + throw new IOException("Simulated IO error"); + } + }; + + // Then + assertThatThrownBy(() -> deserializer.fromJson(problematicStream, String.class)) + .isInstanceOf(RuntimeException.class) + .hasMessageContaining("Failed to read input stream as String"); + } + + @Test + void shouldConvertStringToByteArray() { + // When + LambdaDefaultDeserializer deserializer = new LambdaDefaultDeserializer(); + + // Then + String input = "This is a test string"; + + // Convert String to InputStream + byte[] result = deserializer.fromJson(input, InputStream.class); + + // Verify the result + String resultString = new String(result); + assertThat(resultString).isEqualTo(input); + } + @ParameterizedTest @MethodSource("inputTypes") @SetEnvironmentVariable(key = "_HANDLER", value = "software.amazon.lambda.powertools.kafka.testutils.JsonHandler::handleRequest") From 2be14dd2669f82dd56bee3aab9e5c0001cc78f60 Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Tue, 17 Jun 2025 17:10:17 +0200 Subject: [PATCH 28/38] Raise a runtime exception when the KafkaEvent is invalid. --- .../AbstractKafkaDeserializer.java | 12 +++++-- .../AbstractKafkaDeserializerTest.java | 36 +++++++++++++++++++ 2 files changed, 45 insertions(+), 3 deletions(-) diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java index c74b209ba..209b49cab 100644 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java @@ -118,8 +118,13 @@ private boolean isConsumerRecordsType(Type type) { private ConsumerRecords convertToConsumerRecords(KafkaEvent kafkaEvent, Class keyType, Class valueType) { + // Validate that this is actually a Kafka event by checking for required properties + if (kafkaEvent == null || kafkaEvent.getEventSource() == null) { + throw new RuntimeException( + "Failed to deserialize Lambda handler input to ConsumerRecords: Input is not a valid Kafka event."); + } - if (kafkaEvent == null || kafkaEvent.getRecords() == null) { + if (kafkaEvent.getRecords() == null) { return ConsumerRecords.empty(); } @@ -138,7 +143,7 @@ private ConsumerRecords convertToConsumerRecords(KafkaEvent kafkaEv return createConsumerRecords(recordsMap); } - + /** * Creates ConsumerRecords with compatibility for both Kafka 3.x.x and 4.x.x. * @@ -147,7 +152,8 @@ private ConsumerRecords convertToConsumerRecords(KafkaEvent kafkaEv * @param records Map of records by topic partition * @return ConsumerRecords instance */ - protected ConsumerRecords createConsumerRecords(Map>> records) { + protected ConsumerRecords createConsumerRecords( + Map>> records) { try { // Try to use the Kafka 4.x.x constructor with nextOffsets parameter return new ConsumerRecords<>(records, Map.of()); diff --git a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializerTest.java b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializerTest.java index 55b7bd210..512058bca 100644 --- a/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializerTest.java +++ b/powertools-kafka/src/test/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializerTest.java @@ -307,6 +307,42 @@ void shouldHandleNullRecords(InputType inputType) { assertThat(records.count()).isZero(); } + @ParameterizedTest + @MethodSource("inputTypes") + void shouldThrowExceptionWhenEventSourceIsNull(InputType inputType) { + // Given + // Create a JSON without eventSource property + String kafkaJson = "{\n" + + " \"records\": {\n" + + " \"test-topic-1\": [\n" + + " {\n" + + " \"topic\": \"test-topic-1\",\n" + + " \"partition\": 0,\n" + + " \"offset\": 15,\n" + + " \"timestamp\": 1545084650987,\n" + + " \"timestampType\": \"CREATE_TIME\",\n" + + " \"key\": null,\n" + + " \"value\": null,\n" + + " \"headers\": []\n" + + " }\n" + + " ]\n" + + " }\n" + + "}"; + Type type = TestUtils.createConsumerRecordsType(String.class, TestProductPojo.class); + + // When/Then + if (inputType == InputType.INPUT_STREAM) { + ByteArrayInputStream inputStream = new ByteArrayInputStream(kafkaJson.getBytes()); + assertThatThrownBy(() -> deserializer.fromJson(inputStream, type)) + .isInstanceOf(RuntimeException.class) + .hasMessageContaining("Input is not a valid Kafka event"); + } else { + assertThatThrownBy(() -> deserializer.fromJson(kafkaJson, type)) + .isInstanceOf(RuntimeException.class) + .hasMessageContaining("Input is not a valid Kafka event"); + } + } + static Stream primitiveTypesProvider() { return Stream.of( // For each primitive type, test with both INPUT_STREAM and STRING From 04cf14a2e85b2380a35642ab2e49791e461c110d Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Mon, 16 Jun 2025 16:59:54 +0200 Subject: [PATCH 29/38] docs: Announce deprecation of v1 --- docs/processes/versioning.md | 7 ++++--- docs/upgrade.md | 5 ++++- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/docs/processes/versioning.md b/docs/processes/versioning.md index 8b12e0fa9..bbb60f507 100644 --- a/docs/processes/versioning.md +++ b/docs/processes/versioning.md @@ -55,6 +55,7 @@ To see the list of available major versions of Powertools for AWS Lambda and whe ### Version support matrix -| SDK | Major version | Current Phase | General Availability Date | Notes | -| -------------------------------- | ------------- | -------------------- | ------------------------- | ------------------------------------------------------------------------------------------------- | -| Powertools for AWS Lambda (Java) | 1.x | General Availability | 11/04/2020 | See [Release notes](https://github.com/aws-powertools/powertools-lambda-java/releases/tag/v1.0.0) | +| SDK | Major version | Current Phase | General Availability Date | Notes | +| -------------------------------- | ------------- | -------------------- | ------------------------- | ------------------------------------------------------------------------------------------------------------------- | +| Powertools for AWS Lambda (Java) | 2.x | General Availability | 06/12/2025 | See [Release notes](https://github.com/aws-powertools/powertools-lambda-java/releases/tag/v2.0.0) | +| Powertools for AWS Lambda (Java) | 1.x | Maintenance | 11/04/2020 | End-of-support: December 12, 2025. See [upgrade guide](https://docs.powertools.aws.dev/lambda/java/latest/upgrade/) | diff --git a/docs/upgrade.md b/docs/upgrade.md index d1388d95b..5b6d16d99 100644 --- a/docs/upgrade.md +++ b/docs/upgrade.md @@ -5,7 +5,10 @@ description: Guide to update between major Powertools for AWS Lambda (Java) vers ## End of support v1 - + +!!! warning "End of support notice" + On December 12th, 2025, Powertools for AWS Lambda (Java) v1 will reach end of support and will no longer receive updates or releases. If you are still using v1, we strongly recommend you to read our upgrade guide and update to the latest version. + Given our commitment to all of our customers using Powertools for AWS Lambda (Java), we will keep [Maven Central](https://central.sonatype.com/search?q=powertools){target="\_blank"} `v1` releases and a `v1` documentation archive to prevent any disruption. From f02c8fde8baa44706bcb6352bb22d1219d7ec572 Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Mon, 16 Jun 2025 17:00:49 +0200 Subject: [PATCH 30/38] fix(metrics): Do not flush when no metrics were added to avoid printing root-level _aws dict (#1891) * fix(metrics): Do not flush when no metrics were added to avoid printing root-level _aws dict. * Fix pmd linting failures. --- .../powertools/metrics/internal/EmfMetricsLogger.java | 3 ++- .../metrics/internal/EmfMetricsLoggerTest.java | 10 ++++++---- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/powertools-metrics/src/main/java/software/amazon/lambda/powertools/metrics/internal/EmfMetricsLogger.java b/powertools-metrics/src/main/java/software/amazon/lambda/powertools/metrics/internal/EmfMetricsLogger.java index a55e1da5a..1eedd270d 100644 --- a/powertools-metrics/src/main/java/software/amazon/lambda/powertools/metrics/internal/EmfMetricsLogger.java +++ b/powertools-metrics/src/main/java/software/amazon/lambda/powertools/metrics/internal/EmfMetricsLogger.java @@ -164,8 +164,9 @@ public void flush() { } else { LOGGER.warn("No metrics were emitted"); } + } else { + emfLogger.flush(); } - emfLogger.flush(); } @Override diff --git a/powertools-metrics/src/test/java/software/amazon/lambda/powertools/metrics/internal/EmfMetricsLoggerTest.java b/powertools-metrics/src/test/java/software/amazon/lambda/powertools/metrics/internal/EmfMetricsLoggerTest.java index 1b7106ece..a4fc0d61c 100644 --- a/powertools-metrics/src/test/java/software/amazon/lambda/powertools/metrics/internal/EmfMetricsLoggerTest.java +++ b/powertools-metrics/src/test/java/software/amazon/lambda/powertools/metrics/internal/EmfMetricsLoggerTest.java @@ -51,7 +51,7 @@ class EmfMetricsLoggerTest { private Metrics metrics; private final ObjectMapper objectMapper = new ObjectMapper(); - private final PrintStream standardOut = System.out; + private static final PrintStream standardOut = System.out; private final ByteArrayOutputStream outputStreamCaptor = new ByteArrayOutputStream(); @BeforeEach @@ -180,7 +180,7 @@ void shouldAddDimension() throws Exception { JsonNode dimensions = rootNode.get("_aws").get("CloudWatchMetrics").get(0).get("Dimensions").get(0); boolean hasDimension = false; for (JsonNode dimension : dimensions) { - if (dimension.asText().equals("CustomDimension")) { + if ("CustomDimension".equals(dimension.asText())) { hasDimension = true; break; } @@ -233,9 +233,9 @@ void shouldAddDimensionSet() throws Exception { boolean hasDim2 = false; for (JsonNode dimension : dimensions) { String dimName = dimension.asText(); - if (dimName.equals("Dim1")) { + if ("Dim1".equals(dimName)) { hasDim1 = true; - } else if (dimName.equals("Dim2")) { + } else if ("Dim2".equals(dimName)) { hasDim2 = true; } } @@ -348,6 +348,8 @@ void shouldLogWarningOnEmptyMetrics() throws Exception { // Read the log file and check for the warning String logContent = new String(Files.readAllBytes(logFile.toPath()), StandardCharsets.UTF_8); assertThat(logContent).contains("No metrics were emitted"); + // No EMF output should be generated + assertThat(outputStreamCaptor.toString().trim()).isEmpty(); } @Test From 3c7635759eff673e213124b368e3270fe48868ab Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Wed, 18 Jun 2025 11:45:47 +0200 Subject: [PATCH 31/38] Rename docs to Kafka Consumer and add line highlights for code examples. --- docs/utilities/kafka.md | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/docs/utilities/kafka.md b/docs/utilities/kafka.md index 82d86d5ea..c5ef8bf9c 100644 --- a/docs/utilities/kafka.md +++ b/docs/utilities/kafka.md @@ -1,5 +1,5 @@ --- -title: Kafka +title: Kafka Consumer description: Utility status: new --- @@ -132,7 +132,7 @@ The Kafka utility transforms raw Lambda Kafka events into an intuitive format fo === "Avro Messages" - ```java + ```java hl_lines="13 16" package org.example; import com.amazonaws.services.lambda.runtime.Context; @@ -158,7 +158,7 @@ The Kafka utility transforms raw Lambda Kafka events into an intuitive format fo === "Protocol Buffers" - ```java + ```java hl_lines="13 16" package org.example; import com.amazonaws.services.lambda.runtime.Context; @@ -184,7 +184,7 @@ The Kafka utility transforms raw Lambda Kafka events into an intuitive format fo === "JSON Messages" - ```java + ```java hl_lines="13 16" package org.example; import com.amazonaws.services.lambda.runtime.Context; @@ -200,7 +200,7 @@ The Kafka utility transforms raw Lambda Kafka events into an intuitive format fo @Deserialization(type = DeserializationType.KAFKA_JSON) public String handleRequest(ConsumerRecords records, Context context) { for (ConsumerRecord record : records) { - User user = record.value(); // Deserialized JSON object + User user = record.value(); // Deserialized JSON object into User POJO System.out.printf("Processing user: %s, age %d%n", user.getName(), user.getAge()); } return "OK"; @@ -218,7 +218,7 @@ The `@Deserialization` annotation deserializes both keys and values based on you === "Key and Value Deserialization" - ```java + ```java hl_lines="17" package org.example; import com.amazonaws.services.lambda.runtime.Context; @@ -248,7 +248,7 @@ The `@Deserialization` annotation deserializes both keys and values based on you === "Value-Only Deserialization" - ```java + ```java hl_lines="17" package org.example; import com.amazonaws.services.lambda.runtime.Context; @@ -289,7 +289,7 @@ When working with primitive data types (strings, integers, etc.) rather than str === "Primitive key" - ```java + ```java hl_lines="17 19" package org.example; import com.amazonaws.services.lambda.runtime.Context; @@ -322,7 +322,7 @@ When working with primitive data types (strings, integers, etc.) rather than str === "Primitive key and value" - ```java + ```java hl_lines="17 20" package org.example; import com.amazonaws.services.lambda.runtime.Context; @@ -367,7 +367,7 @@ The Kafka utility supports multiple serialization formats to match your existing | **JSON** | `KAFKA_JSON` | Human-readable text format | Jackson | | **Avro** | `KAFKA_AVRO` | Compact binary format with schema | Apache Avro | | **Protocol Buffers** | `KAFKA_PROTOBUF` | Efficient binary format | Protocol Buffers | - | **Lambda Default** | `LAMBDA_DEFAULT` | Uses Lambda's built-in deserialization (equivalent to removing the @Deserialization annotation) | None | + | **Lambda Default** | `LAMBDA_DEFAULT` | Uses Lambda's built-in deserialization (equivalent to removing the `@Deserialization` annotation) | None | === "Format Comparison" @@ -579,7 +579,7 @@ The Idempotency utility automatically stores the result of each successful opera ???+ tip "Ensuring exactly-once processing" - The @Idempotent annotation will use the JSON representation of the Payment object to make sure that the same object is only processed exactly once. Even if a batch fails and Lambda retries the messages, each unique payment will be processed exactly once. + The `@Idempotent` annotation will use the JSON representation of the Payment object to make sure that the same object is only processed exactly once. Even if a batch fails and Lambda retries the messages, each unique payment will be processed exactly once. ### Best practices @@ -625,7 +625,7 @@ When using binary serialization formats across multiple programming languages, e === "Using Python naming convention" - ```java + ```java hl_lines="28 31 34 35 37 38 51" package org.example; import com.amazonaws.services.lambda.runtime.Context; @@ -821,7 +821,9 @@ sequenceDiagram Lambda->>+KafkaUtility: Pass Kafka event KafkaUtility->>KafkaUtility: Parse event structure loop For each record + KafkaUtility->>KafkaUtility: Decode base64 data KafkaUtility->>KafkaUtility: Record is already deserialized + KafkaUtility->>KafkaUtility: Map to POJO (if specified) end KafkaUtility->>+YourCode: Provide ConsumerRecords YourCode->>YourCode: Process records From ffebe8ce1dff071d1e8e21b7e17914b9f4b9b95e Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Wed, 18 Jun 2025 12:14:34 +0200 Subject: [PATCH 32/38] Fix Spotbug issues. --- .../kafka/serializers/AbstractKafkaDeserializer.java | 5 +++-- .../powertools/kafka/serializers/KafkaJsonDeserializer.java | 3 ++- .../kafka/serializers/LambdaDefaultDeserializer.java | 5 +++-- 3 files changed, 8 insertions(+), 5 deletions(-) diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java index 209b49cab..637dc4948 100644 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java @@ -16,6 +16,7 @@ import java.io.InputStream; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Base64; import java.util.HashMap; @@ -263,11 +264,11 @@ private T deserialize(byte[] data, Class type) throws IOException { private T deserializePrimitive(byte[] data, Class type) { // Handle String type if (type == String.class) { - return (T) new String(data); + return (T) new String(data, StandardCharsets.UTF_8); } // Handle primitive types and their wrappers - String str = new String(data); + String str = new String(data, StandardCharsets.UTF_8); if (type == Integer.class || type == int.class) { return (T) Integer.valueOf(str); diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializer.java index e2280364a..ed64f3786 100644 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializer.java +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/KafkaJsonDeserializer.java @@ -13,6 +13,7 @@ package software.amazon.lambda.powertools.kafka.serializers; import java.io.IOException; +import java.nio.charset.StandardCharsets; /** * Deserializer for Kafka records using JSON format. @@ -21,7 +22,7 @@ public class KafkaJsonDeserializer extends AbstractKafkaDeserializer { @Override protected T deserializeObject(byte[] data, Class type) throws IOException { - String decodedStr = new String(data); + String decodedStr = new String(data, StandardCharsets.UTF_8); return objectMapper.readValue(decodedStr, type); } diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/LambdaDefaultDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/LambdaDefaultDeserializer.java index 582f858ae..a7ea15d2f 100644 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/LambdaDefaultDeserializer.java +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/LambdaDefaultDeserializer.java @@ -15,6 +15,7 @@ import java.io.IOException; import java.io.InputStream; import java.lang.reflect.Type; +import java.nio.charset.StandardCharsets; import com.amazonaws.services.lambda.runtime.serialization.factories.JacksonFactory; @@ -37,7 +38,7 @@ public T fromJson(InputStream input, Type type) { // If the target type is String, read the input stream as a String if (type.equals(String.class)) { try { - return (T) new String(input.readAllBytes()); + return (T) new String(input.readAllBytes(), StandardCharsets.UTF_8); } catch (IOException e) { throw new RuntimeException("Failed to read input stream as String", e); } @@ -56,7 +57,7 @@ public T fromJson(String input, Type type) { // If the target type is InputStream, read the input stream as a String if (type.equals(InputStream.class)) { - return (T) new String(input).getBytes(); + return (T) input.getBytes(StandardCharsets.UTF_8); } return (T) JacksonFactory.getInstance().getSerializer(type).fromJson(input); From 55c860a43212a1ebf08a799c2e2fe0bc8c76d652 Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Wed, 18 Jun 2025 12:37:24 +0200 Subject: [PATCH 33/38] Reduce cognitive complexity of DeserializationUtils making it more modular and representing handler information in a simple HandlerInfo class. --- .../kafka/internal/DeserializationUtils.java | 91 +++++++++++-------- 1 file changed, 51 insertions(+), 40 deletions(-) diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtils.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtils.java index f08426a05..1d2fe9aca 100644 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtils.java +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/internal/DeserializationUtils.java @@ -35,51 +35,62 @@ private DeserializationUtils() { } public static DeserializationType determineDeserializationType() { + String handler = System.getenv("_HANDLER"); + if (handler == null || handler.trim().isEmpty()) { + LOGGER.error("Cannot determine deserialization type. No valid handler found in _HANDLER: {}", handler); + return DeserializationType.LAMBDA_DEFAULT; + } + try { - // Get the handler from the environment. It has a format like org.example.MyRequestHandler::handleRequest - // or can be abbreviated as just org.example.MyRequestHandler (defaulting to handleRequest) - String handler = System.getenv("_HANDLER"); - String className; - String methodName = "handleRequest"; // Default method name - - if (handler != null && !handler.trim().isEmpty()) { - if (handler.contains("::")) { - className = handler.substring(0, handler.indexOf("::")); - methodName = handler.substring(handler.indexOf("::") + 2); - } else { - // Handle the case where method name is omitted - className = handler; - } - - Class handlerClazz = Class.forName(className); - - // Only consider if it implements RequestHandler - if (RequestHandler.class.isAssignableFrom(handlerClazz)) { - // Look for deserialization type on annotation on handler method - for (Method method : handlerClazz.getDeclaredMethods()) { - if (method.getName().equals(methodName) && method.isAnnotationPresent(Deserialization.class)) { - Deserialization annotation = method.getAnnotation(Deserialization.class); - LOGGER.debug("Found deserialization type: {}", annotation.type()); - return annotation.type(); - } - } - } else { - LOGGER.warn("Candidate class for custom deserialization '{}' does not implement RequestHandler. " - + "Ignoring.", className); - } - } else { - LOGGER.error( - "Cannot determine deserialization type for custom deserialization. " - + "Defaulting to standard. " - + "No valid handler found in environment variable _HANDLER: {}.", - handler); + HandlerInfo handlerInfo = parseHandler(handler); + Class handlerClazz = Class.forName(handlerInfo.className); + + if (!RequestHandler.class.isAssignableFrom(handlerClazz)) { + LOGGER.warn("Class '{}' does not implement RequestHandler. Ignoring.", handlerInfo.className); + return DeserializationType.LAMBDA_DEFAULT; } + + return findDeserializationType(handlerClazz, handlerInfo.methodName); } catch (Exception e) { - LOGGER.warn( - "Cannot determine deserialization type for custom deserialization. Defaulting to standard.", - e); + LOGGER.warn("Cannot determine deserialization type. Defaulting to standard.", e); + return DeserializationType.LAMBDA_DEFAULT; + } + } + + private static HandlerInfo parseHandler(String handler) { + if (handler.contains("::")) { + int separatorIndex = handler.indexOf("::"); + String className = handler.substring(0, separatorIndex); + String methodName = handler.substring(separatorIndex + 2); + return new HandlerInfo(className, methodName); + } + + return new HandlerInfo(handler); + } + + private static DeserializationType findDeserializationType(Class handlerClass, String methodName) { + for (Method method : handlerClass.getDeclaredMethods()) { + if (method.getName().equals(methodName) && method.isAnnotationPresent(Deserialization.class)) { + Deserialization annotation = method.getAnnotation(Deserialization.class); + LOGGER.debug("Found deserialization type: {}", annotation.type()); + return annotation.type(); + } } return DeserializationType.LAMBDA_DEFAULT; } + + private static class HandlerInfo { + final String className; + final String methodName; + + HandlerInfo(String className) { + this(className, "handleRequest"); + } + + HandlerInfo(String className, String methodName) { + this.className = className; + this.methodName = methodName; + } + } } From 03e5b111bfda4ea3317a8991ef09cd5de9c8fe8e Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Wed, 18 Jun 2025 12:44:12 +0200 Subject: [PATCH 34/38] Reduce cognitive complexity of AbstractKafkaDeserializer. --- .../AbstractKafkaDeserializer.java | 64 +++++++++---------- 1 file changed, 31 insertions(+), 33 deletions(-) diff --git a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java index 637dc4948..8d0fc8f61 100644 --- a/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java +++ b/powertools-kafka/src/main/java/software/amazon/lambda/powertools/kafka/serializers/AbstractKafkaDeserializer.java @@ -170,31 +170,40 @@ private ConsumerRecord convertToConsumerRecord( Class keyType, Class valueType) { - K key = null; - V value = null; - - // We set these to NULL_SIZE since they are not relevant in the Lambda environment due to ESM pre-processing. - int keySize = ConsumerRecord.NULL_SIZE; - int valueSize = ConsumerRecord.NULL_SIZE; - - if (eventRecord.getKey() != null) { - try { - byte[] decodedKeyBytes = Base64.getDecoder().decode(eventRecord.getKey()); - key = deserialize(decodedKeyBytes, keyType); - } catch (Exception e) { - throw new RuntimeException("Failed to deserialize Kafka record key.", e); - } + K key = deserializeField(eventRecord.getKey(), keyType, "key"); + V value = deserializeField(eventRecord.getValue(), valueType, "value"); + Headers headers = extractHeaders(eventRecord); + + return new ConsumerRecord<>( + topic, + eventRecord.getPartition(), + eventRecord.getOffset(), + eventRecord.getTimestamp(), + TimestampType.valueOf(eventRecord.getTimestampType()), + // We set these to NULL_SIZE since they are not relevant in the Lambda environment due to ESM + // pre-processing. + ConsumerRecord.NULL_SIZE, + ConsumerRecord.NULL_SIZE, + key, + value, + headers, + Optional.empty()); + } + + private T deserializeField(String encodedData, Class type, String fieldName) { + if (encodedData == null) { + return null; } - if (eventRecord.getValue() != null) { - try { - byte[] decodedValueBytes = Base64.getDecoder().decode(eventRecord.getValue()); - value = deserialize(decodedValueBytes, valueType); - } catch (Exception e) { - throw new RuntimeException("Failed to deserialize Kafka record value.", e); - } + try { + byte[] decodedBytes = Base64.getDecoder().decode(encodedData); + return deserialize(decodedBytes, type); + } catch (Exception e) { + throw new RuntimeException("Failed to deserialize Kafka record " + fieldName + ".", e); } + } + private Headers extractHeaders(KafkaEvent.KafkaEventRecord eventRecord) { Headers headers = new RecordHeaders(); if (eventRecord.getHeaders() != null) { for (Map headerMap : eventRecord.getHeaders()) { @@ -206,18 +215,7 @@ private ConsumerRecord convertToConsumerRecord( } } - return new ConsumerRecord<>( - topic, - eventRecord.getPartition(), - eventRecord.getOffset(), - eventRecord.getTimestamp(), - TimestampType.valueOf(eventRecord.getTimestampType()), - keySize, - valueSize, - key, - value, - headers, - Optional.empty()); + return headers; } /** From a5689e9d3b0a1ffd06b35f96dd3efdbf4aa2e7ac Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Wed, 18 Jun 2025 13:56:36 +0200 Subject: [PATCH 35/38] Enable removal policy DESTROY on e2e test for kinesis streams and SQS queues to avoid exceeding account limit. --- .../amazon/lambda/powertools/testutils/Infrastructure.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/powertools-e2e-tests/src/test/java/software/amazon/lambda/powertools/testutils/Infrastructure.java b/powertools-e2e-tests/src/test/java/software/amazon/lambda/powertools/testutils/Infrastructure.java index 143409989..07d816112 100644 --- a/powertools-e2e-tests/src/test/java/software/amazon/lambda/powertools/testutils/Infrastructure.java +++ b/powertools-e2e-tests/src/test/java/software/amazon/lambda/powertools/testutils/Infrastructure.java @@ -290,6 +290,7 @@ private Stack createStackWithLambda() { .queueName(queue) .visibilityTimeout(Duration.seconds(timeout * 6)) .retentionPeriod(Duration.seconds(timeout * 6)) + .removalPolicy(RemovalPolicy.DESTROY) .build(); DeadLetterQueue.builder() .queue(sqsQueue) @@ -314,6 +315,7 @@ private Stack createStackWithLambda() { .create(e2eStack, "KinesisStream") .streamMode(StreamMode.ON_DEMAND) .streamName(kinesisStream) + .removalPolicy(RemovalPolicy.DESTROY) .build(); stream.grantRead(function); From 8f12c045a0df4d3bd4e2e1c965f5977e65cdc74a Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Wed, 18 Jun 2025 17:21:57 +0200 Subject: [PATCH 36/38] Replace System.out with Powertools Logging. --- docs/utilities/kafka.md | 114 +++++++++++++++++++++++++++++----------- 1 file changed, 82 insertions(+), 32 deletions(-) diff --git a/docs/utilities/kafka.md b/docs/utilities/kafka.md index c5ef8bf9c..b54fa5254 100644 --- a/docs/utilities/kafka.md +++ b/docs/utilities/kafka.md @@ -132,24 +132,29 @@ The Kafka utility transforms raw Lambda Kafka events into an intuitive format fo === "Avro Messages" - ```java hl_lines="13 16" + ```java hl_lines="18 21" package org.example; import com.amazonaws.services.lambda.runtime.Context; import com.amazonaws.services.lambda.runtime.RequestHandler; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; import software.amazon.lambda.powertools.kafka.Deserialization; import software.amazon.lambda.powertools.kafka.DeserializationType; + import software.amazon.lambda.powertools.logging.Logging; public class AvroKafkaHandler implements RequestHandler, String> { + private static final Logger LOGGER = LoggerFactory.getLogger(AvroKafkaHandler.class); @Override + @Logging @Deserialization(type = DeserializationType.KAFKA_AVRO) public String handleRequest(ConsumerRecords records, Context context) { for (ConsumerRecord record : records) { User user = record.value(); // User class is auto-generated from Avro schema - System.out.printf("Processing user: %s, age %d%n", user.getName(), user.getAge()); + LOGGER.info("Processing user: {}, age {}", user.getName(), user.getAge()); } return "OK"; } @@ -158,24 +163,29 @@ The Kafka utility transforms raw Lambda Kafka events into an intuitive format fo === "Protocol Buffers" - ```java hl_lines="13 16" + ```java hl_lines="18 21" package org.example; import com.amazonaws.services.lambda.runtime.Context; import com.amazonaws.services.lambda.runtime.RequestHandler; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; import software.amazon.lambda.powertools.kafka.Deserialization; import software.amazon.lambda.powertools.kafka.DeserializationType; + import software.amazon.lambda.powertools.logging.Logging; public class ProtobufKafkaHandler implements RequestHandler, String> { + private static final Logger LOGGER = LoggerFactory.getLogger(ProtobufKafkaHandler.class); @Override + @Logging @Deserialization(type = DeserializationType.KAFKA_PROTOBUF) public String handleRequest(ConsumerRecords records, Context context) { for (ConsumerRecord record : records) { UserProto.User user = record.value(); // UserProto.User class is auto-generated from Protocol Buffer schema - System.out.printf("Processing user: %s, age %d%n", user.getName(), user.getAge()); + LOGGER.info("Processing user: {}, age {}", user.getName(), user.getAge()); } return "OK"; } @@ -184,24 +194,29 @@ The Kafka utility transforms raw Lambda Kafka events into an intuitive format fo === "JSON Messages" - ```java hl_lines="13 16" + ```java hl_lines="18 21" package org.example; import com.amazonaws.services.lambda.runtime.Context; import com.amazonaws.services.lambda.runtime.RequestHandler; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; import software.amazon.lambda.powertools.kafka.Deserialization; import software.amazon.lambda.powertools.kafka.DeserializationType; + import software.amazon.lambda.powertools.logging.Logging; public class JsonKafkaHandler implements RequestHandler, String> { + private static final Logger LOGGER = LoggerFactory.getLogger(JsonKafkaHandler.class); @Override + @Logging @Deserialization(type = DeserializationType.KAFKA_JSON) public String handleRequest(ConsumerRecords records, Context context) { for (ConsumerRecord record : records) { User user = record.value(); // Deserialized JSON object into User POJO - System.out.printf("Processing user: %s, age %d%n", user.getName(), user.getAge()); + LOGGER.info("Processing user: {}, age {}", user.getName(), user.getAge()); } return "OK"; } @@ -218,19 +233,24 @@ The `@Deserialization` annotation deserializes both keys and values based on you === "Key and Value Deserialization" - ```java hl_lines="17" + ```java hl_lines="22" package org.example; import com.amazonaws.services.lambda.runtime.Context; import com.amazonaws.services.lambda.runtime.RequestHandler; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; import software.amazon.lambda.powertools.kafka.Deserialization; import software.amazon.lambda.powertools.kafka.DeserializationType; + import software.amazon.lambda.powertools.logging.Logging; public class KeyValueKafkaHandler implements RequestHandler, String> { + private static final Logger LOGGER = LoggerFactory.getLogger(KeyValueKafkaHandler.class); @Override + @Logging @Deserialization(type = DeserializationType.KAFKA_AVRO) public String handleRequest(ConsumerRecords records, Context context) { for (ConsumerRecord record : records) { @@ -238,8 +258,8 @@ The `@Deserialization` annotation deserializes both keys and values based on you ProductKey key = record.key(); // ProductKey class is auto-generated from Avro schema ProductInfo product = record.value(); // ProductInfo class is auto-generated from Avro schema - System.out.printf("Processing product ID: %s%n", key.getProductId()); - System.out.printf("Product: %s - $%.2f%n", product.getName(), product.getPrice()); + LOGGER.info("Processing product ID: {}", key.getProductId()); + LOGGER.info("Product: {} - ${}", product.getName(), product.getPrice()); } return "OK"; } @@ -248,31 +268,36 @@ The `@Deserialization` annotation deserializes both keys and values based on you === "Value-Only Deserialization" - ```java hl_lines="17" + ```java hl_lines="22" package org.example; import com.amazonaws.services.lambda.runtime.Context; import com.amazonaws.services.lambda.runtime.RequestHandler; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; import software.amazon.lambda.powertools.kafka.Deserialization; import software.amazon.lambda.powertools.kafka.DeserializationType; + import software.amazon.lambda.powertools.logging.Logging; public class ValueOnlyKafkaHandler implements RequestHandler, String> { + private static final Logger LOGGER = LoggerFactory.getLogger(ValueOnlyKafkaHandler.class); @Override + @Logging @Deserialization(type = DeserializationType.KAFKA_JSON) public String handleRequest(ConsumerRecords records, Context context) { for (ConsumerRecord record : records) { // Key remains as string (if present) String key = record.key(); if (key != null) { - System.out.printf("Message key: %s%n", key); + LOGGER.info("Message key: {}", key); } // Value is deserialized as JSON Order order = record.value(); - System.out.printf("Order #%s - Total: $%.2f%n", order.getOrderId(), order.getTotal()); + LOGGER.info("Order #{} - Total: ${}", order.getOrderId(), order.getTotal()); } return "OK"; } @@ -289,19 +314,24 @@ When working with primitive data types (strings, integers, etc.) rather than str === "Primitive key" - ```java hl_lines="17 19" + ```java hl_lines="18 22" package org.example; import com.amazonaws.services.lambda.runtime.Context; import com.amazonaws.services.lambda.runtime.RequestHandler; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; import software.amazon.lambda.powertools.kafka.Deserialization; import software.amazon.lambda.powertools.kafka.DeserializationType; + import software.amazon.lambda.powertools.logging.Logging; public class PrimitiveKeyHandler implements RequestHandler, String> { + private static final Logger LOGGER = LoggerFactory.getLogger(PrimitiveKeyHandler.class); @Override + @Logging @Deserialization(type = DeserializationType.KAFKA_JSON) public String handleRequest(ConsumerRecords records, Context context) { for (ConsumerRecord record : records) { @@ -311,9 +341,9 @@ When working with primitive data types (strings, integers, etc.) rather than str // Value is deserialized as JSON Customer customer = record.value(); - System.out.printf("Key: %d%n", key); - System.out.printf("Name: %s%n", customer.getName()); - System.out.printf("Email: %s%n", customer.getEmail()); + LOGGER.info("Key: {}", key); + LOGGER.info("Name: {}", customer.getName()); + LOGGER.info("Email: {}", customer.getEmail()); } return "OK"; } @@ -322,19 +352,24 @@ When working with primitive data types (strings, integers, etc.) rather than str === "Primitive key and value" - ```java hl_lines="17 20" + ```java hl_lines="18 22" package org.example; import com.amazonaws.services.lambda.runtime.Context; import com.amazonaws.services.lambda.runtime.RequestHandler; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; import software.amazon.lambda.powertools.kafka.Deserialization; import software.amazon.lambda.powertools.kafka.DeserializationType; + import software.amazon.lambda.powertools.logging.Logging; public class PrimitiveHandler implements RequestHandler, String> { + private static final Logger LOGGER = LoggerFactory.getLogger(PrimitiveHandler.class); @Override + @Logging @Deserialization(type = DeserializationType.KAFKA_JSON) public String handleRequest(ConsumerRecords records, Context context) { for (ConsumerRecord record : records) { @@ -344,8 +379,8 @@ When working with primitive data types (strings, integers, etc.) rather than str // Value is automatically deserialized as String String value = record.value(); - System.out.printf("Key: %s%n", key); - System.out.printf("Value: %s%n", value); + LOGGER.info("Key: {}", key); + LOGGER.info("Value: {}", value); } return "OK"; } @@ -404,32 +439,37 @@ Each Kafka record contains important metadata that you can access alongside the import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.common.header.Header; + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; import software.amazon.lambda.powertools.kafka.Deserialization; import software.amazon.lambda.powertools.kafka.DeserializationType; + import software.amazon.lambda.powertools.logging.Logging; public class MetadataKafkaHandler implements RequestHandler, String> { + private static final Logger LOGGER = LoggerFactory.getLogger(MetadataKafkaHandler.class); @Override + @Logging @Deserialization(type = DeserializationType.KAFKA_AVRO) public String handleRequest(ConsumerRecords records, Context context) { for (ConsumerRecord record : records) { // Log record coordinates for tracing - System.out.printf("Processing message from topic '%s'%n", record.topic()); - System.out.printf(" Partition: %d, Offset: %d%n", record.partition(), record.offset()); - System.out.printf(" Produced at: %d%n", record.timestamp()); + LOGGER.info("Processing message from topic '{}'", record.topic()); + LOGGER.info(" Partition: {}, Offset: {}", record.partition(), record.offset()); + LOGGER.info(" Produced at: {}", record.timestamp()); // Process message headers if (record.headers() != null) { for (Header header : record.headers()) { - System.out.printf(" Header: %s = %s%n", + LOGGER.info(" Header: {} = {}", header.key(), new String(header.value())); } } // Access the Avro deserialized message content Customer customer = record.value(); // Customer class is auto-generated from Avro schema - System.out.printf("Processing order for: %s%n", customer.getName()); - System.out.printf("Order total: $%.2f%n", customer.getOrderTotal()); + LOGGER.info("Processing order for: {}", customer.getName()); + LOGGER.info("Order total: ${}", customer.getOrderTotal()); } return "OK"; } @@ -477,10 +517,11 @@ Handle errors gracefully when processing Kafka messages to ensure your applicati public class ErrorHandlingKafkaHandler implements RequestHandler, String> { - private static final Logger logger = LoggerFactory.getLogger(ErrorHandlingKafkaHandler.class); + private static final Logger LOGGER = LoggerFactory.getLogger(ErrorHandlingKafkaHandler.class); private static final Metrics metrics = MetricsFactory.getMetricsInstance(); @Override + @Logging @FlushMetrics(namespace = "KafkaProcessing", service = "order-processing") @Deserialization(type = DeserializationType.KAFKA_AVRO) public String handleRequest(ConsumerRecords records, Context context) { @@ -494,10 +535,9 @@ Handle errors gracefully when processing Kafka messages to ensure your applicati processOrder(order); successfulRecords++; metrics.addMetric("ProcessedRecords", 1, MetricUnit.COUNT); - } catch (Exception e) { failedRecords++; - logger.error("Error processing Kafka message from topic: {}, partition: {}, offset: {}", + LOGGER.error("Error processing Kafka message from topic: {}, partition: {}, offset: {}", record.topic(), record.partition(), record.offset(), e); metrics.addMetric("ProcessingErrors", 1, MetricUnit.COUNT); // Optionally send to DLQ or error topic @@ -511,7 +551,7 @@ Handle errors gracefully when processing Kafka messages to ensure your applicati private void processOrder(Order order) { // Your business logic here - System.out.printf("Processing order: %s%n", order.getOrderId()); + LOGGER.info("Processing order: {}", order.getOrderId()); } private void sendToDlq(ConsumerRecord record) { @@ -535,14 +575,18 @@ The Idempotency utility automatically stores the result of each successful opera import com.amazonaws.services.lambda.runtime.RequestHandler; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; import software.amazon.lambda.powertools.kafka.Deserialization; import software.amazon.lambda.powertools.kafka.DeserializationType; import software.amazon.lambda.powertools.idempotency.Idempotency; import software.amazon.lambda.powertools.idempotency.IdempotencyConfig; import software.amazon.lambda.powertools.idempotency.Idempotent; import software.amazon.lambda.powertools.idempotency.persistence.dynamodb.DynamoDBPersistenceStore; + import software.amazon.lambda.powertools.logging.Logging; public class IdempotentKafkaHandler implements RequestHandler, String> { + private static final Logger LOGGER = LoggerFactory.getLogger(IdempotentKafkaHandler.class); public IdempotentKafkaHandler() { // Configure idempotency with DynamoDB persistence store @@ -555,6 +599,7 @@ The Idempotency utility automatically stores the result of each successful opera } @Override + @Logging @Deserialization(type = DeserializationType.KAFKA_JSON) public String handleRequest(ConsumerRecords records, Context context) { for (ConsumerRecord record : records) { @@ -569,7 +614,7 @@ The Idempotency utility automatically stores the result of each successful opera @Idempotent private void processPayment(Payment payment) { - System.out.printf("Processing payment %s%n", payment.getPaymentId()); + LOGGER.info("Processing payment {}", payment.getPaymentId()); // Your business logic here PaymentService.process(payment.getPaymentId(), payment.getCustomerId(), payment.getAmount()); @@ -625,26 +670,31 @@ When using binary serialization formats across multiple programming languages, e === "Using Python naming convention" - ```java hl_lines="28 31 34 35 37 38 51" + ```java hl_lines="33 36 39 42 56" package org.example; import com.amazonaws.services.lambda.runtime.Context; import com.amazonaws.services.lambda.runtime.RequestHandler; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; + import org.slf4j.Logger; + import org.slf4j.LoggerFactory; import software.amazon.lambda.powertools.kafka.Deserialization; import software.amazon.lambda.powertools.kafka.DeserializationType; + import software.amazon.lambda.powertools.logging.Logging; import com.fasterxml.jackson.annotation.JsonProperty; import java.time.Instant; public class CrossLanguageKafkaHandler implements RequestHandler, String> { + private static final Logger LOGGER = LoggerFactory.getLogger(CrossLanguageKafkaHandler.class); @Override + @Logging @Deserialization(type = DeserializationType.KAFKA_JSON) public String handleRequest(ConsumerRecords records, Context context) { for (ConsumerRecord record : records) { OrderEvent order = record.value(); // OrderEvent class handles JSON with Python field names - System.out.printf("Processing order %s from %s%n", + LOGGER.info("Processing order {} from {}", order.getOrderId(), order.getOrderDate()); } return "OK"; From 335279e7b723c3fffc93ddfbbfa05476978d050c Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Wed, 18 Jun 2025 17:25:27 +0200 Subject: [PATCH 37/38] Add notice about kafka-clients compatibility. --- docs/utilities/kafka.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/utilities/kafka.md b/docs/utilities/kafka.md index b54fa5254..c1bcbbe49 100644 --- a/docs/utilities/kafka.md +++ b/docs/utilities/kafka.md @@ -59,7 +59,7 @@ Lambda processes Kafka messages as discrete events rather than continuous stream ### Installation -Add the Powertools for AWS Lambda Kafka dependency to your project: +Add the Powertools for AWS Lambda Kafka dependency to your project. Make sure to also add the `kafka-clients` library as a dependency. The utility supports `kafka-clients >= 3.0.0`. === "Maven" From e9654a9a362b32cd90dd09b3e69e1210380de5c5 Mon Sep 17 00:00:00 2001 From: Philipp Page Date: Wed, 18 Jun 2025 17:29:22 +0200 Subject: [PATCH 38/38] Add sentence stating that Avro / Protobuf classes can be autogenerated. --- docs/utilities/kafka.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/utilities/kafka.md b/docs/utilities/kafka.md index c1bcbbe49..da179bc5c 100644 --- a/docs/utilities/kafka.md +++ b/docs/utilities/kafka.md @@ -124,7 +124,7 @@ Powertools for AWS supports both Schema Registry integration modes in your Event ### Processing Kafka events -The Kafka utility transforms raw Lambda Kafka events into an intuitive format for processing. To handle messages effectively, you'll need to configure the `@Deserialization` annotation that matches your data format. +The Kafka utility transforms raw Lambda Kafka events into an intuitive format for processing. To handle messages effectively, you'll need to configure the `@Deserialization` annotation that matches your data format. Based on the deserializer you choose, incoming records are directly transformed into your business objects which can be auto-generated classes from Avro / Protobuf or simple POJOs. ???+ tip "Using Avro is recommended"