From 1e64f81b214dcd2268addc81492d67f34b27155e Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Fri, 6 Jun 2025 10:07:55 +0100 Subject: [PATCH 01/35] feat(kafka): add Avro serialization support and implement Kafka event handling --- libraries/AWS.Lambda.Powertools.sln | 30 +++ .../AWS.Lambda.Powertools.Kafka.csproj | 20 ++ .../AWS.Lambda.Powertools.Kafka/KafkaEvent.cs | 23 ++ .../PowertoolsKafkaAvroSerializer.cs | 216 ++++++++++++++++++ .../src/AWS.Lambda.Powertools.Kafka/Readme.md | 1 + libraries/src/Directory.Packages.props | 1 + .../AWS.Lambda.Powertools.Kafka.Tests.csproj | 57 +++++ .../Powertools/Kafka/Tests/AvroProduct.cs | 86 +++++++ .../Avro/AvroProduct.avsc | 10 + .../Avro/HandlerTests.cs | 140 ++++++++++++ .../PowertoolsKafkaAvroSerializerTests.cs | 49 ++++ .../Avro/kafka-avro-event.json | 51 +++++ .../Json/kafka-json-event.json | 50 ++++ .../PowertoolsLambdaKafkaSerializerTests.cs | 12 + .../Protobuf/kafka-protobuf-event.json | 51 +++++ .../Readme.md | 17 ++ 16 files changed, 814 insertions(+) create mode 100644 libraries/src/AWS.Lambda.Powertools.Kafka/AWS.Lambda.Powertools.Kafka.csproj create mode 100644 libraries/src/AWS.Lambda.Powertools.Kafka/KafkaEvent.cs create mode 100644 libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs create mode 100644 libraries/src/AWS.Lambda.Powertools.Kafka/Readme.md create mode 100644 libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/AWS.Lambda.Powertools.Kafka.Tests.csproj create mode 100644 libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AWS/Lambda/Powertools/Kafka/Tests/AvroProduct.cs create mode 100644 libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AvroProduct.avsc create mode 100644 libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs create mode 100644 libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs create mode 100644 libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/kafka-avro-event.json create mode 100644 libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/kafka-json-event.json create mode 100644 libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsLambdaKafkaSerializerTests.cs create mode 100644 libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/kafka-protobuf-event.json create mode 100644 libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Readme.md diff --git a/libraries/AWS.Lambda.Powertools.sln b/libraries/AWS.Lambda.Powertools.sln index c3056d147..cc18e136a 100644 --- a/libraries/AWS.Lambda.Powertools.sln +++ b/libraries/AWS.Lambda.Powertools.sln @@ -113,6 +113,10 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AWS.Lambda.Powertools.Event EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore", "src\AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore\AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore.csproj", "{8A22F22E-D10A-4897-A89A-DC76C267F6BB}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AWS.Lambda.Powertools.Kafka", "src\AWS.Lambda.Powertools.Kafka\AWS.Lambda.Powertools.Kafka.csproj", "{5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AWS.Lambda.Powertools.Kafka.Tests", "tests\AWS.Lambda.Powertools.Kafka.Tests\AWS.Lambda.Powertools.Kafka.Tests.csproj", "{FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -618,6 +622,30 @@ Global {8A22F22E-D10A-4897-A89A-DC76C267F6BB}.Release|x64.Build.0 = Release|Any CPU {8A22F22E-D10A-4897-A89A-DC76C267F6BB}.Release|x86.ActiveCfg = Release|Any CPU {8A22F22E-D10A-4897-A89A-DC76C267F6BB}.Release|x86.Build.0 = Release|Any CPU + {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Debug|x64.ActiveCfg = Debug|Any CPU + {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Debug|x64.Build.0 = Debug|Any CPU + {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Debug|x86.ActiveCfg = Debug|Any CPU + {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Debug|x86.Build.0 = Debug|Any CPU + {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Release|Any CPU.Build.0 = Release|Any CPU + {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Release|x64.ActiveCfg = Release|Any CPU + {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Release|x64.Build.0 = Release|Any CPU + {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Release|x86.ActiveCfg = Release|Any CPU + {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Release|x86.Build.0 = Release|Any CPU + {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Debug|Any CPU.Build.0 = Debug|Any CPU + {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Debug|x64.ActiveCfg = Debug|Any CPU + {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Debug|x64.Build.0 = Debug|Any CPU + {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Debug|x86.ActiveCfg = Debug|Any CPU + {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Debug|x86.Build.0 = Debug|Any CPU + {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Release|Any CPU.ActiveCfg = Release|Any CPU + {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Release|Any CPU.Build.0 = Release|Any CPU + {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Release|x64.ActiveCfg = Release|Any CPU + {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Release|x64.Build.0 = Release|Any CPU + {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Release|x86.ActiveCfg = Release|Any CPU + {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Release|x86.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(NestedProjects) = preSolution @@ -671,5 +699,7 @@ Global {F4B8D5AF-D3CA-4910-A14D-E5BAEF0FD1DE} = {73C9B1E5-3893-47E8-B373-17E5F5D7E6F5} {281F7EB5-ACE5-458F-BC88-46A8899DF3BA} = {73C9B1E5-3893-47E8-B373-17E5F5D7E6F5} {8A22F22E-D10A-4897-A89A-DC76C267F6BB} = {73C9B1E5-3893-47E8-B373-17E5F5D7E6F5} + {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3} = {73C9B1E5-3893-47E8-B373-17E5F5D7E6F5} + {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645} = {1CFF5568-8486-475F-81F6-06105C437528} EndGlobalSection EndGlobal diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/AWS.Lambda.Powertools.Kafka.csproj b/libraries/src/AWS.Lambda.Powertools.Kafka/AWS.Lambda.Powertools.Kafka.csproj new file mode 100644 index 000000000..476f85faf --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/AWS.Lambda.Powertools.Kafka.csproj @@ -0,0 +1,20 @@ + + + + + AWS.Lambda.Powertools.Kafka + Powertools for AWS Lambda (.NET) - Kafka consumer package. + AWS.Lambda.Powertools.Kafka + AWS.Lambda.Powertools.Kafka + net8.0 + false + enable + enable + + + + + + + + diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/KafkaEvent.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/KafkaEvent.cs new file mode 100644 index 000000000..db6fcbd35 --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/KafkaEvent.cs @@ -0,0 +1,23 @@ +using System.Text.Json; + +namespace AWS.Lambda.Powertools.Kafka; + +public class KafkaEvent +{ + public string EventSource { get; set; } + public string EventSourceArn { get; set; } + public string BootstrapServers { get; set; } + public Dictionary>> Records { get; set; } = new(); +} + +public class KafkaRecord +{ + public string Topic { get; set; } + public int Partition { get; set; } + public long Offset { get; set; } + public long Timestamp { get; set; } + public string TimestampType { get; set; } + public string Key { get; set; } + public T Value { get; set; } + public Dictionary Headers { get; set; } +} \ No newline at end of file diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs new file mode 100644 index 000000000..9f3d1a2ab --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs @@ -0,0 +1,216 @@ +using Amazon.Lambda.Core; +using Avro; +using Avro.IO; +using Avro.Specific; +using System; +using System.Collections.Generic; +using System.IO; +using System.Reflection; +using System.Text; +using System.Text.Json; + +namespace AWS.Lambda.Powertools.Kafka; + +public class PowertoolsKafkaAvroSerializer : ILambdaSerializer +{ + private readonly JsonSerializerOptions _jsonOptions = new() + { + PropertyNameCaseInsensitive = true + }; + + public T Deserialize(Stream requestStream) + { + using var reader = new StreamReader(requestStream); + var json = reader.ReadToEnd(); + + var targetType = typeof(T); + + if (targetType.IsGenericType && targetType.GetGenericTypeDefinition() == typeof(KafkaEvent<>)) + { + var payloadType = targetType.GetGenericArguments()[0]; + using var document = JsonDocument.Parse(json); + var root = document.RootElement; + + // Create the correctly typed instance + var typedEvent = Activator.CreateInstance(targetType); + + // Set basic properties + if (root.TryGetProperty("eventSource", out var eventSource)) + targetType.GetProperty("EventSource").SetValue(typedEvent, eventSource.GetString()); + + if (root.TryGetProperty("eventSourceArn", out var eventSourceArn)) + targetType.GetProperty("EventSourceArn").SetValue(typedEvent, eventSourceArn.GetString()); + + if (root.TryGetProperty("bootstrapServers", out var bootstrapServers)) + targetType.GetProperty("BootstrapServers").SetValue(typedEvent, bootstrapServers.GetString()); + + // Get the schema for Avro deserialization + Schema schema = GetAvroSchema(payloadType); + + // Create records dictionary with correct generic type + var dictType = typeof(Dictionary<,>).MakeGenericType( + typeof(string), + typeof(List<>).MakeGenericType(typeof(KafkaRecord<>).MakeGenericType(payloadType)) + ); + var records = Activator.CreateInstance(dictType); + var dictAddMethod = dictType.GetMethod("Add"); + + if (root.TryGetProperty("records", out var recordsElement)) + { + foreach (var topicPartition in recordsElement.EnumerateObject()) + { + string topicName = topicPartition.Name; + + // Create list of records with correct generic type + var listType = typeof(List<>).MakeGenericType(typeof(KafkaRecord<>).MakeGenericType(payloadType)); + var recordsList = Activator.CreateInstance(listType); + var listAddMethod = listType.GetMethod("Add"); + + foreach (var recordElement in topicPartition.Value.EnumerateArray()) + { + // Create record instance of correct type + var recordType = typeof(KafkaRecord<>).MakeGenericType(payloadType); + var record = Activator.CreateInstance(recordType); + + // Set basic properties + SetProperty(recordType, record, "Topic", recordElement, "topic"); + SetProperty(recordType, record, "Partition", recordElement, "partition"); + SetProperty(recordType, record, "Offset", recordElement, "offset"); + SetProperty(recordType, record, "Timestamp", recordElement, "timestamp"); + SetProperty(recordType, record, "TimestampType", recordElement, "timestampType"); + + // Handle key - base64 decode if present + if (recordElement.TryGetProperty("key", out var keyElement) && + keyElement.ValueKind == JsonValueKind.String) + { + string base64Key = keyElement.GetString(); + recordType.GetProperty("Key").SetValue(record, base64Key); + + // Base64 decode the key + if (!string.IsNullOrEmpty(base64Key)) + { + try + { + byte[] keyBytes = Convert.FromBase64String(base64Key); + string decodedKey = Encoding.UTF8.GetString(keyBytes); + recordType.GetProperty("Key").SetValue(record, decodedKey); + } + catch (Exception) + { + // If decoding fails, leave it as is + } + } + } + + // Handle Avro value + if (recordElement.TryGetProperty("value", out var value) && + value.ValueKind == JsonValueKind.String) + { + string base64Value = value.GetString(); + // recordType.GetProperty("Value").SetValue(record, base64Value); + + // Deserialize Avro data + try + { + var deserializedValue = DeserializeAvroValue(base64Value, schema); + recordType.GetProperty("Value").SetValue(record, deserializedValue); + } + catch (Exception ex) + { + throw new Exception($"Failed to deserialize Avro data: {ex.Message}", ex); + } + } + + if (recordElement.TryGetProperty("headers", out var headersElement) && + headersElement.ValueKind == JsonValueKind.Array) + { + var decodedHeaders = new Dictionary(); + + foreach (var headerObj in headersElement.EnumerateArray()) + { + foreach (var header in headerObj.EnumerateObject()) + { + string headerKey = header.Name; + if (header.Value.ValueKind == JsonValueKind.Array) + { + // Convert integer array to byte array + byte[] headerBytes = new byte[header.Value.GetArrayLength()]; + int i = 0; + foreach (var byteVal in header.Value.EnumerateArray()) + { + headerBytes[i++] = (byte)byteVal.GetInt32(); + } + + // Decode as UTF-8 string + string headerValue = Encoding.UTF8.GetString(headerBytes); + decodedHeaders[headerKey] = headerValue; + } + } + } + + recordType.GetProperty("Headers").SetValue(record, decodedHeaders); + } + + // Add to records list + listAddMethod.Invoke(recordsList, new[] { record }); + } + + // Add topic records to dictionary + dictAddMethod.Invoke(records, new[] { topicName, recordsList }); + } + } + + targetType.GetProperty("Records").SetValue(typedEvent, records); + return (T)typedEvent; + } + + return JsonSerializer.Deserialize(json, _jsonOptions); + } + + + private void SetProperty(Type type, object instance, string propertyName, + JsonElement element, string jsonPropertyName) + { + if (!element.TryGetProperty(jsonPropertyName, out var jsonValue) || + jsonValue.ValueKind == JsonValueKind.Null) + return; + + var property = type.GetProperty(propertyName); + var propertyType = property.PropertyType; + + object value; + if (propertyType == typeof(int)) value = jsonValue.GetInt32(); + else if (propertyType == typeof(long)) value = jsonValue.GetInt64(); + else if (propertyType == typeof(double)) value = jsonValue.GetDouble(); + else if (propertyType == typeof(string)) value = jsonValue.GetString(); + else return; + + property.SetValue(instance, value); + } + + private Schema GetAvroSchema(Type payloadType) + { + var schemaField = payloadType.GetField("_SCHEMA", + BindingFlags.Public | BindingFlags.Static); + + if (schemaField == null) + throw new InvalidOperationException($"No Avro schema found for type {payloadType.Name}"); + + return schemaField.GetValue(null) as Schema; + } + + private object DeserializeAvroValue(string base64Value, Schema schema) + { + byte[] avroBytes = Convert.FromBase64String(base64Value); + using var stream = new MemoryStream(avroBytes); + var decoder = new BinaryDecoder(stream); + var reader = new SpecificDatumReader(schema, schema); + return reader.Read(null, decoder); + } + + public void Serialize(T response, Stream responseStream) + { + using var writer = new StreamWriter(responseStream); + writer.Write(JsonSerializer.Serialize(response, _jsonOptions)); + } +} \ No newline at end of file diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/Readme.md b/libraries/src/AWS.Lambda.Powertools.Kafka/Readme.md new file mode 100644 index 000000000..16da5ccb4 --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/Readme.md @@ -0,0 +1 @@ +# Powertools for AWS Lambda (.NET) - Kafka \ No newline at end of file diff --git a/libraries/src/Directory.Packages.props b/libraries/src/Directory.Packages.props index a4421f6fc..becc44d4f 100644 --- a/libraries/src/Directory.Packages.props +++ b/libraries/src/Directory.Packages.props @@ -5,6 +5,7 @@ + diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/AWS.Lambda.Powertools.Kafka.Tests.csproj b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/AWS.Lambda.Powertools.Kafka.Tests.csproj new file mode 100644 index 000000000..aa28ba64c --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/AWS.Lambda.Powertools.Kafka.Tests.csproj @@ -0,0 +1,57 @@ + + + + + + AWS.Lambda.Powertools.Kafka.Tests + AWS.Lambda.Powertools.Kafka.Tests + net8.0 + enable + enable + + false + true + + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + + + + + PreserveNewest + + + + PreserveNewest + + + + PreserveNewest + + + + PreserveNewest + + + + diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AWS/Lambda/Powertools/Kafka/Tests/AvroProduct.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AWS/Lambda/Powertools/Kafka/Tests/AvroProduct.cs new file mode 100644 index 000000000..f1c6aa8d4 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AWS/Lambda/Powertools/Kafka/Tests/AvroProduct.cs @@ -0,0 +1,86 @@ +// ------------------------------------------------------------------------------ +// +// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e +// Changes to this file may cause incorrect behavior and will be lost if code +// is regenerated +// +// ------------------------------------------------------------------------------ +namespace AWS.Lambda.Powertools.Kafka.Tests +{ + using System; + using System.Collections.Generic; + using System.Text; + using global::Avro; + using global::Avro.Specific; + + [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")] + public partial class AvroProduct : global::Avro.Specific.ISpecificRecord + { + public static global::Avro.Schema _SCHEMA = global::Avro.Schema.Parse("{\"type\":\"record\",\"name\":\"AvroProduct\",\"namespace\":\"AWS.Lambda.Powertools.Kafka.Te" + + "sts\",\"fields\":[{\"name\":\"id\",\"type\":\"int\"},{\"name\":\"name\",\"type\":\"string\"},{\"name" + + "\":\"price\",\"type\":\"double\"}]}"); + private int _id; + private string _name; + private double _price; + public virtual global::Avro.Schema Schema + { + get + { + return AvroProduct._SCHEMA; + } + } + public int id + { + get + { + return this._id; + } + set + { + this._id = value; + } + } + public string name + { + get + { + return this._name; + } + set + { + this._name = value; + } + } + public double price + { + get + { + return this._price; + } + set + { + this._price = value; + } + } + public virtual object Get(int fieldPos) + { + switch (fieldPos) + { + case 0: return this.id; + case 1: return this.name; + case 2: return this.price; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Get()"); + }; + } + public virtual void Put(int fieldPos, object fieldValue) + { + switch (fieldPos) + { + case 0: this.id = (System.Int32)fieldValue; break; + case 1: this.name = (System.String)fieldValue; break; + case 2: this.price = (System.Double)fieldValue; break; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Put()"); + }; + } + } +} diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AvroProduct.avsc b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AvroProduct.avsc new file mode 100644 index 000000000..60b8ed002 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AvroProduct.avsc @@ -0,0 +1,10 @@ +{ + "namespace": "AWS.Lambda.Powertools.Kafka.Tests", + "type": "record", + "name": "AvroProduct", + "fields": [ + {"name": "id", "type": "int"}, + {"name": "name", "type": "string"}, + {"name": "price", "type": "double"} + ] +} \ No newline at end of file diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs new file mode 100644 index 000000000..3b3f46a74 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs @@ -0,0 +1,140 @@ +using System.Text; +using Amazon.Lambda.Core; +using Amazon.Lambda.TestUtilities; +using Avro.IO; +using Avro.Specific; + +namespace AWS.Lambda.Powertools.Kafka.Tests; + + +public class KafkaHandlerTests +{ + [Fact] + public async Task Handler_ProcessesKafkaEvent_Successfully() + { + // Arrange + var kafkaJson = GetMockKafkaEvent(); + var mockContext = new TestLambdaContext(); + var serializer = new PowertoolsKafkaAvroSerializer(); + + // Convert JSON string to stream for deserialization + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaJson)); + + // Act - Deserialize and process + var kafkaEvent = serializer.Deserialize>(stream); + var response = await Handler(kafkaEvent, mockContext); + + // Assert + Assert.Equal("Successfully processed Kafka events", response); + + // Verify event structure + Assert.Equal("aws:kafka", kafkaEvent.EventSource); + Assert.Single(kafkaEvent.Records); + + // Verify record content + var records = kafkaEvent.Records["mytopic-0"]; + Assert.Equal(3, records.Count); + + // Verify first record + var firstRecord = records[0]; + Assert.Equal("mytopic", firstRecord.Topic); + Assert.Equal(0, firstRecord.Partition); + Assert.Equal(15, firstRecord.Offset); + + // Verify deserialized value + var product = firstRecord.Value; + Assert.Equal("Laptop", product.name); + Assert.Equal(999.99, product.price); + + // Verify decoded key and headers + Assert.Equal("42", firstRecord.Key); + Assert.Equal("headerValue", firstRecord.Headers["headerKey"]); + } + + private string GetMockKafkaEvent() + { + // For testing, we'll create base64-encoded Avro data for our test products + var laptop = new AvroProduct { name = "Laptop", price = 999.99 }; + var smartphone = new AvroProduct { name = "Smartphone", price = 499.99 }; + var headphones = new AvroProduct { name = "Headphones", price = 99.99 }; + + // Convert to base64-encoded Avro + string laptopBase64 = ConvertToAvroBase64(laptop); + string smartphoneBase64 = ConvertToAvroBase64(smartphone); + string headphonesBase64 = ConvertToAvroBase64(headphones); + + // Create mock Kafka event JSON + return @$"{{ + ""eventSource"": ""aws:kafka"", + ""eventSourceArn"": ""arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4"", + ""bootstrapServers"": ""b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092"", + ""records"": {{ + ""mytopic-0"": [ + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""timestamp"": 1545084650987, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""NDI="", + ""value"": ""{laptopBase64}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }}, + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 16, + ""timestamp"": 1545084650988, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""NDI="", + ""value"": ""{smartphoneBase64}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }}, + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 17, + ""timestamp"": 1545084650989, + ""timestampType"": ""CREATE_TIME"", + ""key"": null, + ""value"": ""{headphonesBase64}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }} + ] + }} + }}"; + } + + private string ConvertToAvroBase64(AvroProduct product) + { + using var stream = new MemoryStream(); + var encoder = new BinaryEncoder(stream); + var writer = new SpecificDatumWriter(AvroProduct._SCHEMA); + + writer.Write(product, encoder); + encoder.Flush(); + + return Convert.ToBase64String(stream.ToArray()); + } + + // Define the test handler method + private async Task Handler(KafkaEvent kafkaEvent, ILambdaContext context) + { + foreach (var topicRecords in kafkaEvent.Records) + { + foreach (var record in topicRecords.Value) + { + var product = record.Value; + context.Logger.LogInformation($"Processing {product.name} at ${product.price}"); + } + } + + return "Successfully processed Kafka events"; + } +} \ No newline at end of file diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs new file mode 100644 index 000000000..44026d544 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs @@ -0,0 +1,49 @@ +using System.Text; +using Avro; +using Avro.Generic; +using Avro.IO; +using Avro.Specific; + +namespace AWS.Lambda.Powertools.Kafka.Tests; + +public class PowertoolsKafkaAvroSerializerTests +{ + [Fact] + public void Deserialize_KafkaEventWithAvroPayload_DeserializesToCorrectType() + { + // Arrange + var serializer = new PowertoolsKafkaAvroSerializer(); + string kafkaEventJson = File.ReadAllText("Avro/kafka-avro-event.json"); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + Assert.NotNull(result); + Assert.Equal("aws:kafka", result.EventSource); + + // Verify records were deserialized + Assert.True(result.Records.ContainsKey("mytopic-0")); + var records = result.Records["mytopic-0"]; + Assert.Equal(3, records.Count); + + // Verify first record's content + var firstRecord = records[0]; + Assert.Equal("mytopic", firstRecord.Topic); + Assert.Equal(0, firstRecord.Partition); + Assert.Equal(15, firstRecord.Offset); + Assert.Equal("42", firstRecord.Key); + + // Verify deserialized Avro value + var product = firstRecord.Value; + Assert.Equal("Laptop", product.name); + Assert.Equal(1001, product.id); + Assert.Equal(999.99000000000001, product.price); + + // Verify second record + var secondRecord = records[1]; + var smartphone = secondRecord.Value; + Assert.Equal("Smartphone", smartphone.name); + } +} \ No newline at end of file diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/kafka-avro-event.json b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/kafka-avro-event.json new file mode 100644 index 000000000..8d6ef2210 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/kafka-avro-event.json @@ -0,0 +1,51 @@ +{ + "eventSource": "aws:kafka", + "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", + "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + "records": { + "mytopic-0": [ + { + "topic": "mytopic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": "NDI=", + "value": "0g8MTGFwdG9wUrgehes/j0A=", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 16, + "timestamp": 1545084650988, + "timestampType": "CREATE_TIME", + "key": "NDI=", + "value": "1A8UU21hcnRwaG9uZVK4HoXrv4JA", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 17, + "timestamp": 1545084650989, + "timestampType": "CREATE_TIME", + "key": null, + "value": "1g8USGVhZHBob25lc0jhehSuv2JA", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + } + ] + } +} diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/kafka-json-event.json b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/kafka-json-event.json new file mode 100644 index 000000000..d85c40654 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/kafka-json-event.json @@ -0,0 +1,50 @@ +{ + "eventSource": "aws:kafka", + "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", + "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + "records": { + "mytopic-0": [ + { + "topic": "mytopic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": "cmVjb3JkS2V5", + "value": "ewogICJpZCI6IDEyMzQ1LAogICJuYW1lIjogInByb2R1Y3Q1IiwKICAicHJpY2UiOiA0NQp9", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "value": "ewogICJpZCI6IDEyMzQ1LAogICJuYW1lIjogInByb2R1Y3Q1IiwKICAicHJpY2UiOiA0NQp9", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": null, + "value": "ewogICJpZCI6IDEyMzQ1LAogICJuYW1lIjogInByb2R1Y3Q1IiwKICAicHJpY2UiOiA0NQp9", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + } + ] + } +} diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsLambdaKafkaSerializerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsLambdaKafkaSerializerTests.cs new file mode 100644 index 000000000..17ee9eca8 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsLambdaKafkaSerializerTests.cs @@ -0,0 +1,12 @@ +using Amazon.Lambda.Core; +using Avro; +using Avro.IO; +using Avro.Specific; +using System; +using System.IO; +using System.Text.Json; +using Xunit; + +namespace AWS.Lambda.Powertools.Kafka.Tests +{ +} diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/kafka-protobuf-event.json b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/kafka-protobuf-event.json new file mode 100644 index 000000000..b3e0139e3 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/kafka-protobuf-event.json @@ -0,0 +1,51 @@ +{ + "eventSource": "aws:kafka", + "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", + "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + "records": { + "mytopic-0": [ + { + "topic": "mytopic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": "NDI=", + "value": "COkHEgZMYXB0b3AZUrgehes/j0A=", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 16, + "timestamp": 1545084650988, + "timestampType": "CREATE_TIME", + "key": "NDI=", + "value": "COoHEgpTbWFydHBob25lGVK4HoXrv4JA", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 17, + "timestamp": 1545084650989, + "timestampType": "CREATE_TIME", + "key": null, + "value": "COsHEgpIZWFkcGhvbmVzGUjhehSuv2JA", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + } + ] + } +} diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Readme.md b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Readme.md new file mode 100644 index 000000000..317c34a26 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Readme.md @@ -0,0 +1,17 @@ +# Avro + +```bash +dotnet tool install --global Apache.Avro.Tools + +cd tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/ +avrogen -s AvroProduct.avsc ./ +``` + +```xml + + + + + + +``` From 7ff8953661bebe0fa09db68f7b0d25e4a38e17b3 Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Fri, 6 Jun 2025 11:14:31 +0100 Subject: [PATCH 02/35] feat(tests): enhance Kafka event handling tests and implement IEnumerable for KafkaEvent --- .../InternalsVisibleTo.cs | 18 +++ .../AWS.Lambda.Powertools.Kafka/KafkaEvent.cs | 22 ++- .../PowertoolsKafkaAvroSerializer.cs | 50 +++--- .../Avro/HandlerTests.cs | 153 +++++++++++++++--- .../PowertoolsKafkaAvroSerializerTests.cs | 34 ++++ 5 files changed, 232 insertions(+), 45 deletions(-) create mode 100644 libraries/src/AWS.Lambda.Powertools.Kafka/InternalsVisibleTo.cs diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/InternalsVisibleTo.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/InternalsVisibleTo.cs new file mode 100644 index 000000000..35c17ea16 --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/InternalsVisibleTo.cs @@ -0,0 +1,18 @@ +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("AWS.Lambda.Powertools.Kafka.Tests")] \ No newline at end of file diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/KafkaEvent.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/KafkaEvent.cs index db6fcbd35..a2ba2424a 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/KafkaEvent.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/KafkaEvent.cs @@ -1,13 +1,31 @@ +using System.Collections; using System.Text.Json; namespace AWS.Lambda.Powertools.Kafka; -public class KafkaEvent +public class KafkaEvent : IEnumerable> { public string EventSource { get; set; } public string EventSourceArn { get; set; } public string BootstrapServers { get; set; } - public Dictionary>> Records { get; set; } = new(); + internal Dictionary>> Records { get; set; } = new(); + + public IEnumerator> GetEnumerator() + { + foreach (var topicRecords in Records) + { + foreach (var record in topicRecords.Value) + { + yield return record; + } + } + } + + // Implement non-generic IEnumerable (required) + IEnumerator IEnumerable.GetEnumerator() + { + return GetEnumerator(); + } } public class KafkaRecord diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs index 9f3d1a2ab..ae0f2b92e 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs @@ -36,7 +36,9 @@ public T Deserialize(Stream requestStream) // Set basic properties if (root.TryGetProperty("eventSource", out var eventSource)) - targetType.GetProperty("EventSource").SetValue(typedEvent, eventSource.GetString()); + targetType.GetProperty("EventSource", + BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance) + ?.SetValue(typedEvent, eventSource.GetString()); if (root.TryGetProperty("eventSourceArn", out var eventSourceArn)) targetType.GetProperty("EventSourceArn").SetValue(typedEvent, eventSourceArn.GetString()); @@ -120,12 +122,12 @@ public T Deserialize(Stream requestStream) throw new Exception($"Failed to deserialize Avro data: {ex.Message}", ex); } } - - if (recordElement.TryGetProperty("headers", out var headersElement) && + + if (recordElement.TryGetProperty("headers", out var headersElement) && headersElement.ValueKind == JsonValueKind.Array) { var decodedHeaders = new Dictionary(); - + foreach (var headerObj in headersElement.EnumerateArray()) { foreach (var header in headerObj.EnumerateObject()) @@ -140,15 +142,20 @@ public T Deserialize(Stream requestStream) { headerBytes[i++] = (byte)byteVal.GetInt32(); } - + // Decode as UTF-8 string string headerValue = Encoding.UTF8.GetString(headerBytes); decodedHeaders[headerKey] = headerValue; } } } - - recordType.GetProperty("Headers").SetValue(record, decodedHeaders); + + var headersProperty = recordType.GetProperty("Headers", + BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance); + if (headersProperty != null) + { + headersProperty.SetValue(record, decodedHeaders); + } } // Add to records list @@ -160,45 +167,48 @@ public T Deserialize(Stream requestStream) } } - targetType.GetProperty("Records").SetValue(typedEvent, records); + targetType.GetProperty("Records",BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance)?.SetValue(typedEvent, records); return (T)typedEvent; } return JsonSerializer.Deserialize(json, _jsonOptions); } - - private void SetProperty(Type type, object instance, string propertyName, + + private void SetProperty(Type type, object instance, string propertyName, JsonElement element, string jsonPropertyName) { - if (!element.TryGetProperty(jsonPropertyName, out var jsonValue) || + if (!element.TryGetProperty(jsonPropertyName, out var jsonValue) || jsonValue.ValueKind == JsonValueKind.Null) return; - - var property = type.GetProperty(propertyName); + + // Add BindingFlags to find internal properties too + var property = type.GetProperty(propertyName, + BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance); + if (property == null) return; var propertyType = property.PropertyType; - + object value; if (propertyType == typeof(int)) value = jsonValue.GetInt32(); else if (propertyType == typeof(long)) value = jsonValue.GetInt64(); else if (propertyType == typeof(double)) value = jsonValue.GetDouble(); else if (propertyType == typeof(string)) value = jsonValue.GetString(); else return; - + property.SetValue(instance, value); } - + private Schema GetAvroSchema(Type payloadType) { - var schemaField = payloadType.GetField("_SCHEMA", + var schemaField = payloadType.GetField("_SCHEMA", BindingFlags.Public | BindingFlags.Static); - + if (schemaField == null) throw new InvalidOperationException($"No Avro schema found for type {payloadType.Name}"); - + return schemaField.GetValue(null) as Schema; } - + private object DeserializeAvroValue(string base64Value, Schema schema) { byte[] avroBytes = Convert.FromBase64String(base64Value); diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs index 3b3f46a74..c90682f76 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs @@ -6,7 +6,6 @@ namespace AWS.Lambda.Powertools.Kafka.Tests; - public class KafkaHandlerTests { [Fact] @@ -16,53 +15,53 @@ public async Task Handler_ProcessesKafkaEvent_Successfully() var kafkaJson = GetMockKafkaEvent(); var mockContext = new TestLambdaContext(); var serializer = new PowertoolsKafkaAvroSerializer(); - + // Convert JSON string to stream for deserialization using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaJson)); - + // Act - Deserialize and process var kafkaEvent = serializer.Deserialize>(stream); var response = await Handler(kafkaEvent, mockContext); - + // Assert Assert.Equal("Successfully processed Kafka events", response); - + // Verify event structure Assert.Equal("aws:kafka", kafkaEvent.EventSource); Assert.Single(kafkaEvent.Records); - + // Verify record content var records = kafkaEvent.Records["mytopic-0"]; Assert.Equal(3, records.Count); - + // Verify first record var firstRecord = records[0]; Assert.Equal("mytopic", firstRecord.Topic); Assert.Equal(0, firstRecord.Partition); Assert.Equal(15, firstRecord.Offset); - + // Verify deserialized value var product = firstRecord.Value; Assert.Equal("Laptop", product.name); Assert.Equal(999.99, product.price); - + // Verify decoded key and headers Assert.Equal("42", firstRecord.Key); Assert.Equal("headerValue", firstRecord.Headers["headerKey"]); } - + private string GetMockKafkaEvent() { // For testing, we'll create base64-encoded Avro data for our test products var laptop = new AvroProduct { name = "Laptop", price = 999.99 }; var smartphone = new AvroProduct { name = "Smartphone", price = 499.99 }; var headphones = new AvroProduct { name = "Headphones", price = 99.99 }; - + // Convert to base64-encoded Avro string laptopBase64 = ConvertToAvroBase64(laptop); string smartphoneBase64 = ConvertToAvroBase64(smartphone); string headphonesBase64 = ConvertToAvroBase64(headphones); - + // Create mock Kafka event JSON return @$"{{ ""eventSource"": ""aws:kafka"", @@ -110,31 +109,139 @@ private string GetMockKafkaEvent() }} }}"; } - + private string ConvertToAvroBase64(AvroProduct product) { using var stream = new MemoryStream(); var encoder = new BinaryEncoder(stream); var writer = new SpecificDatumWriter(AvroProduct._SCHEMA); - + writer.Write(product, encoder); encoder.Flush(); - + return Convert.ToBase64String(stream.ToArray()); } - + // Define the test handler method - private async Task Handler(KafkaEvent kafkaEvent, ILambdaContext context) + private async Task Handler(KafkaEvent records, ILambdaContext context) { - foreach (var topicRecords in kafkaEvent.Records) + foreach (var record in records) { - foreach (var record in topicRecords.Value) - { - var product = record.Value; - context.Logger.LogInformation($"Processing {product.name} at ${product.price}"); - } + var product = record.Value; + context.Logger.LogInformation($"Processing {product.name} at ${product.price}"); } return "Successfully processed Kafka events"; } + + [Fact] + public async Task Handler_ProcessesMultipleTopics_WithNestedLoops() + { + // Arrange + var kafkaJson = GetMockMultiTopicKafkaEvent(); + var mockContext = new TestLambdaContext(); + var serializer = new PowertoolsKafkaAvroSerializer(); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaJson)); + + // Act + var kafkaEvent = serializer.Deserialize>(stream); + var response = await HandlerWithNestedLoops(kafkaEvent, mockContext); + + // Assert + Assert.Equal("Successfully processed Kafka events from multiple topics", response); + Assert.Equal("aws:kafka", kafkaEvent.EventSource); + + // Check that we have two topics + Assert.Equal(2, kafkaEvent.Records.Count); + Assert.True(kafkaEvent.Records.ContainsKey("mytopic-0")); + Assert.True(kafkaEvent.Records.ContainsKey("anothertopic-0")); + + // Verify records count + Assert.Equal(2, kafkaEvent.Records["mytopic-0"].Count); + Assert.Equal(1, kafkaEvent.Records["anothertopic-0"].Count); + + // Verify first record's content + var firstRecord = kafkaEvent.Records["mytopic-0"][0]; + Assert.Equal("mytopic", firstRecord.Topic); + Assert.Equal("Laptop", firstRecord.Value.name); + + // Verify the record in the second topic + var secondTopicRecord = kafkaEvent.Records["anothertopic-0"][0]; + Assert.Equal("anothertopic", secondTopicRecord.Topic); + Assert.Equal("Headphones", secondTopicRecord.Value.name); + } + + private string GetMockMultiTopicKafkaEvent() + { + // Create test products + var laptop = new AvroProduct { name = "Laptop", price = 999.99 }; + var smartphone = new AvroProduct { name = "Smartphone", price = 499.99 }; + var headphones = new AvroProduct { name = "Headphones", price = 99.99 }; + + // Convert to base64-encoded Avro + string laptopBase64 = ConvertToAvroBase64(laptop); + string smartphoneBase64 = ConvertToAvroBase64(smartphone); + string headphonesBase64 = ConvertToAvroBase64(headphones); + + // Create mock Kafka event JSON with multiple topics + return @$"{{ + ""eventSource"": ""aws:kafka"", + ""eventSourceArn"": ""arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4"", + ""bootstrapServers"": ""b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092"", + ""records"": {{ + ""mytopic-0"": [ + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""timestamp"": 1545084650987, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""NDI="", + ""value"": ""{laptopBase64}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }}, + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 16, + ""timestamp"": 1545084650988, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""NDI="", + ""value"": ""{smartphoneBase64}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }} + ], + ""anothertopic-0"": [ + {{ + ""topic"": ""anothertopic"", + ""partition"": 0, + ""offset"": 17, + ""timestamp"": 1545084650989, + ""timestampType"": ""CREATE_TIME"", + ""key"": null, + ""value"": ""{headphonesBase64}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }} + ] + }} + }}"; + } + + private async Task HandlerWithNestedLoops(KafkaEvent kafkaEvent, ILambdaContext context) + { + foreach (var record in kafkaEvent) + { + var product = record.Value; + context.Logger.LogInformation($"Processing {product.name} at ${product.price} from topic {record.Topic}"); + } + + return "Successfully processed Kafka events from multiple topics"; + } } \ No newline at end of file diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs index 44026d544..dd49a5597 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs @@ -46,4 +46,38 @@ public void Deserialize_KafkaEventWithAvroPayload_DeserializesToCorrectType() var smartphone = secondRecord.Value; Assert.Equal("Smartphone", smartphone.name); } + + [Fact] + public void KafkaEvent_ImplementsIEnumerable_ForDirectIteration() + { + // Arrange + var serializer = new PowertoolsKafkaAvroSerializer(); + string kafkaEventJson = File.ReadAllText("Avro/kafka-avro-event.json"); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert - Test enumeration + int count = 0; + var products = new List(); + + // Directly iterate over KafkaEvent + foreach (var record in result) + { + count++; + products.Add(record.Value.name); + } + + // Verify correct count and values + Assert.Equal(3, count); + Assert.Contains("Laptop", products); + Assert.Contains("Smartphone", products); + Assert.Equal(3, products.Count); + + // Get first record directly through Linq extension + var firstRecord = result.First(); + Assert.Equal("Laptop", firstRecord.Value.name); + Assert.Equal(1001, firstRecord.Value.id); + } } \ No newline at end of file From ea82e0abd6577463cb73841631877d2914aa2269 Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Fri, 6 Jun 2025 11:16:18 +0100 Subject: [PATCH 03/35] feat(kafka): rename to ConsumerRecord and ConsumerRecords classes --- .../ConsumerRecord.cs | 13 +++++++++++++ .../{KafkaEvent.cs => ConsumerRecords.cs} | 18 +++--------------- .../PowertoolsKafkaAvroSerializer.cs | 8 ++++---- .../Avro/HandlerTests.cs | 10 +++++----- .../Avro/PowertoolsKafkaAvroSerializerTests.cs | 6 +++--- 5 files changed, 28 insertions(+), 27 deletions(-) create mode 100644 libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecord.cs rename libraries/src/AWS.Lambda.Powertools.Kafka/{KafkaEvent.cs => ConsumerRecords.cs} (51%) diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecord.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecord.cs new file mode 100644 index 000000000..0c9a24d17 --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecord.cs @@ -0,0 +1,13 @@ +namespace AWS.Lambda.Powertools.Kafka; + +public class ConsumerRecord +{ + public string Topic { get; set; } + public int Partition { get; set; } + public long Offset { get; set; } + public long Timestamp { get; set; } + public string TimestampType { get; set; } + public string Key { get; set; } + public T Value { get; set; } + public Dictionary Headers { get; set; } +} \ No newline at end of file diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/KafkaEvent.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecords.cs similarity index 51% rename from libraries/src/AWS.Lambda.Powertools.Kafka/KafkaEvent.cs rename to libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecords.cs index a2ba2424a..111ea6fff 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/KafkaEvent.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecords.cs @@ -3,14 +3,14 @@ namespace AWS.Lambda.Powertools.Kafka; -public class KafkaEvent : IEnumerable> +public class ConsumerRecords : IEnumerable> { public string EventSource { get; set; } public string EventSourceArn { get; set; } public string BootstrapServers { get; set; } - internal Dictionary>> Records { get; set; } = new(); + internal Dictionary>> Records { get; set; } = new(); - public IEnumerator> GetEnumerator() + public IEnumerator> GetEnumerator() { foreach (var topicRecords in Records) { @@ -26,16 +26,4 @@ IEnumerator IEnumerable.GetEnumerator() { return GetEnumerator(); } -} - -public class KafkaRecord -{ - public string Topic { get; set; } - public int Partition { get; set; } - public long Offset { get; set; } - public long Timestamp { get; set; } - public string TimestampType { get; set; } - public string Key { get; set; } - public T Value { get; set; } - public Dictionary Headers { get; set; } } \ No newline at end of file diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs index ae0f2b92e..a39746244 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs @@ -25,7 +25,7 @@ public T Deserialize(Stream requestStream) var targetType = typeof(T); - if (targetType.IsGenericType && targetType.GetGenericTypeDefinition() == typeof(KafkaEvent<>)) + if (targetType.IsGenericType && targetType.GetGenericTypeDefinition() == typeof(ConsumerRecords<>)) { var payloadType = targetType.GetGenericArguments()[0]; using var document = JsonDocument.Parse(json); @@ -52,7 +52,7 @@ public T Deserialize(Stream requestStream) // Create records dictionary with correct generic type var dictType = typeof(Dictionary<,>).MakeGenericType( typeof(string), - typeof(List<>).MakeGenericType(typeof(KafkaRecord<>).MakeGenericType(payloadType)) + typeof(List<>).MakeGenericType(typeof(ConsumerRecord<>).MakeGenericType(payloadType)) ); var records = Activator.CreateInstance(dictType); var dictAddMethod = dictType.GetMethod("Add"); @@ -64,14 +64,14 @@ public T Deserialize(Stream requestStream) string topicName = topicPartition.Name; // Create list of records with correct generic type - var listType = typeof(List<>).MakeGenericType(typeof(KafkaRecord<>).MakeGenericType(payloadType)); + var listType = typeof(List<>).MakeGenericType(typeof(ConsumerRecord<>).MakeGenericType(payloadType)); var recordsList = Activator.CreateInstance(listType); var listAddMethod = listType.GetMethod("Add"); foreach (var recordElement in topicPartition.Value.EnumerateArray()) { // Create record instance of correct type - var recordType = typeof(KafkaRecord<>).MakeGenericType(payloadType); + var recordType = typeof(ConsumerRecord<>).MakeGenericType(payloadType); var record = Activator.CreateInstance(recordType); // Set basic properties diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs index c90682f76..b11a2924b 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs @@ -20,7 +20,7 @@ public async Task Handler_ProcessesKafkaEvent_Successfully() using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaJson)); // Act - Deserialize and process - var kafkaEvent = serializer.Deserialize>(stream); + var kafkaEvent = serializer.Deserialize>(stream); var response = await Handler(kafkaEvent, mockContext); // Assert @@ -123,7 +123,7 @@ private string ConvertToAvroBase64(AvroProduct product) } // Define the test handler method - private async Task Handler(KafkaEvent records, ILambdaContext context) + private async Task Handler(ConsumerRecords records, ILambdaContext context) { foreach (var record in records) { @@ -145,7 +145,7 @@ public async Task Handler_ProcessesMultipleTopics_WithNestedLoops() using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaJson)); // Act - var kafkaEvent = serializer.Deserialize>(stream); + var kafkaEvent = serializer.Deserialize>(stream); var response = await HandlerWithNestedLoops(kafkaEvent, mockContext); // Assert @@ -234,9 +234,9 @@ private string GetMockMultiTopicKafkaEvent() }}"; } - private async Task HandlerWithNestedLoops(KafkaEvent kafkaEvent, ILambdaContext context) + private async Task HandlerWithNestedLoops(ConsumerRecords consumerRecords, ILambdaContext context) { - foreach (var record in kafkaEvent) + foreach (var record in consumerRecords) { var product = record.Value; context.Logger.LogInformation($"Processing {product.name} at ${product.price} from topic {record.Topic}"); diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs index dd49a5597..3297ce325 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs @@ -17,7 +17,7 @@ public void Deserialize_KafkaEventWithAvroPayload_DeserializesToCorrectType() using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); // Act - var result = serializer.Deserialize>(stream); + var result = serializer.Deserialize>(stream); // Assert Assert.NotNull(result); @@ -56,13 +56,13 @@ public void KafkaEvent_ImplementsIEnumerable_ForDirectIteration() using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); // Act - var result = serializer.Deserialize>(stream); + var result = serializer.Deserialize>(stream); // Assert - Test enumeration int count = 0; var products = new List(); - // Directly iterate over KafkaEvent + // Directly iterate over ConsumerRecords foreach (var record in result) { count++; From 802f5a47497cd9dd69fe37cda81a584946332d02 Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Fri, 6 Jun 2025 11:33:36 +0100 Subject: [PATCH 04/35] feat(kafka): enhance ConsumerRecord and ConsumerRecords with internal setters and XML documentation --- .../ConsumerRecord.cs | 53 ++++++++++++--- .../ConsumerRecords.cs | 30 +++++++-- .../PowertoolsKafkaAvroSerializer.cs | 67 +++++++++++++++++-- 3 files changed, 130 insertions(+), 20 deletions(-) diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecord.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecord.cs index 0c9a24d17..6f8dc42b8 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecord.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecord.cs @@ -1,13 +1,48 @@ namespace AWS.Lambda.Powertools.Kafka; +/// +/// Represents a single record consumed from a Kafka topic. +/// +/// The type of the record's value. public class ConsumerRecord { - public string Topic { get; set; } - public int Partition { get; set; } - public long Offset { get; set; } - public long Timestamp { get; set; } - public string TimestampType { get; set; } - public string Key { get; set; } - public T Value { get; set; } - public Dictionary Headers { get; set; } -} \ No newline at end of file + /// + /// Gets the Kafka topic name from which the record was consumed. + /// + public string Topic { get; internal set; } + + /// + /// Gets the Kafka partition from which the record was consumed. + /// + public int Partition { get; internal set; } + + /// + /// Gets the offset of the record within its Kafka partition. + /// + public long Offset { get; internal set; } + + /// + /// Gets the timestamp of the record (typically in Unix time). + /// + public long Timestamp { get; internal set; } + + /// + /// Gets the type of timestamp (e.g., "CREATE_TIME" or "LOG_APPEND_TIME"). + /// + public string TimestampType { get; internal set; } + + /// + /// Gets the key of the record (often used for partitioning). + /// + public string Key { get; internal set; } + + /// + /// Gets the deserialized value of the record. + /// + public T Value { get; internal set; } + + /// + /// Gets the headers associated with the record. + /// + public Dictionary Headers { get; internal set; } +} diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecords.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecords.cs index 111ea6fff..9f43a6a71 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecords.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecords.cs @@ -1,15 +1,35 @@ using System.Collections; -using System.Text.Json; namespace AWS.Lambda.Powertools.Kafka; +/// +/// Represents a collection of Kafka consumer records that can be enumerated. +/// Contains event metadata and records organized by topics. +/// +/// The type of the record values. public class ConsumerRecords : IEnumerable> { - public string EventSource { get; set; } - public string EventSourceArn { get; set; } - public string BootstrapServers { get; set; } + /// + /// Gets the event source (typically "aws:kafka"). + /// + public string EventSource { get; internal set; } = null!; + + /// + /// Gets the ARN of the event source (MSK cluster or Self-managed Kafka). + /// + public string EventSourceArn { get; internal set; } = null!; + + /// + /// Gets the Kafka bootstrap servers connection string. + /// + public string BootstrapServers { get; internal set; } = null!; + internal Dictionary>> Records { get; set; } = new(); - + + /// + /// Returns an enumerator that iterates through all consumer records across all topics. + /// + /// An enumerator of ConsumerRecord<T> objects. public IEnumerator> GetEnumerator() { foreach (var topicRecords in Records) diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs index a39746244..efb759157 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs @@ -2,15 +2,35 @@ using Avro; using Avro.IO; using Avro.Specific; -using System; -using System.Collections.Generic; -using System.IO; using System.Reflection; using System.Text; using System.Text.Json; namespace AWS.Lambda.Powertools.Kafka; +/// +/// A Lambda serializer for Kafka events that handles Avro-formatted data. +/// This serializer automatically deserializes the Avro binary format from base64-encoded strings +/// in Kafka records and converts them to strongly-typed objects. +/// +/// +/// +/// [assembly: LambdaSerializer(typeof(PowertoolsKafkaAvroSerializer))] +/// +/// // Your Lambda handler will receive properly deserialized objects +/// public class Function +/// { +/// public void Handler(ConsumerRecords<Customer> records, ILambdaContext context) +/// { +/// foreach (var record in records) +/// { +/// Customer customer = record.Value; +/// context.Logger.LogInformation($"Processed customer {customer.Name}, age {customer.Age}"); +/// } +/// } +/// } +/// +/// public class PowertoolsKafkaAvroSerializer : ILambdaSerializer { private readonly JsonSerializerOptions _jsonOptions = new() @@ -18,6 +38,13 @@ public class PowertoolsKafkaAvroSerializer : ILambdaSerializer PropertyNameCaseInsensitive = true }; + /// + /// Deserializes the Lambda input stream into the specified type. + /// Specializes in handling Kafka events with Avro-serialized payloads. + /// + /// The type to deserialize to. For Kafka events, typically ConsumerRecords<TPayload>. + /// The stream containing the serialized Lambda event. + /// The deserialized object of type T. public T Deserialize(Stream requestStream) { using var reader = new StreamReader(requestStream); @@ -64,7 +91,8 @@ public T Deserialize(Stream requestStream) string topicName = topicPartition.Name; // Create list of records with correct generic type - var listType = typeof(List<>).MakeGenericType(typeof(ConsumerRecord<>).MakeGenericType(payloadType)); + var listType = + typeof(List<>).MakeGenericType(typeof(ConsumerRecord<>).MakeGenericType(payloadType)); var recordsList = Activator.CreateInstance(listType); var listAddMethod = listType.GetMethod("Add"); @@ -167,14 +195,22 @@ public T Deserialize(Stream requestStream) } } - targetType.GetProperty("Records",BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance)?.SetValue(typedEvent, records); + targetType.GetProperty("Records", BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance) + ?.SetValue(typedEvent, records); return (T)typedEvent; } return JsonSerializer.Deserialize(json, _jsonOptions); } - + /// + /// Sets a property value on an object instance from a JsonElement. + /// + /// The type of the object. + /// The object instance. + /// The name of the property to set. + /// The JsonElement containing the source data. + /// The property name within the JsonElement. private void SetProperty(Type type, object instance, string propertyName, JsonElement element, string jsonPropertyName) { @@ -198,6 +234,13 @@ private void SetProperty(Type type, object instance, string propertyName, property.SetValue(instance, value); } + /// + /// Gets the Avro schema for the specified type. + /// The type must have a public static _SCHEMA field defined. + /// + /// The type to get the Avro schema for. + /// The Avro Schema object. + /// Thrown if no schema is found for the type. private Schema GetAvroSchema(Type payloadType) { var schemaField = payloadType.GetField("_SCHEMA", @@ -209,6 +252,12 @@ private Schema GetAvroSchema(Type payloadType) return schemaField.GetValue(null) as Schema; } + /// + /// Deserializes a base64-encoded Avro binary value into an object. + /// + /// The base64-encoded Avro binary data. + /// The Avro schema to use for deserialization. + /// The deserialized object. private object DeserializeAvroValue(string base64Value, Schema schema) { byte[] avroBytes = Convert.FromBase64String(base64Value); @@ -218,6 +267,12 @@ private object DeserializeAvroValue(string base64Value, Schema schema) return reader.Read(null, decoder); } + /// + /// Serializes an object to JSON and writes it to the provided stream. + /// + /// The type of object to serialize. + /// The object to serialize. + /// The stream to write the serialized data to. public void Serialize(T response, Stream responseStream) { using var writer = new StreamWriter(responseStream); From a2169daef9df18bad50f43baa343f5a03a68823d Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Fri, 6 Jun 2025 11:45:42 +0100 Subject: [PATCH 05/35] feat(kafka): improve ConsumerRecord properties with nullability checks and enhance Avro deserialization error handling --- .../ConsumerRecord.cs | 25 +++++-- .../PowertoolsKafkaAvroSerializer.cs | 69 +++++++++++-------- 2 files changed, 60 insertions(+), 34 deletions(-) diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecord.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecord.cs index 6f8dc42b8..ba613bff0 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecord.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecord.cs @@ -4,12 +4,23 @@ namespace AWS.Lambda.Powertools.Kafka; /// Represents a single record consumed from a Kafka topic. /// /// The type of the record's value. +/// +/// +/// var record = new ConsumerRecord<Customer> +/// { +/// Topic = "customers", +/// Partition = 0, +/// Offset = 42, +/// Value = new Customer { Id = 123, Name = "John Doe" } +/// }; +/// +/// public class ConsumerRecord { /// - /// Gets the Kafka topic name from which the record was consumed. + /// Gets or sets the Kafka topic name from which the record was consumed. /// - public string Topic { get; internal set; } + public string Topic { get; internal set; } = null!; /// /// Gets the Kafka partition from which the record was consumed. @@ -29,20 +40,20 @@ public class ConsumerRecord /// /// Gets the type of timestamp (e.g., "CREATE_TIME" or "LOG_APPEND_TIME"). /// - public string TimestampType { get; internal set; } + public string TimestampType { get; internal set; } = null!; /// /// Gets the key of the record (often used for partitioning). /// - public string Key { get; internal set; } + public string Key { get; internal set; } = null!; /// /// Gets the deserialized value of the record. /// - public T Value { get; internal set; } + public T Value { get; internal set; } = default!; /// /// Gets the headers associated with the record. /// - public Dictionary Headers { get; internal set; } -} + public Dictionary Headers { get; internal set; } = null!; +} \ No newline at end of file diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs index efb759157..91c4992f7 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs @@ -59,7 +59,8 @@ public T Deserialize(Stream requestStream) var root = document.RootElement; // Create the correctly typed instance - var typedEvent = Activator.CreateInstance(targetType); + var typedEvent = Activator.CreateInstance(targetType) ?? + throw new InvalidOperationException($"Failed to create instance of {targetType.Name}"); // Set basic properties if (root.TryGetProperty("eventSource", out var eventSource)) @@ -68,10 +69,10 @@ public T Deserialize(Stream requestStream) ?.SetValue(typedEvent, eventSource.GetString()); if (root.TryGetProperty("eventSourceArn", out var eventSourceArn)) - targetType.GetProperty("EventSourceArn").SetValue(typedEvent, eventSourceArn.GetString()); + targetType.GetProperty("EventSourceArn")?.SetValue(typedEvent, eventSourceArn.GetString()); if (root.TryGetProperty("bootstrapServers", out var bootstrapServers)) - targetType.GetProperty("BootstrapServers").SetValue(typedEvent, bootstrapServers.GetString()); + targetType.GetProperty("BootstrapServers")?.SetValue(typedEvent, bootstrapServers.GetString()); // Get the schema for Avro deserialization Schema schema = GetAvroSchema(payloadType); @@ -81,8 +82,10 @@ public T Deserialize(Stream requestStream) typeof(string), typeof(List<>).MakeGenericType(typeof(ConsumerRecord<>).MakeGenericType(payloadType)) ); - var records = Activator.CreateInstance(dictType); - var dictAddMethod = dictType.GetMethod("Add"); + var records = Activator.CreateInstance(dictType) ?? + throw new InvalidOperationException($"Failed to create dictionary of type {dictType.Name}"); + var dictAddMethod = dictType.GetMethod("Add") ?? + throw new InvalidOperationException("Add method not found on dictionary type"); if (root.TryGetProperty("records", out var recordsElement)) { @@ -93,14 +96,18 @@ public T Deserialize(Stream requestStream) // Create list of records with correct generic type var listType = typeof(List<>).MakeGenericType(typeof(ConsumerRecord<>).MakeGenericType(payloadType)); - var recordsList = Activator.CreateInstance(listType); - var listAddMethod = listType.GetMethod("Add"); + var recordsList = Activator.CreateInstance(listType) ?? + throw new InvalidOperationException($"Failed to create list of type {listType.Name}"); + var listAddMethod = listType.GetMethod("Add") ?? + throw new InvalidOperationException("Add method not found on list type"); foreach (var recordElement in topicPartition.Value.EnumerateArray()) { // Create record instance of correct type var recordType = typeof(ConsumerRecord<>).MakeGenericType(payloadType); var record = Activator.CreateInstance(recordType); + if (record == null) + continue; // Set basic properties SetProperty(recordType, record, "Topic", recordElement, "topic"); @@ -113,8 +120,10 @@ public T Deserialize(Stream requestStream) if (recordElement.TryGetProperty("key", out var keyElement) && keyElement.ValueKind == JsonValueKind.String) { - string base64Key = keyElement.GetString(); - recordType.GetProperty("Key").SetValue(record, base64Key); + string? base64Key = keyElement.GetString(); + var keyProperty = recordType.GetProperty("Key"); + if (keyProperty != null) + keyProperty.SetValue(record, base64Key); // Base64 decode the key if (!string.IsNullOrEmpty(base64Key)) @@ -123,7 +132,7 @@ public T Deserialize(Stream requestStream) { byte[] keyBytes = Convert.FromBase64String(base64Key); string decodedKey = Encoding.UTF8.GetString(keyBytes); - recordType.GetProperty("Key").SetValue(record, decodedKey); + keyProperty?.SetValue(record, decodedKey); } catch (Exception) { @@ -136,18 +145,21 @@ public T Deserialize(Stream requestStream) if (recordElement.TryGetProperty("value", out var value) && value.ValueKind == JsonValueKind.String) { - string base64Value = value.GetString(); - // recordType.GetProperty("Value").SetValue(record, base64Value); + string? base64Value = value.GetString(); + var valueProperty = recordType.GetProperty("Value"); // Deserialize Avro data - try + if (base64Value != null && valueProperty != null) { - var deserializedValue = DeserializeAvroValue(base64Value, schema); - recordType.GetProperty("Value").SetValue(record, deserializedValue); - } - catch (Exception ex) - { - throw new Exception($"Failed to deserialize Avro data: {ex.Message}", ex); + try + { + var deserializedValue = DeserializeAvroValue(base64Value, schema); + valueProperty.SetValue(record, deserializedValue); + } + catch (Exception ex) + { + throw new Exception($"Failed to deserialize Avro data: {ex.Message}", ex); + } } } @@ -180,10 +192,7 @@ public T Deserialize(Stream requestStream) var headersProperty = recordType.GetProperty("Headers", BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance); - if (headersProperty != null) - { - headersProperty.SetValue(record, decodedHeaders); - } + headersProperty?.SetValue(record, decodedHeaders); } // Add to records list @@ -200,7 +209,8 @@ public T Deserialize(Stream requestStream) return (T)typedEvent; } - return JsonSerializer.Deserialize(json, _jsonOptions); + var result = JsonSerializer.Deserialize(json, _jsonOptions); + return result != null ? result : throw new InvalidOperationException($"Failed to deserialize to type {typeof(T).Name}"); } /// @@ -228,7 +238,7 @@ private void SetProperty(Type type, object instance, string propertyName, if (propertyType == typeof(int)) value = jsonValue.GetInt32(); else if (propertyType == typeof(long)) value = jsonValue.GetInt64(); else if (propertyType == typeof(double)) value = jsonValue.GetDouble(); - else if (propertyType == typeof(string)) value = jsonValue.GetString(); + else if (propertyType == typeof(string)) value = jsonValue.GetString()!; else return; property.SetValue(instance, value); @@ -249,7 +259,11 @@ private Schema GetAvroSchema(Type payloadType) if (schemaField == null) throw new InvalidOperationException($"No Avro schema found for type {payloadType.Name}"); - return schemaField.GetValue(null) as Schema; + var schema = schemaField.GetValue(null) as Schema; + if (schema == null) + throw new InvalidOperationException($"Avro schema for type {payloadType.Name} is null"); + + return schema; } /// @@ -264,7 +278,8 @@ private object DeserializeAvroValue(string base64Value, Schema schema) using var stream = new MemoryStream(avroBytes); var decoder = new BinaryDecoder(stream); var reader = new SpecificDatumReader(schema, schema); - return reader.Read(null, decoder); + var result = reader.Read(null!, decoder); + return result ?? throw new InvalidOperationException("Failed to deserialize Avro value"); } /// From a1bd2068cd582d6a5062fd9eb7eb29fafcd9c2b9 Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Fri, 6 Jun 2025 16:02:16 +0100 Subject: [PATCH 06/35] feat(kafka): refactor ConsumerRecord and ConsumerRecords to support key-value pairs and add Protobuf serialization --- .../AWS.Lambda.Powertools.Kafka.csproj | 1 + .../ConsumerRecord.cs | 5 +- .../ConsumerRecords.cs | 9 +- .../PowertoolsKafkaAvroSerializer.cs | 156 ++++++++++++++---- libraries/src/Directory.Packages.props | 1 + .../AWS.Lambda.Powertools.Kafka.Tests.csproj | 11 ++ .../Lambda/Powertools/Kafka/Tests/AvroKey.cs | 70 ++++++++ .../Lambda/Powertools/Kafka/Tests/Color.cs | 23 +++ .../Avro/AvroKey.avsc | 24 +++ .../Avro/HandlerTests.cs | 120 +++++++++----- .../PowertoolsKafkaAvroSerializerTests.cs | 6 +- .../PowertoolsKafkaProtobufSerializerTests.cs | 24 +++ .../Protobuf/Product.proto | 9 + .../Readme.md | 22 ++- libraries/tests/Directory.Packages.props | 1 + 15 files changed, 396 insertions(+), 86 deletions(-) create mode 100644 libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AWS/Lambda/Powertools/Kafka/Tests/AvroKey.cs create mode 100644 libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AWS/Lambda/Powertools/Kafka/Tests/Color.cs create mode 100644 libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AvroKey.avsc create mode 100644 libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs create mode 100644 libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/Product.proto diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/AWS.Lambda.Powertools.Kafka.csproj b/libraries/src/AWS.Lambda.Powertools.Kafka/AWS.Lambda.Powertools.Kafka.csproj index 476f85faf..dadef4c35 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/AWS.Lambda.Powertools.Kafka.csproj +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/AWS.Lambda.Powertools.Kafka.csproj @@ -15,6 +15,7 @@ + diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecord.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecord.cs index ba613bff0..ac8cd80dc 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecord.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecord.cs @@ -4,6 +4,7 @@ namespace AWS.Lambda.Powertools.Kafka; /// Represents a single record consumed from a Kafka topic. /// /// The type of the record's value. +/// The type of the key value /// /// /// var record = new ConsumerRecord<Customer> @@ -15,7 +16,7 @@ namespace AWS.Lambda.Powertools.Kafka; /// }; /// /// -public class ConsumerRecord +public class ConsumerRecord { /// /// Gets or sets the Kafka topic name from which the record was consumed. @@ -45,7 +46,7 @@ public class ConsumerRecord /// /// Gets the key of the record (often used for partitioning). /// - public string Key { get; internal set; } = null!; + public TK Key { get; internal set; } = default!; /// /// Gets the deserialized value of the record. diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecords.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecords.cs index 9f43a6a71..c488301d8 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecords.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecords.cs @@ -6,8 +6,9 @@ namespace AWS.Lambda.Powertools.Kafka; /// Represents a collection of Kafka consumer records that can be enumerated. /// Contains event metadata and records organized by topics. /// -/// The type of the record values. -public class ConsumerRecords : IEnumerable> +/// The type of the record values from the event. +/// The type of Key values from the event. +public class ConsumerRecords : IEnumerable> { /// /// Gets the event source (typically "aws:kafka"). @@ -24,13 +25,13 @@ public class ConsumerRecords : IEnumerable> /// public string BootstrapServers { get; internal set; } = null!; - internal Dictionary>> Records { get; set; } = new(); + internal Dictionary>> Records { get; set; } = new(); /// /// Returns an enumerator that iterates through all consumer records across all topics. /// /// An enumerator of ConsumerRecord<T> objects. - public IEnumerator> GetEnumerator() + public IEnumerator> GetEnumerator() { foreach (var topicRecords in Records) { diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs index 91c4992f7..32b69ed55 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs @@ -52,15 +52,18 @@ public T Deserialize(Stream requestStream) var targetType = typeof(T); - if (targetType.IsGenericType && targetType.GetGenericTypeDefinition() == typeof(ConsumerRecords<>)) + if (targetType.IsGenericType && targetType.GetGenericTypeDefinition() == typeof(ConsumerRecords<,>)) { - var payloadType = targetType.GetGenericArguments()[0]; + var typeArgs = targetType.GetGenericArguments(); + var keyType = typeArgs[0]; + var valueType = typeArgs[1]; + using var document = JsonDocument.Parse(json); var root = document.RootElement; // Create the correctly typed instance - var typedEvent = Activator.CreateInstance(targetType) ?? - throw new InvalidOperationException($"Failed to create instance of {targetType.Name}"); + var typedEvent = Activator.CreateInstance(targetType) ?? + throw new InvalidOperationException($"Failed to create instance of {targetType.Name}"); // Set basic properties if (root.TryGetProperty("eventSource", out var eventSource)) @@ -74,18 +77,18 @@ public T Deserialize(Stream requestStream) if (root.TryGetProperty("bootstrapServers", out var bootstrapServers)) targetType.GetProperty("BootstrapServers")?.SetValue(typedEvent, bootstrapServers.GetString()); - // Get the schema for Avro deserialization - Schema schema = GetAvroSchema(payloadType); + // Get the schema for Avro deserialization (for value) + Schema schema = GetAvroSchema(valueType); - // Create records dictionary with correct generic type + // Create records dictionary with correct generic types var dictType = typeof(Dictionary<,>).MakeGenericType( typeof(string), - typeof(List<>).MakeGenericType(typeof(ConsumerRecord<>).MakeGenericType(payloadType)) + typeof(List<>).MakeGenericType(typeof(ConsumerRecord<,>).MakeGenericType(keyType, valueType)) ); - var records = Activator.CreateInstance(dictType) ?? - throw new InvalidOperationException($"Failed to create dictionary of type {dictType.Name}"); - var dictAddMethod = dictType.GetMethod("Add") ?? - throw new InvalidOperationException("Add method not found on dictionary type"); + var records = Activator.CreateInstance(dictType) ?? + throw new InvalidOperationException($"Failed to create dictionary of type {dictType.Name}"); + var dictAddMethod = dictType.GetMethod("Add") ?? + throw new InvalidOperationException("Add method not found on dictionary type"); if (root.TryGetProperty("records", out var recordsElement)) { @@ -93,18 +96,19 @@ public T Deserialize(Stream requestStream) { string topicName = topicPartition.Name; - // Create list of records with correct generic type + // Create list of records with correct generic types var listType = - typeof(List<>).MakeGenericType(typeof(ConsumerRecord<>).MakeGenericType(payloadType)); - var recordsList = Activator.CreateInstance(listType) ?? - throw new InvalidOperationException($"Failed to create list of type {listType.Name}"); - var listAddMethod = listType.GetMethod("Add") ?? - throw new InvalidOperationException("Add method not found on list type"); + typeof(List<>).MakeGenericType(typeof(ConsumerRecord<,>).MakeGenericType(keyType, valueType)); + var recordsList = Activator.CreateInstance(listType) ?? + throw new InvalidOperationException( + $"Failed to create list of type {listType.Name}"); + var listAddMethod = listType.GetMethod("Add") ?? + throw new InvalidOperationException("Add method not found on list type"); foreach (var recordElement in topicPartition.Value.EnumerateArray()) { // Create record instance of correct type - var recordType = typeof(ConsumerRecord<>).MakeGenericType(payloadType); + var recordType = typeof(ConsumerRecord<,>).MakeGenericType(keyType, valueType); var record = Activator.CreateInstance(recordType); if (record == null) continue; @@ -116,36 +120,33 @@ public T Deserialize(Stream requestStream) SetProperty(recordType, record, "Timestamp", recordElement, "timestamp"); SetProperty(recordType, record, "TimestampType", recordElement, "timestampType"); - // Handle key - base64 decode if present + // Handle key - base64 decode and convert to the correct type if (recordElement.TryGetProperty("key", out var keyElement) && keyElement.ValueKind == JsonValueKind.String) { string? base64Key = keyElement.GetString(); - var keyProperty = recordType.GetProperty("Key"); - if (keyProperty != null) - keyProperty.SetValue(record, base64Key); - - // Base64 decode the key if (!string.IsNullOrEmpty(base64Key)) { try { byte[] keyBytes = Convert.FromBase64String(base64Key); - string decodedKey = Encoding.UTF8.GetString(keyBytes); + object? decodedKey = DeserializeKey(keyBytes, keyType); + + var keyProperty = recordType.GetProperty("Key"); keyProperty?.SetValue(record, decodedKey); } - catch (Exception) + catch (Exception ex) { - // If decoding fails, leave it as is + // Log or handle key deserialization failures } } } // Handle Avro value - if (recordElement.TryGetProperty("value", out var value) && - value.ValueKind == JsonValueKind.String) + if (recordElement.TryGetProperty("value", out var valueElement) && + valueElement.ValueKind == JsonValueKind.String) { - string? base64Value = value.GetString(); + string? base64Value = valueElement.GetString(); var valueProperty = recordType.GetProperty("Value"); // Deserialize Avro data @@ -163,6 +164,7 @@ public T Deserialize(Stream requestStream) } } + // Process headers if (recordElement.TryGetProperty("headers", out var headersElement) && headersElement.ValueKind == JsonValueKind.Array) { @@ -175,7 +177,6 @@ public T Deserialize(Stream requestStream) string headerKey = header.Name; if (header.Value.ValueKind == JsonValueKind.Array) { - // Convert integer array to byte array byte[] headerBytes = new byte[header.Value.GetArrayLength()]; int i = 0; foreach (var byteVal in header.Value.EnumerateArray()) @@ -183,7 +184,6 @@ public T Deserialize(Stream requestStream) headerBytes[i++] = (byte)byteVal.GetInt32(); } - // Decode as UTF-8 string string headerValue = Encoding.UTF8.GetString(headerBytes); decodedHeaders[headerKey] = headerValue; } @@ -210,7 +210,95 @@ public T Deserialize(Stream requestStream) } var result = JsonSerializer.Deserialize(json, _jsonOptions); - return result != null ? result : throw new InvalidOperationException($"Failed to deserialize to type {typeof(T).Name}"); + return result != null + ? result + : throw new InvalidOperationException($"Failed to deserialize to type {typeof(T).Name}"); + } + + private object? DeserializeKey(byte[] keyBytes, Type keyType) + { + if (keyBytes == null || keyBytes.Length == 0) + return null; + + if (keyType == typeof(int)) + { + // First try to interpret as a string representation and parse + string stringValue = Encoding.UTF8.GetString(keyBytes); + if (int.TryParse(stringValue, out int parsedValue)) + return parsedValue; + + // Fall back to binary representation if parsing fails + if (keyBytes.Length >= 4) + return BitConverter.ToInt32(keyBytes, 0); + else if (keyBytes.Length == 1) + return (int)keyBytes[0]; + + return 0; + } + else if (keyType == typeof(long)) + { + // Try string parsing first + string stringValue = Encoding.UTF8.GetString(keyBytes); + if (long.TryParse(stringValue, out long parsedValue)) + return parsedValue; + + // Fall back to binary + if (keyBytes.Length >= 8) + return BitConverter.ToInt64(keyBytes, 0); + else if (keyBytes.Length >= 4) + return (long)BitConverter.ToInt32(keyBytes, 0); + + return 0L; + } + else if (keyType == typeof(string)) + { + // String conversion is safe regardless of length + return Encoding.UTF8.GetString(keyBytes); + } + else if (keyType == typeof(double)) + { + if (keyBytes.Length >= 8) + return BitConverter.ToDouble(keyBytes, 0); + else + return 0.0; + } + else if (keyType == typeof(bool) && keyBytes.Length >= 1) + { + return keyBytes[0] != 0; + } + else if (keyType == typeof(Guid) && keyBytes.Length >= 16) + { + return new Guid(keyBytes); + } + + // For complex types - try Avro or JSON deserialization + try + { + // Try to get Avro schema for the key type + var schemaField = keyType.GetField("_SCHEMA", + BindingFlags.Public | BindingFlags.Static); + + if (schemaField != null) + { + var schema = schemaField.GetValue(null) as Schema; + if (schema != null) + { + using var stream = new MemoryStream(keyBytes); + var decoder = new BinaryDecoder(stream); + var reader = new SpecificDatumReader(schema, schema); + return reader.Read(null!, decoder); + } + } + + // As a fallback, try JSON deserialization + string jsonStr = Encoding.UTF8.GetString(keyBytes); + return JsonSerializer.Deserialize(jsonStr, keyType, _jsonOptions); + } + catch + { + // If all deserialization attempts fail, return null + return null; + } } /// diff --git a/libraries/src/Directory.Packages.props b/libraries/src/Directory.Packages.props index becc44d4f..be5d56855 100644 --- a/libraries/src/Directory.Packages.props +++ b/libraries/src/Directory.Packages.props @@ -13,6 +13,7 @@ + diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/AWS.Lambda.Powertools.Kafka.Tests.csproj b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/AWS.Lambda.Powertools.Kafka.Tests.csproj index aa28ba64c..c635cb91f 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/AWS.Lambda.Powertools.Kafka.Tests.csproj +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/AWS.Lambda.Powertools.Kafka.Tests.csproj @@ -15,6 +15,10 @@ + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + @@ -52,6 +56,13 @@ PreserveNewest + + + PreserveNewest + + + PreserveNewest + diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AWS/Lambda/Powertools/Kafka/Tests/AvroKey.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AWS/Lambda/Powertools/Kafka/Tests/AvroKey.cs new file mode 100644 index 000000000..96d09316e --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AWS/Lambda/Powertools/Kafka/Tests/AvroKey.cs @@ -0,0 +1,70 @@ +// ------------------------------------------------------------------------------ +// +// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e +// Changes to this file may cause incorrect behavior and will be lost if code +// is regenerated +// +// ------------------------------------------------------------------------------ +namespace AWS.Lambda.Powertools.Kafka.Tests +{ + using System; + using System.Collections.Generic; + using System.Text; + using global::Avro; + using global::Avro.Specific; + + [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")] + public partial class AvroKey : global::Avro.Specific.ISpecificRecord + { + public static global::Avro.Schema _SCHEMA = global::Avro.Schema.Parse(@"{""type"":""record"",""name"":""AvroKey"",""namespace"":""AWS.Lambda.Powertools.Kafka.Tests"",""fields"":[{""name"":""id"",""type"":""int""},{""name"":""color"",""type"":{""type"":""enum"",""name"":""Color"",""namespace"":""AWS.Lambda.Powertools.Kafka.Tests"",""symbols"":[""UNKNOWN"",""GREEN"",""RED""],""default"":""UNKNOWN""}}]}"); + private int _id; + private AWS.Lambda.Powertools.Kafka.Tests.Color _color = AWS.Lambda.Powertools.Kafka.Tests.Color.UNKNOWN; + public virtual global::Avro.Schema Schema + { + get + { + return AvroKey._SCHEMA; + } + } + public int id + { + get + { + return this._id; + } + set + { + this._id = value; + } + } + public AWS.Lambda.Powertools.Kafka.Tests.Color color + { + get + { + return this._color; + } + set + { + this._color = value; + } + } + public virtual object Get(int fieldPos) + { + switch (fieldPos) + { + case 0: return this.id; + case 1: return this.color; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Get()"); + }; + } + public virtual void Put(int fieldPos, object fieldValue) + { + switch (fieldPos) + { + case 0: this.id = (System.Int32)fieldValue; break; + case 1: this.color = (AWS.Lambda.Powertools.Kafka.Tests.Color)fieldValue; break; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Put()"); + }; + } + } +} diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AWS/Lambda/Powertools/Kafka/Tests/Color.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AWS/Lambda/Powertools/Kafka/Tests/Color.cs new file mode 100644 index 000000000..963233679 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AWS/Lambda/Powertools/Kafka/Tests/Color.cs @@ -0,0 +1,23 @@ +// ------------------------------------------------------------------------------ +// +// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e +// Changes to this file may cause incorrect behavior and will be lost if code +// is regenerated +// +// ------------------------------------------------------------------------------ +namespace AWS.Lambda.Powertools.Kafka.Tests +{ + using System; + using System.Collections.Generic; + using System.Text; + using global::Avro; + using global::Avro.Specific; + + [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")] + public enum Color + { + UNKNOWN, + GREEN, + RED, + } +} diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AvroKey.avsc b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AvroKey.avsc new file mode 100644 index 000000000..cc15c9e72 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AvroKey.avsc @@ -0,0 +1,24 @@ +{ + "namespace": "AWS.Lambda.Powertools.Kafka.Tests", + "type": "record", + "name": "AvroKey", + "fields": [ + { + "name": "id", + "type": "int" + }, + { + "name": "color", + "type": { + "type": "enum", + "name": "Color", + "symbols": [ + "UNKNOWN", + "GREEN", + "RED" + ], + "default": "UNKNOWN" + } + } + ] +} \ No newline at end of file diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs index b11a2924b..507ca2edb 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs @@ -20,7 +20,7 @@ public async Task Handler_ProcessesKafkaEvent_Successfully() using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaJson)); // Act - Deserialize and process - var kafkaEvent = serializer.Deserialize>(stream); + var kafkaEvent = serializer.Deserialize>(stream); var response = await Handler(kafkaEvent, mockContext); // Assert @@ -46,8 +46,14 @@ public async Task Handler_ProcessesKafkaEvent_Successfully() Assert.Equal(999.99, product.price); // Verify decoded key and headers - Assert.Equal("42", firstRecord.Key); + Assert.Equal(42, firstRecord.Key); Assert.Equal("headerValue", firstRecord.Headers["headerKey"]); + + var secondRecord = records[1]; + Assert.Equal(43, secondRecord.Key); + + var thirdRecord = records[2]; + Assert.Equal(0, thirdRecord.Key); } private string GetMockKafkaEvent() @@ -62,6 +68,9 @@ private string GetMockKafkaEvent() string smartphoneBase64 = ConvertToAvroBase64(smartphone); string headphonesBase64 = ConvertToAvroBase64(headphones); + string firstRecordKey = Convert.ToBase64String(Encoding.UTF8.GetBytes("42")); // Example key + string secondRecordKey = Convert.ToBase64String(Encoding.UTF8.GetBytes("43")); // Example key for second record + // Create mock Kafka event JSON return @$"{{ ""eventSource"": ""aws:kafka"", @@ -75,7 +84,7 @@ private string GetMockKafkaEvent() ""offset"": 15, ""timestamp"": 1545084650987, ""timestampType"": ""CREATE_TIME"", - ""key"": ""NDI="", + ""key"": ""{firstRecordKey}"", ""value"": ""{laptopBase64}"", ""headers"": [ {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} @@ -87,7 +96,7 @@ private string GetMockKafkaEvent() ""offset"": 16, ""timestamp"": 1545084650988, ""timestampType"": ""CREATE_TIME"", - ""key"": ""NDI="", + ""key"": ""{secondRecordKey}"", ""value"": ""{smartphoneBase64}"", ""headers"": [ {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} @@ -123,7 +132,7 @@ private string ConvertToAvroBase64(AvroProduct product) } // Define the test handler method - private async Task Handler(ConsumerRecords records, ILambdaContext context) + private async Task Handler(ConsumerRecords records, ILambdaContext context) { foreach (var record in records) { @@ -135,56 +144,78 @@ private async Task Handler(ConsumerRecords records, ILambda } [Fact] - public async Task Handler_ProcessesMultipleTopics_WithNestedLoops() + public async Task Handler_ProcessesKafkaEvent_WithAvroKey_Successfully() { // Arrange - var kafkaJson = GetMockMultiTopicKafkaEvent(); + var kafkaJson = GetMockKafkaEventWithAvroKeys(); var mockContext = new TestLambdaContext(); var serializer = new PowertoolsKafkaAvroSerializer(); + // Convert JSON string to stream for deserialization using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaJson)); - // Act - var kafkaEvent = serializer.Deserialize>(stream); - var response = await HandlerWithNestedLoops(kafkaEvent, mockContext); + // Act - Deserialize and process + var kafkaEvent = serializer.Deserialize>(stream); + var response = await HandlerWithAvroKeys(kafkaEvent, mockContext); // Assert - Assert.Equal("Successfully processed Kafka events from multiple topics", response); - Assert.Equal("aws:kafka", kafkaEvent.EventSource); + Assert.Equal("Successfully processed Kafka events", response); - // Check that we have two topics - Assert.Equal(2, kafkaEvent.Records.Count); - Assert.True(kafkaEvent.Records.ContainsKey("mytopic-0")); - Assert.True(kafkaEvent.Records.ContainsKey("anothertopic-0")); + // Verify event structure + Assert.Equal("aws:kafka", kafkaEvent.EventSource); + Assert.Single(kafkaEvent.Records); - // Verify records count - Assert.Equal(2, kafkaEvent.Records["mytopic-0"].Count); - Assert.Equal(1, kafkaEvent.Records["anothertopic-0"].Count); + // Verify record content + var records = kafkaEvent.Records["mytopic-0"]; + Assert.Equal(3, records.Count); - // Verify first record's content - var firstRecord = kafkaEvent.Records["mytopic-0"][0]; + // Verify first record + var firstRecord = records[0]; Assert.Equal("mytopic", firstRecord.Topic); + Assert.Equal(0, firstRecord.Partition); + Assert.Equal(15, firstRecord.Offset); + + // Verify deserialized Avro key and value Assert.Equal("Laptop", firstRecord.Value.name); + Assert.Equal(999.99, firstRecord.Value.price); + Assert.Equal(1, firstRecord.Key.id); + Assert.Equal(Color.GREEN, firstRecord.Key.color); + + // Verify headers + Assert.Equal("headerValue", firstRecord.Headers["headerKey"]); - // Verify the record in the second topic - var secondTopicRecord = kafkaEvent.Records["anothertopic-0"][0]; - Assert.Equal("anothertopic", secondTopicRecord.Topic); - Assert.Equal("Headphones", secondTopicRecord.Value.name); + var secondRecord = records[1]; + Assert.Equal(2, secondRecord.Key.id); + Assert.Equal(Color.UNKNOWN, secondRecord.Key.color); + + var thirdRecord = records[2]; + Assert.Equal(3, thirdRecord.Key.id); + Assert.Equal(Color.RED, thirdRecord.Key.color); } - private string GetMockMultiTopicKafkaEvent() + private string GetMockKafkaEventWithAvroKeys() { // Create test products var laptop = new AvroProduct { name = "Laptop", price = 999.99 }; var smartphone = new AvroProduct { name = "Smartphone", price = 499.99 }; var headphones = new AvroProduct { name = "Headphones", price = 99.99 }; - // Convert to base64-encoded Avro + // Create test keys + var key1 = new AvroKey { id = 1, color = Color.GREEN }; + var key2 = new AvroKey { id = 2 }; + var key3 = new AvroKey { id = 3, color = Color.RED }; + + // Convert values to base64-encoded Avro string laptopBase64 = ConvertToAvroBase64(laptop); string smartphoneBase64 = ConvertToAvroBase64(smartphone); string headphonesBase64 = ConvertToAvroBase64(headphones); - // Create mock Kafka event JSON with multiple topics + // Convert keys to base64-encoded Avro + string key1Base64 = ConvertKeyToAvroBase64(key1); + string key2Base64 = ConvertKeyToAvroBase64(key2); + string key3Base64 = ConvertKeyToAvroBase64(key3); + + // Create mock Kafka event JSON return @$"{{ ""eventSource"": ""aws:kafka"", ""eventSourceArn"": ""arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4"", @@ -197,7 +228,7 @@ private string GetMockMultiTopicKafkaEvent() ""offset"": 15, ""timestamp"": 1545084650987, ""timestampType"": ""CREATE_TIME"", - ""key"": ""NDI="", + ""key"": ""{key1Base64}"", ""value"": ""{laptopBase64}"", ""headers"": [ {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} @@ -209,21 +240,19 @@ private string GetMockMultiTopicKafkaEvent() ""offset"": 16, ""timestamp"": 1545084650988, ""timestampType"": ""CREATE_TIME"", - ""key"": ""NDI="", + ""key"": ""{key2Base64}"", ""value"": ""{smartphoneBase64}"", ""headers"": [ {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} ] - }} - ], - ""anothertopic-0"": [ + }}, {{ - ""topic"": ""anothertopic"", + ""topic"": ""mytopic"", ""partition"": 0, ""offset"": 17, ""timestamp"": 1545084650989, ""timestampType"": ""CREATE_TIME"", - ""key"": null, + ""key"": ""{key3Base64}"", ""value"": ""{headphonesBase64}"", ""headers"": [ {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} @@ -234,14 +263,27 @@ private string GetMockMultiTopicKafkaEvent() }}"; } - private async Task HandlerWithNestedLoops(ConsumerRecords consumerRecords, ILambdaContext context) + private string ConvertKeyToAvroBase64(AvroKey key) { - foreach (var record in consumerRecords) + using var stream = new MemoryStream(); + var encoder = new BinaryEncoder(stream); + var writer = new SpecificDatumWriter(AvroKey._SCHEMA); + + writer.Write(key, encoder); + encoder.Flush(); + + return Convert.ToBase64String(stream.ToArray()); + } + + private async Task HandlerWithAvroKeys(ConsumerRecords records, + ILambdaContext context) + { + foreach (var record in records) { + var key = record.Key.id; var product = record.Value; - context.Logger.LogInformation($"Processing {product.name} at ${product.price} from topic {record.Topic}"); } - return "Successfully processed Kafka events from multiple topics"; + return "Successfully processed Kafka events"; } } \ No newline at end of file diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs index 3297ce325..960928321 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs @@ -17,7 +17,7 @@ public void Deserialize_KafkaEventWithAvroPayload_DeserializesToCorrectType() using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); // Act - var result = serializer.Deserialize>(stream); + var result = serializer.Deserialize>(stream); // Assert Assert.NotNull(result); @@ -33,7 +33,7 @@ public void Deserialize_KafkaEventWithAvroPayload_DeserializesToCorrectType() Assert.Equal("mytopic", firstRecord.Topic); Assert.Equal(0, firstRecord.Partition); Assert.Equal(15, firstRecord.Offset); - Assert.Equal("42", firstRecord.Key); + Assert.Equal(42, firstRecord.Key); // Verify deserialized Avro value var product = firstRecord.Value; @@ -56,7 +56,7 @@ public void KafkaEvent_ImplementsIEnumerable_ForDirectIteration() using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); // Act - var result = serializer.Deserialize>(stream); + var result = serializer.Deserialize>(stream); // Assert - Test enumeration int count = 0; diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs new file mode 100644 index 000000000..e8697be23 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs @@ -0,0 +1,24 @@ +using TestKafka; + +namespace AWS.Lambda.Powertools.Kafka.Tests.Protobuf; + +public class PowertoolsKafkaProtobufSerializerTests +{ + [Fact] + public void DeserializeProtobufFromBase64() + { + // Base64 encoded Protobuf data + string base64EncodedProto = "COkHEgZMYXB0b3AZUrgehes/j0A="; + + // Decode base64 to bytes + byte[] protoBytes = Convert.FromBase64String(base64EncodedProto); + + // Deserialize to ProtobufProduct + var product = ProtobufProduct.Parser.ParseFrom(protoBytes); + + // Verify values + Assert.Equal("Laptop", product.Name); + Assert.Equal(1001, product.Id); + Assert.Equal(999.99, product.Price); + } +} \ No newline at end of file diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/Product.proto b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/Product.proto new file mode 100644 index 000000000..1d4c64e90 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/Product.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +option csharp_namespace = "TestKafka"; + +message ProtobufProduct { + int32 id = 1; + string name = 2; + double price = 3; +} \ No newline at end of file diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Readme.md b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Readme.md index 317c34a26..4df25b4b8 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Readme.md +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Readme.md @@ -8,10 +8,24 @@ avrogen -s AvroProduct.avsc ./ ``` ```xml + - - - - + + + + ``` + +# Protobuf + +```xml + + + + PreserveNewest + + + + +``` \ No newline at end of file diff --git a/libraries/tests/Directory.Packages.props b/libraries/tests/Directory.Packages.props index 88751caf4..804b073e2 100644 --- a/libraries/tests/Directory.Packages.props +++ b/libraries/tests/Directory.Packages.props @@ -6,6 +6,7 @@ + From a981c9569f2b9b47f60c1483f3318166b76dacf5 Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Fri, 6 Jun 2025 16:28:46 +0100 Subject: [PATCH 07/35] feat(kafka): add Protobuf support with Key.proto and enhance deserialization in PowertoolsKafkaProtobufSerializer --- .../PowertoolsKafkaAvroSerializer.cs | 357 +++--------------- .../PowertoolsKafkaJsonSerializer.cs | 65 ++++ .../PowertoolsKafkaProtobufSerializer.cs | 105 ++++++ .../PowertoolsKafkaSerializerBase.cs | 319 ++++++++++++++++ .../AWS.Lambda.Powertools.Kafka.Tests.csproj | 9 +- .../Protobuf/HandlerTests.cs | 300 +++++++++++++++ .../Protobuf/Key.proto | 14 + .../PowertoolsKafkaProtobufSerializerTests.cs | 83 +++- 8 files changed, 932 insertions(+), 320 deletions(-) create mode 100644 libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaJsonSerializer.cs create mode 100644 libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaProtobufSerializer.cs create mode 100644 libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs create mode 100644 libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/HandlerTests.cs create mode 100644 libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/Key.proto diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs index 32b69ed55..4ce245965 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs @@ -1,4 +1,3 @@ -using Amazon.Lambda.Core; using Avro; using Avro.IO; using Avro.Specific; @@ -20,7 +19,7 @@ namespace AWS.Lambda.Powertools.Kafka; /// // Your Lambda handler will receive properly deserialized objects /// public class Function /// { -/// public void Handler(ConsumerRecords<Customer> records, ILambdaContext context) +/// public void Handler(ConsumerRecords<string, Customer> records, ILambdaContext context) /// { /// foreach (var record in records) /// { @@ -31,307 +30,8 @@ namespace AWS.Lambda.Powertools.Kafka; /// } /// /// -public class PowertoolsKafkaAvroSerializer : ILambdaSerializer +public class PowertoolsKafkaAvroSerializer : PowertoolsKafkaSerializerBase { - private readonly JsonSerializerOptions _jsonOptions = new() - { - PropertyNameCaseInsensitive = true - }; - - /// - /// Deserializes the Lambda input stream into the specified type. - /// Specializes in handling Kafka events with Avro-serialized payloads. - /// - /// The type to deserialize to. For Kafka events, typically ConsumerRecords<TPayload>. - /// The stream containing the serialized Lambda event. - /// The deserialized object of type T. - public T Deserialize(Stream requestStream) - { - using var reader = new StreamReader(requestStream); - var json = reader.ReadToEnd(); - - var targetType = typeof(T); - - if (targetType.IsGenericType && targetType.GetGenericTypeDefinition() == typeof(ConsumerRecords<,>)) - { - var typeArgs = targetType.GetGenericArguments(); - var keyType = typeArgs[0]; - var valueType = typeArgs[1]; - - using var document = JsonDocument.Parse(json); - var root = document.RootElement; - - // Create the correctly typed instance - var typedEvent = Activator.CreateInstance(targetType) ?? - throw new InvalidOperationException($"Failed to create instance of {targetType.Name}"); - - // Set basic properties - if (root.TryGetProperty("eventSource", out var eventSource)) - targetType.GetProperty("EventSource", - BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance) - ?.SetValue(typedEvent, eventSource.GetString()); - - if (root.TryGetProperty("eventSourceArn", out var eventSourceArn)) - targetType.GetProperty("EventSourceArn")?.SetValue(typedEvent, eventSourceArn.GetString()); - - if (root.TryGetProperty("bootstrapServers", out var bootstrapServers)) - targetType.GetProperty("BootstrapServers")?.SetValue(typedEvent, bootstrapServers.GetString()); - - // Get the schema for Avro deserialization (for value) - Schema schema = GetAvroSchema(valueType); - - // Create records dictionary with correct generic types - var dictType = typeof(Dictionary<,>).MakeGenericType( - typeof(string), - typeof(List<>).MakeGenericType(typeof(ConsumerRecord<,>).MakeGenericType(keyType, valueType)) - ); - var records = Activator.CreateInstance(dictType) ?? - throw new InvalidOperationException($"Failed to create dictionary of type {dictType.Name}"); - var dictAddMethod = dictType.GetMethod("Add") ?? - throw new InvalidOperationException("Add method not found on dictionary type"); - - if (root.TryGetProperty("records", out var recordsElement)) - { - foreach (var topicPartition in recordsElement.EnumerateObject()) - { - string topicName = topicPartition.Name; - - // Create list of records with correct generic types - var listType = - typeof(List<>).MakeGenericType(typeof(ConsumerRecord<,>).MakeGenericType(keyType, valueType)); - var recordsList = Activator.CreateInstance(listType) ?? - throw new InvalidOperationException( - $"Failed to create list of type {listType.Name}"); - var listAddMethod = listType.GetMethod("Add") ?? - throw new InvalidOperationException("Add method not found on list type"); - - foreach (var recordElement in topicPartition.Value.EnumerateArray()) - { - // Create record instance of correct type - var recordType = typeof(ConsumerRecord<,>).MakeGenericType(keyType, valueType); - var record = Activator.CreateInstance(recordType); - if (record == null) - continue; - - // Set basic properties - SetProperty(recordType, record, "Topic", recordElement, "topic"); - SetProperty(recordType, record, "Partition", recordElement, "partition"); - SetProperty(recordType, record, "Offset", recordElement, "offset"); - SetProperty(recordType, record, "Timestamp", recordElement, "timestamp"); - SetProperty(recordType, record, "TimestampType", recordElement, "timestampType"); - - // Handle key - base64 decode and convert to the correct type - if (recordElement.TryGetProperty("key", out var keyElement) && - keyElement.ValueKind == JsonValueKind.String) - { - string? base64Key = keyElement.GetString(); - if (!string.IsNullOrEmpty(base64Key)) - { - try - { - byte[] keyBytes = Convert.FromBase64String(base64Key); - object? decodedKey = DeserializeKey(keyBytes, keyType); - - var keyProperty = recordType.GetProperty("Key"); - keyProperty?.SetValue(record, decodedKey); - } - catch (Exception ex) - { - // Log or handle key deserialization failures - } - } - } - - // Handle Avro value - if (recordElement.TryGetProperty("value", out var valueElement) && - valueElement.ValueKind == JsonValueKind.String) - { - string? base64Value = valueElement.GetString(); - var valueProperty = recordType.GetProperty("Value"); - - // Deserialize Avro data - if (base64Value != null && valueProperty != null) - { - try - { - var deserializedValue = DeserializeAvroValue(base64Value, schema); - valueProperty.SetValue(record, deserializedValue); - } - catch (Exception ex) - { - throw new Exception($"Failed to deserialize Avro data: {ex.Message}", ex); - } - } - } - - // Process headers - if (recordElement.TryGetProperty("headers", out var headersElement) && - headersElement.ValueKind == JsonValueKind.Array) - { - var decodedHeaders = new Dictionary(); - - foreach (var headerObj in headersElement.EnumerateArray()) - { - foreach (var header in headerObj.EnumerateObject()) - { - string headerKey = header.Name; - if (header.Value.ValueKind == JsonValueKind.Array) - { - byte[] headerBytes = new byte[header.Value.GetArrayLength()]; - int i = 0; - foreach (var byteVal in header.Value.EnumerateArray()) - { - headerBytes[i++] = (byte)byteVal.GetInt32(); - } - - string headerValue = Encoding.UTF8.GetString(headerBytes); - decodedHeaders[headerKey] = headerValue; - } - } - } - - var headersProperty = recordType.GetProperty("Headers", - BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance); - headersProperty?.SetValue(record, decodedHeaders); - } - - // Add to records list - listAddMethod.Invoke(recordsList, new[] { record }); - } - - // Add topic records to dictionary - dictAddMethod.Invoke(records, new[] { topicName, recordsList }); - } - } - - targetType.GetProperty("Records", BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance) - ?.SetValue(typedEvent, records); - return (T)typedEvent; - } - - var result = JsonSerializer.Deserialize(json, _jsonOptions); - return result != null - ? result - : throw new InvalidOperationException($"Failed to deserialize to type {typeof(T).Name}"); - } - - private object? DeserializeKey(byte[] keyBytes, Type keyType) - { - if (keyBytes == null || keyBytes.Length == 0) - return null; - - if (keyType == typeof(int)) - { - // First try to interpret as a string representation and parse - string stringValue = Encoding.UTF8.GetString(keyBytes); - if (int.TryParse(stringValue, out int parsedValue)) - return parsedValue; - - // Fall back to binary representation if parsing fails - if (keyBytes.Length >= 4) - return BitConverter.ToInt32(keyBytes, 0); - else if (keyBytes.Length == 1) - return (int)keyBytes[0]; - - return 0; - } - else if (keyType == typeof(long)) - { - // Try string parsing first - string stringValue = Encoding.UTF8.GetString(keyBytes); - if (long.TryParse(stringValue, out long parsedValue)) - return parsedValue; - - // Fall back to binary - if (keyBytes.Length >= 8) - return BitConverter.ToInt64(keyBytes, 0); - else if (keyBytes.Length >= 4) - return (long)BitConverter.ToInt32(keyBytes, 0); - - return 0L; - } - else if (keyType == typeof(string)) - { - // String conversion is safe regardless of length - return Encoding.UTF8.GetString(keyBytes); - } - else if (keyType == typeof(double)) - { - if (keyBytes.Length >= 8) - return BitConverter.ToDouble(keyBytes, 0); - else - return 0.0; - } - else if (keyType == typeof(bool) && keyBytes.Length >= 1) - { - return keyBytes[0] != 0; - } - else if (keyType == typeof(Guid) && keyBytes.Length >= 16) - { - return new Guid(keyBytes); - } - - // For complex types - try Avro or JSON deserialization - try - { - // Try to get Avro schema for the key type - var schemaField = keyType.GetField("_SCHEMA", - BindingFlags.Public | BindingFlags.Static); - - if (schemaField != null) - { - var schema = schemaField.GetValue(null) as Schema; - if (schema != null) - { - using var stream = new MemoryStream(keyBytes); - var decoder = new BinaryDecoder(stream); - var reader = new SpecificDatumReader(schema, schema); - return reader.Read(null!, decoder); - } - } - - // As a fallback, try JSON deserialization - string jsonStr = Encoding.UTF8.GetString(keyBytes); - return JsonSerializer.Deserialize(jsonStr, keyType, _jsonOptions); - } - catch - { - // If all deserialization attempts fail, return null - return null; - } - } - - /// - /// Sets a property value on an object instance from a JsonElement. - /// - /// The type of the object. - /// The object instance. - /// The name of the property to set. - /// The JsonElement containing the source data. - /// The property name within the JsonElement. - private void SetProperty(Type type, object instance, string propertyName, - JsonElement element, string jsonPropertyName) - { - if (!element.TryGetProperty(jsonPropertyName, out var jsonValue) || - jsonValue.ValueKind == JsonValueKind.Null) - return; - - // Add BindingFlags to find internal properties too - var property = type.GetProperty(propertyName, - BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance); - if (property == null) return; - var propertyType = property.PropertyType; - - object value; - if (propertyType == typeof(int)) value = jsonValue.GetInt32(); - else if (propertyType == typeof(long)) value = jsonValue.GetInt64(); - else if (propertyType == typeof(double)) value = jsonValue.GetDouble(); - else if (propertyType == typeof(string)) value = jsonValue.GetString()!; - else return; - - property.SetValue(instance, value); - } - /// /// Gets the Avro schema for the specified type. /// The type must have a public static _SCHEMA field defined. @@ -358,6 +58,18 @@ private Schema GetAvroSchema(Type payloadType) /// Deserializes a base64-encoded Avro binary value into an object. /// /// The base64-encoded Avro binary data. + /// The type to deserialize to. + /// The deserialized object. + protected override object DeserializeValue(string base64Value, Type valueType) + { + Schema schema = GetAvroSchema(valueType); + return DeserializeAvroValue(base64Value, schema); + } + + /// + /// Deserializes a base64-encoded Avro binary value into an object using the provided schema. + /// + /// The base64-encoded Avro binary data. /// The Avro schema to use for deserialization. /// The deserialized object. private object DeserializeAvroValue(string base64Value, Schema schema) @@ -371,14 +83,39 @@ private object DeserializeAvroValue(string base64Value, Schema schema) } /// - /// Serializes an object to JSON and writes it to the provided stream. + /// Deserializes complex key types using Avro format. /// - /// The type of object to serialize. - /// The object to serialize. - /// The stream to write the serialized data to. - public void Serialize(T response, Stream responseStream) + /// The key bytes to deserialize. + /// The type to deserialize to. + /// The deserialized key object. + protected override object? DeserializeComplexKey(byte[] keyBytes, Type keyType) { - using var writer = new StreamWriter(responseStream); - writer.Write(JsonSerializer.Serialize(response, _jsonOptions)); + try + { + // Try to get Avro schema for the key type + var schemaField = keyType.GetField("_SCHEMA", + BindingFlags.Public | BindingFlags.Static); + + if (schemaField != null) + { + var schema = schemaField.GetValue(null) as Schema; + if (schema != null) + { + using var stream = new MemoryStream(keyBytes); + var decoder = new BinaryDecoder(stream); + var reader = new SpecificDatumReader(schema, schema); + return reader.Read(null!, decoder); + } + } + + // As a fallback, try JSON deserialization + string jsonStr = Encoding.UTF8.GetString(keyBytes); + return JsonSerializer.Deserialize(jsonStr, keyType, JsonOptions); + } + catch + { + // If all deserialization attempts fail, return null + return null; + } } -} \ No newline at end of file +} diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaJsonSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaJsonSerializer.cs new file mode 100644 index 000000000..904641576 --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaJsonSerializer.cs @@ -0,0 +1,65 @@ +using System.Text; +using System.Text.Json; + +namespace AWS.Lambda.Powertools.Kafka; + +/// +/// A Lambda serializer for Kafka events that handles JSON-formatted data. +/// This serializer deserializes JSON data from Kafka records into strongly-typed objects. +/// +/// +/// +/// [assembly: LambdaSerializer(typeof(PowertoolsKafkaJsonSerializer))] +/// +/// // Your Lambda handler will receive properly deserialized objects +/// public class Function +/// { +/// public void Handler(ConsumerRecords<string, Customer> records, ILambdaContext context) +/// { +/// foreach (var record in records) +/// { +/// Customer customer = record.Value; +/// context.Logger.LogInformation($"Processed customer {customer.Name}"); +/// } +/// } +/// } +/// +/// +public class PowertoolsKafkaJsonSerializer : PowertoolsKafkaSerializerBase +{ + /// + /// Deserializes a base64-encoded JSON value into an object. + /// + /// The base64-encoded JSON data. + /// The type to deserialize to. + /// The deserialized object. + protected override object DeserializeValue(string base64Value, Type valueType) + { + byte[] jsonBytes = Convert.FromBase64String(base64Value); + string jsonString = Encoding.UTF8.GetString(jsonBytes); + + var result = JsonSerializer.Deserialize(jsonString, valueType, JsonOptions); + return result ?? throw new InvalidOperationException($"Failed to deserialize JSON to type {valueType.Name}"); + } + + /// + /// Deserializes complex key types from JSON. + /// + /// The key bytes to deserialize. + /// The type to deserialize to. + /// The deserialized key object. + protected override object? DeserializeComplexKey(byte[] keyBytes, Type keyType) + { + try + { + // Convert bytes to JSON string and deserialize + string jsonStr = Encoding.UTF8.GetString(keyBytes); + return JsonSerializer.Deserialize(jsonStr, keyType, JsonOptions); + } + catch + { + // If deserialization fails, return null + return null; + } + } +} diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaProtobufSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaProtobufSerializer.cs new file mode 100644 index 000000000..bd15a60df --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaProtobufSerializer.cs @@ -0,0 +1,105 @@ +using Google.Protobuf; +using System.Reflection; +using System.Text; +using System.Text.Json; + +namespace AWS.Lambda.Powertools.Kafka; + +/// +/// A Lambda serializer for Kafka events that handles Protobuf-formatted data. +/// This serializer automatically deserializes the Protobuf binary format from base64-encoded strings +/// in Kafka records and converts them to strongly-typed objects. +/// +/// +/// +/// [assembly: LambdaSerializer(typeof(PowertoolsKafkaProtobufSerializer))] +/// +/// // Your Lambda handler will receive properly deserialized objects +/// public class Function +/// { +/// public void Handler(ConsumerRecords<string, Customer> records, ILambdaContext context) +/// { +/// foreach (var record in records) +/// { +/// Customer customer = record.Value; +/// context.Logger.LogInformation($"Processed customer {customer.Name}"); +/// } +/// } +/// } +/// +/// +public class PowertoolsKafkaProtobufSerializer : PowertoolsKafkaSerializerBase +{ + /// + /// Deserializes a base64-encoded Protobuf binary value into an object. + /// + /// The base64-encoded Protobuf binary data. + /// The type to deserialize to. + /// The deserialized object. + protected override object DeserializeValue(string base64Value, Type valueType) + { + byte[] protobufBytes = Convert.FromBase64String(base64Value); + return DeserializeProtobufValue(protobufBytes, valueType); + } + + /// + /// Deserializes Protobuf binary data into an object of the specified type. + /// + /// The Protobuf binary data. + /// The Protobuf message type to deserialize to. + /// The deserialized object. + private object DeserializeProtobufValue(byte[] protobufBytes, Type messageType) + { + // Find the Parser property which is available on all Protobuf generated classes + var parserProperty = messageType.GetProperty("Parser", + BindingFlags.Public | BindingFlags.Static); + + if (parserProperty == null) + throw new InvalidOperationException($"Type {messageType.Name} does not appear to be a Protobuf message type: Parser property not found"); + + var parser = parserProperty.GetValue(null) as MessageParser; + if (parser == null) + throw new InvalidOperationException($"Could not get Parser for Protobuf type {messageType.Name}"); + + // Use the parser to deserialize the message + using var stream = new MemoryStream(protobufBytes); + var message = parser.ParseFrom(stream); + + return message; + } + + /// + /// Deserializes complex key types using Protobuf format. + /// + /// The key bytes to deserialize. + /// The type to deserialize to. + /// The deserialized key object. + protected override object? DeserializeComplexKey(byte[] keyBytes, Type keyType) + { + try + { + // Check if it's a Protobuf message type + var parserProperty = keyType.GetProperty("Parser", + BindingFlags.Public | BindingFlags.Static); + + if (parserProperty != null) + { + var parser = parserProperty.GetValue(null) as MessageParser; + if (parser != null) + { + using var stream = new MemoryStream(keyBytes); + return parser.ParseFrom(stream); + } + } + + // As a fallback, try JSON deserialization + string jsonStr = Encoding.UTF8.GetString(keyBytes); + return JsonSerializer.Deserialize(jsonStr, keyType, JsonOptions); + } + catch + { + // If all deserialization attempts fail, return null + return null; + } + } +} diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs new file mode 100644 index 000000000..d5c7edf55 --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs @@ -0,0 +1,319 @@ +using Amazon.Lambda.Core; +using System.Reflection; +using System.Text; +using System.Text.Json; + +namespace AWS.Lambda.Powertools.Kafka; + +/// +/// Base class for Kafka event serializers that provides common functionality +/// for deserializing Kafka event structures in Lambda functions. +/// +/// +/// Inherit from this class to implement specific formats like Avro, Protobuf or JSON. +/// +public abstract class PowertoolsKafkaSerializerBase : ILambdaSerializer +{ + protected readonly JsonSerializerOptions JsonOptions = new() + { + PropertyNameCaseInsensitive = true + }; + + /// + /// Deserializes the Lambda input stream into the specified type. + /// Handles Kafka events with various serialization formats. + /// + /// The type to deserialize to. For Kafka events, typically ConsumerRecords<TKey,TValue>. + /// The stream containing the serialized Lambda event. + /// The deserialized object of type T. + public T Deserialize(Stream requestStream) + { + using var reader = new StreamReader(requestStream); + var json = reader.ReadToEnd(); + + var targetType = typeof(T); + + if (targetType.IsGenericType && targetType.GetGenericTypeDefinition() == typeof(ConsumerRecords<,>)) + { + var typeArgs = targetType.GetGenericArguments(); + var keyType = typeArgs[0]; + var valueType = typeArgs[1]; + + using var document = JsonDocument.Parse(json); + var root = document.RootElement; + + // Create the correctly typed instance + var typedEvent = Activator.CreateInstance(targetType) ?? + throw new InvalidOperationException($"Failed to create instance of {targetType.Name}"); + + // Set basic properties + if (root.TryGetProperty("eventSource", out var eventSource)) + targetType.GetProperty("EventSource", + BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance) + ?.SetValue(typedEvent, eventSource.GetString()); + + if (root.TryGetProperty("eventSourceArn", out var eventSourceArn)) + targetType.GetProperty("EventSourceArn")?.SetValue(typedEvent, eventSourceArn.GetString()); + + if (root.TryGetProperty("bootstrapServers", out var bootstrapServers)) + targetType.GetProperty("BootstrapServers")?.SetValue(typedEvent, bootstrapServers.GetString()); + + // Create records dictionary with correct generic types + var dictType = typeof(Dictionary<,>).MakeGenericType( + typeof(string), + typeof(List<>).MakeGenericType(typeof(ConsumerRecord<,>).MakeGenericType(keyType, valueType)) + ); + var records = Activator.CreateInstance(dictType) ?? + throw new InvalidOperationException($"Failed to create dictionary of type {dictType.Name}"); + var dictAddMethod = dictType.GetMethod("Add") ?? + throw new InvalidOperationException("Add method not found on dictionary type"); + + if (root.TryGetProperty("records", out var recordsElement)) + { + foreach (var topicPartition in recordsElement.EnumerateObject()) + { + string topicName = topicPartition.Name; + + // Create list of records with correct generic types + var listType = + typeof(List<>).MakeGenericType(typeof(ConsumerRecord<,>).MakeGenericType(keyType, valueType)); + var recordsList = Activator.CreateInstance(listType) ?? + throw new InvalidOperationException( + $"Failed to create list of type {listType.Name}"); + var listAddMethod = listType.GetMethod("Add") ?? + throw new InvalidOperationException("Add method not found on list type"); + + foreach (var recordElement in topicPartition.Value.EnumerateArray()) + { + // Create record instance of correct type + var recordType = typeof(ConsumerRecord<,>).MakeGenericType(keyType, valueType); + var record = Activator.CreateInstance(recordType); + if (record == null) + continue; + + // Set basic properties + SetProperty(recordType, record, "Topic", recordElement, "topic"); + SetProperty(recordType, record, "Partition", recordElement, "partition"); + SetProperty(recordType, record, "Offset", recordElement, "offset"); + SetProperty(recordType, record, "Timestamp", recordElement, "timestamp"); + SetProperty(recordType, record, "TimestampType", recordElement, "timestampType"); + + // Handle key - base64 decode and convert to the correct type + if (recordElement.TryGetProperty("key", out var keyElement) && + keyElement.ValueKind == JsonValueKind.String) + { + string? base64Key = keyElement.GetString(); + if (!string.IsNullOrEmpty(base64Key)) + { + try + { + byte[] keyBytes = Convert.FromBase64String(base64Key); + object? decodedKey = DeserializeKey(keyBytes, keyType); + + var keyProperty = recordType.GetProperty("Key"); + keyProperty?.SetValue(record, decodedKey); + } + catch (Exception ex) + { + // Log or handle key deserialization failures + } + } + } + + // Handle value + if (recordElement.TryGetProperty("value", out var valueElement) && + valueElement.ValueKind == JsonValueKind.String) + { + string? base64Value = valueElement.GetString(); + var valueProperty = recordType.GetProperty("Value"); + + if (base64Value != null && valueProperty != null) + { + try + { + var deserializedValue = DeserializeValue(base64Value, valueType); + valueProperty.SetValue(record, deserializedValue); + } + catch (Exception ex) + { + throw new Exception($"Failed to deserialize data: {ex.Message}", ex); + } + } + } + + // Process headers + if (recordElement.TryGetProperty("headers", out var headersElement) && + headersElement.ValueKind == JsonValueKind.Array) + { + var decodedHeaders = new Dictionary(); + + foreach (var headerObj in headersElement.EnumerateArray()) + { + foreach (var header in headerObj.EnumerateObject()) + { + string headerKey = header.Name; + if (header.Value.ValueKind == JsonValueKind.Array) + { + byte[] headerBytes = new byte[header.Value.GetArrayLength()]; + int i = 0; + foreach (var byteVal in header.Value.EnumerateArray()) + { + headerBytes[i++] = (byte)byteVal.GetInt32(); + } + + string headerValue = Encoding.UTF8.GetString(headerBytes); + decodedHeaders[headerKey] = headerValue; + } + } + } + + var headersProperty = recordType.GetProperty("Headers", + BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance); + headersProperty?.SetValue(record, decodedHeaders); + } + + // Add to records list + listAddMethod.Invoke(recordsList, new[] { record }); + } + + // Add topic records to dictionary + dictAddMethod.Invoke(records, new[] { topicName, recordsList }); + } + } + + targetType.GetProperty("Records", BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance) + ?.SetValue(typedEvent, records); + return (T)typedEvent; + } + + var result = JsonSerializer.Deserialize(json, JsonOptions); + return result != null + ? result + : throw new InvalidOperationException($"Failed to deserialize to type {typeof(T).Name}"); + } + + /// + /// Deserializes a key from bytes based on the specified key type. + /// + /// The key bytes to deserialize. + /// The target type for the key. + /// The deserialized key object. + protected object? DeserializeKey(byte[] keyBytes, Type keyType) + { + if (keyBytes == null || keyBytes.Length == 0) + return null; + + if (keyType == typeof(int)) + { + // First try to interpret as a string representation and parse + string stringValue = Encoding.UTF8.GetString(keyBytes); + if (int.TryParse(stringValue, out int parsedValue)) + return parsedValue; + + // Fall back to binary representation if parsing fails + if (keyBytes.Length >= 4) + return BitConverter.ToInt32(keyBytes, 0); + else if (keyBytes.Length == 1) + return (int)keyBytes[0]; + + return 0; + } + else if (keyType == typeof(long)) + { + // Try string parsing first + string stringValue = Encoding.UTF8.GetString(keyBytes); + if (long.TryParse(stringValue, out long parsedValue)) + return parsedValue; + + // Fall back to binary + if (keyBytes.Length >= 8) + return BitConverter.ToInt64(keyBytes, 0); + else if (keyBytes.Length >= 4) + return (long)BitConverter.ToInt32(keyBytes, 0); + + return 0L; + } + else if (keyType == typeof(string)) + { + // String conversion is safe regardless of length + return Encoding.UTF8.GetString(keyBytes); + } + else if (keyType == typeof(double)) + { + if (keyBytes.Length >= 8) + return BitConverter.ToDouble(keyBytes, 0); + else + return 0.0; + } + else if (keyType == typeof(bool) && keyBytes.Length >= 1) + { + return keyBytes[0] != 0; + } + else if (keyType == typeof(Guid) && keyBytes.Length >= 16) + { + return new Guid(keyBytes); + } + + // For complex types, try format-specific deserialization + return DeserializeComplexKey(keyBytes, keyType); + } + + /// + /// Sets a property value on an object instance from a JsonElement. + /// + /// The type of the object. + /// The object instance. + /// The name of the property to set. + /// The JsonElement containing the source data. + /// The property name within the JsonElement. + protected void SetProperty(Type type, object instance, string propertyName, + JsonElement element, string jsonPropertyName) + { + if (!element.TryGetProperty(jsonPropertyName, out var jsonValue) || + jsonValue.ValueKind == JsonValueKind.Null) + return; + + // Add BindingFlags to find internal properties too + var property = type.GetProperty(propertyName, + BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance); + if (property == null) return; + var propertyType = property.PropertyType; + + object value; + if (propertyType == typeof(int)) value = jsonValue.GetInt32(); + else if (propertyType == typeof(long)) value = jsonValue.GetInt64(); + else if (propertyType == typeof(double)) value = jsonValue.GetDouble(); + else if (propertyType == typeof(string)) value = jsonValue.GetString()!; + else return; + + property.SetValue(instance, value); + } + + /// + /// Serializes an object to JSON and writes it to the provided stream. + /// + /// The type of object to serialize. + /// The object to serialize. + /// The stream to write the serialized data to. + public void Serialize(T response, Stream responseStream) + { + using var writer = new StreamWriter(responseStream); + writer.Write(JsonSerializer.Serialize(response, JsonOptions)); + } + + /// + /// Deserializes a base64-encoded value into an object using the appropriate format. + /// + /// The base64-encoded binary data. + /// The target type to deserialize to. + /// The deserialized object. + protected abstract object DeserializeValue(string base64Value, Type valueType); + + /// + /// Deserializes complex key types using the appropriate format. + /// + /// The key bytes to deserialize. + /// The type to deserialize to. + /// The deserialized key object. + protected abstract object? DeserializeComplexKey(byte[] keyBytes, Type keyType); +} diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/AWS.Lambda.Powertools.Kafka.Tests.csproj b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/AWS.Lambda.Powertools.Kafka.Tests.csproj index c635cb91f..5f7374147 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/AWS.Lambda.Powertools.Kafka.Tests.csproj +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/AWS.Lambda.Powertools.Kafka.Tests.csproj @@ -56,9 +56,16 @@ PreserveNewest - + + + Client + PreserveNewest + MSBuild:Compile + + PreserveNewest + PreserveNewest diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/HandlerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/HandlerTests.cs new file mode 100644 index 000000000..750d3382e --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/HandlerTests.cs @@ -0,0 +1,300 @@ +using System.Text; +using Amazon.Lambda.Core; +using Amazon.Lambda.TestUtilities; +using Google.Protobuf; +using TestKafka; + +namespace AWS.Lambda.Powertools.Kafka.Tests.Protobuf; + +public class ProtobufHandlerTests +{ + [Fact] + public async Task Handler_ProcessesKafkaEvent_Successfully() + { + // Arrange + var kafkaJson = GetMockKafkaEvent(); + var mockContext = new TestLambdaContext(); + var serializer = new PowertoolsKafkaProtobufSerializer(); + + // Convert JSON string to stream for deserialization + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaJson)); + + // Act - Deserialize and process + var kafkaEvent = serializer.Deserialize>(stream); + var response = await Handler(kafkaEvent, mockContext); + + // Assert + Assert.Equal("Successfully processed Protobuf Kafka events", response); + + // Verify event structure + Assert.Equal("aws:kafka", kafkaEvent.EventSource); + Assert.Single(kafkaEvent.Records); + + // Verify record content + var records = kafkaEvent.Records["mytopic-0"]; + Assert.Equal(3, records.Count); + + // Verify first record + var firstRecord = records[0]; + Assert.Equal("mytopic", firstRecord.Topic); + Assert.Equal(0, firstRecord.Partition); + Assert.Equal(15, firstRecord.Offset); + + // Verify deserialized value + var product = firstRecord.Value; + Assert.Equal("Laptop", product.Name); + Assert.Equal(999.99, product.Price); + + // Verify decoded key and headers + Assert.Equal(42, firstRecord.Key); + Assert.Equal("headerValue", firstRecord.Headers["headerKey"]); + + var secondRecord = records[1]; + Assert.Equal(43, secondRecord.Key); + + var thirdRecord = records[2]; + Assert.Equal(0, thirdRecord.Key); + } + + [Fact] + public async Task Handler_ProcessesKafkaEvent_WithProtobufKey_Successfully() + { + // Arrange + var kafkaJson = GetMockKafkaEventWithProtobufKeys(); + var mockContext = new TestLambdaContext(); + var serializer = new PowertoolsKafkaProtobufSerializer(); + + // Convert JSON string to stream for deserialization + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaJson)); + + // Act - Deserialize and process + var kafkaEvent = serializer.Deserialize>(stream); + var response = await HandlerWithProtobufKeys(kafkaEvent, mockContext); + + // Assert + Assert.Equal("Successfully processed Protobuf Kafka events with complex keys", response); + + // Verify event structure + Assert.Equal("aws:kafka", kafkaEvent.EventSource); + Assert.Single(kafkaEvent.Records); + + // Verify record content + var records = kafkaEvent.Records["mytopic-0"]; + Assert.Equal(3, records.Count); + + // Verify first record + var firstRecord = records[0]; + Assert.Equal("mytopic", firstRecord.Topic); + Assert.Equal(0, firstRecord.Partition); + Assert.Equal(15, firstRecord.Offset); + + // Verify deserialized Protobuf key and value + Assert.Equal("Laptop", firstRecord.Value.Name); + Assert.Equal(999.99, firstRecord.Value.Price); + Assert.Equal(1, firstRecord.Key.Id); + Assert.Equal(TestKafka.Color.Green, firstRecord.Key.Color); + + // Verify headers + Assert.Equal("headerValue", firstRecord.Headers["headerKey"]); + + var secondRecord = records[1]; + Assert.Equal(2, secondRecord.Key.Id); + Assert.Equal(TestKafka.Color.Unknown, secondRecord.Key.Color); + + var thirdRecord = records[2]; + Assert.Equal(3, thirdRecord.Key.Id); + Assert.Equal(TestKafka.Color.Red, thirdRecord.Key.Color); + } + + private string GetMockKafkaEvent() + { + // For testing, we'll create base64-encoded Protobuf data for our test products + var laptop = new ProtobufProduct + { + Name = "Laptop", + Id = 1001, + Price = 999.99 + }; + + var smartphone = new ProtobufProduct + { + Name = "Smartphone", + Id = 1002, + Price = 499.99 + }; + + var headphones = new ProtobufProduct + { + Name = "Headphones", + Id = 1003, + Price = 99.99 + }; + + // Convert to base64-encoded Protobuf + string laptopBase64 = Convert.ToBase64String(laptop.ToByteArray()); + string smartphoneBase64 = Convert.ToBase64String(smartphone.ToByteArray()); + string headphonesBase64 = Convert.ToBase64String(headphones.ToByteArray()); + + string firstRecordKey = Convert.ToBase64String(Encoding.UTF8.GetBytes("42")); // Example key + string secondRecordKey = Convert.ToBase64String(Encoding.UTF8.GetBytes("43")); // Example key for second record + + // Create mock Kafka event JSON + return @$"{{ + ""eventSource"": ""aws:kafka"", + ""eventSourceArn"": ""arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4"", + ""bootstrapServers"": ""b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092"", + ""records"": {{ + ""mytopic-0"": [ + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""timestamp"": 1545084650987, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{firstRecordKey}"", + ""value"": ""{laptopBase64}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }}, + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 16, + ""timestamp"": 1545084650988, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{secondRecordKey}"", + ""value"": ""{smartphoneBase64}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }}, + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 17, + ""timestamp"": 1545084650989, + ""timestampType"": ""CREATE_TIME"", + ""key"": null, + ""value"": ""{headphonesBase64}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }} + ] + }} + }}"; + } + + private string GetMockKafkaEventWithProtobufKeys() + { + // Create test products + var laptop = new ProtobufProduct + { + Name = "Laptop", + Id = 1001, + Price = 999.99 + }; + + var smartphone = new ProtobufProduct + { + Name = "Smartphone", + Id = 1002, + Price = 499.99 + }; + + var headphones = new ProtobufProduct + { + Name = "Headphones", + Id = 1003, + Price = 99.99 + }; + + // Create test keys + var key1 = new ProtobufKey { Id = 1, Color = TestKafka.Color.Green }; + var key2 = new ProtobufKey { Id = 2 }; + var key3 = new ProtobufKey { Id = 3, Color = TestKafka.Color.Red }; + + // Convert values to base64-encoded Protobuf + string laptopBase64 = Convert.ToBase64String(laptop.ToByteArray()); + string smartphoneBase64 = Convert.ToBase64String(smartphone.ToByteArray()); + string headphonesBase64 = Convert.ToBase64String(headphones.ToByteArray()); + + // Convert keys to base64-encoded Protobuf + string key1Base64 = Convert.ToBase64String(key1.ToByteArray()); + string key2Base64 = Convert.ToBase64String(key2.ToByteArray()); + string key3Base64 = Convert.ToBase64String(key3.ToByteArray()); + + // Create mock Kafka event JSON + return @$"{{ + ""eventSource"": ""aws:kafka"", + ""eventSourceArn"": ""arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4"", + ""bootstrapServers"": ""b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092"", + ""records"": {{ + ""mytopic-0"": [ + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""timestamp"": 1545084650987, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{key1Base64}"", + ""value"": ""{laptopBase64}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }}, + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 16, + ""timestamp"": 1545084650988, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{key2Base64}"", + ""value"": ""{smartphoneBase64}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }}, + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 17, + ""timestamp"": 1545084650989, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{key3Base64}"", + ""value"": ""{headphonesBase64}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }} + ] + }} + }}"; + } + + // Define the test handler method + private async Task Handler(ConsumerRecords records, ILambdaContext context) + { + foreach (var record in records) + { + var product = record.Value; + context.Logger.LogInformation($"Processing {product.Name} at ${product.Price}"); + } + + return "Successfully processed Protobuf Kafka events"; + } + + private async Task HandlerWithProtobufKeys(ConsumerRecords records, + ILambdaContext context) + { + foreach (var record in records) + { + var key = record.Key; + var product = record.Value; + context.Logger.LogInformation($"Processing key {key.Id} - {product.Name} at ${product.Price}"); + } + + return "Successfully processed Protobuf Kafka events with complex keys"; + } +} diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/Key.proto b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/Key.proto new file mode 100644 index 000000000..deedcf5dc --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/Key.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; + +option csharp_namespace = "TestKafka"; + +message ProtobufKey { + int32 id = 1; + Color color = 2; +} + +enum Color { + UNKNOWN = 0; + GREEN = 1; + RED = 2; +} \ No newline at end of file diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs index e8697be23..eba974d71 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs @@ -1,3 +1,4 @@ +using System.Text; using TestKafka; namespace AWS.Lambda.Powertools.Kafka.Tests.Protobuf; @@ -5,20 +6,84 @@ namespace AWS.Lambda.Powertools.Kafka.Tests.Protobuf; public class PowertoolsKafkaProtobufSerializerTests { [Fact] - public void DeserializeProtobufFromBase64() + public void Deserialize_KafkaEventWithProtobufPayload_DeserializesToCorrectType() { - // Base64 encoded Protobuf data - string base64EncodedProto = "COkHEgZMYXB0b3AZUrgehes/j0A="; + // Arrange + var serializer = new PowertoolsKafkaProtobufSerializer(); + string kafkaEventJson = File.ReadAllText("Protobuf/kafka-protobuf-event.json"); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); - // Decode base64 to bytes - byte[] protoBytes = Convert.FromBase64String(base64EncodedProto); + // Act + var result = serializer.Deserialize>(stream); - // Deserialize to ProtobufProduct - var product = ProtobufProduct.Parser.ParseFrom(protoBytes); + // Assert + Assert.NotNull(result); + Assert.Equal("aws:kafka", result.EventSource); - // Verify values + // Verify records were deserialized + Assert.True(result.Records.ContainsKey("mytopic-0")); + var records = result.Records["mytopic-0"]; + Assert.Equal(3, records.Count); // Fixed to expect 3 records instead of 1 + + // Verify first record's content + var firstRecord = records[0]; + Assert.Equal("mytopic", firstRecord.Topic); + Assert.Equal(0, firstRecord.Partition); + Assert.Equal(15, firstRecord.Offset); + Assert.Equal(42, firstRecord.Key); + + // Verify deserialized Protobuf value + var product = firstRecord.Value; Assert.Equal("Laptop", product.Name); Assert.Equal(1001, product.Id); Assert.Equal(999.99, product.Price); + + // Verify second record + var secondRecord = records[1]; + var smartphone = secondRecord.Value; + Assert.Equal("Smartphone", smartphone.Name); + Assert.Equal(1002, smartphone.Id); + Assert.Equal(599.99, smartphone.Price); + + // Verify third record + var thirdRecord = records[2]; + var headphones = thirdRecord.Value; + Assert.Equal("Headphones", headphones.Name); + Assert.Equal(1003, headphones.Id); + Assert.Equal(149.99, headphones.Price); + } + + [Fact] + public void KafkaEvent_ImplementsIEnumerable_ForDirectIteration() + { + // Arrange + var serializer = new PowertoolsKafkaProtobufSerializer(); + string kafkaEventJson = File.ReadAllText("Protobuf/kafka-protobuf-event.json"); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert - Test enumeration + int count = 0; + var products = new List(); + + // Directly iterate over ConsumerRecords + foreach (var record in result) + { + count++; + products.Add(record.Value.Name); + } + + // Verify correct count and values + Assert.Equal(3, count); + Assert.Contains("Laptop", products); + Assert.Contains("Smartphone", products); + Assert.Contains("Headphones", products); + + // Get first record directly through Linq extension + var firstRecord = result.First(); + Assert.Equal("Laptop", firstRecord.Value.Name); + Assert.Equal(1001, firstRecord.Value.Id); } -} \ No newline at end of file +} From 14be14bcb5488b9bc0cbb690340cdab3ef74822a Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Sat, 7 Jun 2025 16:10:23 +0100 Subject: [PATCH 08/35] feat(kafka): add unit tests for PowertoolsKafkaJsonSerializer to validate JSON deserialization --- .../PowertoolsKafkaJsonSerializerTests.cs | 93 +++++++++++++++++++ 1 file changed, 93 insertions(+) create mode 100644 libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs new file mode 100644 index 000000000..b7d7be67f --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs @@ -0,0 +1,93 @@ +using System.Text; + +namespace AWS.Lambda.Powertools.Kafka.Tests.Json; + +public record JsonProduct +{ + public int Id { get; set; } + public string Name { get; set; } = string.Empty; + public decimal Price { get; set; } +} + +public class PowertoolsKafkaJsonSerializerTests +{ + [Fact] + public void Deserialize_KafkaEventWithJsonPayload_DeserializesToCorrectType() + { + // Arrange + var serializer = new PowertoolsKafkaJsonSerializer(); + string kafkaEventJson = File.ReadAllText("Json/kafka-json-event.json"); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + Assert.NotNull(result); + Assert.Equal("aws:kafka", result.EventSource); + + // Verify records were deserialized + Assert.True(result.Records.ContainsKey("mytopic-0")); + var records = result.Records["mytopic-0"]; + Assert.Equal(3, records.Count); + + // Verify first record's content + var firstRecord = records[0]; + Assert.Equal("mytopic", firstRecord.Topic); + Assert.Equal(0, firstRecord.Partition); + Assert.Equal(15, firstRecord.Offset); + Assert.Equal("recordKey", firstRecord.Key); + + // Verify deserialized JSON value + var product = firstRecord.Value; + Assert.Equal("product5", product.Name); + Assert.Equal(12345, product.Id); + Assert.Equal(45, product.Price); + + // Verify second record + var secondRecord = records[1]; + var p2 = secondRecord.Value; + Assert.Equal("product5", p2.Name); + Assert.Equal(12345, p2.Id); + Assert.Equal(45, p2.Price); + + // Verify third record + var thirdRecord = records[2]; + var p3 = thirdRecord.Value; + Assert.Equal("product5", p3.Name); + Assert.Equal(12345, p3.Id); + Assert.Equal(45, p3.Price); + } + + [Fact] + public void KafkaEvent_ImplementsIEnumerable_ForDirectIteration() + { + // Arrange + var serializer = new PowertoolsKafkaJsonSerializer(); + string kafkaEventJson = File.ReadAllText("Json/kafka-json-event.json"); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert - Test enumeration + int count = 0; + var products = new List(); + + // Directly iterate over ConsumerRecords + foreach (var record in result) + { + count++; + products.Add(record.Value.Name); + } + + // Verify correct count and values + Assert.Equal(3, count); + Assert.Contains("product5", products); + + // Get first record directly through Linq extension + var firstRecord = result.First(); + Assert.Equal("product5", firstRecord.Value.Name); + Assert.Equal(12345, firstRecord.Value.Id); + } +} From c6e155e38e07396fb6e8db999701bf92a1f16f3f Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Sat, 7 Jun 2025 16:23:40 +0100 Subject: [PATCH 09/35] feat(kafka): add constructors to serializers for custom JSON serialization options --- .../PowertoolsKafkaAvroSerializer.cs | 19 ++++++++++++- .../PowertoolsKafkaJsonSerializer.cs | 17 ++++++++++++ .../PowertoolsKafkaProtobufSerializer.cs | 19 ++++++++++++- .../PowertoolsKafkaSerializerBase.cs | 27 ++++++++++++++++--- 4 files changed, 77 insertions(+), 5 deletions(-) diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs index 4ce245965..b24937364 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs @@ -32,6 +32,23 @@ namespace AWS.Lambda.Powertools.Kafka; /// public class PowertoolsKafkaAvroSerializer : PowertoolsKafkaSerializerBase { + /// + /// Initializes a new instance of the class + /// with default JSON serialization options. + /// + public PowertoolsKafkaAvroSerializer() : base() + { + } + + /// + /// Initializes a new instance of the class + /// with custom JSON serialization options. + /// + /// Custom JSON serializer options to use during deserialization. + public PowertoolsKafkaAvroSerializer(JsonSerializerOptions jsonOptions) : base(jsonOptions) + { + } + /// /// Gets the Avro schema for the specified type. /// The type must have a public static _SCHEMA field defined. @@ -118,4 +135,4 @@ private object DeserializeAvroValue(string base64Value, Schema schema) return null; } } -} +} \ No newline at end of file diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaJsonSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaJsonSerializer.cs index 904641576..26c85e848 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaJsonSerializer.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaJsonSerializer.cs @@ -27,6 +27,23 @@ namespace AWS.Lambda.Powertools.Kafka; /// public class PowertoolsKafkaJsonSerializer : PowertoolsKafkaSerializerBase { + /// + /// Initializes a new instance of the class + /// with default JSON serialization options. + /// + public PowertoolsKafkaJsonSerializer() : base() + { + } + + /// + /// Initializes a new instance of the class + /// with custom JSON serialization options. + /// + /// Custom JSON serializer options to use during deserialization. + public PowertoolsKafkaJsonSerializer(JsonSerializerOptions jsonOptions) : base(jsonOptions) + { + } + /// /// Deserializes a base64-encoded JSON value into an object. /// diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaProtobufSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaProtobufSerializer.cs index bd15a60df..ab3702758 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaProtobufSerializer.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaProtobufSerializer.cs @@ -30,6 +30,23 @@ namespace AWS.Lambda.Powertools.Kafka; /// public class PowertoolsKafkaProtobufSerializer : PowertoolsKafkaSerializerBase { + /// + /// Initializes a new instance of the class + /// with default JSON serialization options. + /// + public PowertoolsKafkaProtobufSerializer() : base() + { + } + + /// + /// Initializes a new instance of the class + /// with custom JSON serialization options. + /// + /// Custom JSON serializer options to use during deserialization. + public PowertoolsKafkaProtobufSerializer(JsonSerializerOptions jsonOptions) : base(jsonOptions) + { + } + /// /// Deserializes a base64-encoded Protobuf binary value into an object. /// @@ -102,4 +119,4 @@ private object DeserializeProtobufValue(byte[] protobufBytes, Type messageType) return null; } } -} +} \ No newline at end of file diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs index d5c7edf55..86f180aa7 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs @@ -14,10 +14,31 @@ namespace AWS.Lambda.Powertools.Kafka; /// public abstract class PowertoolsKafkaSerializerBase : ILambdaSerializer { - protected readonly JsonSerializerOptions JsonOptions = new() + /// + /// JSON serializer options used for deserialization. + /// + protected readonly JsonSerializerOptions JsonOptions; + + /// + /// Initializes a new instance of the class + /// with default JSON serialization options. + /// + protected PowertoolsKafkaSerializerBase() : this(new JsonSerializerOptions { PropertyNameCaseInsensitive = true - }; + }) + { + } + + /// + /// Initializes a new instance of the class + /// with custom JSON serialization options. + /// + /// Custom JSON serializer options to use during deserialization. + protected PowertoolsKafkaSerializerBase(JsonSerializerOptions jsonOptions) + { + JsonOptions = jsonOptions ?? new JsonSerializerOptions { PropertyNameCaseInsensitive = true }; + } /// /// Deserializes the Lambda input stream into the specified type. @@ -316,4 +337,4 @@ public void Serialize(T response, Stream responseStream) /// The type to deserialize to. /// The deserialized key object. protected abstract object? DeserializeComplexKey(byte[] keyBytes, Type keyType); -} +} \ No newline at end of file From 4dd610bbf576d2128cc72ac093db0212ac81314c Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Sat, 7 Jun 2025 16:46:38 +0100 Subject: [PATCH 10/35] feat(kafka): enhance serializers with AOT-compatible JSON context and dynamic code attributes --- .../PowertoolsKafkaAvroSerializer.cs | 51 +- .../PowertoolsKafkaJsonSerializer.cs | 61 ++- .../PowertoolsKafkaProtobufSerializer.cs | 45 +- .../PowertoolsKafkaSerializerBase.cs | 444 +++++++++++------- 4 files changed, 413 insertions(+), 188 deletions(-) diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs index b24937364..78c66c833 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs @@ -1,9 +1,11 @@ -using Avro; -using Avro.IO; -using Avro.Specific; +using System.Diagnostics.CodeAnalysis; using System.Reflection; using System.Text; using System.Text.Json; +using System.Text.Json.Serialization; +using Avro; +using Avro.IO; +using Avro.Specific; namespace AWS.Lambda.Powertools.Kafka; @@ -49,6 +51,15 @@ public PowertoolsKafkaAvroSerializer(JsonSerializerOptions jsonOptions) : base(j { } + /// + /// Initializes a new instance of the class + /// with a JSON serializer context for AOT-compatible serialization. + /// + /// JSON serializer context for AOT compatibility. + public PowertoolsKafkaAvroSerializer(JsonSerializerContext serializerContext) : base(serializerContext) + { + } + /// /// Gets the Avro schema for the specified type. /// The type must have a public static _SCHEMA field defined. @@ -56,7 +67,9 @@ public PowertoolsKafkaAvroSerializer(JsonSerializerOptions jsonOptions) : base(j /// The type to get the Avro schema for. /// The Avro Schema object. /// Thrown if no schema is found for the type. - private Schema GetAvroSchema(Type payloadType) + [RequiresDynamicCode("Avro schema access requires reflection which may be incompatible with AOT.")] + [RequiresUnreferencedCode("Avro schema access requires reflection which may be incompatible with trimming.")] + private Schema GetAvroSchema([DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicFields)] Type payloadType) { var schemaField = payloadType.GetField("_SCHEMA", BindingFlags.Public | BindingFlags.Static); @@ -77,9 +90,11 @@ private Schema GetAvroSchema(Type payloadType) /// The base64-encoded Avro binary data. /// The type to deserialize to. /// The deserialized object. - protected override object DeserializeValue(string base64Value, Type valueType) + [RequiresDynamicCode("Avro deserialization requires reflection which may be incompatible with AOT.")] + [RequiresUnreferencedCode("Avro deserialization requires reflection which may be incompatible with trimming.")] + protected override object DeserializeValue(string base64Value, [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicFields)] Type valueType) { - Schema schema = GetAvroSchema(valueType); + var schema = GetAvroSchema(valueType); return DeserializeAvroValue(base64Value, schema); } @@ -91,7 +106,7 @@ protected override object DeserializeValue(string base64Value, Type valueType) /// The deserialized object. private object DeserializeAvroValue(string base64Value, Schema schema) { - byte[] avroBytes = Convert.FromBase64String(base64Value); + var avroBytes = Convert.FromBase64String(base64Value); using var stream = new MemoryStream(avroBytes); var decoder = new BinaryDecoder(stream); var reader = new SpecificDatumReader(schema, schema); @@ -105,7 +120,9 @@ private object DeserializeAvroValue(string base64Value, Schema schema) /// The key bytes to deserialize. /// The type to deserialize to. /// The deserialized key object. - protected override object? DeserializeComplexKey(byte[] keyBytes, Type keyType) + [RequiresDynamicCode("Avro and JSON deserialization might require runtime code generation.")] + [RequiresUnreferencedCode("Avro and JSON deserialization might require types that cannot be statically analyzed.")] + protected override object? DeserializeComplexKey(byte[] keyBytes, [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicFields)] Type keyType) { try { @@ -126,8 +143,22 @@ private object DeserializeAvroValue(string base64Value, Schema schema) } // As a fallback, try JSON deserialization - string jsonStr = Encoding.UTF8.GetString(keyBytes); + var jsonStr = Encoding.UTF8.GetString(keyBytes); + + if (SerializerContext != null) + { + // Try to get type info from context for AOT compatibility + var typeInfo = SerializerContext.GetTypeInfo(keyType); + if (typeInfo != null) + { + return JsonSerializer.Deserialize(jsonStr, typeInfo); + } + } + + // Fallback to regular deserialization + #pragma warning disable IL2026, IL3050 return JsonSerializer.Deserialize(jsonStr, keyType, JsonOptions); + #pragma warning restore IL2026, IL3050 } catch { @@ -135,4 +166,4 @@ private object DeserializeAvroValue(string base64Value, Schema schema) return null; } } -} \ No newline at end of file +} diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaJsonSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaJsonSerializer.cs index 26c85e848..307598b17 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaJsonSerializer.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaJsonSerializer.cs @@ -1,5 +1,8 @@ +using System.Diagnostics.CodeAnalysis; using System.Text; using System.Text.Json; +using System.Text.Json.Serialization; +using System.Text.Json.Serialization.Metadata; namespace AWS.Lambda.Powertools.Kafka; @@ -44,19 +47,45 @@ public PowertoolsKafkaJsonSerializer(JsonSerializerOptions jsonOptions) : base(j { } + /// + /// Initializes a new instance of the class + /// with a JSON serializer context for AOT-compatible serialization. + /// + /// JSON serializer context for AOT compatibility. + public PowertoolsKafkaJsonSerializer(JsonSerializerContext serializerContext) : base(serializerContext) + { + } + /// /// Deserializes a base64-encoded JSON value into an object. /// /// The base64-encoded JSON data. /// The type to deserialize to. /// The deserialized object. - protected override object DeserializeValue(string base64Value, Type valueType) + [RequiresDynamicCode("JSON deserialization might require runtime code generation.")] + [RequiresUnreferencedCode("JSON deserialization might require types that cannot be statically analyzed.")] + protected override object DeserializeValue(string base64Value, [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.PublicFields)] Type valueType) { - byte[] jsonBytes = Convert.FromBase64String(base64Value); - string jsonString = Encoding.UTF8.GetString(jsonBytes); + var jsonBytes = Convert.FromBase64String(base64Value); + var jsonString = Encoding.UTF8.GetString(jsonBytes); + + if (SerializerContext != null) + { + // Try to get type info from context for AOT compatibility + var typeInfo = SerializerContext.GetTypeInfo(valueType); + if (typeInfo != null) + { + var result = JsonSerializer.Deserialize(jsonString, typeInfo); + return result ?? throw new InvalidOperationException($"Failed to deserialize JSON to type {valueType.Name}"); + } + } + + // Fallback to regular deserialization + #pragma warning disable IL2026, IL3050 + var fallbackResult = JsonSerializer.Deserialize(jsonString, valueType, JsonOptions); + #pragma warning restore IL2026, IL3050 - var result = JsonSerializer.Deserialize(jsonString, valueType, JsonOptions); - return result ?? throw new InvalidOperationException($"Failed to deserialize JSON to type {valueType.Name}"); + return fallbackResult ?? throw new InvalidOperationException($"Failed to deserialize JSON to type {valueType.Name}"); } /// @@ -65,13 +94,29 @@ protected override object DeserializeValue(string base64Value, Type valueType) /// The key bytes to deserialize. /// The type to deserialize to. /// The deserialized key object. - protected override object? DeserializeComplexKey(byte[] keyBytes, Type keyType) + [RequiresDynamicCode("JSON deserialization might require runtime code generation.")] + [RequiresUnreferencedCode("JSON deserialization might require types that cannot be statically analyzed.")] + protected override object? DeserializeComplexKey(byte[] keyBytes, [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.PublicFields)] Type keyType) { try { - // Convert bytes to JSON string and deserialize - string jsonStr = Encoding.UTF8.GetString(keyBytes); + // Convert bytes to JSON string + var jsonStr = Encoding.UTF8.GetString(keyBytes); + + if (SerializerContext != null) + { + // Try to get type info from context for AOT compatibility + var typeInfo = SerializerContext.GetTypeInfo(keyType); + if (typeInfo != null) + { + return JsonSerializer.Deserialize(jsonStr, typeInfo); + } + } + + // Fallback to regular deserialization + #pragma warning disable IL2026, IL3050 return JsonSerializer.Deserialize(jsonStr, keyType, JsonOptions); + #pragma warning restore IL2026, IL3050 } catch { diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaProtobufSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaProtobufSerializer.cs index ab3702758..042eeec52 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaProtobufSerializer.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaProtobufSerializer.cs @@ -1,7 +1,9 @@ -using Google.Protobuf; +using System.Diagnostics.CodeAnalysis; using System.Reflection; using System.Text; using System.Text.Json; +using System.Text.Json.Serialization; +using Google.Protobuf; namespace AWS.Lambda.Powertools.Kafka; @@ -47,15 +49,26 @@ public PowertoolsKafkaProtobufSerializer(JsonSerializerOptions jsonOptions) : ba { } + /// + /// Initializes a new instance of the class + /// with a JSON serializer context for AOT-compatible serialization. + /// + /// JSON serializer context for AOT compatibility. + public PowertoolsKafkaProtobufSerializer(JsonSerializerContext serializerContext) : base(serializerContext) + { + } + /// /// Deserializes a base64-encoded Protobuf binary value into an object. /// /// The base64-encoded Protobuf binary data. /// The type to deserialize to. /// The deserialized object. - protected override object DeserializeValue(string base64Value, Type valueType) + [RequiresDynamicCode("Protobuf deserialization might require runtime code generation.")] + [RequiresUnreferencedCode("Protobuf deserialization might require types that cannot be statically analyzed.")] + protected override object DeserializeValue(string base64Value, [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties)] Type valueType) { - byte[] protobufBytes = Convert.FromBase64String(base64Value); + var protobufBytes = Convert.FromBase64String(base64Value); return DeserializeProtobufValue(protobufBytes, valueType); } @@ -65,7 +78,9 @@ protected override object DeserializeValue(string base64Value, Type valueType) /// The Protobuf binary data. /// The Protobuf message type to deserialize to. /// The deserialized object. - private object DeserializeProtobufValue(byte[] protobufBytes, Type messageType) + [RequiresDynamicCode("Protobuf deserialization might require runtime code generation.")] + [RequiresUnreferencedCode("Protobuf deserialization might require types that cannot be statically analyzed.")] + private object DeserializeProtobufValue(byte[] protobufBytes, [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties)] Type messageType) { // Find the Parser property which is available on all Protobuf generated classes var parserProperty = messageType.GetProperty("Parser", @@ -91,7 +106,9 @@ private object DeserializeProtobufValue(byte[] protobufBytes, Type messageType) /// The key bytes to deserialize. /// The type to deserialize to. /// The deserialized key object. - protected override object? DeserializeComplexKey(byte[] keyBytes, Type keyType) + [RequiresDynamicCode("Protobuf and JSON deserialization might require runtime code generation.")] + [RequiresUnreferencedCode("Protobuf and JSON deserialization might require types that cannot be statically analyzed.")] + protected override object? DeserializeComplexKey(byte[] keyBytes, [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties)] Type keyType) { try { @@ -110,8 +127,22 @@ private object DeserializeProtobufValue(byte[] protobufBytes, Type messageType) } // As a fallback, try JSON deserialization - string jsonStr = Encoding.UTF8.GetString(keyBytes); + var jsonStr = Encoding.UTF8.GetString(keyBytes); + + if (SerializerContext != null) + { + // Try to get type info from context for AOT compatibility + var typeInfo = SerializerContext.GetTypeInfo(keyType); + if (typeInfo != null) + { + return JsonSerializer.Deserialize(jsonStr, typeInfo); + } + } + + // Fallback to regular deserialization + #pragma warning disable IL2026, IL3050 return JsonSerializer.Deserialize(jsonStr, keyType, JsonOptions); + #pragma warning restore IL2026, IL3050 } catch { @@ -119,4 +150,4 @@ private object DeserializeProtobufValue(byte[] protobufBytes, Type messageType) return null; } } -} \ No newline at end of file +} diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs index 86f180aa7..d1ec9591f 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs @@ -1,7 +1,10 @@ using Amazon.Lambda.Core; +using System.Diagnostics.CodeAnalysis; using System.Reflection; using System.Text; using System.Text.Json; +using System.Text.Json.Serialization; +using System.Text.Json.Serialization.Metadata; namespace AWS.Lambda.Powertools.Kafka; @@ -19,6 +22,11 @@ public abstract class PowertoolsKafkaSerializerBase : ILambdaSerializer /// protected readonly JsonSerializerOptions JsonOptions; + /// + /// JSON serializer context used for AOT-compatible serialization/deserialization. + /// + protected readonly JsonSerializerContext? SerializerContext; + /// /// Initializes a new instance of the class /// with default JSON serialization options. @@ -26,7 +34,7 @@ public abstract class PowertoolsKafkaSerializerBase : ILambdaSerializer protected PowertoolsKafkaSerializerBase() : this(new JsonSerializerOptions { PropertyNameCaseInsensitive = true - }) + }, null) { } @@ -35,9 +43,29 @@ protected PowertoolsKafkaSerializerBase() : this(new JsonSerializerOptions /// with custom JSON serialization options. /// /// Custom JSON serializer options to use during deserialization. - protected PowertoolsKafkaSerializerBase(JsonSerializerOptions jsonOptions) + protected PowertoolsKafkaSerializerBase(JsonSerializerOptions jsonOptions) : this(jsonOptions, null) + { + } + + /// + /// Initializes a new instance of the class + /// with a JSON serializer context for AOT-compatible serialization/deserialization. + /// + /// The JSON serializer context for AOT compatibility. + protected PowertoolsKafkaSerializerBase(JsonSerializerContext serializerContext) : this(serializerContext.Options, serializerContext) + { + } + + /// + /// Initializes a new instance of the class + /// with custom JSON serialization options and an optional serializer context. + /// + /// Custom JSON serializer options to use during deserialization. + /// Optional JSON serializer context for AOT compatibility. + protected PowertoolsKafkaSerializerBase(JsonSerializerOptions jsonOptions, JsonSerializerContext? serializerContext) { JsonOptions = jsonOptions ?? new JsonSerializerOptions { PropertyNameCaseInsensitive = true }; + SerializerContext = serializerContext; } /// @@ -47,8 +75,20 @@ protected PowertoolsKafkaSerializerBase(JsonSerializerOptions jsonOptions) /// The type to deserialize to. For Kafka events, typically ConsumerRecords<TKey,TValue>. /// The stream containing the serialized Lambda event. /// The deserialized object of type T. + [RequiresUnreferencedCode("Kafka serializer uses reflection and may be incompatible with trimming. Use an overload that accepts a JsonTypeInfo or JsonSerializerContext for AOT compatibility.")] + [RequiresDynamicCode("Kafka serializer dynamically creates generic types and may be incompatible with NativeAOT. Use an overload that accepts a JsonTypeInfo or JsonSerializerContext for AOT compatibility.")] public T Deserialize(Stream requestStream) { + if (SerializerContext != null && typeof(T) != typeof(ConsumerRecords<,>)) + { + // Fast path for regular JSON types when serializer context is provided + var typeInfo = GetJsonTypeInfo(); + if (typeInfo != null) + { + return JsonSerializer.Deserialize(requestStream, typeInfo) ?? throw new InvalidOperationException(); + } + } + using var reader = new StreamReader(requestStream); var json = reader.ReadToEnd(); @@ -56,161 +96,187 @@ public T Deserialize(Stream requestStream) if (targetType.IsGenericType && targetType.GetGenericTypeDefinition() == typeof(ConsumerRecords<,>)) { - var typeArgs = targetType.GetGenericArguments(); - var keyType = typeArgs[0]; - var valueType = typeArgs[1]; - - using var document = JsonDocument.Parse(json); - var root = document.RootElement; - - // Create the correctly typed instance - var typedEvent = Activator.CreateInstance(targetType) ?? - throw new InvalidOperationException($"Failed to create instance of {targetType.Name}"); - - // Set basic properties - if (root.TryGetProperty("eventSource", out var eventSource)) - targetType.GetProperty("EventSource", - BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance) - ?.SetValue(typedEvent, eventSource.GetString()); - - if (root.TryGetProperty("eventSourceArn", out var eventSourceArn)) - targetType.GetProperty("EventSourceArn")?.SetValue(typedEvent, eventSourceArn.GetString()); - - if (root.TryGetProperty("bootstrapServers", out var bootstrapServers)) - targetType.GetProperty("BootstrapServers")?.SetValue(typedEvent, bootstrapServers.GetString()); - - // Create records dictionary with correct generic types - var dictType = typeof(Dictionary<,>).MakeGenericType( - typeof(string), - typeof(List<>).MakeGenericType(typeof(ConsumerRecord<,>).MakeGenericType(keyType, valueType)) - ); - var records = Activator.CreateInstance(dictType) ?? - throw new InvalidOperationException($"Failed to create dictionary of type {dictType.Name}"); - var dictAddMethod = dictType.GetMethod("Add") ?? - throw new InvalidOperationException("Add method not found on dictionary type"); - - if (root.TryGetProperty("records", out var recordsElement)) + return DeserializeConsumerRecords(json); + } + + if (SerializerContext != null) + { + // Try to find type info in context + var typeInfo = SerializerContext.GetTypeInfo(targetType); + if (typeInfo != null) { - foreach (var topicPartition in recordsElement.EnumerateObject()) + return (T)JsonSerializer.Deserialize(json, typeInfo)!; + } + } + + // Fallback to regular deserialization with warning + #pragma warning disable IL2026, IL3050 + var result = JsonSerializer.Deserialize(json, JsonOptions); + #pragma warning restore IL2026, IL3050 + + return result != null + ? result + : throw new InvalidOperationException($"Failed to deserialize to type {typeof(T).Name}"); + } + + /// + /// Deserializes a Kafka ConsumerRecords event from JSON string. + /// + /// The ConsumerRecords type with key and value generics. + /// The JSON string to deserialize. + /// The deserialized ConsumerRecords object. + [RequiresUnreferencedCode("ConsumerRecords deserialization uses reflection and may be incompatible with trimming.")] + [RequiresDynamicCode("ConsumerRecords deserialization dynamically creates generic types and may be incompatible with NativeAOT.")] + private T DeserializeConsumerRecords(string json) + { + var targetType = typeof(T); + var typeArgs = targetType.GetGenericArguments(); + var keyType = typeArgs[0]; + var valueType = typeArgs[1]; + + using var document = JsonDocument.Parse(json); + var root = document.RootElement; + + // Create the correctly typed instance + var typedEvent = Activator.CreateInstance(targetType) ?? + throw new InvalidOperationException($"Failed to create instance of {targetType.Name}"); + + // Set basic properties + if (root.TryGetProperty("eventSource", out var eventSource)) + targetType.GetProperty("EventSource", + BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance) + ?.SetValue(typedEvent, eventSource.GetString()); + + if (root.TryGetProperty("eventSourceArn", out var eventSourceArn)) + targetType.GetProperty("EventSourceArn")?.SetValue(typedEvent, eventSourceArn.GetString()); + + if (root.TryGetProperty("bootstrapServers", out var bootstrapServers)) + targetType.GetProperty("BootstrapServers")?.SetValue(typedEvent, bootstrapServers.GetString()); + + // Create records dictionary with correct generic types + var dictType = typeof(Dictionary<,>).MakeGenericType( + typeof(string), + typeof(List<>).MakeGenericType(typeof(ConsumerRecord<,>).MakeGenericType(keyType, valueType)) + ); + var records = Activator.CreateInstance(dictType) ?? + throw new InvalidOperationException($"Failed to create dictionary of type {dictType.Name}"); + var dictAddMethod = dictType.GetMethod("Add") ?? + throw new InvalidOperationException("Add method not found on dictionary type"); + + if (root.TryGetProperty("records", out var recordsElement)) + { + foreach (var topicPartition in recordsElement.EnumerateObject()) + { + var topicName = topicPartition.Name; + + // Create list of records with correct generic types + var listType = + typeof(List<>).MakeGenericType(typeof(ConsumerRecord<,>).MakeGenericType(keyType, valueType)); + var recordsList = Activator.CreateInstance(listType) ?? + throw new InvalidOperationException( + $"Failed to create list of type {listType.Name}"); + var listAddMethod = listType.GetMethod("Add") ?? + throw new InvalidOperationException("Add method not found on list type"); + + foreach (var recordElement in topicPartition.Value.EnumerateArray()) { - string topicName = topicPartition.Name; - - // Create list of records with correct generic types - var listType = - typeof(List<>).MakeGenericType(typeof(ConsumerRecord<,>).MakeGenericType(keyType, valueType)); - var recordsList = Activator.CreateInstance(listType) ?? - throw new InvalidOperationException( - $"Failed to create list of type {listType.Name}"); - var listAddMethod = listType.GetMethod("Add") ?? - throw new InvalidOperationException("Add method not found on list type"); - - foreach (var recordElement in topicPartition.Value.EnumerateArray()) + // Create record instance of correct type + var recordType = typeof(ConsumerRecord<,>).MakeGenericType(keyType, valueType); + var record = Activator.CreateInstance(recordType); + if (record == null) + continue; + + // Set basic properties + SetProperty(recordType, record, "Topic", recordElement, "topic"); + SetProperty(recordType, record, "Partition", recordElement, "partition"); + SetProperty(recordType, record, "Offset", recordElement, "offset"); + SetProperty(recordType, record, "Timestamp", recordElement, "timestamp"); + SetProperty(recordType, record, "TimestampType", recordElement, "timestampType"); + + // Handle key - base64 decode and convert to the correct type + if (recordElement.TryGetProperty("key", out var keyElement) && + keyElement.ValueKind == JsonValueKind.String) { - // Create record instance of correct type - var recordType = typeof(ConsumerRecord<,>).MakeGenericType(keyType, valueType); - var record = Activator.CreateInstance(recordType); - if (record == null) - continue; - - // Set basic properties - SetProperty(recordType, record, "Topic", recordElement, "topic"); - SetProperty(recordType, record, "Partition", recordElement, "partition"); - SetProperty(recordType, record, "Offset", recordElement, "offset"); - SetProperty(recordType, record, "Timestamp", recordElement, "timestamp"); - SetProperty(recordType, record, "TimestampType", recordElement, "timestampType"); - - // Handle key - base64 decode and convert to the correct type - if (recordElement.TryGetProperty("key", out var keyElement) && - keyElement.ValueKind == JsonValueKind.String) + var base64Key = keyElement.GetString(); + if (!string.IsNullOrEmpty(base64Key)) { - string? base64Key = keyElement.GetString(); - if (!string.IsNullOrEmpty(base64Key)) + try { - try - { - byte[] keyBytes = Convert.FromBase64String(base64Key); - object? decodedKey = DeserializeKey(keyBytes, keyType); + var keyBytes = Convert.FromBase64String(base64Key); + var decodedKey = DeserializeKey(keyBytes, keyType); - var keyProperty = recordType.GetProperty("Key"); - keyProperty?.SetValue(record, decodedKey); - } - catch (Exception ex) - { - // Log or handle key deserialization failures - } + var keyProperty = recordType.GetProperty("Key"); + keyProperty?.SetValue(record, decodedKey); + } + catch (Exception ex) + { + throw new Exception($"Failed to deserialize data: {ex.Message}", ex); } } + } - // Handle value - if (recordElement.TryGetProperty("value", out var valueElement) && - valueElement.ValueKind == JsonValueKind.String) - { - string? base64Value = valueElement.GetString(); - var valueProperty = recordType.GetProperty("Value"); + // Handle value + if (recordElement.TryGetProperty("value", out var valueElement) && + valueElement.ValueKind == JsonValueKind.String) + { + var base64Value = valueElement.GetString(); + var valueProperty = recordType.GetProperty("Value"); - if (base64Value != null && valueProperty != null) + if (base64Value != null && valueProperty != null) + { + try { - try - { - var deserializedValue = DeserializeValue(base64Value, valueType); - valueProperty.SetValue(record, deserializedValue); - } - catch (Exception ex) - { - throw new Exception($"Failed to deserialize data: {ex.Message}", ex); - } + var deserializedValue = DeserializeValue(base64Value, valueType); + valueProperty.SetValue(record, deserializedValue); + } + catch (Exception ex) + { + throw new Exception($"Failed to deserialize data: {ex.Message}", ex); } } + } - // Process headers - if (recordElement.TryGetProperty("headers", out var headersElement) && - headersElement.ValueKind == JsonValueKind.Array) - { - var decodedHeaders = new Dictionary(); + // Process headers + if (recordElement.TryGetProperty("headers", out var headersElement) && + headersElement.ValueKind == JsonValueKind.Array) + { + var decodedHeaders = new Dictionary(); - foreach (var headerObj in headersElement.EnumerateArray()) + foreach (var headerObj in headersElement.EnumerateArray()) + { + foreach (var header in headerObj.EnumerateObject()) { - foreach (var header in headerObj.EnumerateObject()) + var headerKey = header.Name; + if (header.Value.ValueKind != JsonValueKind.Array) continue; + var headerBytes = new byte[header.Value.GetArrayLength()]; + var i = 0; + foreach (var byteVal in header.Value.EnumerateArray()) { - string headerKey = header.Name; - if (header.Value.ValueKind == JsonValueKind.Array) - { - byte[] headerBytes = new byte[header.Value.GetArrayLength()]; - int i = 0; - foreach (var byteVal in header.Value.EnumerateArray()) - { - headerBytes[i++] = (byte)byteVal.GetInt32(); - } - - string headerValue = Encoding.UTF8.GetString(headerBytes); - decodedHeaders[headerKey] = headerValue; - } + headerBytes[i++] = (byte)byteVal.GetInt32(); } - } - var headersProperty = recordType.GetProperty("Headers", - BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance); - headersProperty?.SetValue(record, decodedHeaders); + var headerValue = Encoding.UTF8.GetString(headerBytes); + decodedHeaders[headerKey] = headerValue; + } } - // Add to records list - listAddMethod.Invoke(recordsList, new[] { record }); + var headersProperty = recordType.GetProperty("Headers", + BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance); + headersProperty?.SetValue(record, decodedHeaders); } - // Add topic records to dictionary - dictAddMethod.Invoke(records, new[] { topicName, recordsList }); + // Add to records list + listAddMethod.Invoke(recordsList, new[] { record }); } - } - targetType.GetProperty("Records", BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance) - ?.SetValue(typedEvent, records); - return (T)typedEvent; + // Add topic records to dictionary + dictAddMethod.Invoke(records, new[] { topicName, recordsList }); + } } - var result = JsonSerializer.Deserialize(json, JsonOptions); - return result != null - ? result - : throw new InvalidOperationException($"Failed to deserialize to type {typeof(T).Name}"); + targetType.GetProperty("Records", BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance) + ?.SetValue(typedEvent, records); + return (T)typedEvent; } /// @@ -219,58 +285,61 @@ public T Deserialize(Stream requestStream) /// The key bytes to deserialize. /// The target type for the key. /// The deserialized key object. - protected object? DeserializeKey(byte[] keyBytes, Type keyType) + private object? DeserializeKey(byte[] keyBytes, Type keyType) { + // ReSharper disable once ConditionIsAlwaysTrueOrFalseAccordingToNullableAPIContract if (keyBytes == null || keyBytes.Length == 0) return null; if (keyType == typeof(int)) { // First try to interpret as a string representation and parse - string stringValue = Encoding.UTF8.GetString(keyBytes); - if (int.TryParse(stringValue, out int parsedValue)) + var stringValue = Encoding.UTF8.GetString(keyBytes); + if (int.TryParse(stringValue, out var parsedValue)) return parsedValue; - - // Fall back to binary representation if parsing fails - if (keyBytes.Length >= 4) - return BitConverter.ToInt32(keyBytes, 0); - else if (keyBytes.Length == 1) - return (int)keyBytes[0]; - - return 0; + + return keyBytes.Length switch + { + // Fall back to binary representation if parsing fails + >= 4 => BitConverter.ToInt32(keyBytes, 0), + 1 => keyBytes[0], + _ => 0 + }; } - else if (keyType == typeof(long)) + + if (keyType == typeof(long)) { // Try string parsing first - string stringValue = Encoding.UTF8.GetString(keyBytes); - if (long.TryParse(stringValue, out long parsedValue)) + var stringValue = Encoding.UTF8.GetString(keyBytes); + if (long.TryParse(stringValue, out var parsedValue)) return parsedValue; - - // Fall back to binary - if (keyBytes.Length >= 8) - return BitConverter.ToInt64(keyBytes, 0); - else if (keyBytes.Length >= 4) - return (long)BitConverter.ToInt32(keyBytes, 0); - - return 0L; + + return keyBytes.Length switch + { + // Fall back to binary + >= 8 => BitConverter.ToInt64(keyBytes, 0), + >= 4 => BitConverter.ToInt32(keyBytes, 0), + _ => 0L + }; } - else if (keyType == typeof(string)) + + if (keyType == typeof(string)) { // String conversion is safe regardless of length return Encoding.UTF8.GetString(keyBytes); } - else if (keyType == typeof(double)) + + if (keyType == typeof(double)) { - if (keyBytes.Length >= 8) - return BitConverter.ToDouble(keyBytes, 0); - else - return 0.0; + return keyBytes.Length >= 8 ? BitConverter.ToDouble(keyBytes, 0) : 0.0; } - else if (keyType == typeof(bool) && keyBytes.Length >= 1) + + if (keyType == typeof(bool) && keyBytes.Length >= 1) { return keyBytes[0] != 0; } - else if (keyType == typeof(Guid) && keyBytes.Length >= 16) + + if (keyType == typeof(Guid) && keyBytes.Length >= 16) { return new Guid(keyBytes); } @@ -287,7 +356,9 @@ public T Deserialize(Stream requestStream) /// The name of the property to set. /// The JsonElement containing the source data. /// The property name within the JsonElement. - protected void SetProperty(Type type, object instance, string propertyName, + [RequiresDynamicCode("Dynamically accesses properties which might be trimmed.")] + [RequiresUnreferencedCode("Dynamically accesses properties which might be trimmed.")] + private void SetProperty([DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.NonPublicProperties)] Type type, object instance, string propertyName, JsonElement element, string jsonPropertyName) { if (!element.TryGetProperty(jsonPropertyName, out var jsonValue) || @@ -316,10 +387,53 @@ protected void SetProperty(Type type, object instance, string propertyName, /// The type of object to serialize. /// The object to serialize. /// The stream to write the serialized data to. + [RequiresDynamicCode("JSON serialization might require types that cannot be statically analyzed and might need runtime code generation.")] + [RequiresUnreferencedCode("JSON serialization might require types that cannot be statically analyzed.")] public void Serialize(T response, Stream responseStream) { + if (SerializerContext != null) + { + var typeInfo = GetJsonTypeInfo(); + if (typeInfo != null) + { + JsonSerializer.Serialize(responseStream, response, typeInfo); + return; + } + + // Try to find by type if generic match didn't work + var typeInfo2 = SerializerContext.GetTypeInfo(typeof(T)); + if (typeInfo2 != null) + { + JsonSerializer.Serialize(responseStream, response, typeInfo2); + return; + } + } + + // Fallback with warning using var writer = new StreamWriter(responseStream); + #pragma warning disable IL2026, IL3050 writer.Write(JsonSerializer.Serialize(response, JsonOptions)); + #pragma warning restore IL2026, IL3050 + } + + /// + /// Tries to get JsonTypeInfo for type T from the SerializerContext. + /// + private JsonTypeInfo? GetJsonTypeInfo() + { + if (SerializerContext == null) + return null; + + // Use reflection to find the right JsonTypeInfo property + foreach (var prop in SerializerContext.GetType().GetProperties()) + { + if (prop.PropertyType == typeof(JsonTypeInfo)) + { + return prop.GetValue(SerializerContext) as JsonTypeInfo; + } + } + + return null; } /// @@ -328,7 +442,9 @@ public void Serialize(T response, Stream responseStream) /// The base64-encoded binary data. /// The target type to deserialize to. /// The deserialized object. - protected abstract object DeserializeValue(string base64Value, Type valueType); + [RequiresDynamicCode("Deserializing values might require runtime code generation depending on format.")] + [RequiresUnreferencedCode("Deserializing values might require types that cannot be statically analyzed.")] + protected abstract object DeserializeValue(string base64Value, [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.PublicFields)] Type valueType); /// /// Deserializes complex key types using the appropriate format. @@ -336,5 +452,7 @@ public void Serialize(T response, Stream responseStream) /// The key bytes to deserialize. /// The type to deserialize to. /// The deserialized key object. - protected abstract object? DeserializeComplexKey(byte[] keyBytes, Type keyType); + [RequiresDynamicCode("Deserializing complex keys might require runtime code generation depending on format.")] + [RequiresUnreferencedCode("Deserializing complex keys might require types that cannot be statically analyzed.")] + protected abstract object? DeserializeComplexKey(byte[] keyBytes, [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.PublicFields)] Type keyType); } \ No newline at end of file From 868acff70781720a81086d5d7d7e722d516e6de0 Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Mon, 9 Jun 2025 11:32:46 +0100 Subject: [PATCH 11/35] add examples --- examples/Kafka/src/Avro/AvroKey.avsc | 24 ++++++ examples/Kafka/src/Avro/AvroProduct.avsc | 10 +++ .../Lambda/Powertools/Kafka/Tests/AvroKey.cs | 70 +++++++++++++++ .../Powertools/Kafka/Tests/AvroProduct.cs | 86 +++++++++++++++++++ .../Lambda/Powertools/Kafka/Tests/Color.cs | 23 +++++ examples/Kafka/src/Avro/kafka-avro-event.json | 51 +++++++++++ examples/Kafka/src/Function.cs | 24 ++++++ examples/Kafka/src/Json/kafka-json-event.json | 50 +++++++++++ examples/Kafka/src/Kafka.csproj | 69 +++++++++++++++ examples/Kafka/src/Protobuf/Key.proto | 14 +++ examples/Kafka/src/Protobuf/Product.proto | 9 ++ .../src/Protobuf/kafka-protobuf-event.json | 51 +++++++++++ examples/Kafka/src/Readme.md | 73 ++++++++++++++++ .../Kafka/src/aws-lambda-tools-defaults.json | 17 ++++ examples/examples.sln | 12 +++ 15 files changed, 583 insertions(+) create mode 100644 examples/Kafka/src/Avro/AvroKey.avsc create mode 100644 examples/Kafka/src/Avro/AvroProduct.avsc create mode 100644 examples/Kafka/src/Avro/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroKey.cs create mode 100644 examples/Kafka/src/Avro/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroProduct.cs create mode 100644 examples/Kafka/src/Avro/Generated/AWS/Lambda/Powertools/Kafka/Tests/Color.cs create mode 100644 examples/Kafka/src/Avro/kafka-avro-event.json create mode 100644 examples/Kafka/src/Function.cs create mode 100644 examples/Kafka/src/Json/kafka-json-event.json create mode 100644 examples/Kafka/src/Kafka.csproj create mode 100644 examples/Kafka/src/Protobuf/Key.proto create mode 100644 examples/Kafka/src/Protobuf/Product.proto create mode 100644 examples/Kafka/src/Protobuf/kafka-protobuf-event.json create mode 100644 examples/Kafka/src/Readme.md create mode 100644 examples/Kafka/src/aws-lambda-tools-defaults.json diff --git a/examples/Kafka/src/Avro/AvroKey.avsc b/examples/Kafka/src/Avro/AvroKey.avsc new file mode 100644 index 000000000..cc15c9e72 --- /dev/null +++ b/examples/Kafka/src/Avro/AvroKey.avsc @@ -0,0 +1,24 @@ +{ + "namespace": "AWS.Lambda.Powertools.Kafka.Tests", + "type": "record", + "name": "AvroKey", + "fields": [ + { + "name": "id", + "type": "int" + }, + { + "name": "color", + "type": { + "type": "enum", + "name": "Color", + "symbols": [ + "UNKNOWN", + "GREEN", + "RED" + ], + "default": "UNKNOWN" + } + } + ] +} \ No newline at end of file diff --git a/examples/Kafka/src/Avro/AvroProduct.avsc b/examples/Kafka/src/Avro/AvroProduct.avsc new file mode 100644 index 000000000..60b8ed002 --- /dev/null +++ b/examples/Kafka/src/Avro/AvroProduct.avsc @@ -0,0 +1,10 @@ +{ + "namespace": "AWS.Lambda.Powertools.Kafka.Tests", + "type": "record", + "name": "AvroProduct", + "fields": [ + {"name": "id", "type": "int"}, + {"name": "name", "type": "string"}, + {"name": "price", "type": "double"} + ] +} \ No newline at end of file diff --git a/examples/Kafka/src/Avro/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroKey.cs b/examples/Kafka/src/Avro/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroKey.cs new file mode 100644 index 000000000..96d09316e --- /dev/null +++ b/examples/Kafka/src/Avro/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroKey.cs @@ -0,0 +1,70 @@ +// ------------------------------------------------------------------------------ +// +// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e +// Changes to this file may cause incorrect behavior and will be lost if code +// is regenerated +// +// ------------------------------------------------------------------------------ +namespace AWS.Lambda.Powertools.Kafka.Tests +{ + using System; + using System.Collections.Generic; + using System.Text; + using global::Avro; + using global::Avro.Specific; + + [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")] + public partial class AvroKey : global::Avro.Specific.ISpecificRecord + { + public static global::Avro.Schema _SCHEMA = global::Avro.Schema.Parse(@"{""type"":""record"",""name"":""AvroKey"",""namespace"":""AWS.Lambda.Powertools.Kafka.Tests"",""fields"":[{""name"":""id"",""type"":""int""},{""name"":""color"",""type"":{""type"":""enum"",""name"":""Color"",""namespace"":""AWS.Lambda.Powertools.Kafka.Tests"",""symbols"":[""UNKNOWN"",""GREEN"",""RED""],""default"":""UNKNOWN""}}]}"); + private int _id; + private AWS.Lambda.Powertools.Kafka.Tests.Color _color = AWS.Lambda.Powertools.Kafka.Tests.Color.UNKNOWN; + public virtual global::Avro.Schema Schema + { + get + { + return AvroKey._SCHEMA; + } + } + public int id + { + get + { + return this._id; + } + set + { + this._id = value; + } + } + public AWS.Lambda.Powertools.Kafka.Tests.Color color + { + get + { + return this._color; + } + set + { + this._color = value; + } + } + public virtual object Get(int fieldPos) + { + switch (fieldPos) + { + case 0: return this.id; + case 1: return this.color; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Get()"); + }; + } + public virtual void Put(int fieldPos, object fieldValue) + { + switch (fieldPos) + { + case 0: this.id = (System.Int32)fieldValue; break; + case 1: this.color = (AWS.Lambda.Powertools.Kafka.Tests.Color)fieldValue; break; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Put()"); + }; + } + } +} diff --git a/examples/Kafka/src/Avro/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroProduct.cs b/examples/Kafka/src/Avro/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroProduct.cs new file mode 100644 index 000000000..f1c6aa8d4 --- /dev/null +++ b/examples/Kafka/src/Avro/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroProduct.cs @@ -0,0 +1,86 @@ +// ------------------------------------------------------------------------------ +// +// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e +// Changes to this file may cause incorrect behavior and will be lost if code +// is regenerated +// +// ------------------------------------------------------------------------------ +namespace AWS.Lambda.Powertools.Kafka.Tests +{ + using System; + using System.Collections.Generic; + using System.Text; + using global::Avro; + using global::Avro.Specific; + + [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")] + public partial class AvroProduct : global::Avro.Specific.ISpecificRecord + { + public static global::Avro.Schema _SCHEMA = global::Avro.Schema.Parse("{\"type\":\"record\",\"name\":\"AvroProduct\",\"namespace\":\"AWS.Lambda.Powertools.Kafka.Te" + + "sts\",\"fields\":[{\"name\":\"id\",\"type\":\"int\"},{\"name\":\"name\",\"type\":\"string\"},{\"name" + + "\":\"price\",\"type\":\"double\"}]}"); + private int _id; + private string _name; + private double _price; + public virtual global::Avro.Schema Schema + { + get + { + return AvroProduct._SCHEMA; + } + } + public int id + { + get + { + return this._id; + } + set + { + this._id = value; + } + } + public string name + { + get + { + return this._name; + } + set + { + this._name = value; + } + } + public double price + { + get + { + return this._price; + } + set + { + this._price = value; + } + } + public virtual object Get(int fieldPos) + { + switch (fieldPos) + { + case 0: return this.id; + case 1: return this.name; + case 2: return this.price; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Get()"); + }; + } + public virtual void Put(int fieldPos, object fieldValue) + { + switch (fieldPos) + { + case 0: this.id = (System.Int32)fieldValue; break; + case 1: this.name = (System.String)fieldValue; break; + case 2: this.price = (System.Double)fieldValue; break; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Put()"); + }; + } + } +} diff --git a/examples/Kafka/src/Avro/Generated/AWS/Lambda/Powertools/Kafka/Tests/Color.cs b/examples/Kafka/src/Avro/Generated/AWS/Lambda/Powertools/Kafka/Tests/Color.cs new file mode 100644 index 000000000..963233679 --- /dev/null +++ b/examples/Kafka/src/Avro/Generated/AWS/Lambda/Powertools/Kafka/Tests/Color.cs @@ -0,0 +1,23 @@ +// ------------------------------------------------------------------------------ +// +// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e +// Changes to this file may cause incorrect behavior and will be lost if code +// is regenerated +// +// ------------------------------------------------------------------------------ +namespace AWS.Lambda.Powertools.Kafka.Tests +{ + using System; + using System.Collections.Generic; + using System.Text; + using global::Avro; + using global::Avro.Specific; + + [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")] + public enum Color + { + UNKNOWN, + GREEN, + RED, + } +} diff --git a/examples/Kafka/src/Avro/kafka-avro-event.json b/examples/Kafka/src/Avro/kafka-avro-event.json new file mode 100644 index 000000000..8d6ef2210 --- /dev/null +++ b/examples/Kafka/src/Avro/kafka-avro-event.json @@ -0,0 +1,51 @@ +{ + "eventSource": "aws:kafka", + "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", + "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + "records": { + "mytopic-0": [ + { + "topic": "mytopic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": "NDI=", + "value": "0g8MTGFwdG9wUrgehes/j0A=", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 16, + "timestamp": 1545084650988, + "timestampType": "CREATE_TIME", + "key": "NDI=", + "value": "1A8UU21hcnRwaG9uZVK4HoXrv4JA", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 17, + "timestamp": 1545084650989, + "timestampType": "CREATE_TIME", + "key": null, + "value": "1g8USGVhZHBob25lc0jhehSuv2JA", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + } + ] + } +} diff --git a/examples/Kafka/src/Function.cs b/examples/Kafka/src/Function.cs new file mode 100644 index 000000000..c756feef2 --- /dev/null +++ b/examples/Kafka/src/Function.cs @@ -0,0 +1,24 @@ +using Amazon.Lambda.Core; +using Amazon.Lambda.RuntimeSupport; +using AWS.Lambda.Powertools.Kafka; +using AWS.Lambda.Powertools.Kafka.Tests; + +// The function handler that will be called for each Lambda event +string Handler(ConsumerRecords records, ILambdaContext context) +{ + foreach (var record in records) + { + var key = record.Key.id; + var product = record.Value; + + context.Logger.LogInformation($"Processing record with key: {key}, Product: {product.name}, Price: {product.price}"); + } + + return "Processed " + records.Count() + " records"; +} + + +await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, + new PowertoolsKafkaAvroSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization + .Build() + .RunAsync(); \ No newline at end of file diff --git a/examples/Kafka/src/Json/kafka-json-event.json b/examples/Kafka/src/Json/kafka-json-event.json new file mode 100644 index 000000000..d85c40654 --- /dev/null +++ b/examples/Kafka/src/Json/kafka-json-event.json @@ -0,0 +1,50 @@ +{ + "eventSource": "aws:kafka", + "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", + "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + "records": { + "mytopic-0": [ + { + "topic": "mytopic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": "cmVjb3JkS2V5", + "value": "ewogICJpZCI6IDEyMzQ1LAogICJuYW1lIjogInByb2R1Y3Q1IiwKICAicHJpY2UiOiA0NQp9", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "value": "ewogICJpZCI6IDEyMzQ1LAogICJuYW1lIjogInByb2R1Y3Q1IiwKICAicHJpY2UiOiA0NQp9", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": null, + "value": "ewogICJpZCI6IDEyMzQ1LAogICJuYW1lIjogInByb2R1Y3Q1IiwKICAicHJpY2UiOiA0NQp9", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + } + ] + } +} diff --git a/examples/Kafka/src/Kafka.csproj b/examples/Kafka/src/Kafka.csproj new file mode 100644 index 000000000..16131f8c3 --- /dev/null +++ b/examples/Kafka/src/Kafka.csproj @@ -0,0 +1,69 @@ + + + Exe + net8.0 + enable + enable + true + Lambda + + true + + true + + + + + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + + + + + + + + + + + + + + + + + + + + Client + PreserveNewest + MSBuild:Compile + + + + PreserveNewest + + + + + + + + PreserveNewest + + + + PreserveNewest + + + + PreserveNewest + + + + + + + \ No newline at end of file diff --git a/examples/Kafka/src/Protobuf/Key.proto b/examples/Kafka/src/Protobuf/Key.proto new file mode 100644 index 000000000..deedcf5dc --- /dev/null +++ b/examples/Kafka/src/Protobuf/Key.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; + +option csharp_namespace = "TestKafka"; + +message ProtobufKey { + int32 id = 1; + Color color = 2; +} + +enum Color { + UNKNOWN = 0; + GREEN = 1; + RED = 2; +} \ No newline at end of file diff --git a/examples/Kafka/src/Protobuf/Product.proto b/examples/Kafka/src/Protobuf/Product.proto new file mode 100644 index 000000000..1d4c64e90 --- /dev/null +++ b/examples/Kafka/src/Protobuf/Product.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +option csharp_namespace = "TestKafka"; + +message ProtobufProduct { + int32 id = 1; + string name = 2; + double price = 3; +} \ No newline at end of file diff --git a/examples/Kafka/src/Protobuf/kafka-protobuf-event.json b/examples/Kafka/src/Protobuf/kafka-protobuf-event.json new file mode 100644 index 000000000..b3e0139e3 --- /dev/null +++ b/examples/Kafka/src/Protobuf/kafka-protobuf-event.json @@ -0,0 +1,51 @@ +{ + "eventSource": "aws:kafka", + "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", + "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + "records": { + "mytopic-0": [ + { + "topic": "mytopic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": "NDI=", + "value": "COkHEgZMYXB0b3AZUrgehes/j0A=", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 16, + "timestamp": 1545084650988, + "timestampType": "CREATE_TIME", + "key": "NDI=", + "value": "COoHEgpTbWFydHBob25lGVK4HoXrv4JA", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 17, + "timestamp": 1545084650989, + "timestampType": "CREATE_TIME", + "key": null, + "value": "COsHEgpIZWFkcGhvbmVzGUjhehSuv2JA", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + } + ] + } +} diff --git a/examples/Kafka/src/Readme.md b/examples/Kafka/src/Readme.md new file mode 100644 index 000000000..e43192609 --- /dev/null +++ b/examples/Kafka/src/Readme.md @@ -0,0 +1,73 @@ +# Powertools for AWS Lambda - Kafka examples + + +## Already added to the project + +# Avro + +```bash +dotnet tool install --global Apache.Avro.Tools + +cd tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/ +avrogen -s AvroProduct.avsc ./ +``` + +```xml + + + + + + + +``` + +# Protobuf + +```xml + + + + PreserveNewest + + + + +``` + +## Here are some steps to follow to get started from the command line: + + +Install Amazon.Lambda.Tools Global Tools if not already installed. +``` + dotnet tool install -g Amazon.Lambda.Tools +``` + +## Edit the aws-lambda-tools-defaults.json file + +Update the role to use in the `aws-lambda-tools-defaults.json` file. This file is used by the `dotnet lambda deploy-function` command to deploy the Lambda function. + +``` + code aws-lambda-tools-defaults.json +``` + +Deploy function to AWS Lambda +``` + dotnet lambda deploy-function +``` + +## Infra + +Make sure the Lambda function adds permissions for bedrock to invoke it. You can do this by running the following command: + +```bash +aws lambda add-permission --function-name --principal bedrock.amazonaws.com --statement-id --action lambda:InvokeFunction +``` + +## Invoke the function + +Use the provided test event to invoke the function. You can do this with the AWS CLI or the dotnet CLI. + +```bash +dotnet lambda invoke-function --payload file://Avro/kafka-event.json +``` diff --git a/examples/Kafka/src/aws-lambda-tools-defaults.json b/examples/Kafka/src/aws-lambda-tools-defaults.json new file mode 100644 index 000000000..969492087 --- /dev/null +++ b/examples/Kafka/src/aws-lambda-tools-defaults.json @@ -0,0 +1,17 @@ +{ + "Information": [ + "This file provides default values for the deployment wizard inside Visual Studio and the AWS Lambda commands added to the .NET Core CLI.", + "To learn more about the Lambda commands with the .NET Core CLI execute the following command at the command line in the project root directory.", + "dotnet lambda help", + "All the command line options for the Lambda command can be specified in this file." + ], + "profile": "", + "region": "", + "configuration": "Release", + "function-runtime": "dotnet8", + "function-memory-size": 512, + "function-timeout": 30, + "function-handler": "Kafka", + "function-name": "Powertools-Kafka-Example", + "function-role": "arn:aws:iam::123456789012:role/lambda-role" +} \ No newline at end of file diff --git a/examples/examples.sln b/examples/examples.sln index 10ec48509..6919941ef 100644 --- a/examples/examples.sln +++ b/examples/examples.sln @@ -109,6 +109,12 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AOT_Logging", "AOT\AOT_Logg EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AOT_Logging.Tests", "AOT\AOT_Logging\test\AOT_Logging.Tests\AOT_Logging.Tests.csproj", "{FC010A0E-64A9-4440-97FE-DEDA8CEE0BE5}" EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Kafka", "Kafka", "{71027B81-CA39-498C-9A50-ADDAFA2AC2F5}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Kafka", "Kafka\src\Kafka.csproj", "{D9FF982F-7CD7-4652-94BB-B387B7FC034F}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{0D5246BB-02B3-43EE-9EE3-E0E627170C9B}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -202,6 +208,10 @@ Global {FC010A0E-64A9-4440-97FE-DEDA8CEE0BE5}.Debug|Any CPU.Build.0 = Debug|Any CPU {FC010A0E-64A9-4440-97FE-DEDA8CEE0BE5}.Release|Any CPU.ActiveCfg = Release|Any CPU {FC010A0E-64A9-4440-97FE-DEDA8CEE0BE5}.Release|Any CPU.Build.0 = Release|Any CPU + {D9FF982F-7CD7-4652-94BB-B387B7FC034F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D9FF982F-7CD7-4652-94BB-B387B7FC034F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D9FF982F-7CD7-4652-94BB-B387B7FC034F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D9FF982F-7CD7-4652-94BB-B387B7FC034F}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(NestedProjects) = preSolution {0CC66DBC-C1DF-4AF6-8EEB-FFED6C578BF4} = {526F1EF7-5A9C-4BFF-ABAE-75992ACD8F78} @@ -249,5 +259,7 @@ Global {343CF6B9-C006-43F8-924C-BF5BF5B6D051} = {FE1CAA26-87E9-4B71-800E-81D2997A7B53} {FC02CF45-DE15-4413-958A-D86808B99146} = {FEE72EAB-494F-403B-A75A-825E713C3D43} {FC010A0E-64A9-4440-97FE-DEDA8CEE0BE5} = {F3480212-EE7F-46FE-9ED5-24ACAB5B681D} + {0D5246BB-02B3-43EE-9EE3-E0E627170C9B} = {71027B81-CA39-498C-9A50-ADDAFA2AC2F5} + {D9FF982F-7CD7-4652-94BB-B387B7FC034F} = {0D5246BB-02B3-43EE-9EE3-E0E627170C9B} EndGlobalSection EndGlobal From 733fbbdaf6502d0cb0ab5c9f30f82a0df48df6ec Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Tue, 17 Jun 2025 17:25:40 +0100 Subject: [PATCH 12/35] refactor to multiple projects, add examples. add headers decoded and non decoded --- examples/Kafka/Avro/src/Avro.csproj | 42 +++++ examples/Kafka/Avro/src/AvroKey.avsc | 24 +++ examples/Kafka/Avro/src/AvroProduct.avsc | 10 ++ examples/Kafka/Avro/src/Function.cs | 62 +++++++ .../Lambda/Powertools/Kafka/Tests/AvroKey.cs | 70 ++++++++ .../Powertools/Kafka/Tests/AvroProduct.cs | 86 ++++++++++ .../Lambda/Powertools/Kafka/Tests/Color.cs | 23 +++ .../Generated/com/example/AccountStatus.cs | 23 +++ .../Avro/src/Generated/com/example/Address.cs | 115 +++++++++++++ .../Generated/com/example/CustomerProfile.cs | 154 ++++++++++++++++++ .../src/Generated/com/example/EmailAddress.cs | 86 ++++++++++ .../src/Generated/com/example/PhoneNumber.cs | 72 ++++++++ .../src/Generated/com/example/PhoneType.cs | 23 +++ examples/Kafka/Avro/src/Payload.avsc | 46 ++++++ examples/Kafka/Avro/src/Readme.md | 63 +++++++ .../Avro/src/aws-lambda-tools-defaults.json | 17 ++ examples/Kafka/Avro/src/kafka-avro-event.json | 23 +++ examples/Kafka/Json/src/Function.cs | 124 ++++++++++++++ examples/Kafka/Json/src/Json.csproj | 31 ++++ examples/Kafka/Json/src/Readme.md | 63 +++++++ .../Json/src/aws-lambda-tools-defaults.json | 17 ++ examples/Kafka/Json/src/kafka-json-event.json | 23 +++ examples/Kafka/Protobuf/src/Function.cs | 90 ++++++++++ examples/Kafka/Protobuf/src/Key.proto | 14 ++ examples/Kafka/Protobuf/src/Payload.proto | 49 ++++++ examples/Kafka/Protobuf/src/Product.proto | 9 + examples/Kafka/Protobuf/src/Protobuf.csproj | 62 +++++++ examples/Kafka/Protobuf/src/Readme.md | 63 +++++++ .../src/aws-lambda-tools-defaults.json | 17 ++ .../Protobuf/src/kafka-protobuf-event.json | 23 +++ examples/Kafka/src/Function.cs | 73 ++++++++- examples/Kafka/src/Kafka.csproj | 9 +- .../Kafka/src/aws-lambda-tools-defaults.json | 6 +- examples/examples.sln | 21 +++ libraries/AWS.Lambda.Powertools.sln | 45 +++++ .../AWS.Lambda.Powertools.Kafka.Avro.csproj | 21 +++ .../PowertoolsKafkaAvroSerializer.cs | 2 +- .../Readme.md | 1 + .../AWS.Lambda.Powertools.Kafka.Json.csproj | 19 +++ .../PowertoolsKafkaJsonSerializer.cs | 3 +- .../Readme.md | 1 + ...WS.Lambda.Powertools.Kafka.Protobuf.csproj | 24 +++ .../PowertoolsKafkaProtobufSerializer.cs | 2 +- .../Readme.md | 1 + .../AWS.Lambda.Powertools.Kafka.csproj | 2 - .../ConsumerRecord.cs | 2 +- .../HeaderExtensions.cs | 37 +++++ .../PowertoolsKafkaSerializerBase.cs | 124 +++++++++----- .../AWS.Lambda.Powertools.Kafka.Tests.csproj | 4 +- .../Avro/HandlerTests.cs | 7 +- .../PowertoolsKafkaAvroSerializerTests.cs | 1 + .../PowertoolsKafkaJsonSerializerTests.cs | 3 + .../Protobuf/HandlerTests.cs | 5 +- .../PowertoolsKafkaProtobufSerializerTests.cs | 1 + 54 files changed, 1875 insertions(+), 63 deletions(-) create mode 100644 examples/Kafka/Avro/src/Avro.csproj create mode 100644 examples/Kafka/Avro/src/AvroKey.avsc create mode 100644 examples/Kafka/Avro/src/AvroProduct.avsc create mode 100644 examples/Kafka/Avro/src/Function.cs create mode 100644 examples/Kafka/Avro/src/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroKey.cs create mode 100644 examples/Kafka/Avro/src/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroProduct.cs create mode 100644 examples/Kafka/Avro/src/Generated/AWS/Lambda/Powertools/Kafka/Tests/Color.cs create mode 100644 examples/Kafka/Avro/src/Generated/com/example/AccountStatus.cs create mode 100644 examples/Kafka/Avro/src/Generated/com/example/Address.cs create mode 100644 examples/Kafka/Avro/src/Generated/com/example/CustomerProfile.cs create mode 100644 examples/Kafka/Avro/src/Generated/com/example/EmailAddress.cs create mode 100644 examples/Kafka/Avro/src/Generated/com/example/PhoneNumber.cs create mode 100644 examples/Kafka/Avro/src/Generated/com/example/PhoneType.cs create mode 100644 examples/Kafka/Avro/src/Payload.avsc create mode 100644 examples/Kafka/Avro/src/Readme.md create mode 100644 examples/Kafka/Avro/src/aws-lambda-tools-defaults.json create mode 100644 examples/Kafka/Avro/src/kafka-avro-event.json create mode 100644 examples/Kafka/Json/src/Function.cs create mode 100644 examples/Kafka/Json/src/Json.csproj create mode 100644 examples/Kafka/Json/src/Readme.md create mode 100644 examples/Kafka/Json/src/aws-lambda-tools-defaults.json create mode 100644 examples/Kafka/Json/src/kafka-json-event.json create mode 100644 examples/Kafka/Protobuf/src/Function.cs create mode 100644 examples/Kafka/Protobuf/src/Key.proto create mode 100644 examples/Kafka/Protobuf/src/Payload.proto create mode 100644 examples/Kafka/Protobuf/src/Product.proto create mode 100644 examples/Kafka/Protobuf/src/Protobuf.csproj create mode 100644 examples/Kafka/Protobuf/src/Readme.md create mode 100644 examples/Kafka/Protobuf/src/aws-lambda-tools-defaults.json create mode 100644 examples/Kafka/Protobuf/src/kafka-protobuf-event.json create mode 100644 libraries/src/AWS.Lambda.Powertools.Kafka.Avro/AWS.Lambda.Powertools.Kafka.Avro.csproj rename libraries/src/{AWS.Lambda.Powertools.Kafka => AWS.Lambda.Powertools.Kafka.Avro}/PowertoolsKafkaAvroSerializer.cs (99%) create mode 100644 libraries/src/AWS.Lambda.Powertools.Kafka.Avro/Readme.md create mode 100644 libraries/src/AWS.Lambda.Powertools.Kafka.Json/AWS.Lambda.Powertools.Kafka.Json.csproj rename libraries/src/{AWS.Lambda.Powertools.Kafka => AWS.Lambda.Powertools.Kafka.Json}/PowertoolsKafkaJsonSerializer.cs (98%) create mode 100644 libraries/src/AWS.Lambda.Powertools.Kafka.Json/Readme.md create mode 100644 libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/AWS.Lambda.Powertools.Kafka.Protobuf.csproj rename libraries/src/{AWS.Lambda.Powertools.Kafka => AWS.Lambda.Powertools.Kafka.Protobuf}/PowertoolsKafkaProtobufSerializer.cs (99%) create mode 100644 libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/Readme.md create mode 100644 libraries/src/AWS.Lambda.Powertools.Kafka/HeaderExtensions.cs diff --git a/examples/Kafka/Avro/src/Avro.csproj b/examples/Kafka/Avro/src/Avro.csproj new file mode 100644 index 000000000..2781c9c21 --- /dev/null +++ b/examples/Kafka/Avro/src/Avro.csproj @@ -0,0 +1,42 @@ + + + Exe + net8.0 + enable + enable + true + Lambda + + true + + true + + Avro.Example + + + + + + + + + + + + + + + + + + + + + + + + + PreserveNewest + + + \ No newline at end of file diff --git a/examples/Kafka/Avro/src/AvroKey.avsc b/examples/Kafka/Avro/src/AvroKey.avsc new file mode 100644 index 000000000..cc15c9e72 --- /dev/null +++ b/examples/Kafka/Avro/src/AvroKey.avsc @@ -0,0 +1,24 @@ +{ + "namespace": "AWS.Lambda.Powertools.Kafka.Tests", + "type": "record", + "name": "AvroKey", + "fields": [ + { + "name": "id", + "type": "int" + }, + { + "name": "color", + "type": { + "type": "enum", + "name": "Color", + "symbols": [ + "UNKNOWN", + "GREEN", + "RED" + ], + "default": "UNKNOWN" + } + } + ] +} \ No newline at end of file diff --git a/examples/Kafka/Avro/src/AvroProduct.avsc b/examples/Kafka/Avro/src/AvroProduct.avsc new file mode 100644 index 000000000..60b8ed002 --- /dev/null +++ b/examples/Kafka/Avro/src/AvroProduct.avsc @@ -0,0 +1,10 @@ +{ + "namespace": "AWS.Lambda.Powertools.Kafka.Tests", + "type": "record", + "name": "AvroProduct", + "fields": [ + {"name": "id", "type": "int"}, + {"name": "name", "type": "string"}, + {"name": "price", "type": "double"} + ] +} \ No newline at end of file diff --git a/examples/Kafka/Avro/src/Function.cs b/examples/Kafka/Avro/src/Function.cs new file mode 100644 index 000000000..0225d3861 --- /dev/null +++ b/examples/Kafka/Avro/src/Function.cs @@ -0,0 +1,62 @@ +using System.Diagnostics; +using Amazon.Lambda.Core; +using Amazon.Lambda.RuntimeSupport; +using Amazon.Lambda.Serialization.SystemTextJson; +using AWS.Lambda.Powertools.Kafka; +using AWS.Lambda.Powertools.Kafka.Avro; +using AWS.Lambda.Powertools.Kafka.Tests; +using AWS.Lambda.Powertools.Logging; +using AWS.Lambda.Powertools.Metrics; +using com.example; + +// string Handler(ConsumerRecords records, ILambdaContext context) +// { +// Metrics.SetNamespace("Avro"); +// Metrics.AddMetric("NumberOfRequests", 1, MetricUnit.Count, MetricResolution.High); +// +// foreach (var record in records) +// { +// Logger.LogInformation("Record Key: {@key}", record.Key); +// Logger.LogInformation("Record Value: {@record}", record.Value); +// } +// +// return "Processed " + records.Count() + " records"; +// } +// +// +// await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, +// new PowertoolsKafkaAvroSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization +// .Build() +// .RunAsync(); + +var responseStream = new MemoryStream(); +var serializer = new PowertoolsKafkaAvroSerializer(); +Task ToUpperAsync(InvocationRequest invocation) +{ + var stopwatch = Stopwatch.StartNew(); + + var records = serializer.Deserialize>(invocation.InputStream); + + foreach (var record in records) + { + Console.WriteLine("Record UserId: {0}", record.Value.user_id); + } + + stopwatch.Stop(); + + Metrics.PushSingleMetric("AvroDeserialization-1024", + stopwatch.ElapsedMilliseconds, MetricUnit.Milliseconds, "kafka-dotnet", "service", null, + MetricResolution.High); + + Console.WriteLine("Record Count: {0}", records.Count()); + Console.WriteLine("Record UserId: {0}", records.First().Value.user_id); + Console.WriteLine("JsonDeserialization: {0:F2}", stopwatch.ElapsedMilliseconds); + + responseStream.SetLength(0); + responseStream.Position = 0; + + return Task.FromResult(new InvocationResponse(responseStream, false)); +} + +var bootstrap = new LambdaBootstrap(ToUpperAsync); +await bootstrap.RunAsync(); \ No newline at end of file diff --git a/examples/Kafka/Avro/src/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroKey.cs b/examples/Kafka/Avro/src/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroKey.cs new file mode 100644 index 000000000..96d09316e --- /dev/null +++ b/examples/Kafka/Avro/src/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroKey.cs @@ -0,0 +1,70 @@ +// ------------------------------------------------------------------------------ +// +// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e +// Changes to this file may cause incorrect behavior and will be lost if code +// is regenerated +// +// ------------------------------------------------------------------------------ +namespace AWS.Lambda.Powertools.Kafka.Tests +{ + using System; + using System.Collections.Generic; + using System.Text; + using global::Avro; + using global::Avro.Specific; + + [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")] + public partial class AvroKey : global::Avro.Specific.ISpecificRecord + { + public static global::Avro.Schema _SCHEMA = global::Avro.Schema.Parse(@"{""type"":""record"",""name"":""AvroKey"",""namespace"":""AWS.Lambda.Powertools.Kafka.Tests"",""fields"":[{""name"":""id"",""type"":""int""},{""name"":""color"",""type"":{""type"":""enum"",""name"":""Color"",""namespace"":""AWS.Lambda.Powertools.Kafka.Tests"",""symbols"":[""UNKNOWN"",""GREEN"",""RED""],""default"":""UNKNOWN""}}]}"); + private int _id; + private AWS.Lambda.Powertools.Kafka.Tests.Color _color = AWS.Lambda.Powertools.Kafka.Tests.Color.UNKNOWN; + public virtual global::Avro.Schema Schema + { + get + { + return AvroKey._SCHEMA; + } + } + public int id + { + get + { + return this._id; + } + set + { + this._id = value; + } + } + public AWS.Lambda.Powertools.Kafka.Tests.Color color + { + get + { + return this._color; + } + set + { + this._color = value; + } + } + public virtual object Get(int fieldPos) + { + switch (fieldPos) + { + case 0: return this.id; + case 1: return this.color; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Get()"); + }; + } + public virtual void Put(int fieldPos, object fieldValue) + { + switch (fieldPos) + { + case 0: this.id = (System.Int32)fieldValue; break; + case 1: this.color = (AWS.Lambda.Powertools.Kafka.Tests.Color)fieldValue; break; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Put()"); + }; + } + } +} diff --git a/examples/Kafka/Avro/src/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroProduct.cs b/examples/Kafka/Avro/src/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroProduct.cs new file mode 100644 index 000000000..f1c6aa8d4 --- /dev/null +++ b/examples/Kafka/Avro/src/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroProduct.cs @@ -0,0 +1,86 @@ +// ------------------------------------------------------------------------------ +// +// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e +// Changes to this file may cause incorrect behavior and will be lost if code +// is regenerated +// +// ------------------------------------------------------------------------------ +namespace AWS.Lambda.Powertools.Kafka.Tests +{ + using System; + using System.Collections.Generic; + using System.Text; + using global::Avro; + using global::Avro.Specific; + + [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")] + public partial class AvroProduct : global::Avro.Specific.ISpecificRecord + { + public static global::Avro.Schema _SCHEMA = global::Avro.Schema.Parse("{\"type\":\"record\",\"name\":\"AvroProduct\",\"namespace\":\"AWS.Lambda.Powertools.Kafka.Te" + + "sts\",\"fields\":[{\"name\":\"id\",\"type\":\"int\"},{\"name\":\"name\",\"type\":\"string\"},{\"name" + + "\":\"price\",\"type\":\"double\"}]}"); + private int _id; + private string _name; + private double _price; + public virtual global::Avro.Schema Schema + { + get + { + return AvroProduct._SCHEMA; + } + } + public int id + { + get + { + return this._id; + } + set + { + this._id = value; + } + } + public string name + { + get + { + return this._name; + } + set + { + this._name = value; + } + } + public double price + { + get + { + return this._price; + } + set + { + this._price = value; + } + } + public virtual object Get(int fieldPos) + { + switch (fieldPos) + { + case 0: return this.id; + case 1: return this.name; + case 2: return this.price; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Get()"); + }; + } + public virtual void Put(int fieldPos, object fieldValue) + { + switch (fieldPos) + { + case 0: this.id = (System.Int32)fieldValue; break; + case 1: this.name = (System.String)fieldValue; break; + case 2: this.price = (System.Double)fieldValue; break; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Put()"); + }; + } + } +} diff --git a/examples/Kafka/Avro/src/Generated/AWS/Lambda/Powertools/Kafka/Tests/Color.cs b/examples/Kafka/Avro/src/Generated/AWS/Lambda/Powertools/Kafka/Tests/Color.cs new file mode 100644 index 000000000..963233679 --- /dev/null +++ b/examples/Kafka/Avro/src/Generated/AWS/Lambda/Powertools/Kafka/Tests/Color.cs @@ -0,0 +1,23 @@ +// ------------------------------------------------------------------------------ +// +// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e +// Changes to this file may cause incorrect behavior and will be lost if code +// is regenerated +// +// ------------------------------------------------------------------------------ +namespace AWS.Lambda.Powertools.Kafka.Tests +{ + using System; + using System.Collections.Generic; + using System.Text; + using global::Avro; + using global::Avro.Specific; + + [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")] + public enum Color + { + UNKNOWN, + GREEN, + RED, + } +} diff --git a/examples/Kafka/Avro/src/Generated/com/example/AccountStatus.cs b/examples/Kafka/Avro/src/Generated/com/example/AccountStatus.cs new file mode 100644 index 000000000..c7809f518 --- /dev/null +++ b/examples/Kafka/Avro/src/Generated/com/example/AccountStatus.cs @@ -0,0 +1,23 @@ +// ------------------------------------------------------------------------------ +// +// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e +// Changes to this file may cause incorrect behavior and will be lost if code +// is regenerated +// +// ------------------------------------------------------------------------------ +namespace com.example +{ + using System; + using System.Collections.Generic; + using System.Text; + using global::Avro; + using global::Avro.Specific; + + [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")] + public enum AccountStatus + { + ACTIVE, + INACTIVE, + SUSPENDED, + } +} diff --git a/examples/Kafka/Avro/src/Generated/com/example/Address.cs b/examples/Kafka/Avro/src/Generated/com/example/Address.cs new file mode 100644 index 000000000..e2053e0f2 --- /dev/null +++ b/examples/Kafka/Avro/src/Generated/com/example/Address.cs @@ -0,0 +1,115 @@ +// ------------------------------------------------------------------------------ +// +// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e +// Changes to this file may cause incorrect behavior and will be lost if code +// is regenerated +// +// ------------------------------------------------------------------------------ +namespace com.example +{ + using System; + using System.Collections.Generic; + using System.Text; + using global::Avro; + using global::Avro.Specific; + + [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")] + public partial class Address : global::Avro.Specific.ISpecificRecord + { + public static global::Avro.Schema _SCHEMA = global::Avro.Schema.Parse("{\"type\":\"record\",\"name\":\"Address\",\"namespace\":\"com.example\",\"fields\":[{\"name\":\"st" + + "reet\",\"type\":\"string\"},{\"name\":\"city\",\"type\":\"string\"},{\"name\":\"state\",\"type\":\"s" + + "tring\"},{\"name\":\"country\",\"type\":\"string\"},{\"name\":\"zip_code\",\"type\":\"string\"}]}" + + ""); + private string _street; + private string _city; + private string _state; + private string _country; + private string _zip_code; + public virtual global::Avro.Schema Schema + { + get + { + return Address._SCHEMA; + } + } + public string street + { + get + { + return this._street; + } + set + { + this._street = value; + } + } + public string city + { + get + { + return this._city; + } + set + { + this._city = value; + } + } + public string state + { + get + { + return this._state; + } + set + { + this._state = value; + } + } + public string country + { + get + { + return this._country; + } + set + { + this._country = value; + } + } + public string zip_code + { + get + { + return this._zip_code; + } + set + { + this._zip_code = value; + } + } + public virtual object Get(int fieldPos) + { + switch (fieldPos) + { + case 0: return this.street; + case 1: return this.city; + case 2: return this.state; + case 3: return this.country; + case 4: return this.zip_code; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Get()"); + }; + } + public virtual void Put(int fieldPos, object fieldValue) + { + switch (fieldPos) + { + case 0: this.street = (System.String)fieldValue; break; + case 1: this.city = (System.String)fieldValue; break; + case 2: this.state = (System.String)fieldValue; break; + case 3: this.country = (System.String)fieldValue; break; + case 4: this.zip_code = (System.String)fieldValue; break; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Put()"); + }; + } + } +} diff --git a/examples/Kafka/Avro/src/Generated/com/example/CustomerProfile.cs b/examples/Kafka/Avro/src/Generated/com/example/CustomerProfile.cs new file mode 100644 index 000000000..15d62095d --- /dev/null +++ b/examples/Kafka/Avro/src/Generated/com/example/CustomerProfile.cs @@ -0,0 +1,154 @@ +// ------------------------------------------------------------------------------ +// +// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e +// Changes to this file may cause incorrect behavior and will be lost if code +// is regenerated +// +// ------------------------------------------------------------------------------ +namespace com.example +{ + using System; + using System.Collections.Generic; + using System.Text; + using global::Avro; + using global::Avro.Specific; + + [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")] + public partial class CustomerProfile : global::Avro.Specific.ISpecificRecord + { + public static global::Avro.Schema _SCHEMA = global::Avro.Schema.Parse(@"{""type"":""record"",""name"":""CustomerProfile"",""namespace"":""com.example"",""fields"":[{""name"":""user_id"",""type"":""string""},{""name"":""full_name"",""type"":""string""},{""name"":""email"",""type"":{""type"":""record"",""name"":""EmailAddress"",""namespace"":""com.example"",""fields"":[{""name"":""address"",""type"":""string""},{""name"":""verified"",""type"":""boolean""},{""name"":""primary"",""type"":""boolean""}]}},{""name"":""age"",""type"":""int""},{""name"":""address"",""type"":{""type"":""record"",""name"":""Address"",""namespace"":""com.example"",""fields"":[{""name"":""street"",""type"":""string""},{""name"":""city"",""type"":""string""},{""name"":""state"",""type"":""string""},{""name"":""country"",""type"":""string""},{""name"":""zip_code"",""type"":""string""}]}},{""name"":""phone_numbers"",""type"":{""type"":""array"",""items"":{""type"":""record"",""name"":""PhoneNumber"",""namespace"":""com.example"",""fields"":[{""name"":""number"",""type"":""string""},{""name"":""type"",""type"":{""type"":""enum"",""name"":""PhoneType"",""namespace"":""com.example"",""symbols"":[""HOME"",""WORK"",""MOBILE""]}}]}}},{""name"":""preferences"",""type"":{""type"":""map"",""values"":""string""}},{""name"":""account_status"",""type"":{""type"":""enum"",""name"":""AccountStatus"",""namespace"":""com.example"",""symbols"":[""ACTIVE"",""INACTIVE"",""SUSPENDED""]}}]}"); + private string _user_id; + private string _full_name; + private com.example.EmailAddress _email; + private int _age; + private com.example.Address _address; + private IList _phone_numbers; + private IDictionary _preferences; + private com.example.AccountStatus _account_status; + public virtual global::Avro.Schema Schema + { + get + { + return CustomerProfile._SCHEMA; + } + } + public string user_id + { + get + { + return this._user_id; + } + set + { + this._user_id = value; + } + } + public string full_name + { + get + { + return this._full_name; + } + set + { + this._full_name = value; + } + } + public com.example.EmailAddress email + { + get + { + return this._email; + } + set + { + this._email = value; + } + } + public int age + { + get + { + return this._age; + } + set + { + this._age = value; + } + } + public com.example.Address address + { + get + { + return this._address; + } + set + { + this._address = value; + } + } + public IList phone_numbers + { + get + { + return this._phone_numbers; + } + set + { + this._phone_numbers = value; + } + } + public IDictionary preferences + { + get + { + return this._preferences; + } + set + { + this._preferences = value; + } + } + public com.example.AccountStatus account_status + { + get + { + return this._account_status; + } + set + { + this._account_status = value; + } + } + public virtual object Get(int fieldPos) + { + switch (fieldPos) + { + case 0: return this.user_id; + case 1: return this.full_name; + case 2: return this.email; + case 3: return this.age; + case 4: return this.address; + case 5: return this.phone_numbers; + case 6: return this.preferences; + case 7: return this.account_status; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Get()"); + }; + } + public virtual void Put(int fieldPos, object fieldValue) + { + switch (fieldPos) + { + case 0: this.user_id = (System.String)fieldValue; break; + case 1: this.full_name = (System.String)fieldValue; break; + case 2: this.email = (com.example.EmailAddress)fieldValue; break; + case 3: this.age = (System.Int32)fieldValue; break; + case 4: this.address = (com.example.Address)fieldValue; break; + case 5: this.phone_numbers = (IList)fieldValue; break; + case 6: this.preferences = (IDictionary)fieldValue; break; + case 7: this.account_status = (com.example.AccountStatus)fieldValue; break; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Put()"); + }; + } + } +} diff --git a/examples/Kafka/Avro/src/Generated/com/example/EmailAddress.cs b/examples/Kafka/Avro/src/Generated/com/example/EmailAddress.cs new file mode 100644 index 000000000..4a25a6e0b --- /dev/null +++ b/examples/Kafka/Avro/src/Generated/com/example/EmailAddress.cs @@ -0,0 +1,86 @@ +// ------------------------------------------------------------------------------ +// +// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e +// Changes to this file may cause incorrect behavior and will be lost if code +// is regenerated +// +// ------------------------------------------------------------------------------ +namespace com.example +{ + using System; + using System.Collections.Generic; + using System.Text; + using global::Avro; + using global::Avro.Specific; + + [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")] + public partial class EmailAddress : global::Avro.Specific.ISpecificRecord + { + public static global::Avro.Schema _SCHEMA = global::Avro.Schema.Parse("{\"type\":\"record\",\"name\":\"EmailAddress\",\"namespace\":\"com.example\",\"fields\":[{\"name" + + "\":\"address\",\"type\":\"string\"},{\"name\":\"verified\",\"type\":\"boolean\"},{\"name\":\"prima" + + "ry\",\"type\":\"boolean\"}]}"); + private string _address; + private bool _verified; + private bool _primary; + public virtual global::Avro.Schema Schema + { + get + { + return EmailAddress._SCHEMA; + } + } + public string address + { + get + { + return this._address; + } + set + { + this._address = value; + } + } + public bool verified + { + get + { + return this._verified; + } + set + { + this._verified = value; + } + } + public bool primary + { + get + { + return this._primary; + } + set + { + this._primary = value; + } + } + public virtual object Get(int fieldPos) + { + switch (fieldPos) + { + case 0: return this.address; + case 1: return this.verified; + case 2: return this.primary; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Get()"); + }; + } + public virtual void Put(int fieldPos, object fieldValue) + { + switch (fieldPos) + { + case 0: this.address = (System.String)fieldValue; break; + case 1: this.verified = (System.Boolean)fieldValue; break; + case 2: this.primary = (System.Boolean)fieldValue; break; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Put()"); + }; + } + } +} diff --git a/examples/Kafka/Avro/src/Generated/com/example/PhoneNumber.cs b/examples/Kafka/Avro/src/Generated/com/example/PhoneNumber.cs new file mode 100644 index 000000000..ea3d2b8ed --- /dev/null +++ b/examples/Kafka/Avro/src/Generated/com/example/PhoneNumber.cs @@ -0,0 +1,72 @@ +// ------------------------------------------------------------------------------ +// +// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e +// Changes to this file may cause incorrect behavior and will be lost if code +// is regenerated +// +// ------------------------------------------------------------------------------ +namespace com.example +{ + using System; + using System.Collections.Generic; + using System.Text; + using global::Avro; + using global::Avro.Specific; + + [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")] + public partial class PhoneNumber : global::Avro.Specific.ISpecificRecord + { + public static global::Avro.Schema _SCHEMA = global::Avro.Schema.Parse("{\"type\":\"record\",\"name\":\"PhoneNumber\",\"namespace\":\"com.example\",\"fields\":[{\"name\"" + + ":\"number\",\"type\":\"string\"},{\"name\":\"type\",\"type\":{\"type\":\"enum\",\"name\":\"PhoneTyp" + + "e\",\"namespace\":\"com.example\",\"symbols\":[\"HOME\",\"WORK\",\"MOBILE\"]}}]}"); + private string _number; + private com.example.PhoneType _type; + public virtual global::Avro.Schema Schema + { + get + { + return PhoneNumber._SCHEMA; + } + } + public string number + { + get + { + return this._number; + } + set + { + this._number = value; + } + } + public com.example.PhoneType type + { + get + { + return this._type; + } + set + { + this._type = value; + } + } + public virtual object Get(int fieldPos) + { + switch (fieldPos) + { + case 0: return this.number; + case 1: return this.type; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Get()"); + }; + } + public virtual void Put(int fieldPos, object fieldValue) + { + switch (fieldPos) + { + case 0: this.number = (System.String)fieldValue; break; + case 1: this.type = (com.example.PhoneType)fieldValue; break; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Put()"); + }; + } + } +} diff --git a/examples/Kafka/Avro/src/Generated/com/example/PhoneType.cs b/examples/Kafka/Avro/src/Generated/com/example/PhoneType.cs new file mode 100644 index 000000000..f592d8692 --- /dev/null +++ b/examples/Kafka/Avro/src/Generated/com/example/PhoneType.cs @@ -0,0 +1,23 @@ +// ------------------------------------------------------------------------------ +// +// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e +// Changes to this file may cause incorrect behavior and will be lost if code +// is regenerated +// +// ------------------------------------------------------------------------------ +namespace com.example +{ + using System; + using System.Collections.Generic; + using System.Text; + using global::Avro; + using global::Avro.Specific; + + [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")] + public enum PhoneType + { + HOME, + WORK, + MOBILE, + } +} diff --git a/examples/Kafka/Avro/src/Payload.avsc b/examples/Kafka/Avro/src/Payload.avsc new file mode 100644 index 000000000..bf8cc090c --- /dev/null +++ b/examples/Kafka/Avro/src/Payload.avsc @@ -0,0 +1,46 @@ +{ + "type": "record", + "name": "CustomerProfile", + "namespace": "com.example", + "fields": [ + {"name": "user_id", "type": "string"}, + {"name": "full_name", "type": "string"}, + {"name": "email", "type": { + "type": "record", + "name": "EmailAddress", + "fields": [ + {"name": "address", "type": "string"}, + {"name": "verified", "type": "boolean"}, + {"name": "primary", "type": "boolean"} + ] + }}, + {"name": "age", "type": "int"}, + {"name": "address", "type": { + "type": "record", + "name": "Address", + "fields": [ + {"name": "street", "type": "string"}, + {"name": "city", "type": "string"}, + {"name": "state", "type": "string"}, + {"name": "country", "type": "string"}, + {"name": "zip_code", "type": "string"} + ] + }}, + {"name": "phone_numbers", "type": { + "type": "array", + "items": { + "type": "record", + "name": "PhoneNumber", + "fields": [ + {"name": "number", "type": "string"}, + {"name": "type", "type": {"type": "enum", "name": "PhoneType", "symbols": ["HOME", "WORK", "MOBILE"]}} + ] + } + }}, + {"name": "preferences", "type": { + "type": "map", + "values": "string" + }}, + {"name": "account_status", "type": {"type": "enum", "name": "AccountStatus", "symbols": ["ACTIVE", "INACTIVE", "SUSPENDED"]}} + ] +} \ No newline at end of file diff --git a/examples/Kafka/Avro/src/Readme.md b/examples/Kafka/Avro/src/Readme.md new file mode 100644 index 000000000..79ed451ba --- /dev/null +++ b/examples/Kafka/Avro/src/Readme.md @@ -0,0 +1,63 @@ +# AWS Lambda Function Using Top Level Statements + +This starter project consists of: +* Function.cs - file contain C# top level statements that define the function to be called for each event and starts the Lambda runtime client. +* aws-lambda-tools-defaults.json - default argument settings for use with Visual Studio and command line deployment tools for AWS + +You may also have a test project depending on the options selected. + +The generated function handler is a simple method accepting a string argument that returns the uppercase equivalent of the input string. Replace the body of this method, and parameters, to suit your needs. + +## Executable Assembly + +.NET Lambda projects that use C# top level statements like this project must be deployed as an executable assembly instead of a class library. To indicate to Lambda that the .NET function is an executable assembly the +Lambda function handler value is set to the .NET Assembly name. This is different then deploying as a class library where the function handler string includes the assembly, type and method name. + +To deploy as an executable assembly the Lambda runtime client must be started to listen for incoming events to process. To start +the Lambda runtime client add the `Amazon.Lambda.RuntimeSupport` NuGet package and add the following code at the end of the +of the file containing top-level statements to start the runtime. + +```csharp +await LambdaBootstrapBuilder.Create(handler, new DefaultLambdaJsonSerializer()) + .Build() + .RunAsync(); +``` + +Pass into the Lambda runtime client a function handler as either an `Action<>` or `Func<>` for the code that +should be called for each event. If the handler takes in an input event besides `System.IO.Stream` then +the JSON serializer must also be passed into the `Create` method. + + +## Here are some steps to follow from Visual Studio: + +To deploy your function to AWS Lambda, right click the project in Solution Explorer and select *Publish to AWS Lambda*. + +To view your deployed function open its Function View window by double-clicking the function name shown beneath the AWS Lambda node in the AWS Explorer tree. + +To perform testing against your deployed function use the Test Invoke tab in the opened Function View window. + +To configure event sources for your deployed function, for example to have your function invoked when an object is created in an Amazon S3 bucket, use the Event Sources tab in the opened Function View window. + +To update the runtime configuration of your deployed function use the Configuration tab in the opened Function View window. + +To view execution logs of invocations of your function use the Logs tab in the opened Function View window. + +## Here are some steps to follow to get started from the command line: + +Once you have edited your template and code you can deploy your application using the [Amazon.Lambda.Tools Global Tool](https://github.com/aws/aws-extensions-for-dotnet-cli#aws-lambda-amazonlambdatools) from the command line. + +Install Amazon.Lambda.Tools Global Tools if not already installed. +``` + dotnet tool install -g Amazon.Lambda.Tools +``` + +If already installed check if new version is available. +``` + dotnet tool update -g Amazon.Lambda.Tools +``` + +Deploy function to AWS Lambda +``` + cd "Avro/src/Avro" + dotnet lambda deploy-function +``` \ No newline at end of file diff --git a/examples/Kafka/Avro/src/aws-lambda-tools-defaults.json b/examples/Kafka/Avro/src/aws-lambda-tools-defaults.json new file mode 100644 index 000000000..3db487a1e --- /dev/null +++ b/examples/Kafka/Avro/src/aws-lambda-tools-defaults.json @@ -0,0 +1,17 @@ +{ + "Information": [ + "This file provides default values for the deployment wizard inside Visual Studio and the AWS Lambda commands added to the .NET Core CLI.", + "To learn more about the Lambda commands with the .NET Core CLI execute the following command at the command line in the project root directory.", + "dotnet lambda help", + "All the command line options for the Lambda command can be specified in this file." + ], + "profile": "", + "region": "", + "configuration": "Release", + "function-runtime": "dotnet8", + "function-memory-size": 1024, + "function-timeout": 30, + "function-handler": "Avro.Example", + "function-name": "dotnet-kafka-avro-1024", + "function-role": "arn:aws:iam::992382490249:role/dotnet-kafka-test-role" +} \ No newline at end of file diff --git a/examples/Kafka/Avro/src/kafka-avro-event.json b/examples/Kafka/Avro/src/kafka-avro-event.json new file mode 100644 index 000000000..6f5e045e3 --- /dev/null +++ b/examples/Kafka/Avro/src/kafka-avro-event.json @@ -0,0 +1,23 @@ +{ + "eventSource": "aws:kafka", + "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/CustomerCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", + "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + "records": { + "customer-topic-0": [ + { + "topic": "customer-topic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": "dXNlcl85NzU0", + "value": "EnVzZXJfOTc1NBxVc2VyIHVzZXJfOTc1NCh1c2VyXzk3NTRAaWNsb3VkLmNvbQABahg5MzQwIE1haW4gU3QQU2FuIEpvc2UEQ0EGVVNBCjM5NTk2AhgyNDQtNDA3LTg4NzECAAYQdGltZXpvbmUOZW5hYmxlZBBsYW5ndWFnZRBkaXNhYmxlZBpub3RpZmljYXRpb25zCGRhcmsABA==", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + } + ] + } +} \ No newline at end of file diff --git a/examples/Kafka/Json/src/Function.cs b/examples/Kafka/Json/src/Function.cs new file mode 100644 index 000000000..cbd6fca03 --- /dev/null +++ b/examples/Kafka/Json/src/Function.cs @@ -0,0 +1,124 @@ +using System.Diagnostics; +using System.Text.Json.Serialization; +using Amazon.Lambda.Core; +using Amazon.Lambda.RuntimeSupport; +using AWS.Lambda.Powertools.Kafka; +using AWS.Lambda.Powertools.Kafka.Json; +using AWS.Lambda.Powertools.Logging; +using AWS.Lambda.Powertools.Metrics; + +// string Handler(ConsumerRecords records, ILambdaContext context) +// { +// Metrics.SetNamespace("Json"); +// Metrics.AddMetric("NumberOfRequests", 1, MetricUnit.Count, MetricResolution.High); +// +// foreach (var record in records) +// { +// Logger.LogInformation("Record Key: {@record.Key}", record.Key); +// Logger.LogInformation("Record Value: {@record}", record.Value); +// } +// +// return "Processed " + records.Count() + " records"; +// } +// +// +// await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, +// new PowertoolsKafkaJsonSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization +// .Build() +// .RunAsync(); + +var responseStream = new MemoryStream(); +var serializer = new PowertoolsKafkaJsonSerializer(); + +Task ToUpperAsync(InvocationRequest invocation) +{ + var stopwatch = Stopwatch.StartNew(); + + var records = serializer.Deserialize>(invocation.InputStream); + + foreach (var record in records) + { + Console.WriteLine("Record UserId: {0}", record.Value.UserId); + } + + stopwatch.Stop(); + + Metrics.PushSingleMetric("JsonDeserialization-1024", + stopwatch.ElapsedMilliseconds, MetricUnit.Milliseconds, "kafka-dotnet", "service", null, + MetricResolution.High); + + Console.WriteLine("Record Count: {0}", records.Count()); + Console.WriteLine("Record UserId: {0}", records.First().Value.UserId); + Console.WriteLine("JsonDeserialization: {0:F2}", stopwatch.ElapsedMilliseconds); + + responseStream.SetLength(0); + responseStream.Position = 0; + + return Task.FromResult(new InvocationResponse(responseStream, false)); +} + +var bootstrap = new LambdaBootstrap(ToUpperAsync); +await bootstrap.RunAsync(); + + +public record JsonKey +{ + public int Id { get; set; } +} + +public partial class Payload +{ + [JsonPropertyName("user_id")] public string UserId { get; set; } + + [JsonPropertyName("full_name")] public string FullName { get; set; } + + [JsonPropertyName("email")] public Email Email { get; set; } + + [JsonPropertyName("age")] public long Age { get; set; } + + [JsonPropertyName("address")] public Address Address { get; set; } + + [JsonPropertyName("phone_numbers")] public List PhoneNumbers { get; set; } + + [JsonPropertyName("preferences")] public Preferences Preferences { get; set; } + + [JsonPropertyName("account_status")] public string AccountStatus { get; set; } +} + +public partial class Address +{ + [JsonPropertyName("street")] public string Street { get; set; } + + [JsonPropertyName("city")] public string City { get; set; } + + [JsonPropertyName("state")] public string State { get; set; } + + [JsonPropertyName("country")] public string Country { get; set; } + + [JsonPropertyName("zip_code")] public string ZipCode { get; set; } +} + +public partial class Email +{ + [JsonPropertyName("address")] public string Address { get; set; } + + [JsonPropertyName("verified")] public bool Verified { get; set; } + + [JsonPropertyName("primary")] public bool Primary { get; set; } +} + +public partial class PhoneNumber +{ + [JsonPropertyName("number")] public string Number { get; set; } + + [JsonPropertyName("type")] public string Type { get; set; } +} + +public partial class Preferences +{ + [JsonPropertyName("language")] public string Language { get; set; } + + [JsonPropertyName("notifications")] public string Notifications { get; set; } + + [JsonPropertyName("timezone")] public string Timezone { get; set; } +} \ No newline at end of file diff --git a/examples/Kafka/Json/src/Json.csproj b/examples/Kafka/Json/src/Json.csproj new file mode 100644 index 000000000..158711ba9 --- /dev/null +++ b/examples/Kafka/Json/src/Json.csproj @@ -0,0 +1,31 @@ + + + Exe + net8.0 + enable + enable + true + Lambda + + true + + true + + + + + + + + + + + + + + + PreserveNewest + + + + \ No newline at end of file diff --git a/examples/Kafka/Json/src/Readme.md b/examples/Kafka/Json/src/Readme.md new file mode 100644 index 000000000..acdcf7df9 --- /dev/null +++ b/examples/Kafka/Json/src/Readme.md @@ -0,0 +1,63 @@ +# AWS Lambda Function Using Top Level Statements + +This starter project consists of: +* Function.cs - file contain C# top level statements that define the function to be called for each event and starts the Lambda runtime client. +* aws-lambda-tools-defaults.json - default argument settings for use with Visual Studio and command line deployment tools for AWS + +You may also have a test project depending on the options selected. + +The generated function handler is a simple method accepting a string argument that returns the uppercase equivalent of the input string. Replace the body of this method, and parameters, to suit your needs. + +## Executable Assembly + +.NET Lambda projects that use C# top level statements like this project must be deployed as an executable assembly instead of a class library. To indicate to Lambda that the .NET function is an executable assembly the +Lambda function handler value is set to the .NET Assembly name. This is different then deploying as a class library where the function handler string includes the assembly, type and method name. + +To deploy as an executable assembly the Lambda runtime client must be started to listen for incoming events to process. To start +the Lambda runtime client add the `Amazon.Lambda.RuntimeSupport` NuGet package and add the following code at the end of the +of the file containing top-level statements to start the runtime. + +```csharp +await LambdaBootstrapBuilder.Create(handler, new DefaultLambdaJsonSerializer()) + .Build() + .RunAsync(); +``` + +Pass into the Lambda runtime client a function handler as either an `Action<>` or `Func<>` for the code that +should be called for each event. If the handler takes in an input event besides `System.IO.Stream` then +the JSON serializer must also be passed into the `Create` method. + + +## Here are some steps to follow from Visual Studio: + +To deploy your function to AWS Lambda, right click the project in Solution Explorer and select *Publish to AWS Lambda*. + +To view your deployed function open its Function View window by double-clicking the function name shown beneath the AWS Lambda node in the AWS Explorer tree. + +To perform testing against your deployed function use the Test Invoke tab in the opened Function View window. + +To configure event sources for your deployed function, for example to have your function invoked when an object is created in an Amazon S3 bucket, use the Event Sources tab in the opened Function View window. + +To update the runtime configuration of your deployed function use the Configuration tab in the opened Function View window. + +To view execution logs of invocations of your function use the Logs tab in the opened Function View window. + +## Here are some steps to follow to get started from the command line: + +Once you have edited your template and code you can deploy your application using the [Amazon.Lambda.Tools Global Tool](https://github.com/aws/aws-extensions-for-dotnet-cli#aws-lambda-amazonlambdatools) from the command line. + +Install Amazon.Lambda.Tools Global Tools if not already installed. +``` + dotnet tool install -g Amazon.Lambda.Tools +``` + +If already installed check if new version is available. +``` + dotnet tool update -g Amazon.Lambda.Tools +``` + +Deploy function to AWS Lambda +``` + cd "Json/src/Json" + dotnet lambda deploy-function +``` \ No newline at end of file diff --git a/examples/Kafka/Json/src/aws-lambda-tools-defaults.json b/examples/Kafka/Json/src/aws-lambda-tools-defaults.json new file mode 100644 index 000000000..ceb391444 --- /dev/null +++ b/examples/Kafka/Json/src/aws-lambda-tools-defaults.json @@ -0,0 +1,17 @@ +{ + "Information": [ + "This file provides default values for the deployment wizard inside Visual Studio and the AWS Lambda commands added to the .NET Core CLI.", + "To learn more about the Lambda commands with the .NET Core CLI execute the following command at the command line in the project root directory.", + "dotnet lambda help", + "All the command line options for the Lambda command can be specified in this file." + ], + "profile": "", + "region": "", + "configuration": "Release", + "function-runtime": "dotnet8", + "function-memory-size": 1024, + "function-timeout": 30, + "function-handler": "Json", + "function-name": "dotnet-kafka-json-1024", + "function-role": "arn:aws:iam::992382490249:role/dotnet-kafka-test-role" +} \ No newline at end of file diff --git a/examples/Kafka/Json/src/kafka-json-event.json b/examples/Kafka/Json/src/kafka-json-event.json new file mode 100644 index 000000000..66dc2ab5a --- /dev/null +++ b/examples/Kafka/Json/src/kafka-json-event.json @@ -0,0 +1,23 @@ +{ + "eventSource": "aws:kafka", + "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/CustomerCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", + "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + "records": { + "customer-topic-0": [ + { + "topic": "customer-topic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": "dXNlcl85NzU0", + "value": "eyJwaG9uZV9udW1iZXJzIjpbeyJudW1iZXIiOiIyNDQtNDA3LTg4NzEiLCJ0eXBlIjoiV09SSyJ9XSwicHJlZmVyZW5jZXMiOnsidGltZXpvbmUiOiJlbmFibGVkIiwibGFuZ3VhZ2UiOiJkaXNhYmxlZCIsIm5vdGlmaWNhdGlvbnMiOiJkYXJrIn0sImZ1bGxfbmFtZSI6IlVzZXIgdXNlcl85NzU0IiwiYWRkcmVzcyI6eyJjb3VudHJ5IjoiVVNBIiwiY2l0eSI6IlNhbiBKb3NlIiwic3RyZWV0IjoiOTM0MCBNYWluIFN0Iiwic3RhdGUiOiJDQSIsInppcF9jb2RlIjoiMzk1OTYifSwidXNlcl9pZCI6InVzZXJfOTc1NCIsImFjY291bnRfc3RhdHVzIjoiU1VTUEVOREVEIiwiYWdlIjo1MywiZW1haWwiOnsiYWRkcmVzcyI6InVzZXJfOTc1NEBpY2xvdWQuY29tIiwidmVyaWZpZWQiOmZhbHNlLCJwcmltYXJ5Ijp0cnVlfX0=", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + } + ] + } +} \ No newline at end of file diff --git a/examples/Kafka/Protobuf/src/Function.cs b/examples/Kafka/Protobuf/src/Function.cs new file mode 100644 index 000000000..6e4abe13c --- /dev/null +++ b/examples/Kafka/Protobuf/src/Function.cs @@ -0,0 +1,90 @@ +using System.Diagnostics; +using Amazon.Lambda.Core; +using Amazon.Lambda.RuntimeSupport; +using AWS.Lambda.Powertools.Kafka; +using AWS.Lambda.Powertools.Kafka.Protobuf; +using AWS.Lambda.Powertools.Logging; +using AWS.Lambda.Powertools.Metrics; +using Com.Example; +using TestKafka; + +// string Handler(ConsumerRecords records, ILambdaContext context) +// { +// Metrics.SetNamespace("Proto"); +// Metrics.AddMetric("NumberOfRequests", 1, MetricUnit.Count, MetricResolution.High); +// +// foreach (var record in records) +// { +// foreach (var header in record.Headers) +// { +// Console.WriteLine($"{header.Key}: {ToDecimalString(header.Value)}"); +// } +// +// foreach (var header in record.Headers.DecodedValues()) +// { +// Console.WriteLine($"{header.Key}: {header.Value}"); +// } +// +// Logger.LogInformation("Record Key: {@key}", record.Key); +// Logger.LogInformation("Record Value: {@record}", record.Value); +// } +// +// return "Processed " + records.Count() + " records"; +// } +// +// await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, +// new PowertoolsKafkaProtobufSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization +// .Build() +// .RunAsync(); + +var responseStream = new MemoryStream(); +var serializer = new PowertoolsKafkaProtobufSerializer(); + +Task ToUpperAsync(InvocationRequest invocation) +{ + var stopwatch = Stopwatch.StartNew(); + + var records = serializer.Deserialize>(invocation.InputStream); + + foreach (var record in records) + { + foreach (var header in record.Headers) + { + Console.WriteLine($"{header.Key}: {ToDecimalString(header.Value)}"); + } + + foreach (var header in record.Headers.DecodedValues()) + { + Console.WriteLine($"{header.Key}: {header.Value}"); + } + + Console.WriteLine("Record UserId: {0}", record.Value); + } + + stopwatch.Stop(); + + Metrics.PushSingleMetric("ProtoDeserialization-512", + stopwatch.ElapsedMilliseconds, MetricUnit.Milliseconds, "kafka-dotnet", "service", null, + MetricResolution.High); + + Console.WriteLine("Record Count: {0}", records.Count()); + Console.WriteLine("JsonDeserialization: {0:F2}", stopwatch.ElapsedMilliseconds); + + responseStream.SetLength(0); + responseStream.Position = 0; + + return Task.FromResult(new InvocationResponse(responseStream, false)); +} + +static string ToDecimalString(byte[] bytes) +{ + if (bytes == null || bytes.Length == 0) + { + return "[]"; + } + + return "[" + string.Join(", ", bytes) + "]"; +} + +var bootstrap = new LambdaBootstrap(ToUpperAsync); +await bootstrap.RunAsync(); \ No newline at end of file diff --git a/examples/Kafka/Protobuf/src/Key.proto b/examples/Kafka/Protobuf/src/Key.proto new file mode 100644 index 000000000..deedcf5dc --- /dev/null +++ b/examples/Kafka/Protobuf/src/Key.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; + +option csharp_namespace = "TestKafka"; + +message ProtobufKey { + int32 id = 1; + Color color = 2; +} + +enum Color { + UNKNOWN = 0; + GREEN = 1; + RED = 2; +} \ No newline at end of file diff --git a/examples/Kafka/Protobuf/src/Payload.proto b/examples/Kafka/Protobuf/src/Payload.proto new file mode 100644 index 000000000..9c69b1c41 --- /dev/null +++ b/examples/Kafka/Protobuf/src/Payload.proto @@ -0,0 +1,49 @@ +syntax = "proto3"; + +package com.example; + +enum PhoneType { + HOME = 0; + WORK = 1; + MOBILE = 2; +} + +enum AccountStatus { + ACTIVE = 0; + INACTIVE = 1; + SUSPENDED = 2; +} + +// EmailAddress message +message EmailAddress { + string address = 1; + bool verified = 2; + bool primary = 3; +} + +// Address message +message Address { + string street = 1; + string city = 2; + string state = 3; + string country = 4; + string zip_code = 5; +} + +// PhoneNumber message +message PhoneNumber { + string number = 1; + PhoneType type = 2; +} + +// CustomerProfile message +message CustomerProfile { + string user_id = 1; + string full_name = 2; + EmailAddress email = 3; + int32 age = 4; + Address address = 5; + repeated PhoneNumber phone_numbers = 6; + map preferences = 7; + AccountStatus account_status = 8; +} \ No newline at end of file diff --git a/examples/Kafka/Protobuf/src/Product.proto b/examples/Kafka/Protobuf/src/Product.proto new file mode 100644 index 000000000..1d4c64e90 --- /dev/null +++ b/examples/Kafka/Protobuf/src/Product.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +option csharp_namespace = "TestKafka"; + +message ProtobufProduct { + int32 id = 1; + string name = 2; + double price = 3; +} \ No newline at end of file diff --git a/examples/Kafka/Protobuf/src/Protobuf.csproj b/examples/Kafka/Protobuf/src/Protobuf.csproj new file mode 100644 index 000000000..7e383bab7 --- /dev/null +++ b/examples/Kafka/Protobuf/src/Protobuf.csproj @@ -0,0 +1,62 @@ + + + Exe + net8.0 + enable + enable + true + Lambda + + true + + true + + + + + + + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + + + + + PreserveNewest + + + + + Client + Public + True + True + obj\Debug/net8.0/ + MSBuild:Compile + PreserveNewest + + + Client + Public + True + True + obj\Debug/net8.0/ + MSBuild:Compile + PreserveNewest + + + Client + Public + True + True + obj\Debug/net8.0/ + MSBuild:Compile + PreserveNewest + + + + + + \ No newline at end of file diff --git a/examples/Kafka/Protobuf/src/Readme.md b/examples/Kafka/Protobuf/src/Readme.md new file mode 100644 index 000000000..f9c897654 --- /dev/null +++ b/examples/Kafka/Protobuf/src/Readme.md @@ -0,0 +1,63 @@ +# AWS Lambda Function Using Top Level Statements + +This starter project consists of: +* Function.cs - file contain C# top level statements that define the function to be called for each event and starts the Lambda runtime client. +* aws-lambda-tools-defaults.json - default argument settings for use with Visual Studio and command line deployment tools for AWS + +You may also have a test project depending on the options selected. + +The generated function handler is a simple method accepting a string argument that returns the uppercase equivalent of the input string. Replace the body of this method, and parameters, to suit your needs. + +## Executable Assembly + +.NET Lambda projects that use C# top level statements like this project must be deployed as an executable assembly instead of a class library. To indicate to Lambda that the .NET function is an executable assembly the +Lambda function handler value is set to the .NET Assembly name. This is different then deploying as a class library where the function handler string includes the assembly, type and method name. + +To deploy as an executable assembly the Lambda runtime client must be started to listen for incoming events to process. To start +the Lambda runtime client add the `Amazon.Lambda.RuntimeSupport` NuGet package and add the following code at the end of the +of the file containing top-level statements to start the runtime. + +```csharp +await LambdaBootstrapBuilder.Create(handler, new DefaultLambdaJsonSerializer()) + .Build() + .RunAsync(); +``` + +Pass into the Lambda runtime client a function handler as either an `Action<>` or `Func<>` for the code that +should be called for each event. If the handler takes in an input event besides `System.IO.Stream` then +the JSON serializer must also be passed into the `Create` method. + + +## Here are some steps to follow from Visual Studio: + +To deploy your function to AWS Lambda, right click the project in Solution Explorer and select *Publish to AWS Lambda*. + +To view your deployed function open its Function View window by double-clicking the function name shown beneath the AWS Lambda node in the AWS Explorer tree. + +To perform testing against your deployed function use the Test Invoke tab in the opened Function View window. + +To configure event sources for your deployed function, for example to have your function invoked when an object is created in an Amazon S3 bucket, use the Event Sources tab in the opened Function View window. + +To update the runtime configuration of your deployed function use the Configuration tab in the opened Function View window. + +To view execution logs of invocations of your function use the Logs tab in the opened Function View window. + +## Here are some steps to follow to get started from the command line: + +Once you have edited your template and code you can deploy your application using the [Amazon.Lambda.Tools Global Tool](https://github.com/aws/aws-extensions-for-dotnet-cli#aws-lambda-amazonlambdatools) from the command line. + +Install Amazon.Lambda.Tools Global Tools if not already installed. +``` + dotnet tool install -g Amazon.Lambda.Tools +``` + +If already installed check if new version is available. +``` + dotnet tool update -g Amazon.Lambda.Tools +``` + +Deploy function to AWS Lambda +``` + cd "Protobuf/src/Protobuf" + dotnet lambda deploy-function +``` \ No newline at end of file diff --git a/examples/Kafka/Protobuf/src/aws-lambda-tools-defaults.json b/examples/Kafka/Protobuf/src/aws-lambda-tools-defaults.json new file mode 100644 index 000000000..64b6fa0f4 --- /dev/null +++ b/examples/Kafka/Protobuf/src/aws-lambda-tools-defaults.json @@ -0,0 +1,17 @@ +{ + "Information": [ + "This file provides default values for the deployment wizard inside Visual Studio and the AWS Lambda commands added to the .NET Core CLI.", + "To learn more about the Lambda commands with the .NET Core CLI execute the following command at the command line in the project root directory.", + "dotnet lambda help", + "All the command line options for the Lambda command can be specified in this file." + ], + "profile": "", + "region": "", + "configuration": "Release", + "function-runtime": "dotnet8", + "function-memory-size": 512, + "function-timeout": 30, + "function-handler": "Protobuf", + "function-name": "dotnet-kafka-proto", + "function-role": "arn:aws:iam::992382490249:role/dotnet-kafka-test-role" +} \ No newline at end of file diff --git a/examples/Kafka/Protobuf/src/kafka-protobuf-event.json b/examples/Kafka/Protobuf/src/kafka-protobuf-event.json new file mode 100644 index 000000000..6731ceb40 --- /dev/null +++ b/examples/Kafka/Protobuf/src/kafka-protobuf-event.json @@ -0,0 +1,23 @@ +{ + "eventSource": "aws:kafka", + "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/CustomerCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", + "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + "records": { + "customer-topic-0": [ + { + "topic": "customer-topic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": "dXNlcl85NzU0", + "value": "Cgl1c2VyXzk3NTQSDlVzZXIgdXNlcl85NzU0GhgKFHVzZXJfOTc1NEBpY2xvdWQuY29tGAEgNSooCgw5MzQwIE1haW4gU3QSCFNhbiBKb3NlGgJDQSIDVVNBKgUzOTU5NjIQCgwyNDQtNDA3LTg4NzEQAToUCghsYW5ndWFnZRIIZGlzYWJsZWQ6FQoNbm90aWZpY2F0aW9ucxIEZGFyazoTCgh0aW1lem9uZRIHZW5hYmxlZEAC", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + } + ] + } +} \ No newline at end of file diff --git a/examples/Kafka/src/Function.cs b/examples/Kafka/src/Function.cs index c756feef2..9f433816a 100644 --- a/examples/Kafka/src/Function.cs +++ b/examples/Kafka/src/Function.cs @@ -1,17 +1,22 @@ using Amazon.Lambda.Core; using Amazon.Lambda.RuntimeSupport; using AWS.Lambda.Powertools.Kafka; +using AWS.Lambda.Powertools.Kafka.Json; +using AWS.Lambda.Powertools.Kafka.Avro; using AWS.Lambda.Powertools.Kafka.Tests; +using AWS.Lambda.Powertools.Logging; +using AWS.Lambda.Powertools.Metrics; +using TestKafka; -// The function handler that will be called for each Lambda event string Handler(ConsumerRecords records, ILambdaContext context) { + Metrics.SetNamespace("Avro"); + Metrics.AddMetric("NumberOfRequests", 1, MetricUnit.Count, MetricResolution.High); + foreach (var record in records) { - var key = record.Key.id; - var product = record.Value; - - context.Logger.LogInformation($"Processing record with key: {key}, Product: {product.name}, Price: {product.price}"); + Logger.LogInformation("Record Key: {@key}", record.Key); + Logger.LogInformation("Record Value: {@record}", record.Value); } return "Processed " + records.Count() + " records"; @@ -21,4 +26,60 @@ string Handler(ConsumerRecords records, ILambdaContext con await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, new PowertoolsKafkaAvroSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization .Build() - .RunAsync(); \ No newline at end of file + .RunAsync(); + + +// +// string Handler(ConsumerRecords records, ILambdaContext context) +// { +// Metrics.SetNamespace("Proto"); +// Metrics.AddMetric("NumberOfRequests", 1, MetricUnit.Count, MetricResolution.High); +// +// foreach (var record in records) +// { +// Logger.LogInformation("Record Key: {@key}", record.Key); +// Logger.LogInformation("Record Value: {@record}", record.Value); +// } +// +// return "Processed " + records.Count() + " records"; +// } +// +// +// await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, +// new PowertoolsKafkaProtobufSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization +// .Build() +// .RunAsync(); + +// +// string Handler(ConsumerRecords records, ILambdaContext context) +// { +// Metrics.SetNamespace("Json"); +// Metrics.AddMetric("NumberOfRequests", 1, MetricUnit.Count, MetricResolution.High); +// +// foreach (var record in records) +// { +// Logger.LogInformation("Record Key: {@record.Key}", record.Key); +// Logger.LogInformation("Record Value: {@record}", record.Value); +// } +// +// return "Processed " + records.Count() + " records"; +// } +// +// +// await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, +// new PowertoolsKafkaJsonSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization +// .Build() +// .RunAsync(); +// +// +// public record JsonKey +// { +// public int Id { get; set; } +// } +// +// public record JsonProduct +// { +// public int Id { get; set; } +// public string Name { get; set; } = string.Empty; +// public decimal Price { get; set; } +// } \ No newline at end of file diff --git a/examples/Kafka/src/Kafka.csproj b/examples/Kafka/src/Kafka.csproj index 16131f8c3..b8eac65cd 100644 --- a/examples/Kafka/src/Kafka.csproj +++ b/examples/Kafka/src/Kafka.csproj @@ -12,16 +12,21 @@ true - + + + all runtime; build; native; contentfiles; analyzers; buildtransitive - + + + + diff --git a/examples/Kafka/src/aws-lambda-tools-defaults.json b/examples/Kafka/src/aws-lambda-tools-defaults.json index 969492087..c54ff19c3 100644 --- a/examples/Kafka/src/aws-lambda-tools-defaults.json +++ b/examples/Kafka/src/aws-lambda-tools-defaults.json @@ -6,12 +6,12 @@ "All the command line options for the Lambda command can be specified in this file." ], "profile": "", - "region": "", + "region": "eu-west-3", "configuration": "Release", "function-runtime": "dotnet8", "function-memory-size": 512, "function-timeout": 30, "function-handler": "Kafka", - "function-name": "Powertools-Kafka-Example", - "function-role": "arn:aws:iam::123456789012:role/lambda-role" + "function-name": "dotnet-kafka-json", + "function-role": "arn:aws:iam::992382490249:role/dotnet-kafka-test-role" } \ No newline at end of file diff --git a/examples/examples.sln b/examples/examples.sln index 6919941ef..9b3bf61bd 100644 --- a/examples/examples.sln +++ b/examples/examples.sln @@ -115,6 +115,12 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Kafka", "Kafka\src\Kafka.cs EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{0D5246BB-02B3-43EE-9EE3-E0E627170C9B}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Json", "Kafka\Json\src\Json.csproj", "{58EC305E-353A-4996-A541-3CF7FC0EDD80}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Protobuf", "Kafka\Protobuf\src\Protobuf.csproj", "{853F6FE9-1762-4BA3-BAF4-2FCD605B81CF}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Avro", "Kafka\Avro\src\Avro.csproj", "{B03F22B2-315C-429B-9CC0-C15BE94CBF77}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -212,6 +218,18 @@ Global {D9FF982F-7CD7-4652-94BB-B387B7FC034F}.Debug|Any CPU.Build.0 = Debug|Any CPU {D9FF982F-7CD7-4652-94BB-B387B7FC034F}.Release|Any CPU.ActiveCfg = Release|Any CPU {D9FF982F-7CD7-4652-94BB-B387B7FC034F}.Release|Any CPU.Build.0 = Release|Any CPU + {58EC305E-353A-4996-A541-3CF7FC0EDD80}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {58EC305E-353A-4996-A541-3CF7FC0EDD80}.Debug|Any CPU.Build.0 = Debug|Any CPU + {58EC305E-353A-4996-A541-3CF7FC0EDD80}.Release|Any CPU.ActiveCfg = Release|Any CPU + {58EC305E-353A-4996-A541-3CF7FC0EDD80}.Release|Any CPU.Build.0 = Release|Any CPU + {853F6FE9-1762-4BA3-BAF4-2FCD605B81CF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {853F6FE9-1762-4BA3-BAF4-2FCD605B81CF}.Debug|Any CPU.Build.0 = Debug|Any CPU + {853F6FE9-1762-4BA3-BAF4-2FCD605B81CF}.Release|Any CPU.ActiveCfg = Release|Any CPU + {853F6FE9-1762-4BA3-BAF4-2FCD605B81CF}.Release|Any CPU.Build.0 = Release|Any CPU + {B03F22B2-315C-429B-9CC0-C15BE94CBF77}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B03F22B2-315C-429B-9CC0-C15BE94CBF77}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B03F22B2-315C-429B-9CC0-C15BE94CBF77}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B03F22B2-315C-429B-9CC0-C15BE94CBF77}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(NestedProjects) = preSolution {0CC66DBC-C1DF-4AF6-8EEB-FFED6C578BF4} = {526F1EF7-5A9C-4BFF-ABAE-75992ACD8F78} @@ -261,5 +279,8 @@ Global {FC010A0E-64A9-4440-97FE-DEDA8CEE0BE5} = {F3480212-EE7F-46FE-9ED5-24ACAB5B681D} {0D5246BB-02B3-43EE-9EE3-E0E627170C9B} = {71027B81-CA39-498C-9A50-ADDAFA2AC2F5} {D9FF982F-7CD7-4652-94BB-B387B7FC034F} = {0D5246BB-02B3-43EE-9EE3-E0E627170C9B} + {58EC305E-353A-4996-A541-3CF7FC0EDD80} = {71027B81-CA39-498C-9A50-ADDAFA2AC2F5} + {853F6FE9-1762-4BA3-BAF4-2FCD605B81CF} = {71027B81-CA39-498C-9A50-ADDAFA2AC2F5} + {B03F22B2-315C-429B-9CC0-C15BE94CBF77} = {71027B81-CA39-498C-9A50-ADDAFA2AC2F5} EndGlobalSection EndGlobal diff --git a/libraries/AWS.Lambda.Powertools.sln b/libraries/AWS.Lambda.Powertools.sln index cc18e136a..325c683e0 100644 --- a/libraries/AWS.Lambda.Powertools.sln +++ b/libraries/AWS.Lambda.Powertools.sln @@ -117,6 +117,12 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AWS.Lambda.Powertools.Kafka EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AWS.Lambda.Powertools.Kafka.Tests", "tests\AWS.Lambda.Powertools.Kafka.Tests\AWS.Lambda.Powertools.Kafka.Tests.csproj", "{FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AWS.Lambda.Powertools.Kafka.Avro", "src\AWS.Lambda.Powertools.Kafka.Avro\AWS.Lambda.Powertools.Kafka.Avro.csproj", "{25F0929B-2E04-4ED6-A0ED-5379A0A755B0}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AWS.Lambda.Powertools.Kafka.Json", "src\AWS.Lambda.Powertools.Kafka.Json\AWS.Lambda.Powertools.Kafka.Json.csproj", "{9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AWS.Lambda.Powertools.Kafka.Protobuf", "src\AWS.Lambda.Powertools.Kafka.Protobuf\AWS.Lambda.Powertools.Kafka.Protobuf.csproj", "{B640DB80-C982-407B-A2EC-CD29AC77DDB8}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -646,6 +652,42 @@ Global {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Release|x64.Build.0 = Release|Any CPU {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Release|x86.ActiveCfg = Release|Any CPU {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Release|x86.Build.0 = Release|Any CPU + {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Debug|Any CPU.Build.0 = Debug|Any CPU + {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Debug|x64.ActiveCfg = Debug|Any CPU + {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Debug|x64.Build.0 = Debug|Any CPU + {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Debug|x86.ActiveCfg = Debug|Any CPU + {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Debug|x86.Build.0 = Debug|Any CPU + {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Release|Any CPU.ActiveCfg = Release|Any CPU + {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Release|Any CPU.Build.0 = Release|Any CPU + {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Release|x64.ActiveCfg = Release|Any CPU + {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Release|x64.Build.0 = Release|Any CPU + {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Release|x86.ActiveCfg = Release|Any CPU + {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Release|x86.Build.0 = Release|Any CPU + {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Debug|x64.ActiveCfg = Debug|Any CPU + {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Debug|x64.Build.0 = Debug|Any CPU + {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Debug|x86.ActiveCfg = Debug|Any CPU + {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Debug|x86.Build.0 = Debug|Any CPU + {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Release|Any CPU.Build.0 = Release|Any CPU + {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Release|x64.ActiveCfg = Release|Any CPU + {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Release|x64.Build.0 = Release|Any CPU + {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Release|x86.ActiveCfg = Release|Any CPU + {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Release|x86.Build.0 = Release|Any CPU + {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Debug|x64.ActiveCfg = Debug|Any CPU + {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Debug|x64.Build.0 = Debug|Any CPU + {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Debug|x86.ActiveCfg = Debug|Any CPU + {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Debug|x86.Build.0 = Debug|Any CPU + {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Release|Any CPU.Build.0 = Release|Any CPU + {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Release|x64.ActiveCfg = Release|Any CPU + {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Release|x64.Build.0 = Release|Any CPU + {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Release|x86.ActiveCfg = Release|Any CPU + {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Release|x86.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(NestedProjects) = preSolution @@ -701,5 +743,8 @@ Global {8A22F22E-D10A-4897-A89A-DC76C267F6BB} = {73C9B1E5-3893-47E8-B373-17E5F5D7E6F5} {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3} = {73C9B1E5-3893-47E8-B373-17E5F5D7E6F5} {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645} = {1CFF5568-8486-475F-81F6-06105C437528} + {25F0929B-2E04-4ED6-A0ED-5379A0A755B0} = {73C9B1E5-3893-47E8-B373-17E5F5D7E6F5} + {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E} = {73C9B1E5-3893-47E8-B373-17E5F5D7E6F5} + {B640DB80-C982-407B-A2EC-CD29AC77DDB8} = {73C9B1E5-3893-47E8-B373-17E5F5D7E6F5} EndGlobalSection EndGlobal diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/AWS.Lambda.Powertools.Kafka.Avro.csproj b/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/AWS.Lambda.Powertools.Kafka.Avro.csproj new file mode 100644 index 000000000..255e852a6 --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/AWS.Lambda.Powertools.Kafka.Avro.csproj @@ -0,0 +1,21 @@ + + + + + + AWS.Lambda.Powertools.Kafka.Avro + Powertools for AWS Lambda (.NET) - Kafka Avro consumer package. + AWS.Lambda.Powertools.Kafka.Avro + AWS.Lambda.Powertools.Kafka.Avro + net8.0 + false + enable + enable + + + + + + + + diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/PowertoolsKafkaAvroSerializer.cs similarity index 99% rename from libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs rename to libraries/src/AWS.Lambda.Powertools.Kafka.Avro/PowertoolsKafkaAvroSerializer.cs index 78c66c833..027841f7b 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaAvroSerializer.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/PowertoolsKafkaAvroSerializer.cs @@ -7,7 +7,7 @@ using Avro.IO; using Avro.Specific; -namespace AWS.Lambda.Powertools.Kafka; +namespace AWS.Lambda.Powertools.Kafka.Avro; /// /// A Lambda serializer for Kafka events that handles Avro-formatted data. diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/Readme.md b/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/Readme.md new file mode 100644 index 000000000..16da5ccb4 --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/Readme.md @@ -0,0 +1 @@ +# Powertools for AWS Lambda (.NET) - Kafka \ No newline at end of file diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Json/AWS.Lambda.Powertools.Kafka.Json.csproj b/libraries/src/AWS.Lambda.Powertools.Kafka.Json/AWS.Lambda.Powertools.Kafka.Json.csproj new file mode 100644 index 000000000..db093159d --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Json/AWS.Lambda.Powertools.Kafka.Json.csproj @@ -0,0 +1,19 @@ + + + + + AWS.Lambda.Powertools.Kafka.Json + Powertools for AWS Lambda (.NET) - Kafka Json consumer package. + AWS.Lambda.Powertools.Kafka.Json + AWS.Lambda.Powertools.Kafka.Json + net8.0 + false + enable + enable + + + + + + + diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaJsonSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka.Json/PowertoolsKafkaJsonSerializer.cs similarity index 98% rename from libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaJsonSerializer.cs rename to libraries/src/AWS.Lambda.Powertools.Kafka.Json/PowertoolsKafkaJsonSerializer.cs index 307598b17..73b55799c 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaJsonSerializer.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Json/PowertoolsKafkaJsonSerializer.cs @@ -2,9 +2,8 @@ using System.Text; using System.Text.Json; using System.Text.Json.Serialization; -using System.Text.Json.Serialization.Metadata; -namespace AWS.Lambda.Powertools.Kafka; +namespace AWS.Lambda.Powertools.Kafka.Json; /// /// A Lambda serializer for Kafka events that handles JSON-formatted data. diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Json/Readme.md b/libraries/src/AWS.Lambda.Powertools.Kafka.Json/Readme.md new file mode 100644 index 000000000..16da5ccb4 --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Json/Readme.md @@ -0,0 +1 @@ +# Powertools for AWS Lambda (.NET) - Kafka \ No newline at end of file diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/AWS.Lambda.Powertools.Kafka.Protobuf.csproj b/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/AWS.Lambda.Powertools.Kafka.Protobuf.csproj new file mode 100644 index 000000000..ab1c3844f --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/AWS.Lambda.Powertools.Kafka.Protobuf.csproj @@ -0,0 +1,24 @@ + + + + + + AWS.Lambda.Powertools.Kafka.Protobuf + Powertools for AWS Lambda (.NET) - Kafka Protobuf consumer package. + AWS.Lambda.Powertools.Kafka.Protobuf + AWS.Lambda.Powertools.Kafka.Protobuf + net8.0 + false + enable + enable + + + + + + + + + + + diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaProtobufSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/PowertoolsKafkaProtobufSerializer.cs similarity index 99% rename from libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaProtobufSerializer.cs rename to libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/PowertoolsKafkaProtobufSerializer.cs index 042eeec52..2787b9bb4 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaProtobufSerializer.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/PowertoolsKafkaProtobufSerializer.cs @@ -5,7 +5,7 @@ using System.Text.Json.Serialization; using Google.Protobuf; -namespace AWS.Lambda.Powertools.Kafka; +namespace AWS.Lambda.Powertools.Kafka.Protobuf; /// /// A Lambda serializer for Kafka events that handles Protobuf-formatted data. diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/Readme.md b/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/Readme.md new file mode 100644 index 000000000..16da5ccb4 --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/Readme.md @@ -0,0 +1 @@ +# Powertools for AWS Lambda (.NET) - Kafka \ No newline at end of file diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/AWS.Lambda.Powertools.Kafka.csproj b/libraries/src/AWS.Lambda.Powertools.Kafka/AWS.Lambda.Powertools.Kafka.csproj index dadef4c35..8461809d4 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/AWS.Lambda.Powertools.Kafka.csproj +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/AWS.Lambda.Powertools.Kafka.csproj @@ -14,8 +14,6 @@ - - diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecord.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecord.cs index ac8cd80dc..20b3e1725 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecord.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecord.cs @@ -56,5 +56,5 @@ public class ConsumerRecord /// /// Gets the headers associated with the record. /// - public Dictionary Headers { get; internal set; } = null!; + public Dictionary Headers { get; internal set; } = null!; } \ No newline at end of file diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/HeaderExtensions.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/HeaderExtensions.cs new file mode 100644 index 000000000..8436b3145 --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/HeaderExtensions.cs @@ -0,0 +1,37 @@ +using System.Text; + +namespace AWS.Lambda.Powertools.Kafka; + +/// +/// Extension methods for Kafka headers in ConsumerRecord. +/// +public static class HeaderExtensions +{ + /// + /// Gets the decoded value of a Kafka header from the ConsumerRecord's Headers dictionary. + /// + /// The header key-value pair from ConsumerRecord.Headers + /// The decoded string value. + public static Dictionary DecodedValues(this Dictionary headers) + { + if (headers == null) + { + return new Dictionary(); + } + + return headers.ToDictionary( + pair => pair.Key, + pair => pair.Value.DecodedValue() + ); + } + + public static string DecodedValue(this byte[]? headerBytes) + { + if (headerBytes == null || headerBytes.Length == 0) + { + return string.Empty; + } + + return Encoding.UTF8.GetString(headerBytes); + } +} \ No newline at end of file diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs index d1ec9591f..dbad3794a 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs @@ -21,7 +21,7 @@ public abstract class PowertoolsKafkaSerializerBase : ILambdaSerializer /// JSON serializer options used for deserialization. /// protected readonly JsonSerializerOptions JsonOptions; - + /// /// JSON serializer context used for AOT-compatible serialization/deserialization. /// @@ -31,13 +31,13 @@ public abstract class PowertoolsKafkaSerializerBase : ILambdaSerializer /// Initializes a new instance of the class /// with default JSON serialization options. /// - protected PowertoolsKafkaSerializerBase() : this(new JsonSerializerOptions + protected PowertoolsKafkaSerializerBase() : this(new JsonSerializerOptions { PropertyNameCaseInsensitive = true }, null) { } - + /// /// Initializes a new instance of the class /// with custom JSON serialization options. @@ -52,7 +52,8 @@ protected PowertoolsKafkaSerializerBase(JsonSerializerOptions jsonOptions) : thi /// with a JSON serializer context for AOT-compatible serialization/deserialization. /// /// The JSON serializer context for AOT compatibility. - protected PowertoolsKafkaSerializerBase(JsonSerializerContext serializerContext) : this(serializerContext.Options, serializerContext) + protected PowertoolsKafkaSerializerBase(JsonSerializerContext serializerContext) : this(serializerContext.Options, + serializerContext) { } @@ -237,32 +238,34 @@ private T DeserializeConsumerRecords(string json) } // Process headers - if (recordElement.TryGetProperty("headers", out var headersElement) && + + + if (recordElement.TryGetProperty("headers", out var headersElement) && headersElement.ValueKind == JsonValueKind.Array) { - var decodedHeaders = new Dictionary(); - + var headers = new Dictionary(); + foreach (var headerObj in headersElement.EnumerateArray()) { foreach (var header in headerObj.EnumerateObject()) { var headerKey = header.Name; - if (header.Value.ValueKind != JsonValueKind.Array) continue; - var headerBytes = new byte[header.Value.GetArrayLength()]; - var i = 0; - foreach (var byteVal in header.Value.EnumerateArray()) + if (header.Value.ValueKind == JsonValueKind.Array) { - headerBytes[i++] = (byte)byteVal.GetInt32(); + var headerBytes = new byte[header.Value.GetArrayLength()]; + var i = 0; + foreach (var byteVal in header.Value.EnumerateArray()) + { + headerBytes[i++] = (byte)byteVal.GetInt32(); + } + headers[headerKey] = headerBytes; } - - var headerValue = Encoding.UTF8.GetString(headerBytes); - decodedHeaders[headerKey] = headerValue; } } - + var headersProperty = recordType.GetProperty("Headers", BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance); - headersProperty?.SetValue(record, decodedHeaders); + headersProperty?.SetValue(record, headers); } // Add to records list @@ -358,7 +361,10 @@ private T DeserializeConsumerRecords(string json) /// The property name within the JsonElement. [RequiresDynamicCode("Dynamically accesses properties which might be trimmed.")] [RequiresUnreferencedCode("Dynamically accesses properties which might be trimmed.")] - private void SetProperty([DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.NonPublicProperties)] Type type, object instance, string propertyName, + private void SetProperty( + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | + DynamicallyAccessedMemberTypes.NonPublicProperties)] + Type type, object instance, string propertyName, JsonElement element, string jsonPropertyName) { if (!element.TryGetProperty(jsonPropertyName, out var jsonValue) || @@ -387,44 +393,79 @@ private void SetProperty([DynamicallyAccessedMembers(DynamicallyAccessedMemberTy /// The type of object to serialize. /// The object to serialize. /// The stream to write the serialized data to. - [RequiresDynamicCode("JSON serialization might require types that cannot be statically analyzed and might need runtime code generation.")] + [RequiresDynamicCode( + "JSON serialization might require types that cannot be statically analyzed and might need runtime code generation.")] [RequiresUnreferencedCode("JSON serialization might require types that cannot be statically analyzed.")] public void Serialize(T response, Stream responseStream) { + if (response == null) + { + // According to ILambdaSerializer contract, if response is null, an empty stream or "null" should be written. + // AWS's default System.Text.Json serializer writes "null". + // Let's ensure the stream is written to, as HandlerWrapper might expect some output. + if (responseStream.CanWrite) + { + var nullBytes = Encoding.UTF8.GetBytes("null"); + responseStream.Write(nullBytes, 0, nullBytes.Length); + } + return; + } + if (SerializerContext != null) { - var typeInfo = GetJsonTypeInfo(); + // Attempt to get TypeInfo for the actual type of the response. + // This is important if T is object or an interface. + JsonTypeInfo? typeInfo = SerializerContext.GetTypeInfo(response.GetType()); + if (typeInfo != null) { + // JsonSerializer.Serialize to a stream does not close it by default. JsonSerializer.Serialize(responseStream, response, typeInfo); return; } - - // Try to find by type if generic match didn't work - var typeInfo2 = SerializerContext.GetTypeInfo(typeof(T)); - if (typeInfo2 != null) + // Fallback: if specific type info not found, try with typeof(T) from context + // This might be useful if T is concrete and response.GetType() is the same. + typeInfo = GetJsonTypeInfoFromContext(typeof(T)); + if (typeInfo != null) { - JsonSerializer.Serialize(responseStream, response, typeInfo2); + // Need to cast typeInfo to non-generic JsonTypeInfo for the Serialize overload + JsonSerializer.Serialize(responseStream, response, typeInfo); return; } } - // Fallback with warning - using var writer = new StreamWriter(responseStream); - #pragma warning disable IL2026, IL3050 - writer.Write(JsonSerializer.Serialize(response, JsonOptions)); - #pragma warning restore IL2026, IL3050 + // Fallback to default JsonSerializer with options, ensuring the stream is left open. + // StreamWriter by default uses UTF-8 encoding. We specify it explicitly for clarity. + // The buffer size -1 can be used for default, or a specific size like 1024. + // Crucially, leaveOpen: true prevents the StreamWriter from disposing responseStream. + using (var writer = new StreamWriter(responseStream, encoding: Encoding.UTF8, bufferSize: 1024, leaveOpen: true)) + { + string jsonResponse = JsonSerializer.Serialize(response, JsonOptions); + writer.Write(jsonResponse); + writer.Flush(); // Ensure all data is written to the stream before writer is disposed. + } } - /// - /// Tries to get JsonTypeInfo for type T from the SerializerContext. - /// - private JsonTypeInfo? GetJsonTypeInfo() + // Helper to get non-generic JsonTypeInfo from context based on a Type argument + private JsonTypeInfo? GetJsonTypeInfoFromContext(Type type) + { + if (SerializerContext == null) + return null; + + return SerializerContext.GetTypeInfo(type); + } + + // Adjusted GetJsonTypeInfo to return non-generic JsonTypeInfo for consistency, + // or keep it if it's used elsewhere for JsonTypeInfo specifically. + // For Serialize, GetJsonTypeInfoFromContext(typeof(T)) is more direct. + private JsonTypeInfo? GetJsonTypeInfo() // This is the original generic helper { if (SerializerContext == null) return null; - + // Use reflection to find the right JsonTypeInfo property + // This is specific to how a user might structure their JsonSerializerContext. + // A more robust way for general types is SerializerContext.GetTypeInfo(typeof(T)). foreach (var prop in SerializerContext.GetType().GetProperties()) { if (prop.PropertyType == typeof(JsonTypeInfo)) @@ -432,7 +473,6 @@ public void Serialize(T response, Stream responseStream) return prop.GetValue(SerializerContext) as JsonTypeInfo; } } - return null; } @@ -444,8 +484,11 @@ public void Serialize(T response, Stream responseStream) /// The deserialized object. [RequiresDynamicCode("Deserializing values might require runtime code generation depending on format.")] [RequiresUnreferencedCode("Deserializing values might require types that cannot be statically analyzed.")] - protected abstract object DeserializeValue(string base64Value, [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.PublicFields)] Type valueType); - + protected abstract object DeserializeValue(string base64Value, + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | + DynamicallyAccessedMemberTypes.PublicFields)] + Type valueType); + /// /// Deserializes complex key types using the appropriate format. /// @@ -454,5 +497,8 @@ public void Serialize(T response, Stream responseStream) /// The deserialized key object. [RequiresDynamicCode("Deserializing complex keys might require runtime code generation depending on format.")] [RequiresUnreferencedCode("Deserializing complex keys might require types that cannot be statically analyzed.")] - protected abstract object? DeserializeComplexKey(byte[] keyBytes, [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.PublicFields)] Type keyType); + protected abstract object? DeserializeComplexKey(byte[] keyBytes, + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | + DynamicallyAccessedMemberTypes.PublicFields)] + Type keyType); } \ No newline at end of file diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/AWS.Lambda.Powertools.Kafka.Tests.csproj b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/AWS.Lambda.Powertools.Kafka.Tests.csproj index 5f7374147..28e567c01 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/AWS.Lambda.Powertools.Kafka.Tests.csproj +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/AWS.Lambda.Powertools.Kafka.Tests.csproj @@ -36,7 +36,9 @@ - + + + diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs index 507ca2edb..61ba41e03 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs @@ -3,6 +3,7 @@ using Amazon.Lambda.TestUtilities; using Avro.IO; using Avro.Specific; +using AWS.Lambda.Powertools.Kafka.Avro; namespace AWS.Lambda.Powertools.Kafka.Tests; @@ -47,7 +48,7 @@ public async Task Handler_ProcessesKafkaEvent_Successfully() // Verify decoded key and headers Assert.Equal(42, firstRecord.Key); - Assert.Equal("headerValue", firstRecord.Headers["headerKey"]); + Assert.Equal("headerValue", firstRecord.Headers["headerKey"].DecodedValue()); var secondRecord = records[1]; Assert.Equal(43, secondRecord.Key); @@ -180,9 +181,9 @@ public async Task Handler_ProcessesKafkaEvent_WithAvroKey_Successfully() Assert.Equal(999.99, firstRecord.Value.price); Assert.Equal(1, firstRecord.Key.id); Assert.Equal(Color.GREEN, firstRecord.Key.color); - + // Verify headers - Assert.Equal("headerValue", firstRecord.Headers["headerKey"]); + Assert.Equal("headerValue", firstRecord.Headers["headerKey"].DecodedValue()); var secondRecord = records[1]; Assert.Equal(2, secondRecord.Key.id); diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs index 960928321..3a4ef6139 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs @@ -3,6 +3,7 @@ using Avro.Generic; using Avro.IO; using Avro.Specific; +using AWS.Lambda.Powertools.Kafka.Avro; namespace AWS.Lambda.Powertools.Kafka.Tests; diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs index b7d7be67f..b314e9c51 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs @@ -1,4 +1,5 @@ using System.Text; +using AWS.Lambda.Powertools.Kafka.Json; namespace AWS.Lambda.Powertools.Kafka.Tests.Json; @@ -90,4 +91,6 @@ public void KafkaEvent_ImplementsIEnumerable_ForDirectIteration() Assert.Equal("product5", firstRecord.Value.Name); Assert.Equal(12345, firstRecord.Value.Id); } + + } diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/HandlerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/HandlerTests.cs index 750d3382e..9470cb6e2 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/HandlerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/HandlerTests.cs @@ -1,6 +1,7 @@ using System.Text; using Amazon.Lambda.Core; using Amazon.Lambda.TestUtilities; +using AWS.Lambda.Powertools.Kafka.Protobuf; using Google.Protobuf; using TestKafka; @@ -47,7 +48,7 @@ public async Task Handler_ProcessesKafkaEvent_Successfully() // Verify decoded key and headers Assert.Equal(42, firstRecord.Key); - Assert.Equal("headerValue", firstRecord.Headers["headerKey"]); + Assert.Equal("headerValue", firstRecord.Headers["headerKey"].DecodedValue()); var secondRecord = records[1]; Assert.Equal(43, secondRecord.Key); @@ -95,7 +96,7 @@ public async Task Handler_ProcessesKafkaEvent_WithProtobufKey_Successfully() Assert.Equal(TestKafka.Color.Green, firstRecord.Key.Color); // Verify headers - Assert.Equal("headerValue", firstRecord.Headers["headerKey"]); + Assert.Equal("headerValue", firstRecord.Headers["headerKey"].DecodedValue()); var secondRecord = records[1]; Assert.Equal(2, secondRecord.Key.Id); diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs index eba974d71..0a0a2386f 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs @@ -1,4 +1,5 @@ using System.Text; +using AWS.Lambda.Powertools.Kafka.Protobuf; using TestKafka; namespace AWS.Lambda.Powertools.Kafka.Tests.Protobuf; From ac8f29a1d4bd6987136e0f83803df841d42dd699 Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Tue, 17 Jun 2025 19:20:13 +0100 Subject: [PATCH 13/35] enhance primitive deserialization and add tests --- examples/Kafka/Protobuf/src/Function.cs | 1 + .../PowertoolsKafkaAvroSerializer.cs | 3 +- .../PowertoolsKafkaJsonSerializer.cs | 3 +- .../PowertoolsKafkaProtobufSerializer.cs | 3 +- .../HeaderExtensions.cs | 6 +- .../PowertoolsKafkaSerializerBase.cs | 174 ++++++++++-------- .../Avro/HandlerTests.cs | 118 ++++++++++++ .../PowertoolsKafkaAvroSerializerTests.cs | 36 ++++ .../PowertoolsKafkaJsonSerializerTests.cs | 36 +++- .../PowertoolsLambdaKafkaSerializerTests.cs | 12 -- .../PowertoolsKafkaProtobufSerializerTests.cs | 36 ++++ 11 files changed, 339 insertions(+), 89 deletions(-) delete mode 100644 libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsLambdaKafkaSerializerTests.cs diff --git a/examples/Kafka/Protobuf/src/Function.cs b/examples/Kafka/Protobuf/src/Function.cs index 6e4abe13c..bfb23894e 100644 --- a/examples/Kafka/Protobuf/src/Function.cs +++ b/examples/Kafka/Protobuf/src/Function.cs @@ -48,6 +48,7 @@ Task ToUpperAsync(InvocationRequest invocation) foreach (var record in records) { + Console.WriteLine("Record Key: {0}", record.Key); foreach (var header in record.Headers) { Console.WriteLine($"{header.Key}: {ToDecimalString(header.Value)}"); diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/PowertoolsKafkaAvroSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/PowertoolsKafkaAvroSerializer.cs index 027841f7b..a5cdb573d 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/PowertoolsKafkaAvroSerializer.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/PowertoolsKafkaAvroSerializer.cs @@ -92,7 +92,8 @@ private Schema GetAvroSchema([DynamicallyAccessedMembers(DynamicallyAccessedMemb /// The deserialized object. [RequiresDynamicCode("Avro deserialization requires reflection which may be incompatible with AOT.")] [RequiresUnreferencedCode("Avro deserialization requires reflection which may be incompatible with trimming.")] - protected override object DeserializeValue(string base64Value, [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicFields)] Type valueType) + protected override object DeserializeComplexValue(string base64Value, + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicFields)] Type valueType) { var schema = GetAvroSchema(valueType); return DeserializeAvroValue(base64Value, schema); diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Json/PowertoolsKafkaJsonSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka.Json/PowertoolsKafkaJsonSerializer.cs index 73b55799c..18c779fbb 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka.Json/PowertoolsKafkaJsonSerializer.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Json/PowertoolsKafkaJsonSerializer.cs @@ -63,7 +63,8 @@ public PowertoolsKafkaJsonSerializer(JsonSerializerContext serializerContext) : /// The deserialized object. [RequiresDynamicCode("JSON deserialization might require runtime code generation.")] [RequiresUnreferencedCode("JSON deserialization might require types that cannot be statically analyzed.")] - protected override object DeserializeValue(string base64Value, [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.PublicFields)] Type valueType) + protected override object DeserializeComplexValue(string base64Value, + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicFields)] Type valueType) { var jsonBytes = Convert.FromBase64String(base64Value); var jsonString = Encoding.UTF8.GetString(jsonBytes); diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/PowertoolsKafkaProtobufSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/PowertoolsKafkaProtobufSerializer.cs index 2787b9bb4..bce8830a7 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/PowertoolsKafkaProtobufSerializer.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/PowertoolsKafkaProtobufSerializer.cs @@ -66,7 +66,8 @@ public PowertoolsKafkaProtobufSerializer(JsonSerializerContext serializerContext /// The deserialized object. [RequiresDynamicCode("Protobuf deserialization might require runtime code generation.")] [RequiresUnreferencedCode("Protobuf deserialization might require types that cannot be statically analyzed.")] - protected override object DeserializeValue(string base64Value, [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties)] Type valueType) + protected override object DeserializeComplexValue(string base64Value, + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicFields)] Type valueType) { var protobufBytes = Convert.FromBase64String(base64Value); return DeserializeProtobufValue(protobufBytes, valueType); diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/HeaderExtensions.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/HeaderExtensions.cs index 8436b3145..48531fc49 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/HeaderExtensions.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/HeaderExtensions.cs @@ -10,7 +10,7 @@ public static class HeaderExtensions /// /// Gets the decoded value of a Kafka header from the ConsumerRecord's Headers dictionary. /// - /// The header key-value pair from ConsumerRecord.Headers + /// The header key-value pair from ConsumerRecord.Headers /// The decoded string value. public static Dictionary DecodedValues(this Dictionary headers) { @@ -25,6 +25,10 @@ public static Dictionary DecodedValues(this Dictionary + /// Decodes a byte array from a Kafka header into a UTF-8 string. + /// Returns an empty string if the byte array is null or empty. + /// public static string DecodedValue(this byte[]? headerBytes) { if (headerBytes == null || headerBytes.Length == 0) diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs index dbad3794a..fb1d595dc 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs @@ -76,8 +76,6 @@ protected PowertoolsKafkaSerializerBase(JsonSerializerOptions jsonOptions, JsonS /// The type to deserialize to. For Kafka events, typically ConsumerRecords<TKey,TValue>. /// The stream containing the serialized Lambda event. /// The deserialized object of type T. - [RequiresUnreferencedCode("Kafka serializer uses reflection and may be incompatible with trimming. Use an overload that accepts a JsonTypeInfo or JsonSerializerContext for AOT compatibility.")] - [RequiresDynamicCode("Kafka serializer dynamically creates generic types and may be incompatible with NativeAOT. Use an overload that accepts a JsonTypeInfo or JsonSerializerContext for AOT compatibility.")] public T Deserialize(Stream requestStream) { if (SerializerContext != null && typeof(T) != typeof(ConsumerRecords<,>)) @@ -294,57 +292,9 @@ private T DeserializeConsumerRecords(string json) if (keyBytes == null || keyBytes.Length == 0) return null; - if (keyType == typeof(int)) + if (IsPrimitiveOrSimpleType(keyType)) { - // First try to interpret as a string representation and parse - var stringValue = Encoding.UTF8.GetString(keyBytes); - if (int.TryParse(stringValue, out var parsedValue)) - return parsedValue; - - return keyBytes.Length switch - { - // Fall back to binary representation if parsing fails - >= 4 => BitConverter.ToInt32(keyBytes, 0), - 1 => keyBytes[0], - _ => 0 - }; - } - - if (keyType == typeof(long)) - { - // Try string parsing first - var stringValue = Encoding.UTF8.GetString(keyBytes); - if (long.TryParse(stringValue, out var parsedValue)) - return parsedValue; - - return keyBytes.Length switch - { - // Fall back to binary - >= 8 => BitConverter.ToInt64(keyBytes, 0), - >= 4 => BitConverter.ToInt32(keyBytes, 0), - _ => 0L - }; - } - - if (keyType == typeof(string)) - { - // String conversion is safe regardless of length - return Encoding.UTF8.GetString(keyBytes); - } - - if (keyType == typeof(double)) - { - return keyBytes.Length >= 8 ? BitConverter.ToDouble(keyBytes, 0) : 0.0; - } - - if (keyType == typeof(bool) && keyBytes.Length >= 1) - { - return keyBytes[0] != 0; - } - - if (keyType == typeof(Guid) && keyBytes.Length >= 16) - { - return new Guid(keyBytes); + return DeserializePrimitiveValue(keyBytes, keyType); } // For complex types, try format-specific deserialization @@ -393,9 +343,6 @@ private void SetProperty( /// The type of object to serialize. /// The object to serialize. /// The stream to write the serialized data to. - [RequiresDynamicCode( - "JSON serialization might require types that cannot be statically analyzed and might need runtime code generation.")] - [RequiresUnreferencedCode("JSON serialization might require types that cannot be statically analyzed.")] public void Serialize(T response, Stream responseStream) { if (response == null) @@ -415,7 +362,7 @@ public void Serialize(T response, Stream responseStream) { // Attempt to get TypeInfo for the actual type of the response. // This is important if T is object or an interface. - JsonTypeInfo? typeInfo = SerializerContext.GetTypeInfo(response.GetType()); + var typeInfo = SerializerContext.GetTypeInfo(response.GetType()); if (typeInfo != null) { @@ -438,12 +385,12 @@ public void Serialize(T response, Stream responseStream) // StreamWriter by default uses UTF-8 encoding. We specify it explicitly for clarity. // The buffer size -1 can be used for default, or a specific size like 1024. // Crucially, leaveOpen: true prevents the StreamWriter from disposing responseStream. - using (var writer = new StreamWriter(responseStream, encoding: Encoding.UTF8, bufferSize: 1024, leaveOpen: true)) - { - string jsonResponse = JsonSerializer.Serialize(response, JsonOptions); - writer.Write(jsonResponse); - writer.Flush(); // Ensure all data is written to the stream before writer is disposed. - } + using var writer = new StreamWriter(responseStream, encoding: Encoding.UTF8, bufferSize: 1024, leaveOpen: true); +#pragma warning disable IL2026, IL3050 + var jsonResponse = JsonSerializer.Serialize(response, JsonOptions); +#pragma warning restore IL2026, IL3050 + writer.Write(jsonResponse); + writer.Flush(); // Ensure all data is written to the stream before writer is disposed. } // Helper to get non-generic JsonTypeInfo from context based on a Type argument @@ -484,10 +431,26 @@ public void Serialize(T response, Stream responseStream) /// The deserialized object. [RequiresDynamicCode("Deserializing values might require runtime code generation depending on format.")] [RequiresUnreferencedCode("Deserializing values might require types that cannot be statically analyzed.")] - protected abstract object DeserializeValue(string base64Value, - [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | - DynamicallyAccessedMemberTypes.PublicFields)] - Type valueType); + protected virtual object DeserializeValue(string base64Value, + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.PublicFields)] Type valueType) + { + // Handle primitive types first + if (IsPrimitiveOrSimpleType(valueType)) + { + var bytes = Convert.FromBase64String(base64Value); + return DeserializePrimitiveValue(bytes, valueType); + } + + // For complex types, use format-specific deserialization + return DeserializeComplexValue(base64Value, valueType); + } + + /// + /// Deserializes complex value types using the appropriate format. + /// + protected abstract object DeserializeComplexValue(string base64Value, + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.PublicFields)] Type valueType); + /// /// Deserializes complex key types using the appropriate format. @@ -495,10 +458,77 @@ protected abstract object DeserializeValue(string base64Value, /// The key bytes to deserialize. /// The type to deserialize to. /// The deserialized key object. - [RequiresDynamicCode("Deserializing complex keys might require runtime code generation depending on format.")] - [RequiresUnreferencedCode("Deserializing complex keys might require types that cannot be statically analyzed.")] - protected abstract object? DeserializeComplexKey(byte[] keyBytes, - [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | - DynamicallyAccessedMemberTypes.PublicFields)] - Type keyType); + protected abstract object? DeserializeComplexKey(byte[] keyBytes, Type keyType); + + /// + /// Checks if the specified type is a primitive or simple type. + /// + private bool IsPrimitiveOrSimpleType(Type type) + { + return type.IsPrimitive || + type == typeof(string) || + type == typeof(decimal) || + type == typeof(DateTime) || + type == typeof(Guid); + } + + /// + /// Deserializes a primitive value from bytes based on the specified type. + /// Handles common primitive types like int, long, double, bool, string, and Guid. + /// If the bytes are empty or null, returns null. + /// If the type is not recognized, attempts to convert from string. + /// /// + private object DeserializePrimitiveValue(byte[] bytes, Type valueType) + { + if (bytes == null! || bytes.Length == 0) + return null!; + + if (valueType == typeof(string)) + { + return Encoding.UTF8.GetString(bytes); + } + else if (valueType == typeof(int)) + { + // First try to parse as string + var stringValue = Encoding.UTF8.GetString(bytes); + if (int.TryParse(stringValue, out var parsedValue)) + return parsedValue; + + // Fall back to binary + return bytes.Length switch + { + >= 4 => BitConverter.ToInt32(bytes, 0), + 1 => bytes[0], + _ => 0 + }; + } + else if (valueType == typeof(long)) + { + var stringValue = Encoding.UTF8.GetString(bytes); + if (long.TryParse(stringValue, out var parsedValue)) + return parsedValue; + + return bytes.Length switch + { + >= 8 => BitConverter.ToInt64(bytes, 0), + >= 4 => BitConverter.ToInt32(bytes, 0), + _ => 0L + }; + } + else if (valueType == typeof(double)) + { + return bytes.Length >= 8 ? BitConverter.ToDouble(bytes, 0) : 0.0; + } + else if (valueType == typeof(bool) && bytes.Length >= 1) + { + return bytes[0] != 0; + } + else if (valueType == typeof(Guid) && bytes.Length >= 16) + { + return new Guid(bytes); + } + + // For any other type, try to parse as string + return Convert.ChangeType(Encoding.UTF8.GetString(bytes), valueType); + } } \ No newline at end of file diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs index 61ba41e03..d56ea34a2 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs @@ -56,6 +56,54 @@ public async Task Handler_ProcessesKafkaEvent_Successfully() var thirdRecord = records[2]; Assert.Equal(0, thirdRecord.Key); } + + [Fact] + public async Task Handler_ProcessesKafkaEvent_Primitive_Successfully() + { + // Arrange + var kafkaJson = GetSimpleMockKafkaEvent(); + var mockContext = new TestLambdaContext(); + var serializer = new PowertoolsKafkaAvroSerializer(); + + // Convert JSON string to stream for deserialization + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaJson)); + + // Act - Deserialize and process + var kafkaEvent = serializer.Deserialize>(stream); + var response = await HandlerSimple(kafkaEvent, mockContext); + + // Assert + Assert.Equal("Successfully processed Kafka events", response); + + // Verify event structure + Assert.Equal("aws:kafka", kafkaEvent.EventSource); + Assert.Single(kafkaEvent.Records); + + // Verify record content + var records = kafkaEvent.Records["mytopic-0"]; + Assert.Equal(3, records.Count); + + // Verify first record + var firstRecord = records[0]; + Assert.Equal("mytopic", firstRecord.Topic); + Assert.Equal(0, firstRecord.Partition); + Assert.Equal(15, firstRecord.Offset); + + // Verify deserialized value + Assert.Equal("Laptop", firstRecord.Value); + + // Verify decoded key and headers + Assert.Equal(42, firstRecord.Key); + Assert.Equal("headerValue", firstRecord.Headers["headerKey"].DecodedValue()); + + var secondRecord = records[1]; + Assert.Equal(43, secondRecord.Key); + Assert.Equal("Smartphone", secondRecord.Value); + + var thirdRecord = records[2]; + Assert.Equal(0, thirdRecord.Key); + Assert.Null(thirdRecord.Value); + } private string GetMockKafkaEvent() { @@ -119,6 +167,65 @@ private string GetMockKafkaEvent() }} }}"; } + + private string GetSimpleMockKafkaEvent() + { + // For testing, we'll create base64-encoded Avro data for our test products + + // Convert to base64-encoded Avro + string laptopBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes("Laptop")); + string smartphoneBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes("Smartphone")); + + string firstRecordKey = Convert.ToBase64String(Encoding.UTF8.GetBytes("42")); // Example key + string secondRecordKey = Convert.ToBase64String(Encoding.UTF8.GetBytes("43")); // Example key for second record + + // Create mock Kafka event JSON + return @$"{{ + ""eventSource"": ""aws:kafka"", + ""eventSourceArn"": ""arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4"", + ""bootstrapServers"": ""b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092"", + ""records"": {{ + ""mytopic-0"": [ + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""timestamp"": 1545084650987, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{firstRecordKey}"", + ""value"": ""{laptopBase64}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }}, + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 16, + ""timestamp"": 1545084650988, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{secondRecordKey}"", + ""value"": ""{smartphoneBase64}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }}, + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 17, + ""timestamp"": 1545084650989, + ""timestampType"": ""CREATE_TIME"", + ""key"": null, + ""value"": null, + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }} + ] + }} + }}"; + } private string ConvertToAvroBase64(AvroProduct product) { @@ -143,6 +250,17 @@ private async Task Handler(ConsumerRecords records, IL return "Successfully processed Kafka events"; } + + private async Task HandlerSimple(ConsumerRecords records, ILambdaContext context) + { + foreach (var record in records) + { + var product = record.Value; + context.Logger.LogInformation($"Processing {product}"); + } + + return "Successfully processed Kafka events"; + } [Fact] public async Task Handler_ProcessesKafkaEvent_WithAvroKey_Successfully() diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs index 3a4ef6139..65c62be1b 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs @@ -81,4 +81,40 @@ public void KafkaEvent_ImplementsIEnumerable_ForDirectIteration() Assert.Equal("Laptop", firstRecord.Value.name); Assert.Equal(1001, firstRecord.Value.id); } + + [Fact] + public void Primitive_Deserialization() + { + // Arrange + var serializer = new PowertoolsKafkaAvroSerializer(); + string kafkaEventJson = @$"{{ + ""eventSource"": ""aws:kafka"", + ""eventSourceArn"": ""arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4"", + ""bootstrapServers"": ""b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092"", + ""records"": {{ + ""mytopic-0"": [ + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""timestamp"": 1545084650987, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{Convert.ToBase64String(Encoding.UTF8.GetBytes("MyKey"))}"", + ""value"": ""{Convert.ToBase64String(Encoding.UTF8.GetBytes("Myvalue"))}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }} + ] + }} + }}"; + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + var firstRecord = result.First(); + Assert.Equal("Myvalue", firstRecord.Value); + Assert.Equal("MyKey", firstRecord.Key); + } } \ No newline at end of file diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs index b314e9c51..6283e6e59 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs @@ -92,5 +92,39 @@ public void KafkaEvent_ImplementsIEnumerable_ForDirectIteration() Assert.Equal(12345, firstRecord.Value.Id); } - + [Fact] + public void Primitive_Deserialization() + { + // Arrange + var serializer = new PowertoolsKafkaJsonSerializer(); + string kafkaEventJson = @$"{{ + ""eventSource"": ""aws:kafka"", + ""eventSourceArn"": ""arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4"", + ""bootstrapServers"": ""b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092"", + ""records"": {{ + ""mytopic-0"": [ + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""timestamp"": 1545084650987, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{Convert.ToBase64String(Encoding.UTF8.GetBytes("MyKey"))}"", + ""value"": ""{Convert.ToBase64String(Encoding.UTF8.GetBytes("Myvalue"))}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }} + ] + }} + }}"; + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + var firstRecord = result.First(); + Assert.Equal("Myvalue", firstRecord.Value); + Assert.Equal("MyKey", firstRecord.Key); + } } diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsLambdaKafkaSerializerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsLambdaKafkaSerializerTests.cs deleted file mode 100644 index 17ee9eca8..000000000 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsLambdaKafkaSerializerTests.cs +++ /dev/null @@ -1,12 +0,0 @@ -using Amazon.Lambda.Core; -using Avro; -using Avro.IO; -using Avro.Specific; -using System; -using System.IO; -using System.Text.Json; -using Xunit; - -namespace AWS.Lambda.Powertools.Kafka.Tests -{ -} diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs index 0a0a2386f..7727dd0b2 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs @@ -87,4 +87,40 @@ public void KafkaEvent_ImplementsIEnumerable_ForDirectIteration() Assert.Equal("Laptop", firstRecord.Value.Name); Assert.Equal(1001, firstRecord.Value.Id); } + + [Fact] + public void Primitive_Deserialization() + { + // Arrange + var serializer = new PowertoolsKafkaProtobufSerializer(); + string kafkaEventJson = @$"{{ + ""eventSource"": ""aws:kafka"", + ""eventSourceArn"": ""arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4"", + ""bootstrapServers"": ""b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092"", + ""records"": {{ + ""mytopic-0"": [ + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""timestamp"": 1545084650987, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{Convert.ToBase64String(Encoding.UTF8.GetBytes("MyKey"))}"", + ""value"": ""{Convert.ToBase64String(Encoding.UTF8.GetBytes("Myvalue"))}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }} + ] + }} + }}"; + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + var firstRecord = result.First(); + Assert.Equal("Myvalue", firstRecord.Value); + Assert.Equal("MyKey", firstRecord.Key); + } } From 9912ec802d79ce6ecef22a17102e8cfdeea6c6eb Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Tue, 17 Jun 2025 19:23:03 +0100 Subject: [PATCH 14/35] add versions --- version.json | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/version.json b/version.json index fd3b95021..332a97927 100644 --- a/version.json +++ b/version.json @@ -10,6 +10,9 @@ "Idempotency": "1.3.0", "BatchProcessing": "1.2.1", "EventHandler": "1.0.0", - "EventHandler.Resolvers.BedrockAgentFunction": "1.0.0" + "EventHandler.Resolvers.BedrockAgentFunction": "1.0.0", + "Kafka.Json" : "1.0.0", + "Kafka.Avro" : "1.0.0", + "Kafka.Protobuf" : "1.0.0" } } From 57f5319f6e965948b5f46a118bde9dcb4e140b9b Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Wed, 18 Jun 2025 08:59:43 +0100 Subject: [PATCH 15/35] refactor examples --- examples/Kafka/Avro/src/Avro.csproj | 2 - examples/Kafka/Avro/src/Function.cs | 58 ++----------- examples/Kafka/Avro/src/Readme.md | 9 +- .../Avro/src/aws-lambda-tools-defaults.json | 6 +- examples/Kafka/Json/src/Function.cs | 57 ++---------- examples/Kafka/Json/src/Json.csproj | 1 - examples/Kafka/Json/src/Readme.md | 9 +- .../Json/src/aws-lambda-tools-defaults.json | 6 +- examples/Kafka/Protobuf/src/Function.cs | 79 ++--------------- examples/Kafka/Protobuf/src/Protobuf.csproj | 1 - examples/Kafka/Protobuf/src/Readme.md | 9 +- .../src/aws-lambda-tools-defaults.json | 4 +- examples/Kafka/src/Avro/AvroKey.avsc | 24 ------ examples/Kafka/src/Avro/AvroProduct.avsc | 10 --- .../Lambda/Powertools/Kafka/Tests/AvroKey.cs | 70 --------------- .../Powertools/Kafka/Tests/AvroProduct.cs | 86 ------------------- .../Lambda/Powertools/Kafka/Tests/Color.cs | 23 ----- examples/Kafka/src/Avro/kafka-avro-event.json | 51 ----------- examples/Kafka/src/Function.cs | 85 ------------------ examples/Kafka/src/Json/kafka-json-event.json | 50 ----------- examples/Kafka/src/Kafka.csproj | 74 ---------------- examples/Kafka/src/Protobuf/Key.proto | 14 --- examples/Kafka/src/Protobuf/Product.proto | 9 -- .../src/Protobuf/kafka-protobuf-event.json | 51 ----------- examples/Kafka/src/Readme.md | 73 ---------------- .../Kafka/src/aws-lambda-tools-defaults.json | 17 ---- examples/examples.sln | 10 --- 27 files changed, 46 insertions(+), 842 deletions(-) delete mode 100644 examples/Kafka/src/Avro/AvroKey.avsc delete mode 100644 examples/Kafka/src/Avro/AvroProduct.avsc delete mode 100644 examples/Kafka/src/Avro/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroKey.cs delete mode 100644 examples/Kafka/src/Avro/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroProduct.cs delete mode 100644 examples/Kafka/src/Avro/Generated/AWS/Lambda/Powertools/Kafka/Tests/Color.cs delete mode 100644 examples/Kafka/src/Avro/kafka-avro-event.json delete mode 100644 examples/Kafka/src/Function.cs delete mode 100644 examples/Kafka/src/Json/kafka-json-event.json delete mode 100644 examples/Kafka/src/Kafka.csproj delete mode 100644 examples/Kafka/src/Protobuf/Key.proto delete mode 100644 examples/Kafka/src/Protobuf/Product.proto delete mode 100644 examples/Kafka/src/Protobuf/kafka-protobuf-event.json delete mode 100644 examples/Kafka/src/Readme.md delete mode 100644 examples/Kafka/src/aws-lambda-tools-defaults.json diff --git a/examples/Kafka/Avro/src/Avro.csproj b/examples/Kafka/Avro/src/Avro.csproj index 2781c9c21..f80ce7ece 100644 --- a/examples/Kafka/Avro/src/Avro.csproj +++ b/examples/Kafka/Avro/src/Avro.csproj @@ -19,8 +19,6 @@ - - diff --git a/examples/Kafka/Avro/src/Function.cs b/examples/Kafka/Avro/src/Function.cs index 0225d3861..401b861a5 100644 --- a/examples/Kafka/Avro/src/Function.cs +++ b/examples/Kafka/Avro/src/Function.cs @@ -1,62 +1,22 @@ -using System.Diagnostics; using Amazon.Lambda.Core; using Amazon.Lambda.RuntimeSupport; -using Amazon.Lambda.Serialization.SystemTextJson; using AWS.Lambda.Powertools.Kafka; using AWS.Lambda.Powertools.Kafka.Avro; using AWS.Lambda.Powertools.Kafka.Tests; using AWS.Lambda.Powertools.Logging; -using AWS.Lambda.Powertools.Metrics; -using com.example; -// string Handler(ConsumerRecords records, ILambdaContext context) -// { -// Metrics.SetNamespace("Avro"); -// Metrics.AddMetric("NumberOfRequests", 1, MetricUnit.Count, MetricResolution.High); -// -// foreach (var record in records) -// { -// Logger.LogInformation("Record Key: {@key}", record.Key); -// Logger.LogInformation("Record Value: {@record}", record.Value); -// } -// -// return "Processed " + records.Count() + " records"; -// } -// -// -// await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, -// new PowertoolsKafkaAvroSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization -// .Build() -// .RunAsync(); - -var responseStream = new MemoryStream(); -var serializer = new PowertoolsKafkaAvroSerializer(); -Task ToUpperAsync(InvocationRequest invocation) +string Handler(ConsumerRecords records, ILambdaContext context) { - var stopwatch = Stopwatch.StartNew(); - - var records = serializer.Deserialize>(invocation.InputStream); - foreach (var record in records) { - Console.WriteLine("Record UserId: {0}", record.Value.user_id); + Logger.LogInformation("Record Value: {@record}", record.Value); } - - stopwatch.Stop(); - - Metrics.PushSingleMetric("AvroDeserialization-1024", - stopwatch.ElapsedMilliseconds, MetricUnit.Milliseconds, "kafka-dotnet", "service", null, - MetricResolution.High); - - Console.WriteLine("Record Count: {0}", records.Count()); - Console.WriteLine("Record UserId: {0}", records.First().Value.user_id); - Console.WriteLine("JsonDeserialization: {0:F2}", stopwatch.ElapsedMilliseconds); - - responseStream.SetLength(0); - responseStream.Position = 0; - - return Task.FromResult(new InvocationResponse(responseStream, false)); + + return "Processed " + records.Count() + " records"; } -var bootstrap = new LambdaBootstrap(ToUpperAsync); -await bootstrap.RunAsync(); \ No newline at end of file + +await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, + new PowertoolsKafkaAvroSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization + .Build() + .RunAsync(); \ No newline at end of file diff --git a/examples/Kafka/Avro/src/Readme.md b/examples/Kafka/Avro/src/Readme.md index 79ed451ba..b0922efcf 100644 --- a/examples/Kafka/Avro/src/Readme.md +++ b/examples/Kafka/Avro/src/Readme.md @@ -1,4 +1,4 @@ -# AWS Lambda Function Using Top Level Statements +# Powertools Kafka Avro Lambda Function This starter project consists of: * Function.cs - file contain C# top level statements that define the function to be called for each event and starts the Lambda runtime client. @@ -18,9 +18,10 @@ the Lambda runtime client add the `Amazon.Lambda.RuntimeSupport` NuGet package a of the file containing top-level statements to start the runtime. ```csharp -await LambdaBootstrapBuilder.Create(handler, new DefaultLambdaJsonSerializer()) - .Build() - .RunAsync(); +await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, + new PowertoolsKafkaAvroSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization + .Build() + .RunAsync(); ``` Pass into the Lambda runtime client a function handler as either an `Action<>` or `Func<>` for the code that diff --git a/examples/Kafka/Avro/src/aws-lambda-tools-defaults.json b/examples/Kafka/Avro/src/aws-lambda-tools-defaults.json index 3db487a1e..cd93437eb 100644 --- a/examples/Kafka/Avro/src/aws-lambda-tools-defaults.json +++ b/examples/Kafka/Avro/src/aws-lambda-tools-defaults.json @@ -9,9 +9,7 @@ "region": "", "configuration": "Release", "function-runtime": "dotnet8", - "function-memory-size": 1024, + "function-memory-size": 512, "function-timeout": 30, - "function-handler": "Avro.Example", - "function-name": "dotnet-kafka-avro-1024", - "function-role": "arn:aws:iam::992382490249:role/dotnet-kafka-test-role" + "function-handler": "Avro.Example" } \ No newline at end of file diff --git a/examples/Kafka/Json/src/Function.cs b/examples/Kafka/Json/src/Function.cs index cbd6fca03..b99e64ac6 100644 --- a/examples/Kafka/Json/src/Function.cs +++ b/examples/Kafka/Json/src/Function.cs @@ -1,64 +1,25 @@ -using System.Diagnostics; using System.Text.Json.Serialization; using Amazon.Lambda.Core; using Amazon.Lambda.RuntimeSupport; using AWS.Lambda.Powertools.Kafka; using AWS.Lambda.Powertools.Kafka.Json; using AWS.Lambda.Powertools.Logging; -using AWS.Lambda.Powertools.Metrics; - -// string Handler(ConsumerRecords records, ILambdaContext context) -// { -// Metrics.SetNamespace("Json"); -// Metrics.AddMetric("NumberOfRequests", 1, MetricUnit.Count, MetricResolution.High); -// -// foreach (var record in records) -// { -// Logger.LogInformation("Record Key: {@record.Key}", record.Key); -// Logger.LogInformation("Record Value: {@record}", record.Value); -// } -// -// return "Processed " + records.Count() + " records"; -// } -// -// -// await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, -// new PowertoolsKafkaJsonSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization -// .Build() -// .RunAsync(); - -var responseStream = new MemoryStream(); -var serializer = new PowertoolsKafkaJsonSerializer(); - -Task ToUpperAsync(InvocationRequest invocation) -{ - var stopwatch = Stopwatch.StartNew(); - var records = serializer.Deserialize>(invocation.InputStream); - +string Handler(ConsumerRecords records, ILambdaContext context) +{ foreach (var record in records) { - Console.WriteLine("Record UserId: {0}", record.Value.UserId); + Logger.LogInformation("Record Value: {@record}", record.Value); } - stopwatch.Stop(); - - Metrics.PushSingleMetric("JsonDeserialization-1024", - stopwatch.ElapsedMilliseconds, MetricUnit.Milliseconds, "kafka-dotnet", "service", null, - MetricResolution.High); - - Console.WriteLine("Record Count: {0}", records.Count()); - Console.WriteLine("Record UserId: {0}", records.First().Value.UserId); - Console.WriteLine("JsonDeserialization: {0:F2}", stopwatch.ElapsedMilliseconds); - - responseStream.SetLength(0); - responseStream.Position = 0; - - return Task.FromResult(new InvocationResponse(responseStream, false)); + return "Processed " + records.Count() + " records"; } -var bootstrap = new LambdaBootstrap(ToUpperAsync); -await bootstrap.RunAsync(); + +await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, + new PowertoolsKafkaJsonSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization + .Build() + .RunAsync(); public record JsonKey diff --git a/examples/Kafka/Json/src/Json.csproj b/examples/Kafka/Json/src/Json.csproj index 158711ba9..aba6cde89 100644 --- a/examples/Kafka/Json/src/Json.csproj +++ b/examples/Kafka/Json/src/Json.csproj @@ -16,7 +16,6 @@ - diff --git a/examples/Kafka/Json/src/Readme.md b/examples/Kafka/Json/src/Readme.md index acdcf7df9..6e9251f7b 100644 --- a/examples/Kafka/Json/src/Readme.md +++ b/examples/Kafka/Json/src/Readme.md @@ -1,4 +1,4 @@ -# AWS Lambda Function Using Top Level Statements +# Powertools Kafka JSON Lambda Function This starter project consists of: * Function.cs - file contain C# top level statements that define the function to be called for each event and starts the Lambda runtime client. @@ -18,9 +18,10 @@ the Lambda runtime client add the `Amazon.Lambda.RuntimeSupport` NuGet package a of the file containing top-level statements to start the runtime. ```csharp -await LambdaBootstrapBuilder.Create(handler, new DefaultLambdaJsonSerializer()) - .Build() - .RunAsync(); +await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, + new PowertoolsKafkaJsonSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization + .Build() + .RunAsync(); ``` Pass into the Lambda runtime client a function handler as either an `Action<>` or `Func<>` for the code that diff --git a/examples/Kafka/Json/src/aws-lambda-tools-defaults.json b/examples/Kafka/Json/src/aws-lambda-tools-defaults.json index ceb391444..fb3240903 100644 --- a/examples/Kafka/Json/src/aws-lambda-tools-defaults.json +++ b/examples/Kafka/Json/src/aws-lambda-tools-defaults.json @@ -9,9 +9,7 @@ "region": "", "configuration": "Release", "function-runtime": "dotnet8", - "function-memory-size": 1024, + "function-memory-size": 512, "function-timeout": 30, - "function-handler": "Json", - "function-name": "dotnet-kafka-json-1024", - "function-role": "arn:aws:iam::992382490249:role/dotnet-kafka-test-role" + "function-handler": "Json" } \ No newline at end of file diff --git a/examples/Kafka/Protobuf/src/Function.cs b/examples/Kafka/Protobuf/src/Function.cs index bfb23894e..c63c81000 100644 --- a/examples/Kafka/Protobuf/src/Function.cs +++ b/examples/Kafka/Protobuf/src/Function.cs @@ -1,91 +1,28 @@ -using System.Diagnostics; using Amazon.Lambda.Core; using Amazon.Lambda.RuntimeSupport; using AWS.Lambda.Powertools.Kafka; using AWS.Lambda.Powertools.Kafka.Protobuf; using AWS.Lambda.Powertools.Logging; -using AWS.Lambda.Powertools.Metrics; -using Com.Example; using TestKafka; -// string Handler(ConsumerRecords records, ILambdaContext context) -// { -// Metrics.SetNamespace("Proto"); -// Metrics.AddMetric("NumberOfRequests", 1, MetricUnit.Count, MetricResolution.High); -// -// foreach (var record in records) -// { -// foreach (var header in record.Headers) -// { -// Console.WriteLine($"{header.Key}: {ToDecimalString(header.Value)}"); -// } -// -// foreach (var header in record.Headers.DecodedValues()) -// { -// Console.WriteLine($"{header.Key}: {header.Value}"); -// } -// -// Logger.LogInformation("Record Key: {@key}", record.Key); -// Logger.LogInformation("Record Value: {@record}", record.Value); -// } -// -// return "Processed " + records.Count() + " records"; -// } -// -// await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, -// new PowertoolsKafkaProtobufSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization -// .Build() -// .RunAsync(); - -var responseStream = new MemoryStream(); -var serializer = new PowertoolsKafkaProtobufSerializer(); - -Task ToUpperAsync(InvocationRequest invocation) +string Handler(ConsumerRecords records, ILambdaContext context) { - var stopwatch = Stopwatch.StartNew(); - - var records = serializer.Deserialize>(invocation.InputStream); - foreach (var record in records) { - Console.WriteLine("Record Key: {0}", record.Key); - foreach (var header in record.Headers) - { - Console.WriteLine($"{header.Key}: {ToDecimalString(header.Value)}"); - } - foreach (var header in record.Headers.DecodedValues()) { Console.WriteLine($"{header.Key}: {header.Value}"); } - Console.WriteLine("Record UserId: {0}", record.Value); + Logger.LogInformation("Record Key: {@key}", record.Key); + Logger.LogInformation("Record Value: {@record}", record.Value); } - stopwatch.Stop(); - - Metrics.PushSingleMetric("ProtoDeserialization-512", - stopwatch.ElapsedMilliseconds, MetricUnit.Milliseconds, "kafka-dotnet", "service", null, - MetricResolution.High); - - Console.WriteLine("Record Count: {0}", records.Count()); - Console.WriteLine("JsonDeserialization: {0:F2}", stopwatch.ElapsedMilliseconds); - - responseStream.SetLength(0); - responseStream.Position = 0; - - return Task.FromResult(new InvocationResponse(responseStream, false)); + return "Processed " + records.Count() + " records"; } -static string ToDecimalString(byte[] bytes) -{ - if (bytes == null || bytes.Length == 0) - { - return "[]"; - } - - return "[" + string.Join(", ", bytes) + "]"; -} +await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, + new PowertoolsKafkaProtobufSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization + .Build() + .RunAsync(); -var bootstrap = new LambdaBootstrap(ToUpperAsync); -await bootstrap.RunAsync(); \ No newline at end of file diff --git a/examples/Kafka/Protobuf/src/Protobuf.csproj b/examples/Kafka/Protobuf/src/Protobuf.csproj index 7e383bab7..3623b1b40 100644 --- a/examples/Kafka/Protobuf/src/Protobuf.csproj +++ b/examples/Kafka/Protobuf/src/Protobuf.csproj @@ -16,7 +16,6 @@ - all runtime; build; native; contentfiles; analyzers; buildtransitive diff --git a/examples/Kafka/Protobuf/src/Readme.md b/examples/Kafka/Protobuf/src/Readme.md index f9c897654..ca3020a34 100644 --- a/examples/Kafka/Protobuf/src/Readme.md +++ b/examples/Kafka/Protobuf/src/Readme.md @@ -1,4 +1,4 @@ -# AWS Lambda Function Using Top Level Statements +# PowerTools Kafka Protobuf Lambda Function This starter project consists of: * Function.cs - file contain C# top level statements that define the function to be called for each event and starts the Lambda runtime client. @@ -18,9 +18,10 @@ the Lambda runtime client add the `Amazon.Lambda.RuntimeSupport` NuGet package a of the file containing top-level statements to start the runtime. ```csharp -await LambdaBootstrapBuilder.Create(handler, new DefaultLambdaJsonSerializer()) - .Build() - .RunAsync(); +await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, + new PowertoolsKafkaProtobufSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization + .Build() + .RunAsync(); ``` Pass into the Lambda runtime client a function handler as either an `Action<>` or `Func<>` for the code that diff --git a/examples/Kafka/Protobuf/src/aws-lambda-tools-defaults.json b/examples/Kafka/Protobuf/src/aws-lambda-tools-defaults.json index 64b6fa0f4..1a1c5de1d 100644 --- a/examples/Kafka/Protobuf/src/aws-lambda-tools-defaults.json +++ b/examples/Kafka/Protobuf/src/aws-lambda-tools-defaults.json @@ -11,7 +11,5 @@ "function-runtime": "dotnet8", "function-memory-size": 512, "function-timeout": 30, - "function-handler": "Protobuf", - "function-name": "dotnet-kafka-proto", - "function-role": "arn:aws:iam::992382490249:role/dotnet-kafka-test-role" + "function-handler": "Protobuf" } \ No newline at end of file diff --git a/examples/Kafka/src/Avro/AvroKey.avsc b/examples/Kafka/src/Avro/AvroKey.avsc deleted file mode 100644 index cc15c9e72..000000000 --- a/examples/Kafka/src/Avro/AvroKey.avsc +++ /dev/null @@ -1,24 +0,0 @@ -{ - "namespace": "AWS.Lambda.Powertools.Kafka.Tests", - "type": "record", - "name": "AvroKey", - "fields": [ - { - "name": "id", - "type": "int" - }, - { - "name": "color", - "type": { - "type": "enum", - "name": "Color", - "symbols": [ - "UNKNOWN", - "GREEN", - "RED" - ], - "default": "UNKNOWN" - } - } - ] -} \ No newline at end of file diff --git a/examples/Kafka/src/Avro/AvroProduct.avsc b/examples/Kafka/src/Avro/AvroProduct.avsc deleted file mode 100644 index 60b8ed002..000000000 --- a/examples/Kafka/src/Avro/AvroProduct.avsc +++ /dev/null @@ -1,10 +0,0 @@ -{ - "namespace": "AWS.Lambda.Powertools.Kafka.Tests", - "type": "record", - "name": "AvroProduct", - "fields": [ - {"name": "id", "type": "int"}, - {"name": "name", "type": "string"}, - {"name": "price", "type": "double"} - ] -} \ No newline at end of file diff --git a/examples/Kafka/src/Avro/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroKey.cs b/examples/Kafka/src/Avro/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroKey.cs deleted file mode 100644 index 96d09316e..000000000 --- a/examples/Kafka/src/Avro/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroKey.cs +++ /dev/null @@ -1,70 +0,0 @@ -// ------------------------------------------------------------------------------ -// -// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e -// Changes to this file may cause incorrect behavior and will be lost if code -// is regenerated -// -// ------------------------------------------------------------------------------ -namespace AWS.Lambda.Powertools.Kafka.Tests -{ - using System; - using System.Collections.Generic; - using System.Text; - using global::Avro; - using global::Avro.Specific; - - [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")] - public partial class AvroKey : global::Avro.Specific.ISpecificRecord - { - public static global::Avro.Schema _SCHEMA = global::Avro.Schema.Parse(@"{""type"":""record"",""name"":""AvroKey"",""namespace"":""AWS.Lambda.Powertools.Kafka.Tests"",""fields"":[{""name"":""id"",""type"":""int""},{""name"":""color"",""type"":{""type"":""enum"",""name"":""Color"",""namespace"":""AWS.Lambda.Powertools.Kafka.Tests"",""symbols"":[""UNKNOWN"",""GREEN"",""RED""],""default"":""UNKNOWN""}}]}"); - private int _id; - private AWS.Lambda.Powertools.Kafka.Tests.Color _color = AWS.Lambda.Powertools.Kafka.Tests.Color.UNKNOWN; - public virtual global::Avro.Schema Schema - { - get - { - return AvroKey._SCHEMA; - } - } - public int id - { - get - { - return this._id; - } - set - { - this._id = value; - } - } - public AWS.Lambda.Powertools.Kafka.Tests.Color color - { - get - { - return this._color; - } - set - { - this._color = value; - } - } - public virtual object Get(int fieldPos) - { - switch (fieldPos) - { - case 0: return this.id; - case 1: return this.color; - default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Get()"); - }; - } - public virtual void Put(int fieldPos, object fieldValue) - { - switch (fieldPos) - { - case 0: this.id = (System.Int32)fieldValue; break; - case 1: this.color = (AWS.Lambda.Powertools.Kafka.Tests.Color)fieldValue; break; - default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Put()"); - }; - } - } -} diff --git a/examples/Kafka/src/Avro/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroProduct.cs b/examples/Kafka/src/Avro/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroProduct.cs deleted file mode 100644 index f1c6aa8d4..000000000 --- a/examples/Kafka/src/Avro/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroProduct.cs +++ /dev/null @@ -1,86 +0,0 @@ -// ------------------------------------------------------------------------------ -// -// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e -// Changes to this file may cause incorrect behavior and will be lost if code -// is regenerated -// -// ------------------------------------------------------------------------------ -namespace AWS.Lambda.Powertools.Kafka.Tests -{ - using System; - using System.Collections.Generic; - using System.Text; - using global::Avro; - using global::Avro.Specific; - - [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")] - public partial class AvroProduct : global::Avro.Specific.ISpecificRecord - { - public static global::Avro.Schema _SCHEMA = global::Avro.Schema.Parse("{\"type\":\"record\",\"name\":\"AvroProduct\",\"namespace\":\"AWS.Lambda.Powertools.Kafka.Te" + - "sts\",\"fields\":[{\"name\":\"id\",\"type\":\"int\"},{\"name\":\"name\",\"type\":\"string\"},{\"name" + - "\":\"price\",\"type\":\"double\"}]}"); - private int _id; - private string _name; - private double _price; - public virtual global::Avro.Schema Schema - { - get - { - return AvroProduct._SCHEMA; - } - } - public int id - { - get - { - return this._id; - } - set - { - this._id = value; - } - } - public string name - { - get - { - return this._name; - } - set - { - this._name = value; - } - } - public double price - { - get - { - return this._price; - } - set - { - this._price = value; - } - } - public virtual object Get(int fieldPos) - { - switch (fieldPos) - { - case 0: return this.id; - case 1: return this.name; - case 2: return this.price; - default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Get()"); - }; - } - public virtual void Put(int fieldPos, object fieldValue) - { - switch (fieldPos) - { - case 0: this.id = (System.Int32)fieldValue; break; - case 1: this.name = (System.String)fieldValue; break; - case 2: this.price = (System.Double)fieldValue; break; - default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Put()"); - }; - } - } -} diff --git a/examples/Kafka/src/Avro/Generated/AWS/Lambda/Powertools/Kafka/Tests/Color.cs b/examples/Kafka/src/Avro/Generated/AWS/Lambda/Powertools/Kafka/Tests/Color.cs deleted file mode 100644 index 963233679..000000000 --- a/examples/Kafka/src/Avro/Generated/AWS/Lambda/Powertools/Kafka/Tests/Color.cs +++ /dev/null @@ -1,23 +0,0 @@ -// ------------------------------------------------------------------------------ -// -// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e -// Changes to this file may cause incorrect behavior and will be lost if code -// is regenerated -// -// ------------------------------------------------------------------------------ -namespace AWS.Lambda.Powertools.Kafka.Tests -{ - using System; - using System.Collections.Generic; - using System.Text; - using global::Avro; - using global::Avro.Specific; - - [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")] - public enum Color - { - UNKNOWN, - GREEN, - RED, - } -} diff --git a/examples/Kafka/src/Avro/kafka-avro-event.json b/examples/Kafka/src/Avro/kafka-avro-event.json deleted file mode 100644 index 8d6ef2210..000000000 --- a/examples/Kafka/src/Avro/kafka-avro-event.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "eventSource": "aws:kafka", - "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", - "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", - "records": { - "mytopic-0": [ - { - "topic": "mytopic", - "partition": 0, - "offset": 15, - "timestamp": 1545084650987, - "timestampType": "CREATE_TIME", - "key": "NDI=", - "value": "0g8MTGFwdG9wUrgehes/j0A=", - "headers": [ - { - "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] - } - ] - }, - { - "topic": "mytopic", - "partition": 0, - "offset": 16, - "timestamp": 1545084650988, - "timestampType": "CREATE_TIME", - "key": "NDI=", - "value": "1A8UU21hcnRwaG9uZVK4HoXrv4JA", - "headers": [ - { - "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] - } - ] - }, - { - "topic": "mytopic", - "partition": 0, - "offset": 17, - "timestamp": 1545084650989, - "timestampType": "CREATE_TIME", - "key": null, - "value": "1g8USGVhZHBob25lc0jhehSuv2JA", - "headers": [ - { - "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] - } - ] - } - ] - } -} diff --git a/examples/Kafka/src/Function.cs b/examples/Kafka/src/Function.cs deleted file mode 100644 index 9f433816a..000000000 --- a/examples/Kafka/src/Function.cs +++ /dev/null @@ -1,85 +0,0 @@ -using Amazon.Lambda.Core; -using Amazon.Lambda.RuntimeSupport; -using AWS.Lambda.Powertools.Kafka; -using AWS.Lambda.Powertools.Kafka.Json; -using AWS.Lambda.Powertools.Kafka.Avro; -using AWS.Lambda.Powertools.Kafka.Tests; -using AWS.Lambda.Powertools.Logging; -using AWS.Lambda.Powertools.Metrics; -using TestKafka; - -string Handler(ConsumerRecords records, ILambdaContext context) -{ - Metrics.SetNamespace("Avro"); - Metrics.AddMetric("NumberOfRequests", 1, MetricUnit.Count, MetricResolution.High); - - foreach (var record in records) - { - Logger.LogInformation("Record Key: {@key}", record.Key); - Logger.LogInformation("Record Value: {@record}", record.Value); - } - - return "Processed " + records.Count() + " records"; -} - - -await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, - new PowertoolsKafkaAvroSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization - .Build() - .RunAsync(); - - -// -// string Handler(ConsumerRecords records, ILambdaContext context) -// { -// Metrics.SetNamespace("Proto"); -// Metrics.AddMetric("NumberOfRequests", 1, MetricUnit.Count, MetricResolution.High); -// -// foreach (var record in records) -// { -// Logger.LogInformation("Record Key: {@key}", record.Key); -// Logger.LogInformation("Record Value: {@record}", record.Value); -// } -// -// return "Processed " + records.Count() + " records"; -// } -// -// -// await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, -// new PowertoolsKafkaProtobufSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization -// .Build() -// .RunAsync(); - -// -// string Handler(ConsumerRecords records, ILambdaContext context) -// { -// Metrics.SetNamespace("Json"); -// Metrics.AddMetric("NumberOfRequests", 1, MetricUnit.Count, MetricResolution.High); -// -// foreach (var record in records) -// { -// Logger.LogInformation("Record Key: {@record.Key}", record.Key); -// Logger.LogInformation("Record Value: {@record}", record.Value); -// } -// -// return "Processed " + records.Count() + " records"; -// } -// -// -// await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, -// new PowertoolsKafkaJsonSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization -// .Build() -// .RunAsync(); -// -// -// public record JsonKey -// { -// public int Id { get; set; } -// } -// -// public record JsonProduct -// { -// public int Id { get; set; } -// public string Name { get; set; } = string.Empty; -// public decimal Price { get; set; } -// } \ No newline at end of file diff --git a/examples/Kafka/src/Json/kafka-json-event.json b/examples/Kafka/src/Json/kafka-json-event.json deleted file mode 100644 index d85c40654..000000000 --- a/examples/Kafka/src/Json/kafka-json-event.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "eventSource": "aws:kafka", - "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", - "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", - "records": { - "mytopic-0": [ - { - "topic": "mytopic", - "partition": 0, - "offset": 15, - "timestamp": 1545084650987, - "timestampType": "CREATE_TIME", - "key": "cmVjb3JkS2V5", - "value": "ewogICJpZCI6IDEyMzQ1LAogICJuYW1lIjogInByb2R1Y3Q1IiwKICAicHJpY2UiOiA0NQp9", - "headers": [ - { - "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] - } - ] - }, - { - "topic": "mytopic", - "partition": 0, - "offset": 15, - "timestamp": 1545084650987, - "timestampType": "CREATE_TIME", - "value": "ewogICJpZCI6IDEyMzQ1LAogICJuYW1lIjogInByb2R1Y3Q1IiwKICAicHJpY2UiOiA0NQp9", - "headers": [ - { - "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] - } - ] - }, - { - "topic": "mytopic", - "partition": 0, - "offset": 15, - "timestamp": 1545084650987, - "timestampType": "CREATE_TIME", - "key": null, - "value": "ewogICJpZCI6IDEyMzQ1LAogICJuYW1lIjogInByb2R1Y3Q1IiwKICAicHJpY2UiOiA0NQp9", - "headers": [ - { - "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] - } - ] - } - ] - } -} diff --git a/examples/Kafka/src/Kafka.csproj b/examples/Kafka/src/Kafka.csproj deleted file mode 100644 index b8eac65cd..000000000 --- a/examples/Kafka/src/Kafka.csproj +++ /dev/null @@ -1,74 +0,0 @@ - - - Exe - net8.0 - enable - enable - true - Lambda - - true - - true - - - - - - - - - all - runtime; build; native; contentfiles; analyzers; buildtransitive - - - - - - - - - - - - - - - - - - - - - - - Client - PreserveNewest - MSBuild:Compile - - - - PreserveNewest - - - - - - - - PreserveNewest - - - - PreserveNewest - - - - PreserveNewest - - - - - - - \ No newline at end of file diff --git a/examples/Kafka/src/Protobuf/Key.proto b/examples/Kafka/src/Protobuf/Key.proto deleted file mode 100644 index deedcf5dc..000000000 --- a/examples/Kafka/src/Protobuf/Key.proto +++ /dev/null @@ -1,14 +0,0 @@ -syntax = "proto3"; - -option csharp_namespace = "TestKafka"; - -message ProtobufKey { - int32 id = 1; - Color color = 2; -} - -enum Color { - UNKNOWN = 0; - GREEN = 1; - RED = 2; -} \ No newline at end of file diff --git a/examples/Kafka/src/Protobuf/Product.proto b/examples/Kafka/src/Protobuf/Product.proto deleted file mode 100644 index 1d4c64e90..000000000 --- a/examples/Kafka/src/Protobuf/Product.proto +++ /dev/null @@ -1,9 +0,0 @@ -syntax = "proto3"; - -option csharp_namespace = "TestKafka"; - -message ProtobufProduct { - int32 id = 1; - string name = 2; - double price = 3; -} \ No newline at end of file diff --git a/examples/Kafka/src/Protobuf/kafka-protobuf-event.json b/examples/Kafka/src/Protobuf/kafka-protobuf-event.json deleted file mode 100644 index b3e0139e3..000000000 --- a/examples/Kafka/src/Protobuf/kafka-protobuf-event.json +++ /dev/null @@ -1,51 +0,0 @@ -{ - "eventSource": "aws:kafka", - "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", - "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", - "records": { - "mytopic-0": [ - { - "topic": "mytopic", - "partition": 0, - "offset": 15, - "timestamp": 1545084650987, - "timestampType": "CREATE_TIME", - "key": "NDI=", - "value": "COkHEgZMYXB0b3AZUrgehes/j0A=", - "headers": [ - { - "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] - } - ] - }, - { - "topic": "mytopic", - "partition": 0, - "offset": 16, - "timestamp": 1545084650988, - "timestampType": "CREATE_TIME", - "key": "NDI=", - "value": "COoHEgpTbWFydHBob25lGVK4HoXrv4JA", - "headers": [ - { - "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] - } - ] - }, - { - "topic": "mytopic", - "partition": 0, - "offset": 17, - "timestamp": 1545084650989, - "timestampType": "CREATE_TIME", - "key": null, - "value": "COsHEgpIZWFkcGhvbmVzGUjhehSuv2JA", - "headers": [ - { - "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] - } - ] - } - ] - } -} diff --git a/examples/Kafka/src/Readme.md b/examples/Kafka/src/Readme.md deleted file mode 100644 index e43192609..000000000 --- a/examples/Kafka/src/Readme.md +++ /dev/null @@ -1,73 +0,0 @@ -# Powertools for AWS Lambda - Kafka examples - - -## Already added to the project - -# Avro - -```bash -dotnet tool install --global Apache.Avro.Tools - -cd tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/ -avrogen -s AvroProduct.avsc ./ -``` - -```xml - - - - - - - -``` - -# Protobuf - -```xml - - - - PreserveNewest - - - - -``` - -## Here are some steps to follow to get started from the command line: - - -Install Amazon.Lambda.Tools Global Tools if not already installed. -``` - dotnet tool install -g Amazon.Lambda.Tools -``` - -## Edit the aws-lambda-tools-defaults.json file - -Update the role to use in the `aws-lambda-tools-defaults.json` file. This file is used by the `dotnet lambda deploy-function` command to deploy the Lambda function. - -``` - code aws-lambda-tools-defaults.json -``` - -Deploy function to AWS Lambda -``` - dotnet lambda deploy-function -``` - -## Infra - -Make sure the Lambda function adds permissions for bedrock to invoke it. You can do this by running the following command: - -```bash -aws lambda add-permission --function-name --principal bedrock.amazonaws.com --statement-id --action lambda:InvokeFunction -``` - -## Invoke the function - -Use the provided test event to invoke the function. You can do this with the AWS CLI or the dotnet CLI. - -```bash -dotnet lambda invoke-function --payload file://Avro/kafka-event.json -``` diff --git a/examples/Kafka/src/aws-lambda-tools-defaults.json b/examples/Kafka/src/aws-lambda-tools-defaults.json deleted file mode 100644 index c54ff19c3..000000000 --- a/examples/Kafka/src/aws-lambda-tools-defaults.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "Information": [ - "This file provides default values for the deployment wizard inside Visual Studio and the AWS Lambda commands added to the .NET Core CLI.", - "To learn more about the Lambda commands with the .NET Core CLI execute the following command at the command line in the project root directory.", - "dotnet lambda help", - "All the command line options for the Lambda command can be specified in this file." - ], - "profile": "", - "region": "eu-west-3", - "configuration": "Release", - "function-runtime": "dotnet8", - "function-memory-size": 512, - "function-timeout": 30, - "function-handler": "Kafka", - "function-name": "dotnet-kafka-json", - "function-role": "arn:aws:iam::992382490249:role/dotnet-kafka-test-role" -} \ No newline at end of file diff --git a/examples/examples.sln b/examples/examples.sln index 9b3bf61bd..c2770319e 100644 --- a/examples/examples.sln +++ b/examples/examples.sln @@ -111,10 +111,6 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AOT_Logging.Tests", "AOT\AO EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Kafka", "Kafka", "{71027B81-CA39-498C-9A50-ADDAFA2AC2F5}" EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Kafka", "Kafka\src\Kafka.csproj", "{D9FF982F-7CD7-4652-94BB-B387B7FC034F}" -EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{0D5246BB-02B3-43EE-9EE3-E0E627170C9B}" -EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Json", "Kafka\Json\src\Json.csproj", "{58EC305E-353A-4996-A541-3CF7FC0EDD80}" EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Protobuf", "Kafka\Protobuf\src\Protobuf.csproj", "{853F6FE9-1762-4BA3-BAF4-2FCD605B81CF}" @@ -214,10 +210,6 @@ Global {FC010A0E-64A9-4440-97FE-DEDA8CEE0BE5}.Debug|Any CPU.Build.0 = Debug|Any CPU {FC010A0E-64A9-4440-97FE-DEDA8CEE0BE5}.Release|Any CPU.ActiveCfg = Release|Any CPU {FC010A0E-64A9-4440-97FE-DEDA8CEE0BE5}.Release|Any CPU.Build.0 = Release|Any CPU - {D9FF982F-7CD7-4652-94BB-B387B7FC034F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {D9FF982F-7CD7-4652-94BB-B387B7FC034F}.Debug|Any CPU.Build.0 = Debug|Any CPU - {D9FF982F-7CD7-4652-94BB-B387B7FC034F}.Release|Any CPU.ActiveCfg = Release|Any CPU - {D9FF982F-7CD7-4652-94BB-B387B7FC034F}.Release|Any CPU.Build.0 = Release|Any CPU {58EC305E-353A-4996-A541-3CF7FC0EDD80}.Debug|Any CPU.ActiveCfg = Debug|Any CPU {58EC305E-353A-4996-A541-3CF7FC0EDD80}.Debug|Any CPU.Build.0 = Debug|Any CPU {58EC305E-353A-4996-A541-3CF7FC0EDD80}.Release|Any CPU.ActiveCfg = Release|Any CPU @@ -277,8 +269,6 @@ Global {343CF6B9-C006-43F8-924C-BF5BF5B6D051} = {FE1CAA26-87E9-4B71-800E-81D2997A7B53} {FC02CF45-DE15-4413-958A-D86808B99146} = {FEE72EAB-494F-403B-A75A-825E713C3D43} {FC010A0E-64A9-4440-97FE-DEDA8CEE0BE5} = {F3480212-EE7F-46FE-9ED5-24ACAB5B681D} - {0D5246BB-02B3-43EE-9EE3-E0E627170C9B} = {71027B81-CA39-498C-9A50-ADDAFA2AC2F5} - {D9FF982F-7CD7-4652-94BB-B387B7FC034F} = {0D5246BB-02B3-43EE-9EE3-E0E627170C9B} {58EC305E-353A-4996-A541-3CF7FC0EDD80} = {71027B81-CA39-498C-9A50-ADDAFA2AC2F5} {853F6FE9-1762-4BA3-BAF4-2FCD605B81CF} = {71027B81-CA39-498C-9A50-ADDAFA2AC2F5} {B03F22B2-315C-429B-9CC0-C15BE94CBF77} = {71027B81-CA39-498C-9A50-ADDAFA2AC2F5} From 40d3282c0d06240b7170f1d41521cec3e9c9955f Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Wed, 18 Jun 2025 12:00:53 +0100 Subject: [PATCH 16/35] examples update --- examples/Kafka/Avro/src/Avro.csproj | 7 +- examples/Kafka/Avro/src/AvroKey.avsc | 24 --- examples/Kafka/Avro/src/AvroProduct.avsc | 10 -- .../{Payload.avsc => CustomerProfile.avsc} | 0 examples/Kafka/Avro/src/Function.cs | 7 +- .../Lambda/Powertools/Kafka/Tests/AvroKey.cs | 70 -------- .../Powertools/Kafka/Tests/AvroProduct.cs | 86 ---------- .../Lambda/Powertools/Kafka/Tests/Color.cs | 23 --- examples/Kafka/Avro/src/Readme.md | 146 ++++++++++++----- examples/Kafka/Avro/src/template.yaml | 27 ++++ examples/Kafka/Json/src/Function.cs | 72 +-------- examples/Kafka/Json/src/Models/Address.cs | 16 ++ .../Kafka/Json/src/Models/CustomerProfile.cs | 22 +++ examples/Kafka/Json/src/Models/Email.cs | 12 ++ examples/Kafka/Json/src/Models/PhoneNumber.cs | 10 ++ examples/Kafka/Json/src/Models/Preferences.cs | 12 ++ examples/Kafka/Json/src/Readme.md | 129 ++++++++++----- examples/Kafka/Json/src/template.yaml | 27 ++++ .../{Payload.proto => CustomerProfile.proto} | 0 examples/Kafka/Protobuf/src/Function.cs | 6 +- examples/Kafka/Protobuf/src/Key.proto | 14 -- examples/Kafka/Protobuf/src/Product.proto | 9 -- examples/Kafka/Protobuf/src/Protobuf.csproj | 20 +-- examples/Kafka/Protobuf/src/Readme.md | 149 +++++++++++++----- examples/Kafka/Protobuf/src/template.yaml | 27 ++++ 25 files changed, 468 insertions(+), 457 deletions(-) delete mode 100644 examples/Kafka/Avro/src/AvroKey.avsc delete mode 100644 examples/Kafka/Avro/src/AvroProduct.avsc rename examples/Kafka/Avro/src/{Payload.avsc => CustomerProfile.avsc} (100%) delete mode 100644 examples/Kafka/Avro/src/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroKey.cs delete mode 100644 examples/Kafka/Avro/src/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroProduct.cs delete mode 100644 examples/Kafka/Avro/src/Generated/AWS/Lambda/Powertools/Kafka/Tests/Color.cs create mode 100644 examples/Kafka/Avro/src/template.yaml create mode 100644 examples/Kafka/Json/src/Models/Address.cs create mode 100644 examples/Kafka/Json/src/Models/CustomerProfile.cs create mode 100644 examples/Kafka/Json/src/Models/Email.cs create mode 100644 examples/Kafka/Json/src/Models/PhoneNumber.cs create mode 100644 examples/Kafka/Json/src/Models/Preferences.cs create mode 100644 examples/Kafka/Json/src/template.yaml rename examples/Kafka/Protobuf/src/{Payload.proto => CustomerProfile.proto} (100%) delete mode 100644 examples/Kafka/Protobuf/src/Key.proto delete mode 100644 examples/Kafka/Protobuf/src/Product.proto create mode 100644 examples/Kafka/Protobuf/src/template.yaml diff --git a/examples/Kafka/Avro/src/Avro.csproj b/examples/Kafka/Avro/src/Avro.csproj index f80ce7ece..05314f2fb 100644 --- a/examples/Kafka/Avro/src/Avro.csproj +++ b/examples/Kafka/Avro/src/Avro.csproj @@ -24,12 +24,7 @@ - - - - - - + diff --git a/examples/Kafka/Avro/src/AvroKey.avsc b/examples/Kafka/Avro/src/AvroKey.avsc deleted file mode 100644 index cc15c9e72..000000000 --- a/examples/Kafka/Avro/src/AvroKey.avsc +++ /dev/null @@ -1,24 +0,0 @@ -{ - "namespace": "AWS.Lambda.Powertools.Kafka.Tests", - "type": "record", - "name": "AvroKey", - "fields": [ - { - "name": "id", - "type": "int" - }, - { - "name": "color", - "type": { - "type": "enum", - "name": "Color", - "symbols": [ - "UNKNOWN", - "GREEN", - "RED" - ], - "default": "UNKNOWN" - } - } - ] -} \ No newline at end of file diff --git a/examples/Kafka/Avro/src/AvroProduct.avsc b/examples/Kafka/Avro/src/AvroProduct.avsc deleted file mode 100644 index 60b8ed002..000000000 --- a/examples/Kafka/Avro/src/AvroProduct.avsc +++ /dev/null @@ -1,10 +0,0 @@ -{ - "namespace": "AWS.Lambda.Powertools.Kafka.Tests", - "type": "record", - "name": "AvroProduct", - "fields": [ - {"name": "id", "type": "int"}, - {"name": "name", "type": "string"}, - {"name": "price", "type": "double"} - ] -} \ No newline at end of file diff --git a/examples/Kafka/Avro/src/Payload.avsc b/examples/Kafka/Avro/src/CustomerProfile.avsc similarity index 100% rename from examples/Kafka/Avro/src/Payload.avsc rename to examples/Kafka/Avro/src/CustomerProfile.avsc diff --git a/examples/Kafka/Avro/src/Function.cs b/examples/Kafka/Avro/src/Function.cs index 401b861a5..6ca9ebdb5 100644 --- a/examples/Kafka/Avro/src/Function.cs +++ b/examples/Kafka/Avro/src/Function.cs @@ -2,10 +2,10 @@ using Amazon.Lambda.RuntimeSupport; using AWS.Lambda.Powertools.Kafka; using AWS.Lambda.Powertools.Kafka.Avro; -using AWS.Lambda.Powertools.Kafka.Tests; using AWS.Lambda.Powertools.Logging; +using com.example; -string Handler(ConsumerRecords records, ILambdaContext context) +string Handler(ConsumerRecords records, ILambdaContext context) { foreach (var record in records) { @@ -15,8 +15,7 @@ string Handler(ConsumerRecords records, ILambdaContext con return "Processed " + records.Count() + " records"; } - -await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, +await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, new PowertoolsKafkaAvroSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization .Build() .RunAsync(); \ No newline at end of file diff --git a/examples/Kafka/Avro/src/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroKey.cs b/examples/Kafka/Avro/src/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroKey.cs deleted file mode 100644 index 96d09316e..000000000 --- a/examples/Kafka/Avro/src/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroKey.cs +++ /dev/null @@ -1,70 +0,0 @@ -// ------------------------------------------------------------------------------ -// -// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e -// Changes to this file may cause incorrect behavior and will be lost if code -// is regenerated -// -// ------------------------------------------------------------------------------ -namespace AWS.Lambda.Powertools.Kafka.Tests -{ - using System; - using System.Collections.Generic; - using System.Text; - using global::Avro; - using global::Avro.Specific; - - [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")] - public partial class AvroKey : global::Avro.Specific.ISpecificRecord - { - public static global::Avro.Schema _SCHEMA = global::Avro.Schema.Parse(@"{""type"":""record"",""name"":""AvroKey"",""namespace"":""AWS.Lambda.Powertools.Kafka.Tests"",""fields"":[{""name"":""id"",""type"":""int""},{""name"":""color"",""type"":{""type"":""enum"",""name"":""Color"",""namespace"":""AWS.Lambda.Powertools.Kafka.Tests"",""symbols"":[""UNKNOWN"",""GREEN"",""RED""],""default"":""UNKNOWN""}}]}"); - private int _id; - private AWS.Lambda.Powertools.Kafka.Tests.Color _color = AWS.Lambda.Powertools.Kafka.Tests.Color.UNKNOWN; - public virtual global::Avro.Schema Schema - { - get - { - return AvroKey._SCHEMA; - } - } - public int id - { - get - { - return this._id; - } - set - { - this._id = value; - } - } - public AWS.Lambda.Powertools.Kafka.Tests.Color color - { - get - { - return this._color; - } - set - { - this._color = value; - } - } - public virtual object Get(int fieldPos) - { - switch (fieldPos) - { - case 0: return this.id; - case 1: return this.color; - default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Get()"); - }; - } - public virtual void Put(int fieldPos, object fieldValue) - { - switch (fieldPos) - { - case 0: this.id = (System.Int32)fieldValue; break; - case 1: this.color = (AWS.Lambda.Powertools.Kafka.Tests.Color)fieldValue; break; - default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Put()"); - }; - } - } -} diff --git a/examples/Kafka/Avro/src/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroProduct.cs b/examples/Kafka/Avro/src/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroProduct.cs deleted file mode 100644 index f1c6aa8d4..000000000 --- a/examples/Kafka/Avro/src/Generated/AWS/Lambda/Powertools/Kafka/Tests/AvroProduct.cs +++ /dev/null @@ -1,86 +0,0 @@ -// ------------------------------------------------------------------------------ -// -// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e -// Changes to this file may cause incorrect behavior and will be lost if code -// is regenerated -// -// ------------------------------------------------------------------------------ -namespace AWS.Lambda.Powertools.Kafka.Tests -{ - using System; - using System.Collections.Generic; - using System.Text; - using global::Avro; - using global::Avro.Specific; - - [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")] - public partial class AvroProduct : global::Avro.Specific.ISpecificRecord - { - public static global::Avro.Schema _SCHEMA = global::Avro.Schema.Parse("{\"type\":\"record\",\"name\":\"AvroProduct\",\"namespace\":\"AWS.Lambda.Powertools.Kafka.Te" + - "sts\",\"fields\":[{\"name\":\"id\",\"type\":\"int\"},{\"name\":\"name\",\"type\":\"string\"},{\"name" + - "\":\"price\",\"type\":\"double\"}]}"); - private int _id; - private string _name; - private double _price; - public virtual global::Avro.Schema Schema - { - get - { - return AvroProduct._SCHEMA; - } - } - public int id - { - get - { - return this._id; - } - set - { - this._id = value; - } - } - public string name - { - get - { - return this._name; - } - set - { - this._name = value; - } - } - public double price - { - get - { - return this._price; - } - set - { - this._price = value; - } - } - public virtual object Get(int fieldPos) - { - switch (fieldPos) - { - case 0: return this.id; - case 1: return this.name; - case 2: return this.price; - default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Get()"); - }; - } - public virtual void Put(int fieldPos, object fieldValue) - { - switch (fieldPos) - { - case 0: this.id = (System.Int32)fieldValue; break; - case 1: this.name = (System.String)fieldValue; break; - case 2: this.price = (System.Double)fieldValue; break; - default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Put()"); - }; - } - } -} diff --git a/examples/Kafka/Avro/src/Generated/AWS/Lambda/Powertools/Kafka/Tests/Color.cs b/examples/Kafka/Avro/src/Generated/AWS/Lambda/Powertools/Kafka/Tests/Color.cs deleted file mode 100644 index 963233679..000000000 --- a/examples/Kafka/Avro/src/Generated/AWS/Lambda/Powertools/Kafka/Tests/Color.cs +++ /dev/null @@ -1,23 +0,0 @@ -// ------------------------------------------------------------------------------ -// -// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e -// Changes to this file may cause incorrect behavior and will be lost if code -// is regenerated -// -// ------------------------------------------------------------------------------ -namespace AWS.Lambda.Powertools.Kafka.Tests -{ - using System; - using System.Collections.Generic; - using System.Text; - using global::Avro; - using global::Avro.Specific; - - [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")] - public enum Color - { - UNKNOWN, - GREEN, - RED, - } -} diff --git a/examples/Kafka/Avro/src/Readme.md b/examples/Kafka/Avro/src/Readme.md index b0922efcf..e09861f8f 100644 --- a/examples/Kafka/Avro/src/Readme.md +++ b/examples/Kafka/Avro/src/Readme.md @@ -1,64 +1,130 @@ -# Powertools Kafka Avro Lambda Function +# AWS Powertools for AWS Lambda .NET - Kafka Avro Example -This starter project consists of: -* Function.cs - file contain C# top level statements that define the function to be called for each event and starts the Lambda runtime client. -* aws-lambda-tools-defaults.json - default argument settings for use with Visual Studio and command line deployment tools for AWS +This project demonstrates how to use AWS Lambda Powertools for .NET with Amazon MSK (Managed Streaming for Kafka) to process events from Kafka topics. -You may also have a test project depending on the options selected. +## Overview -The generated function handler is a simple method accepting a string argument that returns the uppercase equivalent of the input string. Replace the body of this method, and parameters, to suit your needs. +This example showcases a Lambda functions that consume messages from Kafka topics with Avro serialization format. -## Executable Assembly +It uses the `AWS.Lambda.Powertools.Kafka.Avro` NuGet package to easily deserialize and process Kafka records. -.NET Lambda projects that use C# top level statements like this project must be deployed as an executable assembly instead of a class library. To indicate to Lambda that the .NET function is an executable assembly the -Lambda function handler value is set to the .NET Assembly name. This is different then deploying as a class library where the function handler string includes the assembly, type and method name. +## Project Structure -To deploy as an executable assembly the Lambda runtime client must be started to listen for incoming events to process. To start -the Lambda runtime client add the `Amazon.Lambda.RuntimeSupport` NuGet package and add the following code at the end of the -of the file containing top-level statements to start the runtime. - -```csharp -await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, - new PowertoolsKafkaAvroSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization - .Build() - .RunAsync(); +```bash +examples/Kafka/Avro/src/ +├── Function.cs # Entry point for the Lambda function +├── aws-lambda-tools-defaults.json # Default argument settings for AWS Lambda deployment +├── template.yaml # AWS SAM template for deploying the function +└── kafka-avro-event.json # Sample Avro event to test the function ``` -Pass into the Lambda runtime client a function handler as either an `Action<>` or `Func<>` for the code that -should be called for each event. If the handler takes in an input event besides `System.IO.Stream` then -the JSON serializer must also be passed into the `Create` method. +## Prerequisites +- [Dotnet](https://dotnet.microsoft.com/en-us/download/dotnet) (dotnet8 or later) +- [AWS SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/install-sam-cli.html) +- [AWS CLI](https://aws.amazon.com/cli/) +- An AWS account with appropriate permissions +- [Amazon MSK](https://aws.amazon.com/msk/) cluster set up with a topic to consume messages from +- [AWS.Lambda.Powertools.Kafka.Avro](https://www.nuget.org/packages/AWS.Lambda.Powertools.Kafka.Avro/) NuGet package installed in your project +- [Avro Tools](https://www.nuget.org/packages/Apache.Avro.Tools/) codegen tool to generate C# classes from the Avro schema -## Here are some steps to follow from Visual Studio: +## Installation -To deploy your function to AWS Lambda, right click the project in Solution Explorer and select *Publish to AWS Lambda*. +1. Clone the repository: -To view your deployed function open its Function View window by double-clicking the function name shown beneath the AWS Lambda node in the AWS Explorer tree. + ```bash + git clone https://github.com/aws-powertools/powertools-lambda-dotnet.git + ``` -To perform testing against your deployed function use the Test Invoke tab in the opened Function View window. +2. Navigate to the project directory: -To configure event sources for your deployed function, for example to have your function invoked when an object is created in an Amazon S3 bucket, use the Event Sources tab in the opened Function View window. + ```bash + cd powertools-lambda-dotnet/examples/Kafka/Avro/src + ``` -To update the runtime configuration of your deployed function use the Configuration tab in the opened Function View window. +3. Build the project: -To view execution logs of invocations of your function use the Logs tab in the opened Function View window. + ```bash + dotnet build + ``` +4. Install the Avro Tools globally to generate C# classes from the Avro schema: -## Here are some steps to follow to get started from the command line: + ```bash + dotnet tool install --global Apache.Avro.Tools + ``` -Once you have edited your template and code you can deploy your application using the [Amazon.Lambda.Tools Global Tool](https://github.com/aws/aws-extensions-for-dotnet-cli#aws-lambda-amazonlambdatools) from the command line. +## Deployment -Install Amazon.Lambda.Tools Global Tools if not already installed. -``` - dotnet tool install -g Amazon.Lambda.Tools -``` +Deploy the application using the AWS SAM CLI: -If already installed check if new version is available. +```bash +sam build +sam deploy --guided ``` - dotnet tool update -g Amazon.Lambda.Tools + +Follow the prompts to configure your deployment. + +## Avro Format +Avro is a binary serialization format that provides a compact and efficient way to serialize structured data. It uses schemas to define the structure of the data, which allows for robust data evolution. + +In this example we provide a schema called `CustomerProfile.avsc`. The schema is used to serialize and deserialize the data in the Kafka messages. + +The classes are generated from the .cs file using the Avro Tools command: + +```xml + + + ``` -Deploy function to AWS Lambda +## Usage Examples + +Once deployed, you can test the Lambda function by sending a sample Avro event to the configured Kafka topic. +You can use the `kafka-avro-event.json` file as a sample event to test the function. + +### Testing + +You can test the function locally using the AWS SAM CLI (Requires Docker to be installed): + +```bash +sam local invoke AvroDeserializationFunction --event kafka-avro-event.json ``` - cd "Avro/src/Avro" - dotnet lambda deploy-function -``` \ No newline at end of file + +This command simulates an invocation of the Lambda function with the provided event data. + +## How It Works + +1. **Event Source**: Configure your Lambda functions with an MSK or self-managed Kafka cluster as an event source. +2. **Deserializing Records**: Powertools handles deserializing the records based on the specified format. +3. **Processing**: Each record is processed within the handler function. + +## Event Deserialization + +Pass the `PowertoolsKafkaAvroSerializer` to the `LambdaBootstrapBuilder.Create()` method to enable Avro deserialization of Kafka records: + +```csharp +await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, + new PowertoolsKafkaAvroSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization + .Build() + .RunAsync(); + ``` + +## Configuration + +The SAM template (`template.yaml`) defines three Lambda function: + +- **AvroDeserializationFunction**: Handles Avro-formatted Kafka messages + +## Customization + +To customize the examples: + +1. Modify the schema definitions to match your data structures +2. Update the handler logic to process the records according to your requirements + +## Resources + +- [AWS Lambda Powertools for .NET Documentation](https://docs.powertools.aws.dev/lambda/dotnet/) +- [Amazon MSK Documentation](https://docs.aws.amazon.com/msk/) +- [AWS Lambda Developer Guide](https://docs.aws.amazon.com/lambda/) +- [Apache Avro Documentation](https://avro.apache.org/docs/) \ No newline at end of file diff --git a/examples/Kafka/Avro/src/template.yaml b/examples/Kafka/Avro/src/template.yaml new file mode 100644 index 000000000..a08325be2 --- /dev/null +++ b/examples/Kafka/Avro/src/template.yaml @@ -0,0 +1,27 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: > + kafka + + Sample SAM Template for kafka + +# More info about Globals: https://github.com/awslabs/serverless-application-model/blob/master/docs/globals.rst +Globals: + Function: + Timeout: 15 + MemorySize: 512 + Runtime: dotnet8 + +Resources: + AvroDeserializationFunction: + Type: AWS::Serverless::Function + Properties: + Handler: Avro.Example + Architectures: + - x86_64 + Tracing: Active + Environment: # Powertools env vars: https://awslabs.github.io/aws-lambda-powertools-python/#environment-variables + Variables: + POWERTOOLS_SERVICE_NAME: PowertoolsHelloWorld + POWERTOOLS_LOG_LEVEL: Info + POWERTOOLS_LOGGER_CASE: PascalCase # Allowed values are: CamelCase, PascalCase and SnakeCase (Default) \ No newline at end of file diff --git a/examples/Kafka/Json/src/Function.cs b/examples/Kafka/Json/src/Function.cs index b99e64ac6..53a8cb638 100644 --- a/examples/Kafka/Json/src/Function.cs +++ b/examples/Kafka/Json/src/Function.cs @@ -1,11 +1,11 @@ -using System.Text.Json.Serialization; using Amazon.Lambda.Core; using Amazon.Lambda.RuntimeSupport; using AWS.Lambda.Powertools.Kafka; using AWS.Lambda.Powertools.Kafka.Json; using AWS.Lambda.Powertools.Logging; +using Json.Models; -string Handler(ConsumerRecords records, ILambdaContext context) +string Handler(ConsumerRecords records, ILambdaContext context) { foreach (var record in records) { @@ -15,71 +15,7 @@ string Handler(ConsumerRecords records, ILambdaContext context return "Processed " + records.Count() + " records"; } - -await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, +await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, new PowertoolsKafkaJsonSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization .Build() - .RunAsync(); - - -public record JsonKey -{ - public int Id { get; set; } -} - -public partial class Payload -{ - [JsonPropertyName("user_id")] public string UserId { get; set; } - - [JsonPropertyName("full_name")] public string FullName { get; set; } - - [JsonPropertyName("email")] public Email Email { get; set; } - - [JsonPropertyName("age")] public long Age { get; set; } - - [JsonPropertyName("address")] public Address Address { get; set; } - - [JsonPropertyName("phone_numbers")] public List PhoneNumbers { get; set; } - - [JsonPropertyName("preferences")] public Preferences Preferences { get; set; } - - [JsonPropertyName("account_status")] public string AccountStatus { get; set; } -} - -public partial class Address -{ - [JsonPropertyName("street")] public string Street { get; set; } - - [JsonPropertyName("city")] public string City { get; set; } - - [JsonPropertyName("state")] public string State { get; set; } - - [JsonPropertyName("country")] public string Country { get; set; } - - [JsonPropertyName("zip_code")] public string ZipCode { get; set; } -} - -public partial class Email -{ - [JsonPropertyName("address")] public string Address { get; set; } - - [JsonPropertyName("verified")] public bool Verified { get; set; } - - [JsonPropertyName("primary")] public bool Primary { get; set; } -} - -public partial class PhoneNumber -{ - [JsonPropertyName("number")] public string Number { get; set; } - - [JsonPropertyName("type")] public string Type { get; set; } -} - -public partial class Preferences -{ - [JsonPropertyName("language")] public string Language { get; set; } - - [JsonPropertyName("notifications")] public string Notifications { get; set; } - - [JsonPropertyName("timezone")] public string Timezone { get; set; } -} \ No newline at end of file + .RunAsync(); \ No newline at end of file diff --git a/examples/Kafka/Json/src/Models/Address.cs b/examples/Kafka/Json/src/Models/Address.cs new file mode 100644 index 000000000..a011b3cee --- /dev/null +++ b/examples/Kafka/Json/src/Models/Address.cs @@ -0,0 +1,16 @@ +using System.Text.Json.Serialization; + +namespace Json.Models; + +public partial class Address +{ + [JsonPropertyName("street")] public string Street { get; set; } + + [JsonPropertyName("city")] public string City { get; set; } + + [JsonPropertyName("state")] public string State { get; set; } + + [JsonPropertyName("country")] public string Country { get; set; } + + [JsonPropertyName("zip_code")] public string ZipCode { get; set; } +} \ No newline at end of file diff --git a/examples/Kafka/Json/src/Models/CustomerProfile.cs b/examples/Kafka/Json/src/Models/CustomerProfile.cs new file mode 100644 index 000000000..1e7ab62b6 --- /dev/null +++ b/examples/Kafka/Json/src/Models/CustomerProfile.cs @@ -0,0 +1,22 @@ +using System.Text.Json.Serialization; + +namespace Json.Models; + +public partial class CustomerProfile +{ + [JsonPropertyName("user_id")] public string UserId { get; set; } + + [JsonPropertyName("full_name")] public string FullName { get; set; } + + [JsonPropertyName("email")] public Email Email { get; set; } + + [JsonPropertyName("age")] public long Age { get; set; } + + [JsonPropertyName("address")] public Address Address { get; set; } + + [JsonPropertyName("phone_numbers")] public List PhoneNumbers { get; set; } + + [JsonPropertyName("preferences")] public Preferences Preferences { get; set; } + + [JsonPropertyName("account_status")] public string AccountStatus { get; set; } +} \ No newline at end of file diff --git a/examples/Kafka/Json/src/Models/Email.cs b/examples/Kafka/Json/src/Models/Email.cs new file mode 100644 index 000000000..045118baf --- /dev/null +++ b/examples/Kafka/Json/src/Models/Email.cs @@ -0,0 +1,12 @@ +using System.Text.Json.Serialization; + +namespace Json.Models; + +public partial class Email +{ + [JsonPropertyName("address")] public string Address { get; set; } + + [JsonPropertyName("verified")] public bool Verified { get; set; } + + [JsonPropertyName("primary")] public bool Primary { get; set; } +} \ No newline at end of file diff --git a/examples/Kafka/Json/src/Models/PhoneNumber.cs b/examples/Kafka/Json/src/Models/PhoneNumber.cs new file mode 100644 index 000000000..7681265d1 --- /dev/null +++ b/examples/Kafka/Json/src/Models/PhoneNumber.cs @@ -0,0 +1,10 @@ +using System.Text.Json.Serialization; + +namespace Json.Models; + +public partial class PhoneNumber +{ + [JsonPropertyName("number")] public string Number { get; set; } + + [JsonPropertyName("type")] public string Type { get; set; } +} \ No newline at end of file diff --git a/examples/Kafka/Json/src/Models/Preferences.cs b/examples/Kafka/Json/src/Models/Preferences.cs new file mode 100644 index 000000000..5dd84aa99 --- /dev/null +++ b/examples/Kafka/Json/src/Models/Preferences.cs @@ -0,0 +1,12 @@ +using System.Text.Json.Serialization; + +namespace Json.Models; + +public partial class Preferences +{ + [JsonPropertyName("language")] public string Language { get; set; } + + [JsonPropertyName("notifications")] public string Notifications { get; set; } + + [JsonPropertyName("timezone")] public string Timezone { get; set; } +} \ No newline at end of file diff --git a/examples/Kafka/Json/src/Readme.md b/examples/Kafka/Json/src/Readme.md index 6e9251f7b..4315f2da7 100644 --- a/examples/Kafka/Json/src/Readme.md +++ b/examples/Kafka/Json/src/Readme.md @@ -1,64 +1,111 @@ -# Powertools Kafka JSON Lambda Function +# AWS Powertools for AWS Lambda .NET - Kafka Json Example -This starter project consists of: -* Function.cs - file contain C# top level statements that define the function to be called for each event and starts the Lambda runtime client. -* aws-lambda-tools-defaults.json - default argument settings for use with Visual Studio and command line deployment tools for AWS +This project demonstrates how to use AWS Lambda Powertools for .NET with Amazon MSK (Managed Streaming for Kafka) to process events from Kafka topics. -You may also have a test project depending on the options selected. +## Overview -The generated function handler is a simple method accepting a string argument that returns the uppercase equivalent of the input string. Replace the body of this method, and parameters, to suit your needs. +This example showcases a Lambda functions that consume messages from Kafka topics with Json serialization format. -## Executable Assembly +It uses the `AWS.Lambda.Powertools.Kafka.Json` NuGet package to easily deserialize and process Kafka records. -.NET Lambda projects that use C# top level statements like this project must be deployed as an executable assembly instead of a class library. To indicate to Lambda that the .NET function is an executable assembly the -Lambda function handler value is set to the .NET Assembly name. This is different then deploying as a class library where the function handler string includes the assembly, type and method name. +## Project Structure -To deploy as an executable assembly the Lambda runtime client must be started to listen for incoming events to process. To start -the Lambda runtime client add the `Amazon.Lambda.RuntimeSupport` NuGet package and add the following code at the end of the -of the file containing top-level statements to start the runtime. - -```csharp -await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, - new PowertoolsKafkaJsonSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization - .Build() - .RunAsync(); +```bash +examples/Kafka/Json/src/ +├── Function.cs # Entry point for the Lambda function +├── aws-lambda-tools-defaults.json # Default argument settings for AWS Lambda deployment +├── template.yaml # AWS SAM template for deploying the function +└── kafka-json-event.json # Sample Json event to test the function ``` -Pass into the Lambda runtime client a function handler as either an `Action<>` or `Func<>` for the code that -should be called for each event. If the handler takes in an input event besides `System.IO.Stream` then -the JSON serializer must also be passed into the `Create` method. +## Prerequisites +- [Dotnet](https://dotnet.microsoft.com/en-us/download/dotnet) (dotnet8 or later) +- [AWS SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/install-sam-cli.html) +- [AWS CLI](https://aws.amazon.com/cli/) +- An AWS account with appropriate permissions +- [Amazon MSK](https://aws.amazon.com/msk/) cluster set up with a topic to consume messages from +- [AWS.Lambda.Powertools.Kafka.Json](https://www.nuget.org/packages/AWS.Lambda.Powertools.Kafka.Json/) NuGet package installed in your project -## Here are some steps to follow from Visual Studio: +## Installation -To deploy your function to AWS Lambda, right click the project in Solution Explorer and select *Publish to AWS Lambda*. +1. Clone the repository: -To view your deployed function open its Function View window by double-clicking the function name shown beneath the AWS Lambda node in the AWS Explorer tree. + ```bash + git clone https://github.com/aws-powertools/powertools-lambda-dotnet.git + ``` -To perform testing against your deployed function use the Test Invoke tab in the opened Function View window. +2. Navigate to the project directory: -To configure event sources for your deployed function, for example to have your function invoked when an object is created in an Amazon S3 bucket, use the Event Sources tab in the opened Function View window. + ```bash + cd powertools-lambda-dotnet/examples/Kafka/Json/src + ``` -To update the runtime configuration of your deployed function use the Configuration tab in the opened Function View window. +3. Build the project: -To view execution logs of invocations of your function use the Logs tab in the opened Function View window. + ```bash + dotnet build + ``` -## Here are some steps to follow to get started from the command line: +## Deployment -Once you have edited your template and code you can deploy your application using the [Amazon.Lambda.Tools Global Tool](https://github.com/aws/aws-extensions-for-dotnet-cli#aws-lambda-amazonlambdatools) from the command line. +Deploy the application using the AWS SAM CLI: -Install Amazon.Lambda.Tools Global Tools if not already installed. -``` - dotnet tool install -g Amazon.Lambda.Tools +```bash +sam build +sam deploy --guided ``` -If already installed check if new version is available. -``` - dotnet tool update -g Amazon.Lambda.Tools -``` +Follow the prompts to configure your deployment. -Deploy function to AWS Lambda + +## Usage Examples + +Once deployed, you can test the Lambda function by sending a sample Json event to the configured Kafka topic. +You can use the `kafka-json-event.json` file as a sample event to test the function. + +### Testing + +You can test the function locally using the AWS SAM CLI (Requires Docker to be installed): + +```bash +sam local invoke JsonDeserializationFunction --event kafka-json-event.json ``` - cd "Json/src/Json" - dotnet lambda deploy-function -``` \ No newline at end of file + +This command simulates an invocation of the Lambda function with the provided event data. + +## How It Works + +1. **Event Source**: Configure your Lambda functions with an MSK or self-managed Kafka cluster as an event source. +2. **Deserializing Records**: Powertools handles deserializing the records based on the specified format. +3. **Processing**: Each record is processed within the handler function. + +## Event Deserialization + +Pass the `PowertoolsKafkaJsonSerializer` to the `LambdaBootstrapBuilder.Create()` method to enable JSON deserialization of Kafka records: + +```csharp +await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, + new PowertoolsKafkaJsonSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization + .Build() + .RunAsync(); + ``` + +## Configuration + +The SAM template (`template.yaml`) defines three Lambda function: + +- **JsonDeserializationFunction**: Handles json-formatted Kafka messages + +## Customization + +To customize the examples: + +1. Modify the schema definitions to match your data structures +2. Update the handler logic to process the records according to your requirements + +## Resources + +- [AWS Lambda Powertools for .NET Documentation](https://docs.powertools.aws.dev/lambda/dotnet/) +- [Amazon MSK Documentation](https://docs.aws.amazon.com/msk/) +- [AWS Lambda Developer Guide](https://docs.aws.amazon.com/lambda/) \ No newline at end of file diff --git a/examples/Kafka/Json/src/template.yaml b/examples/Kafka/Json/src/template.yaml new file mode 100644 index 000000000..dd4bfb9ff --- /dev/null +++ b/examples/Kafka/Json/src/template.yaml @@ -0,0 +1,27 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: > + kafka + + Sample SAM Template for kafka + +# More info about Globals: https://github.com/awslabs/serverless-application-model/blob/master/docs/globals.rst +Globals: + Function: + Timeout: 15 + MemorySize: 512 + Runtime: dotnet8 + +Resources: + JsonDeserializationFunction: + Type: AWS::Serverless::Function + Properties: + Handler: Json + Architectures: + - x86_64 + Tracing: Active + Environment: # Powertools env vars: https://awslabs.github.io/aws-lambda-powertools-python/#environment-variables + Variables: + POWERTOOLS_SERVICE_NAME: PowertoolsHelloWorld + POWERTOOLS_LOG_LEVEL: Info + POWERTOOLS_LOGGER_CASE: PascalCase # Allowed values are: CamelCase, PascalCase and SnakeCase (Default) \ No newline at end of file diff --git a/examples/Kafka/Protobuf/src/Payload.proto b/examples/Kafka/Protobuf/src/CustomerProfile.proto similarity index 100% rename from examples/Kafka/Protobuf/src/Payload.proto rename to examples/Kafka/Protobuf/src/CustomerProfile.proto diff --git a/examples/Kafka/Protobuf/src/Function.cs b/examples/Kafka/Protobuf/src/Function.cs index c63c81000..7a03c5498 100644 --- a/examples/Kafka/Protobuf/src/Function.cs +++ b/examples/Kafka/Protobuf/src/Function.cs @@ -3,9 +3,9 @@ using AWS.Lambda.Powertools.Kafka; using AWS.Lambda.Powertools.Kafka.Protobuf; using AWS.Lambda.Powertools.Logging; -using TestKafka; +using Com.Example; -string Handler(ConsumerRecords records, ILambdaContext context) +string Handler(ConsumerRecords records, ILambdaContext context) { foreach (var record in records) { @@ -21,7 +21,7 @@ string Handler(ConsumerRecords records, ILambdaCon return "Processed " + records.Count() + " records"; } -await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, +await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, new PowertoolsKafkaProtobufSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization .Build() .RunAsync(); diff --git a/examples/Kafka/Protobuf/src/Key.proto b/examples/Kafka/Protobuf/src/Key.proto deleted file mode 100644 index deedcf5dc..000000000 --- a/examples/Kafka/Protobuf/src/Key.proto +++ /dev/null @@ -1,14 +0,0 @@ -syntax = "proto3"; - -option csharp_namespace = "TestKafka"; - -message ProtobufKey { - int32 id = 1; - Color color = 2; -} - -enum Color { - UNKNOWN = 0; - GREEN = 1; - RED = 2; -} \ No newline at end of file diff --git a/examples/Kafka/Protobuf/src/Product.proto b/examples/Kafka/Protobuf/src/Product.proto deleted file mode 100644 index 1d4c64e90..000000000 --- a/examples/Kafka/Protobuf/src/Product.proto +++ /dev/null @@ -1,9 +0,0 @@ -syntax = "proto3"; - -option csharp_namespace = "TestKafka"; - -message ProtobufProduct { - int32 id = 1; - string name = 2; - double price = 3; -} \ No newline at end of file diff --git a/examples/Kafka/Protobuf/src/Protobuf.csproj b/examples/Kafka/Protobuf/src/Protobuf.csproj index 3623b1b40..275fa84ec 100644 --- a/examples/Kafka/Protobuf/src/Protobuf.csproj +++ b/examples/Kafka/Protobuf/src/Protobuf.csproj @@ -27,25 +27,7 @@ - - Client - Public - True - True - obj\Debug/net8.0/ - MSBuild:Compile - PreserveNewest - - - Client - Public - True - True - obj\Debug/net8.0/ - MSBuild:Compile - PreserveNewest - - + Client Public True diff --git a/examples/Kafka/Protobuf/src/Readme.md b/examples/Kafka/Protobuf/src/Readme.md index ca3020a34..c949b7319 100644 --- a/examples/Kafka/Protobuf/src/Readme.md +++ b/examples/Kafka/Protobuf/src/Readme.md @@ -1,64 +1,133 @@ -# PowerTools Kafka Protobuf Lambda Function +# AWS Powertools for AWS Lambda .NET - Kafka Protobuf Example -This starter project consists of: -* Function.cs - file contain C# top level statements that define the function to be called for each event and starts the Lambda runtime client. -* aws-lambda-tools-defaults.json - default argument settings for use with Visual Studio and command line deployment tools for AWS +This project demonstrates how to use AWS Lambda Powertools for .NET with Amazon MSK (Managed Streaming for Kafka) to process events from Kafka topics. -You may also have a test project depending on the options selected. +## Overview -The generated function handler is a simple method accepting a string argument that returns the uppercase equivalent of the input string. Replace the body of this method, and parameters, to suit your needs. +This example showcases a Lambda functions that consume messages from Kafka topics with Protocol Buffers serialization format. -## Executable Assembly +It uses the `AWS.Lambda.Powertools.Kafka.Protobuf` NuGet package to easily deserialize and process Kafka records. -.NET Lambda projects that use C# top level statements like this project must be deployed as an executable assembly instead of a class library. To indicate to Lambda that the .NET function is an executable assembly the -Lambda function handler value is set to the .NET Assembly name. This is different then deploying as a class library where the function handler string includes the assembly, type and method name. +## Project Structure -To deploy as an executable assembly the Lambda runtime client must be started to listen for incoming events to process. To start -the Lambda runtime client add the `Amazon.Lambda.RuntimeSupport` NuGet package and add the following code at the end of the -of the file containing top-level statements to start the runtime. - -```csharp -await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, - new PowertoolsKafkaProtobufSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization - .Build() - .RunAsync(); +```bash +examples/Kafka/Protobuf/src/ +├── Function.cs # Entry point for the Lambda function +├── aws-lambda-tools-defaults.json # Default argument settings for AWS Lambda deployment +├── template.yaml # AWS SAM template for deploying the function +├── CustomerProfile.proto # Protocol Buffers definition file for the data structure used in the Kafka messages +└── kafka-protobuf-event.json # Sample Protocol Buffers event to test the function ``` -Pass into the Lambda runtime client a function handler as either an `Action<>` or `Func<>` for the code that -should be called for each event. If the handler takes in an input event besides `System.IO.Stream` then -the JSON serializer must also be passed into the `Create` method. +## Prerequisites +- [Dotnet](https://dotnet.microsoft.com/en-us/download/dotnet) (dotnet8 or later) +- [AWS SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/install-sam-cli.html) +- [AWS CLI](https://aws.amazon.com/cli/) +- An AWS account with appropriate permissions +- [Amazon MSK](https://aws.amazon.com/msk/) cluster set up with a topic to consume messages from +- [AWS.Lambda.Powertools.Kafka.Protobuf](https://www.nuget.org/packages/AWS.Lambda.Powertools.Kafka.Protobuf/) NuGet package installed in your project -## Here are some steps to follow from Visual Studio: +## Installation -To deploy your function to AWS Lambda, right click the project in Solution Explorer and select *Publish to AWS Lambda*. +1. Clone the repository: -To view your deployed function open its Function View window by double-clicking the function name shown beneath the AWS Lambda node in the AWS Explorer tree. + ```bash + git clone https://github.com/aws-powertools/powertools-lambda-dotnet.git + ``` -To perform testing against your deployed function use the Test Invoke tab in the opened Function View window. +2. Navigate to the project directory: -To configure event sources for your deployed function, for example to have your function invoked when an object is created in an Amazon S3 bucket, use the Event Sources tab in the opened Function View window. + ```bash + cd powertools-lambda-dotnet/examples/Kafka/Protobuf/src + ``` -To update the runtime configuration of your deployed function use the Configuration tab in the opened Function View window. +3. Build the project: -To view execution logs of invocations of your function use the Logs tab in the opened Function View window. + ```bash + dotnet build + ``` -## Here are some steps to follow to get started from the command line: +## Deployment -Once you have edited your template and code you can deploy your application using the [Amazon.Lambda.Tools Global Tool](https://github.com/aws/aws-extensions-for-dotnet-cli#aws-lambda-amazonlambdatools) from the command line. +Deploy the application using the AWS SAM CLI: -Install Amazon.Lambda.Tools Global Tools if not already installed. -``` - dotnet tool install -g Amazon.Lambda.Tools +```bash +sam build +sam deploy --guided ``` -If already installed check if new version is available. -``` - dotnet tool update -g Amazon.Lambda.Tools +Follow the prompts to configure your deployment. + +## Protocol Buffers Format + +The Protobuf example handles messages serialized with Protocol Buffers. The schema is defined in a `.proto` file (which would need to be created), and the C# code is generated from that schema. + +This requires the `Grpc.Tools` package to deserialize the messages correctly. + +And update the `.csproj` file to include the `.proto` files. + +```xml + + Client + Public + True + True + obj\Debug/net8.0/ + MSBuild:Compile + PreserveNewest + ``` -Deploy function to AWS Lambda +## Usage Examples + +Once deployed, you can test the Lambda function by sending a sample Protocol Buffers event to the configured Kafka topic. +You can use the `kafka-protobuf-event.json` file as a sample event to test the function. + +### Testing + +You can test the function locally using the AWS SAM CLI (Requires Docker to be installed): + +```bash +sam local invoke ProtobufDeserializationFunction --event kafka-protobuf-event.json ``` - cd "Protobuf/src/Protobuf" - dotnet lambda deploy-function -``` \ No newline at end of file + +This command simulates an invocation of the Lambda function with the provided event data. + +## How It Works + +1. **Event Source**: Configure your Lambda functions with an MSK or self-managed Kafka cluster as an event source. +2. **Deserializing Records**: Powertools handles deserializing the records based on the specified format. +3. **Processing**: Each record is processed within the handler function. + +## Event Deserialization + +Pass the `PowertoolsKafkaProtobufSerializer` to the `LambdaBootstrapBuilder.Create()` method to enable JSON deserialization of Kafka records: + +```csharp +await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, + new PowertoolsKafkaProtobufSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization + .Build() + .RunAsync(); + ``` + +## Configuration + +The SAM template (`template.yaml`) defines three Lambda function: + +- **ProtobufDeserializationFunction**: Handles Protobuf-formatted Kafka messages + +## Customization + +To customize the examples: + +1. Modify the schema definitions to match your data structures +2. Update the handler logic to process the records according to your requirements +3. Ensure you have the proper `.proto` files and that they are included in your project for Protocol Buffers serialization/deserialization. + +## Resources + +- [AWS Lambda Powertools for .NET Documentation](https://docs.powertools.aws.dev/lambda/dotnet/) +- [Amazon MSK Documentation](https://docs.aws.amazon.com/msk/) +- [AWS Lambda Developer Guide](https://docs.aws.amazon.com/lambda/) +- [Protocol Buffers Documentation](https://developers.google.com/protocol-buffers) \ No newline at end of file diff --git a/examples/Kafka/Protobuf/src/template.yaml b/examples/Kafka/Protobuf/src/template.yaml new file mode 100644 index 000000000..b8f7df6a5 --- /dev/null +++ b/examples/Kafka/Protobuf/src/template.yaml @@ -0,0 +1,27 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: > + kafka + + Sample SAM Template for kafka + +# More info about Globals: https://github.com/awslabs/serverless-application-model/blob/master/docs/globals.rst +Globals: + Function: + Timeout: 15 + MemorySize: 512 + Runtime: dotnet8 + +Resources: + ProtobufDeserializationFunction: + Type: AWS::Serverless::Function + Properties: + Handler: Protobuf + Architectures: + - x86_64 + Tracing: Active + Environment: # Powertools env vars: https://awslabs.github.io/aws-lambda-powertools-python/#environment-variables + Variables: + POWERTOOLS_SERVICE_NAME: PowertoolsHelloWorld + POWERTOOLS_LOG_LEVEL: Info + POWERTOOLS_LOGGER_CASE: PascalCase # Allowed values are: CamelCase, PascalCase and SnakeCase (Default) \ No newline at end of file From 5a59f86a84a16a5274d383fc1490cdc127245ec4 Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Wed, 18 Jun 2025 12:38:42 +0100 Subject: [PATCH 17/35] new example with class library --- examples/Kafka/Avro/src/Readme.md | 1 + .../src/CustomerProfile.proto | 49 +++++++ .../Kafka/JsonClassLibrary/src/Function.cs | 29 ++++ .../src/ProtoBufClassLibrary.csproj | 42 ++++++ examples/Kafka/JsonClassLibrary/src/Readme.md | 130 ++++++++++++++++++ .../src/aws-lambda-tools-defaults.json | 16 +++ .../src/kafka-protobuf-event.json | 23 ++++ .../Kafka/JsonClassLibrary/src/template.yaml | 27 ++++ examples/Kafka/Protobuf/src/Readme.md | 2 +- examples/examples.sln | 7 + 10 files changed, 325 insertions(+), 1 deletion(-) create mode 100644 examples/Kafka/JsonClassLibrary/src/CustomerProfile.proto create mode 100644 examples/Kafka/JsonClassLibrary/src/Function.cs create mode 100644 examples/Kafka/JsonClassLibrary/src/ProtoBufClassLibrary.csproj create mode 100644 examples/Kafka/JsonClassLibrary/src/Readme.md create mode 100644 examples/Kafka/JsonClassLibrary/src/aws-lambda-tools-defaults.json create mode 100644 examples/Kafka/JsonClassLibrary/src/kafka-protobuf-event.json create mode 100644 examples/Kafka/JsonClassLibrary/src/template.yaml diff --git a/examples/Kafka/Avro/src/Readme.md b/examples/Kafka/Avro/src/Readme.md index e09861f8f..23e64e8e2 100644 --- a/examples/Kafka/Avro/src/Readme.md +++ b/examples/Kafka/Avro/src/Readme.md @@ -15,6 +15,7 @@ examples/Kafka/Avro/src/ ├── Function.cs # Entry point for the Lambda function ├── aws-lambda-tools-defaults.json # Default argument settings for AWS Lambda deployment ├── template.yaml # AWS SAM template for deploying the function +├── CustomerProfile.avsc # Avro schema definition file for the data structure used in the Kafka messages └── kafka-avro-event.json # Sample Avro event to test the function ``` diff --git a/examples/Kafka/JsonClassLibrary/src/CustomerProfile.proto b/examples/Kafka/JsonClassLibrary/src/CustomerProfile.proto new file mode 100644 index 000000000..9c69b1c41 --- /dev/null +++ b/examples/Kafka/JsonClassLibrary/src/CustomerProfile.proto @@ -0,0 +1,49 @@ +syntax = "proto3"; + +package com.example; + +enum PhoneType { + HOME = 0; + WORK = 1; + MOBILE = 2; +} + +enum AccountStatus { + ACTIVE = 0; + INACTIVE = 1; + SUSPENDED = 2; +} + +// EmailAddress message +message EmailAddress { + string address = 1; + bool verified = 2; + bool primary = 3; +} + +// Address message +message Address { + string street = 1; + string city = 2; + string state = 3; + string country = 4; + string zip_code = 5; +} + +// PhoneNumber message +message PhoneNumber { + string number = 1; + PhoneType type = 2; +} + +// CustomerProfile message +message CustomerProfile { + string user_id = 1; + string full_name = 2; + EmailAddress email = 3; + int32 age = 4; + Address address = 5; + repeated PhoneNumber phone_numbers = 6; + map preferences = 7; + AccountStatus account_status = 8; +} \ No newline at end of file diff --git a/examples/Kafka/JsonClassLibrary/src/Function.cs b/examples/Kafka/JsonClassLibrary/src/Function.cs new file mode 100644 index 000000000..2d6ad229b --- /dev/null +++ b/examples/Kafka/JsonClassLibrary/src/Function.cs @@ -0,0 +1,29 @@ +using Amazon.Lambda.Core; +using AWS.Lambda.Powertools.Kafka; +using AWS.Lambda.Powertools.Kafka.Protobuf; +using AWS.Lambda.Powertools.Logging; +using Com.Example; + +// Assembly attribute to enable the Lambda function's JSON input to be converted into a .NET class. +[assembly: LambdaSerializer(typeof(PowertoolsKafkaProtobufSerializer))] + +namespace ProtoBufClassLibrary; + +public class Function +{ + /// + /// A simple function that takes a string and does a ToUpper + /// + /// The event for the Lambda function handler to process. + /// The ILambdaContext that provides methods for logging and describing the Lambda environment. + /// + public string FunctionHandler(ConsumerRecords records, ILambdaContext context) + { + foreach (var record in records) + { + Logger.LogInformation("Record Value: {@record}", record.Value); + } + + return "Processed " + records.Count() + " records"; + } +} \ No newline at end of file diff --git a/examples/Kafka/JsonClassLibrary/src/ProtoBufClassLibrary.csproj b/examples/Kafka/JsonClassLibrary/src/ProtoBufClassLibrary.csproj new file mode 100644 index 000000000..a28e1a2f8 --- /dev/null +++ b/examples/Kafka/JsonClassLibrary/src/ProtoBufClassLibrary.csproj @@ -0,0 +1,42 @@ + + + net8.0 + enable + enable + true + Lambda + + true + + true + + + + + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + + + + + + + + PreserveNewest + + + + + Client + Public + True + True + obj/Debug/net8.0/ + MSBuild:Compile + PreserveNewest + + + + \ No newline at end of file diff --git a/examples/Kafka/JsonClassLibrary/src/Readme.md b/examples/Kafka/JsonClassLibrary/src/Readme.md new file mode 100644 index 000000000..ae7e610f4 --- /dev/null +++ b/examples/Kafka/JsonClassLibrary/src/Readme.md @@ -0,0 +1,130 @@ +# AWS Powertools for AWS Lambda .NET - Kafka Protobuf Example + +This project demonstrates how to use AWS Lambda Powertools for .NET with Amazon MSK (Managed Streaming for Kafka) to process events from Kafka topics. + +## Overview + +This example showcases a Lambda functions that consume messages from Kafka topics with Protocol Buffers serialization format. + +It uses the `AWS.Lambda.Powertools.Kafka.Protobuf` NuGet package to easily deserialize and process Kafka records. + +## Project Structure + +```bash +examples/Kafka/Protobuf/src/ +├── Function.cs # Entry point for the Lambda function +├── aws-lambda-tools-defaults.json # Default argument settings for AWS Lambda deployment +├── template.yaml # AWS SAM template for deploying the function +├── CustomerProfile.proto # Protocol Buffers definition file for the data structure used in the Kafka messages +└── kafka-protobuf-event.json # Sample Protocol Buffers event to test the function +``` + +## Prerequisites + +- [Dotnet](https://dotnet.microsoft.com/en-us/download/dotnet) (dotnet8 or later) +- [AWS SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/install-sam-cli.html) +- [AWS CLI](https://aws.amazon.com/cli/) +- An AWS account with appropriate permissions +- [Amazon MSK](https://aws.amazon.com/msk/) cluster set up with a topic to consume messages from +- [AWS.Lambda.Powertools.Kafka.Protobuf](https://www.nuget.org/packages/AWS.Lambda.Powertools.Kafka.Protobuf/) NuGet package installed in your project + +## Installation + +1. Clone the repository: + + ```bash + git clone https://github.com/aws-powertools/powertools-lambda-dotnet.git + ``` + +2. Navigate to the project directory: + + ```bash + cd powertools-lambda-dotnet/examples/Kafka/Protobuf/src + ``` + +3. Build the project: + + ```bash + dotnet build + ``` + +## Deployment + +Deploy the application using the AWS SAM CLI: + +```bash +sam build +sam deploy --guided +``` + +Follow the prompts to configure your deployment. + +## Protocol Buffers Format + +The Protobuf example handles messages serialized with Protocol Buffers. The schema is defined in a `.proto` file (which would need to be created), and the C# code is generated from that schema. + +This requires the `Grpc.Tools` package to deserialize the messages correctly. + +And update the `.csproj` file to include the `.proto` files. + +```xml + + Client + Public + True + True + obj\Debug/net8.0/ + MSBuild:Compile + PreserveNewest + +``` + +## Usage Examples + +Once deployed, you can test the Lambda function by sending a sample Protocol Buffers event to the configured Kafka topic. +You can use the `kafka-protobuf-event.json` file as a sample event to test the function. + +### Testing + +You can test the function locally using the AWS SAM CLI (Requires Docker to be installed): + +```bash +sam local invoke ProtobufDeserializationFunction --event kafka-protobuf-event.json +``` + +This command simulates an invocation of the Lambda function with the provided event data. + +## How It Works + +1. **Event Source**: Configure your Lambda functions with an MSK or self-managed Kafka cluster as an event source. +2. **Deserializing Records**: Powertools handles deserializing the records based on the specified format. +3. **Processing**: Each record is processed within the handler function. + +## Event Deserialization + +Pass the `PowertoolsKafkaProtobufSerializer` to the `[assembly: LambdaSerializer(typeof(PowertoolsKafkaProtobufSerializer))]`: + +```csharp +[assembly: LambdaSerializer(typeof(PowertoolsKafkaProtobufSerializer))] + ``` + +## Configuration + +The SAM template (`template.yaml`) defines three Lambda function: + +- **ProtobufDeserializationFunction**: Handles Protobuf-formatted Kafka messages + +## Customization + +To customize the examples: + +1. Modify the schema definitions to match your data structures +2. Update the handler logic to process the records according to your requirements +3. Ensure you have the proper `.proto` files and that they are included in your project for Protocol Buffers serialization/deserialization. + +## Resources + +- [AWS Lambda Powertools for .NET Documentation](https://docs.powertools.aws.dev/lambda/dotnet/) +- [Amazon MSK Documentation](https://docs.aws.amazon.com/msk/) +- [AWS Lambda Developer Guide](https://docs.aws.amazon.com/lambda/) +- [Protocol Buffers Documentation](https://developers.google.com/protocol-buffers) \ No newline at end of file diff --git a/examples/Kafka/JsonClassLibrary/src/aws-lambda-tools-defaults.json b/examples/Kafka/JsonClassLibrary/src/aws-lambda-tools-defaults.json new file mode 100644 index 000000000..d4ec43f14 --- /dev/null +++ b/examples/Kafka/JsonClassLibrary/src/aws-lambda-tools-defaults.json @@ -0,0 +1,16 @@ +{ + "Information": [ + "This file provides default values for the deployment wizard inside Visual Studio and the AWS Lambda commands added to the .NET Core CLI.", + "To learn more about the Lambda commands with the .NET Core CLI execute the following command at the command line in the project root directory.", + "dotnet lambda help", + "All the command line options for the Lambda command can be specified in this file." + ], + "profile": "", + "region": "", + "configuration": "Release", + "function-architecture": "x86_64", + "function-runtime": "dotnet8", + "function-memory-size": 512, + "function-timeout": 30, + "function-handler": "ProtoBufClassLibrary::ProtoBufClassLibrary.Function::FunctionHandler" +} \ No newline at end of file diff --git a/examples/Kafka/JsonClassLibrary/src/kafka-protobuf-event.json b/examples/Kafka/JsonClassLibrary/src/kafka-protobuf-event.json new file mode 100644 index 000000000..6731ceb40 --- /dev/null +++ b/examples/Kafka/JsonClassLibrary/src/kafka-protobuf-event.json @@ -0,0 +1,23 @@ +{ + "eventSource": "aws:kafka", + "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/CustomerCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", + "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + "records": { + "customer-topic-0": [ + { + "topic": "customer-topic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": "dXNlcl85NzU0", + "value": "Cgl1c2VyXzk3NTQSDlVzZXIgdXNlcl85NzU0GhgKFHVzZXJfOTc1NEBpY2xvdWQuY29tGAEgNSooCgw5MzQwIE1haW4gU3QSCFNhbiBKb3NlGgJDQSIDVVNBKgUzOTU5NjIQCgwyNDQtNDA3LTg4NzEQAToUCghsYW5ndWFnZRIIZGlzYWJsZWQ6FQoNbm90aWZpY2F0aW9ucxIEZGFyazoTCgh0aW1lem9uZRIHZW5hYmxlZEAC", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + } + ] + } +} \ No newline at end of file diff --git a/examples/Kafka/JsonClassLibrary/src/template.yaml b/examples/Kafka/JsonClassLibrary/src/template.yaml new file mode 100644 index 000000000..0df5feaa2 --- /dev/null +++ b/examples/Kafka/JsonClassLibrary/src/template.yaml @@ -0,0 +1,27 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: > + kafka + + Sample SAM Template for kafka + +# More info about Globals: https://github.com/awslabs/serverless-application-model/blob/master/docs/globals.rst +Globals: + Function: + Timeout: 15 + MemorySize: 512 + Runtime: dotnet8 + +Resources: + ProtobufClassLibraryDeserializationFunction: + Type: AWS::Serverless::Function + Properties: + Handler: ProtoBufClassLibrary::ProtoBufClassLibrary.Function::FunctionHandler + Architectures: + - x86_64 + Tracing: Active + Environment: # Powertools env vars: https://awslabs.github.io/aws-lambda-powertools-python/#environment-variables + Variables: + POWERTOOLS_SERVICE_NAME: PowertoolsHelloWorld + POWERTOOLS_LOG_LEVEL: Info + POWERTOOLS_LOGGER_CASE: PascalCase # Allowed values are: CamelCase, PascalCase and SnakeCase (Default) \ No newline at end of file diff --git a/examples/Kafka/Protobuf/src/Readme.md b/examples/Kafka/Protobuf/src/Readme.md index c949b7319..886bbffa1 100644 --- a/examples/Kafka/Protobuf/src/Readme.md +++ b/examples/Kafka/Protobuf/src/Readme.md @@ -102,7 +102,7 @@ This command simulates an invocation of the Lambda function with the provided ev ## Event Deserialization -Pass the `PowertoolsKafkaProtobufSerializer` to the `LambdaBootstrapBuilder.Create()` method to enable JSON deserialization of Kafka records: +Pass the `PowertoolsKafkaProtobufSerializer` to the `LambdaBootstrapBuilder.Create()` method to enable Protobuf deserialization of Kafka records: ```csharp await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, diff --git a/examples/examples.sln b/examples/examples.sln index c2770319e..6b9fa877a 100644 --- a/examples/examples.sln +++ b/examples/examples.sln @@ -117,6 +117,8 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Protobuf", "Kafka\Protobuf\ EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Avro", "Kafka\Avro\src\Avro.csproj", "{B03F22B2-315C-429B-9CC0-C15BE94CBF77}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ProtoBufClassLibrary", "Kafka\JsonClassLibrary\src\ProtoBufClassLibrary.csproj", "{B6B3136D-B739-4917-AD3D-30F19FE12D3F}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -222,6 +224,10 @@ Global {B03F22B2-315C-429B-9CC0-C15BE94CBF77}.Debug|Any CPU.Build.0 = Debug|Any CPU {B03F22B2-315C-429B-9CC0-C15BE94CBF77}.Release|Any CPU.ActiveCfg = Release|Any CPU {B03F22B2-315C-429B-9CC0-C15BE94CBF77}.Release|Any CPU.Build.0 = Release|Any CPU + {B6B3136D-B739-4917-AD3D-30F19FE12D3F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B6B3136D-B739-4917-AD3D-30F19FE12D3F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B6B3136D-B739-4917-AD3D-30F19FE12D3F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B6B3136D-B739-4917-AD3D-30F19FE12D3F}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(NestedProjects) = preSolution {0CC66DBC-C1DF-4AF6-8EEB-FFED6C578BF4} = {526F1EF7-5A9C-4BFF-ABAE-75992ACD8F78} @@ -272,5 +278,6 @@ Global {58EC305E-353A-4996-A541-3CF7FC0EDD80} = {71027B81-CA39-498C-9A50-ADDAFA2AC2F5} {853F6FE9-1762-4BA3-BAF4-2FCD605B81CF} = {71027B81-CA39-498C-9A50-ADDAFA2AC2F5} {B03F22B2-315C-429B-9CC0-C15BE94CBF77} = {71027B81-CA39-498C-9A50-ADDAFA2AC2F5} + {B6B3136D-B739-4917-AD3D-30F19FE12D3F} = {71027B81-CA39-498C-9A50-ADDAFA2AC2F5} EndGlobalSection EndGlobal From e9fcbf29982f2085fd7986432fde8b5422d95ec8 Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Wed, 18 Jun 2025 15:20:04 +0100 Subject: [PATCH 18/35] improve tests and examples --- .../Kafka/JsonClassLibrary/src/Function.cs | 11 +++- .../Protobuf/HandlerTests.cs | 61 +++++++++++++++++++ 2 files changed, 71 insertions(+), 1 deletion(-) diff --git a/examples/Kafka/JsonClassLibrary/src/Function.cs b/examples/Kafka/JsonClassLibrary/src/Function.cs index 2d6ad229b..60a2dbeaf 100644 --- a/examples/Kafka/JsonClassLibrary/src/Function.cs +++ b/examples/Kafka/JsonClassLibrary/src/Function.cs @@ -21,7 +21,16 @@ public string FunctionHandler(ConsumerRecords records, { foreach (var record in records) { - Logger.LogInformation("Record Value: {@record}", record.Value); + Logger.LogInformation("Processing messagem from topic: {topic}", record.Topic); + Logger.LogInformation("Partition: {partition}, Offset: {offset}", record.Partition, record.Offset); + Logger.LogInformation("Produced at: {timestamp}", record.Timestamp); + + foreach (var header in record.Headers.DecodedValues()) + { + Logger.LogInformation($"{header.Key}: {header.Value}"); + } + + Logger.LogInformation("Processing order for: {fullName}", record.Value.FullName); } return "Processed " + records.Count() + " records"; diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/HandlerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/HandlerTests.cs index 9470cb6e2..69234ba36 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/HandlerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/HandlerTests.cs @@ -298,4 +298,65 @@ private async Task HandlerWithProtobufKeys(ConsumerRecords records, ILambdaContext context) + { + foreach (var record in records) + { + var product = record.Value; + context.Logger.LogInformation($"Processing {product.Name} at ${product.Price}"); + } + + return "Successfully processed Protobuf Kafka events"; + } + // Simulate the handler execution + var mockLogger = new TestLambdaLogger(); + var mockContext = new TestLambdaContext + { + Logger = mockLogger + }; + + var records = new ConsumerRecords + { + Records = new Dictionary>> + { + { "mytopic-0", new List> + { + new() + { + Topic = "mytopic", + Partition = 0, + Offset = 15, + Key = 42, + Value = new ProtobufProduct { Name = "Test Product", Id = 1, Price = 99.99 } + } + } + } + } + }; + + // Call the handler + var result = Handler(records, mockContext); + + // Assert the result + Assert.Equal("Successfully processed Protobuf Kafka events", result); + + // Verify the context logger output + Assert.Contains("Processing Test Product at $99.99", mockLogger.Buffer.ToString()); + + // Verify the records were processed + Assert.Single(records.Records); + Assert.Contains("mytopic-0", records.Records.Keys); + Assert.Single(records.Records["mytopic-0"]); + Assert.Equal("mytopic", records.Records["mytopic-0"][0].Topic); + Assert.Equal(0, records.Records["mytopic-0"][0].Partition); + Assert.Equal(15, records.Records["mytopic-0"][0].Offset); + Assert.Equal(42, records.Records["mytopic-0"][0].Key); + Assert.Equal("Test Product", records.Records["mytopic-0"][0].Value.Name); + Assert.Equal(1, records.Records["mytopic-0"][0].Value.Id); + Assert.Equal(99.99, records.Records["mytopic-0"][0].Value.Price); + } } From 75f4eb748740fc87e98416132a780814fb8639df Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Wed, 18 Jun 2025 16:28:33 +0100 Subject: [PATCH 19/35] first sonar fix --- .../PowertoolsKafkaSerializerBase.cs | 335 ++++++++++-------- 1 file changed, 194 insertions(+), 141 deletions(-) diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs index fb1d595dc..1cea46aad 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs @@ -1,6 +1,7 @@ using Amazon.Lambda.Core; using System.Diagnostics.CodeAnalysis; using System.Reflection; +using System.Runtime.Serialization; using System.Text; using System.Text.Json; using System.Text.Json.Serialization; @@ -109,13 +110,16 @@ public T Deserialize(Stream requestStream) } // Fallback to regular deserialization with warning - #pragma warning disable IL2026, IL3050 +#pragma warning disable IL2026, IL3050 var result = JsonSerializer.Deserialize(json, JsonOptions); - #pragma warning restore IL2026, IL3050 - - return result != null - ? result - : throw new InvalidOperationException($"Failed to deserialize to type {typeof(T).Name}"); +#pragma warning restore IL2026, IL3050 + + if (!EqualityComparer.Default.Equals(result, default(T))) + { + return result!; + } + + throw new InvalidOperationException($"Failed to deserialize to type {typeof(T).Name}"); } /// @@ -125,7 +129,8 @@ public T Deserialize(Stream requestStream) /// The JSON string to deserialize. /// The deserialized ConsumerRecords object. [RequiresUnreferencedCode("ConsumerRecords deserialization uses reflection and may be incompatible with trimming.")] - [RequiresDynamicCode("ConsumerRecords deserialization dynamically creates generic types and may be incompatible with NativeAOT.")] + [RequiresDynamicCode( + "ConsumerRecords deserialization dynamically creates generic types and may be incompatible with NativeAOT.")] private T DeserializeConsumerRecords(string json) { var targetType = typeof(T); @@ -136,23 +141,43 @@ private T DeserializeConsumerRecords(string json) using var document = JsonDocument.Parse(json); var root = document.RootElement; - // Create the correctly typed instance - var typedEvent = Activator.CreateInstance(targetType) ?? - throw new InvalidOperationException($"Failed to create instance of {targetType.Name}"); + // Create the typed instance and set basic properties + var typedEvent = CreateConsumerRecordsInstance(targetType); + SetBasicProperties(root, typedEvent, targetType); - // Set basic properties + // Create and populate records dictionary + if (root.TryGetProperty("records", out var recordsElement)) + { + var records = CreateRecordsDictionary(recordsElement, keyType, valueType); + targetType.GetProperty("Records", BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance) + ?.SetValue(typedEvent, records); + } + + return (T)typedEvent; + } + + private object CreateConsumerRecordsInstance(Type targetType) + { + return Activator.CreateInstance(targetType) ?? + throw new InvalidOperationException($"Failed to create instance of {targetType.Name}"); + } + + private void SetBasicProperties(JsonElement root, object instance, Type targetType) + { if (root.TryGetProperty("eventSource", out var eventSource)) - targetType.GetProperty("EventSource", - BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance) - ?.SetValue(typedEvent, eventSource.GetString()); + targetType.GetProperty("EventSource", BindingFlags.Public | BindingFlags.Instance) + ?.SetValue(instance, eventSource.GetString()); if (root.TryGetProperty("eventSourceArn", out var eventSourceArn)) - targetType.GetProperty("EventSourceArn")?.SetValue(typedEvent, eventSourceArn.GetString()); + targetType.GetProperty("EventSourceArn")?.SetValue(instance, eventSourceArn.GetString()); if (root.TryGetProperty("bootstrapServers", out var bootstrapServers)) - targetType.GetProperty("BootstrapServers")?.SetValue(typedEvent, bootstrapServers.GetString()); + targetType.GetProperty("BootstrapServers")?.SetValue(instance, bootstrapServers.GetString()); + } - // Create records dictionary with correct generic types + private object CreateRecordsDictionary(JsonElement recordsElement, Type keyType, Type valueType) + { + // Create dictionary with correct generic types var dictType = typeof(Dictionary<,>).MakeGenericType( typeof(string), typeof(List<>).MakeGenericType(typeof(ConsumerRecord<,>).MakeGenericType(keyType, valueType)) @@ -162,122 +187,144 @@ private T DeserializeConsumerRecords(string json) var dictAddMethod = dictType.GetMethod("Add") ?? throw new InvalidOperationException("Add method not found on dictionary type"); - if (root.TryGetProperty("records", out var recordsElement)) + // Process each topic partition + foreach (var topicPartition in recordsElement.EnumerateObject()) + { + var topicName = topicPartition.Name; + var recordsList = ProcessTopicPartition(topicPartition.Value, keyType, valueType); + dictAddMethod.Invoke(records, new[] { topicName, recordsList }); + } + + return records; + } + + private object ProcessTopicPartition(JsonElement partitionData, Type keyType, Type valueType) + { + // Create list type with correct generics + var listType = typeof(List<>).MakeGenericType( + typeof(ConsumerRecord<,>).MakeGenericType(keyType, valueType)); + var recordsList = Activator.CreateInstance(listType) ?? + throw new InvalidOperationException($"Failed to create list of type {listType.Name}"); + var listAddMethod = listType.GetMethod("Add") ?? + throw new InvalidOperationException("Add method not found on list type"); + + // Process each record + foreach (var recordElement in partitionData.EnumerateArray()) + { + var record = CreateAndPopulateRecord(recordElement, keyType, valueType); + if (record != null) + { + listAddMethod.Invoke(recordsList, new[] { record }); + } + } + + return recordsList; + } + + private object? CreateAndPopulateRecord(JsonElement recordElement, Type keyType, Type valueType) + { + // Create record instance + var recordType = typeof(ConsumerRecord<,>).MakeGenericType(keyType, valueType); + var record = Activator.CreateInstance(recordType); + if (record == null) + return null; + + // Set basic properties + SetProperty(recordType, record, "Topic", recordElement, "topic"); + SetProperty(recordType, record, "Partition", recordElement, "partition"); + SetProperty(recordType, record, "Offset", recordElement, "offset"); + SetProperty(recordType, record, "Timestamp", recordElement, "timestamp"); + SetProperty(recordType, record, "TimestampType", recordElement, "timestampType"); + + // Process key + ProcessKey(recordElement, record, recordType, keyType); + + // Process value + ProcessValue(recordElement, record, recordType, valueType); + + // Process headers + ProcessHeaders(recordElement, record, recordType); + + return record; + } + + private void ProcessKey(JsonElement recordElement, object record, Type recordType, Type keyType) + { + if (recordElement.TryGetProperty("key", out var keyElement) && keyElement.ValueKind == JsonValueKind.String) { - foreach (var topicPartition in recordsElement.EnumerateObject()) + var base64Key = keyElement.GetString(); + if (!string.IsNullOrEmpty(base64Key)) { - var topicName = topicPartition.Name; - - // Create list of records with correct generic types - var listType = - typeof(List<>).MakeGenericType(typeof(ConsumerRecord<,>).MakeGenericType(keyType, valueType)); - var recordsList = Activator.CreateInstance(listType) ?? - throw new InvalidOperationException( - $"Failed to create list of type {listType.Name}"); - var listAddMethod = listType.GetMethod("Add") ?? - throw new InvalidOperationException("Add method not found on list type"); - - foreach (var recordElement in topicPartition.Value.EnumerateArray()) + try { - // Create record instance of correct type - var recordType = typeof(ConsumerRecord<,>).MakeGenericType(keyType, valueType); - var record = Activator.CreateInstance(recordType); - if (record == null) - continue; - - // Set basic properties - SetProperty(recordType, record, "Topic", recordElement, "topic"); - SetProperty(recordType, record, "Partition", recordElement, "partition"); - SetProperty(recordType, record, "Offset", recordElement, "offset"); - SetProperty(recordType, record, "Timestamp", recordElement, "timestamp"); - SetProperty(recordType, record, "TimestampType", recordElement, "timestampType"); - - // Handle key - base64 decode and convert to the correct type - if (recordElement.TryGetProperty("key", out var keyElement) && - keyElement.ValueKind == JsonValueKind.String) - { - var base64Key = keyElement.GetString(); - if (!string.IsNullOrEmpty(base64Key)) - { - try - { - var keyBytes = Convert.FromBase64String(base64Key); - var decodedKey = DeserializeKey(keyBytes, keyType); - - var keyProperty = recordType.GetProperty("Key"); - keyProperty?.SetValue(record, decodedKey); - } - catch (Exception ex) - { - throw new Exception($"Failed to deserialize data: {ex.Message}", ex); - } - } - } + var keyBytes = Convert.FromBase64String(base64Key); + var decodedKey = DeserializeKey(keyBytes, keyType); + recordType.GetProperty("Key")?.SetValue(record, decodedKey); + } + catch (Exception ex) + { + throw new SerializationException($"Failed to deserialize key data: {ex.Message}", ex); + } + } + } + } - // Handle value - if (recordElement.TryGetProperty("value", out var valueElement) && - valueElement.ValueKind == JsonValueKind.String) - { - var base64Value = valueElement.GetString(); - var valueProperty = recordType.GetProperty("Value"); - - if (base64Value != null && valueProperty != null) - { - try - { - var deserializedValue = DeserializeValue(base64Value, valueType); - valueProperty.SetValue(record, deserializedValue); - } - catch (Exception ex) - { - throw new Exception($"Failed to deserialize data: {ex.Message}", ex); - } - } - } + private void ProcessValue(JsonElement recordElement, object record, Type recordType, Type valueType) + { + if (recordElement.TryGetProperty("value", out var valueElement) && valueElement.ValueKind == JsonValueKind.String) + { + var base64Value = valueElement.GetString(); + var valueProperty = recordType.GetProperty("Value"); + + if (base64Value != null && valueProperty != null) + { + try + { + var deserializedValue = DeserializeValue(base64Value, valueType); + valueProperty.SetValue(record, deserializedValue); + } + catch (Exception ex) + { + throw new SerializationException($"Failed to deserialize value data: {ex.Message}", ex); + } + } + } + } - // Process headers - + private void ProcessHeaders(JsonElement recordElement, object record, Type recordType) + { + if (recordElement.TryGetProperty("headers", out var headersElement) && + headersElement.ValueKind == JsonValueKind.Array) + { + var headers = new Dictionary(); - if (recordElement.TryGetProperty("headers", out var headersElement) && - headersElement.ValueKind == JsonValueKind.Array) + foreach (var headerObj in headersElement.EnumerateArray()) + { + foreach (var header in headerObj.EnumerateObject()) + { + if (header.Value.ValueKind == JsonValueKind.Array) { - var headers = new Dictionary(); - - foreach (var headerObj in headersElement.EnumerateArray()) - { - foreach (var header in headerObj.EnumerateObject()) - { - var headerKey = header.Name; - if (header.Value.ValueKind == JsonValueKind.Array) - { - var headerBytes = new byte[header.Value.GetArrayLength()]; - var i = 0; - foreach (var byteVal in header.Value.EnumerateArray()) - { - headerBytes[i++] = (byte)byteVal.GetInt32(); - } - headers[headerKey] = headerBytes; - } - } - } - - var headersProperty = recordType.GetProperty("Headers", - BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance); - headersProperty?.SetValue(record, headers); + headers[header.Name] = ExtractHeaderBytes(header.Value); } - - // Add to records list - listAddMethod.Invoke(recordsList, new[] { record }); } - - // Add topic records to dictionary - dictAddMethod.Invoke(records, new[] { topicName, recordsList }); } + + var headersProperty = recordType.GetProperty("Headers", + BindingFlags.Public | BindingFlags.Instance); + headersProperty?.SetValue(record, headers); } + } - targetType.GetProperty("Records", BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance) - ?.SetValue(typedEvent, records); - return (T)typedEvent; + private byte[] ExtractHeaderBytes(JsonElement headerArray) + { + var headerBytes = new byte[headerArray.GetArrayLength()]; + var i = 0; + foreach (var byteVal in headerArray.EnumerateArray()) + { + headerBytes[i++] = (byte)byteVal.GetInt32(); + } + + return headerBytes; } /// @@ -312,8 +359,7 @@ private T DeserializeConsumerRecords(string json) [RequiresDynamicCode("Dynamically accesses properties which might be trimmed.")] [RequiresUnreferencedCode("Dynamically accesses properties which might be trimmed.")] private void SetProperty( - [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | - DynamicallyAccessedMemberTypes.NonPublicProperties)] + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties)] Type type, object instance, string propertyName, JsonElement element, string jsonPropertyName) { @@ -323,7 +369,7 @@ private void SetProperty( // Add BindingFlags to find internal properties too var property = type.GetProperty(propertyName, - BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance); + BindingFlags.Public | BindingFlags.Instance); if (property == null) return; var propertyType = property.PropertyType; @@ -345,7 +391,7 @@ private void SetProperty( /// The stream to write the serialized data to. public void Serialize(T response, Stream responseStream) { - if (response == null) + if (EqualityComparer.Default.Equals(response, default(T))) { // According to ILambdaSerializer contract, if response is null, an empty stream or "null" should be written. // AWS's default System.Text.Json serializer writes "null". @@ -355,14 +401,15 @@ public void Serialize(T response, Stream responseStream) var nullBytes = Encoding.UTF8.GetBytes("null"); responseStream.Write(nullBytes, 0, nullBytes.Length); } + return; } - + if (SerializerContext != null) { // Attempt to get TypeInfo for the actual type of the response. // This is important if T is object or an interface. - var typeInfo = SerializerContext.GetTypeInfo(response.GetType()); + var typeInfo = SerializerContext.GetTypeInfo(response.GetType()); if (typeInfo != null) { @@ -370,12 +417,13 @@ public void Serialize(T response, Stream responseStream) JsonSerializer.Serialize(responseStream, response, typeInfo); return; } + // Fallback: if specific type info not found, try with typeof(T) from context // This might be useful if T is concrete and response.GetType() is the same. typeInfo = GetJsonTypeInfoFromContext(typeof(T)); if (typeInfo != null) { - // Need to cast typeInfo to non-generic JsonTypeInfo for the Serialize overload + // Need to cast typeInfo to non-generic JsonTypeInfo for the Serialize overload JsonSerializer.Serialize(responseStream, response, typeInfo); return; } @@ -398,7 +446,7 @@ public void Serialize(T response, Stream responseStream) { if (SerializerContext == null) return null; - + return SerializerContext.GetTypeInfo(type); } @@ -420,6 +468,7 @@ public void Serialize(T response, Stream responseStream) return prop.GetValue(SerializerContext) as JsonTypeInfo; } } + return null; } @@ -431,8 +480,10 @@ public void Serialize(T response, Stream responseStream) /// The deserialized object. [RequiresDynamicCode("Deserializing values might require runtime code generation depending on format.")] [RequiresUnreferencedCode("Deserializing values might require types that cannot be statically analyzed.")] - protected virtual object DeserializeValue(string base64Value, - [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.PublicFields)] Type valueType) + protected virtual object DeserializeValue(string base64Value, + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | + DynamicallyAccessedMemberTypes.PublicFields)] + Type valueType) { // Handle primitive types first if (IsPrimitiveOrSimpleType(valueType)) @@ -440,16 +491,18 @@ protected virtual object DeserializeValue(string base64Value, var bytes = Convert.FromBase64String(base64Value); return DeserializePrimitiveValue(bytes, valueType); } - + // For complex types, use format-specific deserialization return DeserializeComplexValue(base64Value, valueType); } - + /// /// Deserializes complex value types using the appropriate format. /// - protected abstract object DeserializeComplexValue(string base64Value, - [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.PublicFields)] Type valueType); + protected abstract object DeserializeComplexValue(string base64Value, + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | + DynamicallyAccessedMemberTypes.PublicFields)] + Type valueType); /// @@ -459,7 +512,7 @@ protected abstract object DeserializeComplexValue(string base64Value, /// The type to deserialize to. /// The deserialized key object. protected abstract object? DeserializeComplexKey(byte[] keyBytes, Type keyType); - + /// /// Checks if the specified type is a primitive or simple type. /// @@ -471,7 +524,7 @@ private bool IsPrimitiveOrSimpleType(Type type) type == typeof(DateTime) || type == typeof(Guid); } - + /// /// Deserializes a primitive value from bytes based on the specified type. /// Handles common primitive types like int, long, double, bool, string, and Guid. @@ -482,7 +535,7 @@ private object DeserializePrimitiveValue(byte[] bytes, Type valueType) { if (bytes == null! || bytes.Length == 0) return null!; - + if (valueType == typeof(string)) { return Encoding.UTF8.GetString(bytes); @@ -493,7 +546,7 @@ private object DeserializePrimitiveValue(byte[] bytes, Type valueType) var stringValue = Encoding.UTF8.GetString(bytes); if (int.TryParse(stringValue, out var parsedValue)) return parsedValue; - + // Fall back to binary return bytes.Length switch { @@ -507,7 +560,7 @@ private object DeserializePrimitiveValue(byte[] bytes, Type valueType) var stringValue = Encoding.UTF8.GetString(bytes); if (long.TryParse(stringValue, out var parsedValue)) return parsedValue; - + return bytes.Length switch { >= 8 => BitConverter.ToInt64(bytes, 0), @@ -527,7 +580,7 @@ private object DeserializePrimitiveValue(byte[] bytes, Type valueType) { return new Guid(bytes); } - + // For any other type, try to parse as string return Convert.ChangeType(Encoding.UTF8.GetString(bytes), valueType); } From 0ecf254a5921cb52cc9c3a3752465d51493bca70 Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Wed, 18 Jun 2025 16:42:46 +0100 Subject: [PATCH 20/35] sonar fix 2 --- examples/Kafka/Json/src/Function.cs | 2 +- .../Kafka/JsonClassLibrary/src/Function.cs | 6 ------ examples/Kafka/Protobuf/src/Function.cs | 8 +------- .../PowertoolsKafkaSerializerBase.cs | 19 ++++++++++++------- 4 files changed, 14 insertions(+), 21 deletions(-) diff --git a/examples/Kafka/Json/src/Function.cs b/examples/Kafka/Json/src/Function.cs index 53a8cb638..d7d96bfca 100644 --- a/examples/Kafka/Json/src/Function.cs +++ b/examples/Kafka/Json/src/Function.cs @@ -16,6 +16,6 @@ string Handler(ConsumerRecords records, ILambdaContext } await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, - new PowertoolsKafkaJsonSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization + new PowertoolsKafkaJsonSerializer()) // Use PowertoolsKafkaJsonSerializer for Json serialization .Build() .RunAsync(); \ No newline at end of file diff --git a/examples/Kafka/JsonClassLibrary/src/Function.cs b/examples/Kafka/JsonClassLibrary/src/Function.cs index 60a2dbeaf..98795029e 100644 --- a/examples/Kafka/JsonClassLibrary/src/Function.cs +++ b/examples/Kafka/JsonClassLibrary/src/Function.cs @@ -11,12 +11,6 @@ namespace ProtoBufClassLibrary; public class Function { - /// - /// A simple function that takes a string and does a ToUpper - /// - /// The event for the Lambda function handler to process. - /// The ILambdaContext that provides methods for logging and describing the Lambda environment. - /// public string FunctionHandler(ConsumerRecords records, ILambdaContext context) { foreach (var record in records) diff --git a/examples/Kafka/Protobuf/src/Function.cs b/examples/Kafka/Protobuf/src/Function.cs index 7a03c5498..446328696 100644 --- a/examples/Kafka/Protobuf/src/Function.cs +++ b/examples/Kafka/Protobuf/src/Function.cs @@ -9,12 +9,6 @@ string Handler(ConsumerRecords records, ILambdaContext { foreach (var record in records) { - foreach (var header in record.Headers.DecodedValues()) - { - Console.WriteLine($"{header.Key}: {header.Value}"); - } - - Logger.LogInformation("Record Key: {@key}", record.Key); Logger.LogInformation("Record Value: {@record}", record.Value); } @@ -22,7 +16,7 @@ string Handler(ConsumerRecords records, ILambdaContext } await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, - new PowertoolsKafkaProtobufSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization + new PowertoolsKafkaProtobufSerializer()) // Use PowertoolsKafkaProtobufSerializer for Protobuf serialization .Build() .RunAsync(); diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs index 1cea46aad..f74a7b8ab 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs @@ -142,7 +142,7 @@ private T DeserializeConsumerRecords(string json) var root = document.RootElement; // Create the typed instance and set basic properties - var typedEvent = CreateConsumerRecordsInstance(targetType); + var typedEvent = CreateConsumerRecordsInstance(targetType); SetBasicProperties(root, typedEvent, targetType); // Create and populate records dictionary @@ -156,7 +156,7 @@ private T DeserializeConsumerRecords(string json) return (T)typedEvent; } - private object CreateConsumerRecordsInstance(Type targetType) + private object CreateConsumerRecordsInstance(Type targetType) { return Activator.CreateInstance(targetType) ?? throw new InvalidOperationException($"Failed to create instance of {targetType.Name}"); @@ -540,7 +540,8 @@ private object DeserializePrimitiveValue(byte[] bytes, Type valueType) { return Encoding.UTF8.GetString(bytes); } - else if (valueType == typeof(int)) + + if (valueType == typeof(int)) { // First try to parse as string var stringValue = Encoding.UTF8.GetString(bytes); @@ -555,7 +556,8 @@ private object DeserializePrimitiveValue(byte[] bytes, Type valueType) _ => 0 }; } - else if (valueType == typeof(long)) + + if (valueType == typeof(long)) { var stringValue = Encoding.UTF8.GetString(bytes); if (long.TryParse(stringValue, out var parsedValue)) @@ -568,15 +570,18 @@ private object DeserializePrimitiveValue(byte[] bytes, Type valueType) _ => 0L }; } - else if (valueType == typeof(double)) + + if (valueType == typeof(double)) { return bytes.Length >= 8 ? BitConverter.ToDouble(bytes, 0) : 0.0; } - else if (valueType == typeof(bool) && bytes.Length >= 1) + + if (valueType == typeof(bool)) { return bytes[0] != 0; } - else if (valueType == typeof(Guid) && bytes.Length >= 16) + + if (valueType == typeof(Guid) && bytes.Length >= 16) { return new Guid(bytes); } From 5e12bca9bae2b85804c1afa31a3128ce8c627d55 Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Wed, 18 Jun 2025 16:47:27 +0100 Subject: [PATCH 21/35] fix examples build: add avro tools --- .github/workflows/examples-tests.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/examples-tests.yml b/.github/workflows/examples-tests.yml index 83522084d..77b322e18 100644 --- a/.github/workflows/examples-tests.yml +++ b/.github/workflows/examples-tests.yml @@ -33,6 +33,9 @@ jobs: - name: Install dependencies run: dotnet restore + - name: Install global tools + run: dotnet tool install --global Apache.Avro.Tools + - name: Build run: dotnet build --configuration Release --no-restore /tl From 5eaffd30befd638f85e66eef2c7c5095337ac6d2 Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Wed, 18 Jun 2025 16:59:03 +0100 Subject: [PATCH 22/35] add base tests --- .../PowertoolsKafkaSerializerBaseTests.cs | 344 ++++++++++++++++++ 1 file changed, 344 insertions(+) create mode 100644 libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsKafkaSerializerBaseTests.cs diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsKafkaSerializerBaseTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsKafkaSerializerBaseTests.cs new file mode 100644 index 000000000..1b3ad2295 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsKafkaSerializerBaseTests.cs @@ -0,0 +1,344 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Runtime.Serialization; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using Xunit; + +namespace AWS.Lambda.Powertools.Kafka.Tests +{ + /// + /// Additional tests for PowertoolsKafkaSerializerBase + /// + public class PowertoolsKafkaSerializerBaseTests + { + /// + /// Simple serializer implementation for testing base class + /// + private class TestKafkaSerializer : PowertoolsKafkaSerializerBase + { + public TestKafkaSerializer() : base() + { + } + + public TestKafkaSerializer(JsonSerializerOptions options) : base(options) + { + } + + public TestKafkaSerializer(JsonSerializerContext context) : base(context) + { + } + + public TestKafkaSerializer(JsonSerializerOptions options, JsonSerializerContext context) + : base(options, context) + { + } + + protected override object? DeserializeComplexKey(byte[] keyBytes, Type keyType) + { + return JsonSerializer.Deserialize(keyBytes, keyType); + } + + protected override object DeserializeComplexValue(string base64Value, Type valueType) + { + var bytes = Convert.FromBase64String(base64Value); + return JsonSerializer.Deserialize(bytes, valueType); + } + + // Implement our own version that mimics the private method's behavior + public object TestDeserializePrimitiveValue(byte[] bytes, Type valueType) + { + if (bytes == null || bytes.Length == 0) + return null!; + + if (valueType == typeof(string)) + { + return Encoding.UTF8.GetString(bytes); + } + + if (valueType == typeof(int)) + { + var stringValue = Encoding.UTF8.GetString(bytes); + if (int.TryParse(stringValue, out var parsedValue)) + return parsedValue; + + return bytes.Length switch + { + >= 4 => BitConverter.ToInt32(bytes, 0), + 1 => bytes[0], + _ => 0 + }; + } + + if (valueType == typeof(long)) + { + var stringValue = Encoding.UTF8.GetString(bytes); + if (long.TryParse(stringValue, out var parsedValue)) + return parsedValue; + + return bytes.Length switch + { + >= 8 => BitConverter.ToInt64(bytes, 0), + >= 4 => BitConverter.ToInt32(bytes, 0), + _ => 0L + }; + } + + if (valueType == typeof(double)) + { + return bytes.Length >= 8 ? BitConverter.ToDouble(bytes, 0) : 0.0; + } + + if (valueType == typeof(bool)) + { + return bytes[0] != 0; + } + + if (valueType == typeof(Guid) && bytes.Length >= 16) + { + return new Guid(bytes); + } + + return Convert.ChangeType(Encoding.UTF8.GetString(bytes), valueType); + } + } + + [Fact] + public void Deserialize_BooleanValues_HandlesCorrectly() + { + // Arrange + var serializer = new TestKafkaSerializer(); + string kafkaEventJson = CreateKafkaEvent( + keyValue: "dHJ1ZQ==", // "true" in base64 + valueValue: "AQ==" // byte[1] = {1} in base64 + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + Assert.NotNull(result); + var firstRecord = result.First(); + Assert.Equal("true", firstRecord.Key); + Assert.True(firstRecord.Value); + } + + [Fact] + public void Deserialize_NumericValues_HandlesCorrectly() + { + // Arrange + var serializer = new TestKafkaSerializer(); + string kafkaEventJson = CreateKafkaEvent( + keyValue: "NDI=", // "42" in base64 + valueValue: "MTIzNA==" // "1234" in base64 + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + Assert.NotNull(result); + var firstRecord = result.First(); + Assert.Equal(42, firstRecord.Key); + Assert.Equal(1234, firstRecord.Value); + } + + [Fact] + public void Deserialize_GuidValues_HandlesCorrectly() + { + // Arrange + var guid = Guid.NewGuid(); + var serializer = new TestKafkaSerializer(); + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(guid.ToByteArray()), + valueValue: Convert.ToBase64String(Encoding.UTF8.GetBytes(guid.ToString())) + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + Assert.NotNull(result); + var firstRecord = result.First(); + Assert.Equal(guid, firstRecord.Key); + Assert.Equal(guid.ToString(), firstRecord.Value); + } + + [Fact] + public void Deserialize_InvalidJson_ThrowsException() + { + // Arrange + var serializer = new TestKafkaSerializer(); + string invalidJson = "{ this is not valid json }"; + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(invalidJson)); + + // Act & Assert + Assert.ThrowsAny(() => + serializer.Deserialize>(stream)); + } + + [Fact] + public void Deserialize_MalformedBase64_ThrowsException() + { + // Arrange + var serializer = new TestKafkaSerializer(); + string kafkaEventJson = CreateKafkaEvent( + keyValue: "not-base64!", + valueValue: "valid-base64==" + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act & Assert + var ex = Assert.Throws(() => + serializer.Deserialize>(stream)); + + Assert.Contains("Failed to deserialize key data", ex.Message); + } + + [Fact] + public void Serialize_ValidObject_WritesToStream() + { + // Arrange + var serializer = new TestKafkaSerializer(); + var testObject = new { Name = "Test", Value = 42 }; + using var responseStream = new MemoryStream(); + + // Act + serializer.Serialize(testObject, responseStream); + responseStream.Position = 0; + string result = Encoding.UTF8.GetString(responseStream.ToArray()); + + // Assert + Assert.Contains("\"Name\":\"Test\"", result); + Assert.Contains("\"Value\":42", result); + } + + [Fact] + public void Serialize_NullObject_WritesNullToStream() + { + // Arrange + var serializer = new TestKafkaSerializer(); + using var responseStream = new MemoryStream(); + + // Act + serializer.Serialize(null, responseStream); + responseStream.Position = 0; + string result = Encoding.UTF8.GetString(responseStream.ToArray()); + + // Assert + Assert.Equal("null", result); + } + + [Fact] + public void DeserializePrimitiveValue_EmptyBytes_ReturnsNull() + { + // Arrange + var serializer = new TestKafkaSerializer(); + + // Act + var result = serializer.TestDeserializePrimitiveValue(Array.Empty(), typeof(string)); + + // Assert + Assert.Null(result); + } + + [Fact] + public void DeserializePrimitiveValue_LongValue_DeserializesCorrectly() + { + // Arrange + var serializer = new TestKafkaSerializer(); + var longBytes = BitConverter.GetBytes(long.MaxValue); + + // Act + var result = serializer.TestDeserializePrimitiveValue(longBytes, typeof(long)); + + // Assert + Assert.Equal(long.MaxValue, result); + } + + [Fact] + public void DeserializePrimitiveValue_DoubleValue_DeserializesCorrectly() + { + // Arrange + var serializer = new TestKafkaSerializer(); + var doubleBytes = BitConverter.GetBytes(3.14159); + + // Act + var result = serializer.TestDeserializePrimitiveValue(doubleBytes, typeof(double)); + + // Assert + Assert.Equal(3.14159, result); + } + + [Fact] + public void ProcessHeaders_MultipleHeaders_DeserializesCorrectly() + { + // Arrange + var serializer = new TestKafkaSerializer(); + string kafkaEventJson = @$"{{ + ""eventSource"": ""aws:kafka"", + ""records"": {{ + ""mytopic-0"": [ + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""key"": ""{Convert.ToBase64String(Encoding.UTF8.GetBytes("key"))}"", + ""value"": ""{Convert.ToBase64String(Encoding.UTF8.GetBytes("value"))}"", + ""headers"": [ + {{ ""header1"": [104, 101, 108, 108, 111] }}, + {{ ""header2"": [119, 111, 114, 108, 100] }} + ] + }} + ] + }} + }}"; + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + var record = result.First(); + Assert.Equal(2, record.Headers.Count); + Assert.Equal("hello", Encoding.ASCII.GetString(record.Headers["header1"])); + Assert.Equal("world", Encoding.ASCII.GetString(record.Headers["header2"])); + } + + // Helper method to create Kafka event JSON with specified key and value + private string CreateKafkaEvent(string keyValue, string valueValue) + { + return @$"{{ + ""eventSource"": ""aws:kafka"", + ""eventSourceArn"": ""arn:aws:kafka:us-east-1:0123456789019:cluster/TestCluster/abcd1234"", + ""bootstrapServers"": ""b-1.test-cluster.kafka.us-east-1.amazonaws.com:9092"", + ""records"": {{ + ""mytopic-0"": [ + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""timestamp"": 1645084650987, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{keyValue}"", + ""value"": ""{valueValue}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }} + ] + }} + }}"; + } + } +} \ No newline at end of file From 0c169d1c93ff0c8ee56287ccac23139aea0ba60c Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Wed, 18 Jun 2025 17:25:51 +0100 Subject: [PATCH 23/35] enhance tests: add deserialization tests with serializer context and complex keys --- .../PowertoolsKafkaAvroSerializerTests.cs | 164 ++++++++++++++---- .../PowertoolsKafkaSerializerBaseTests.cs | 117 +++++++++++++ .../PowertoolsKafkaProtobufSerializerTests.cs | 159 +++++++++++++---- 3 files changed, 373 insertions(+), 67 deletions(-) diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs index 65c62be1b..ebf8a9b6b 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs @@ -1,4 +1,6 @@ using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; using Avro; using Avro.Generic; using Avro.IO; @@ -16,38 +18,38 @@ public void Deserialize_KafkaEventWithAvroPayload_DeserializesToCorrectType() var serializer = new PowertoolsKafkaAvroSerializer(); string kafkaEventJson = File.ReadAllText("Avro/kafka-avro-event.json"); using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); - + // Act var result = serializer.Deserialize>(stream); - + // Assert Assert.NotNull(result); Assert.Equal("aws:kafka", result.EventSource); - + // Verify records were deserialized Assert.True(result.Records.ContainsKey("mytopic-0")); var records = result.Records["mytopic-0"]; Assert.Equal(3, records.Count); - + // Verify first record's content var firstRecord = records[0]; Assert.Equal("mytopic", firstRecord.Topic); Assert.Equal(0, firstRecord.Partition); Assert.Equal(15, firstRecord.Offset); Assert.Equal(42, firstRecord.Key); - + // Verify deserialized Avro value var product = firstRecord.Value; Assert.Equal("Laptop", product.name); Assert.Equal(1001, product.id); Assert.Equal(999.99000000000001, product.price); - + // Verify second record var secondRecord = records[1]; var smartphone = secondRecord.Value; Assert.Equal("Smartphone", smartphone.name); } - + [Fact] public void KafkaEvent_ImplementsIEnumerable_ForDirectIteration() { @@ -55,66 +57,158 @@ public void KafkaEvent_ImplementsIEnumerable_ForDirectIteration() var serializer = new PowertoolsKafkaAvroSerializer(); string kafkaEventJson = File.ReadAllText("Avro/kafka-avro-event.json"); using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); - + // Act var result = serializer.Deserialize>(stream); - + // Assert - Test enumeration int count = 0; var products = new List(); - + // Directly iterate over ConsumerRecords foreach (var record in result) { count++; products.Add(record.Value.name); } - + // Verify correct count and values Assert.Equal(3, count); Assert.Contains("Laptop", products); Assert.Contains("Smartphone", products); Assert.Equal(3, products.Count); - + // Get first record directly through Linq extension var firstRecord = result.First(); Assert.Equal("Laptop", firstRecord.Value.name); Assert.Equal(1001, firstRecord.Value.id); } - + [Fact] public void Primitive_Deserialization() { // Arrange var serializer = new PowertoolsKafkaAvroSerializer(); - string kafkaEventJson = @$"{{ - ""eventSource"": ""aws:kafka"", - ""eventSourceArn"": ""arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4"", - ""bootstrapServers"": ""b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092"", - ""records"": {{ - ""mytopic-0"": [ - {{ - ""topic"": ""mytopic"", - ""partition"": 0, - ""offset"": 15, - ""timestamp"": 1545084650987, - ""timestampType"": ""CREATE_TIME"", - ""key"": ""{Convert.ToBase64String(Encoding.UTF8.GetBytes("MyKey"))}"", - ""value"": ""{Convert.ToBase64String(Encoding.UTF8.GetBytes("Myvalue"))}"", - ""headers"": [ - {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} - ] - }} - ] - }} - }}"; + string kafkaEventJson = + CreateKafkaEvent(Convert.ToBase64String("MyKey"u8.ToArray()), + Convert.ToBase64String("Myvalue"u8.ToArray())); using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); - + // Act var result = serializer.Deserialize>(stream); var firstRecord = result.First(); Assert.Equal("Myvalue", firstRecord.Value); Assert.Equal("MyKey", firstRecord.Key); } + + [Fact] + public void DeserializeComplexKey_WithoutAvroSchema_FallsBackToJson() + { + // Arrange + var serializer = new PowertoolsKafkaAvroSerializer(); + var complexObject = new { Name = "Test", Id = 123 }; + var jsonBytes = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(complexObject)); + + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(jsonBytes), + valueValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("test")) + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + // Use Dictionary as key type since it doesn't have an Avro schema + var result = serializer.Deserialize, string>>(stream); + + // Assert + var record = result.First(); + Assert.NotNull(record.Key); + Assert.Equal("Test", record.Key["Name"].ToString()); + Assert.Equal(123, int.Parse(record.Key["Id"].ToString())); + } + + [Fact] + public void DeserializeComplexKey_WithSerializerContext_UsesContext() + { + // Arrange + // Create custom context + var options = new JsonSerializerOptions(); + var context = new TestAvroSerializerContext(options); + var serializer = new PowertoolsKafkaAvroSerializer(context); + + // Create test data with the registered type + var testModel = new TestModel { Name = "TestFromContext", Value = 456 }; + var jsonBytes = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(testModel)); + + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(jsonBytes), + valueValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("test")) + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + var record = result.First(); + Assert.NotNull(record.Key); + Assert.Equal("TestFromContext", record.Key.Name); + Assert.Equal(456, record.Key.Value); + } + + [Fact] + public void DeserializeComplexKey_WhenAllDeserializationMethodsFail_ReturnsNull() + { + // Arrange + var serializer = new PowertoolsKafkaAvroSerializer(); + // Invalid JSON and not Avro binary + byte[] invalidBytes = { 0xDE, 0xAD, 0xBE, 0xEF }; + + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(invalidBytes), + valueValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("test")) + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + // This shouldn't throw but return a record with null key + var result = serializer.Deserialize>(stream); + + // Assert + var record = result.First(); + Assert.Null(record.Key); + } + + private string CreateKafkaEvent(string keyValue, string valueValue) + { + return @$"{{ + ""eventSource"": ""aws:kafka"", + ""eventSourceArn"": ""arn:aws:kafka:us-east-1:0123456789019:cluster/TestCluster/abcd1234"", + ""bootstrapServers"": ""b-1.test-cluster.kafka.us-east-1.amazonaws.com:9092"", + ""records"": {{ + ""mytopic-0"": [ + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""timestamp"": 1645084650987, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{keyValue}"", + ""value"": ""{valueValue}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }} + ] + }} + }}"; + } +} + +[JsonSerializable(typeof(TestModel))] +public partial class TestAvroSerializerContext : JsonSerializerContext +{ } \ No newline at end of file diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsKafkaSerializerBaseTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsKafkaSerializerBaseTests.cs index 1b3ad2295..76f8f9158 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsKafkaSerializerBaseTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsKafkaSerializerBaseTests.cs @@ -340,5 +340,122 @@ private string CreateKafkaEvent(string keyValue, string valueValue) }} }}"; } + + [Fact] + public void Deserialize_WithSerializerContext_UsesContextForRegisteredTypes() + { + // Arrange + var options = new JsonSerializerOptions { PropertyNameCaseInsensitive = true }; + var context = new TestSerializerContext(options); + // Use only options for constructor, but we'll make the context available for the model deserialization + var serializer = new TestKafkaSerializer(options); + + var testModel = new TestModel { Name = "Test", Value = 123 }; + var modelJson = JsonSerializer.Serialize(testModel, context.TestModel); + var base64Value = Convert.ToBase64String(Encoding.UTF8.GetBytes(modelJson)); + + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("testKey")), + valueValue: base64Value + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + Assert.NotNull(result); + var record = result.First(); + Assert.Equal("testKey", record.Key); + Assert.Equal("Test", record.Value.Name); + Assert.Equal(123, record.Value.Value); + } + + [Fact] + public void Serialize_WithSerializerContext_UsesContextForRegisteredTypes() + { + // Arrange + var options = new JsonSerializerOptions(); + var context = new TestSerializerContext(options); + var serializer = new TestKafkaSerializer(options, context); + + var testModel = new TestModel { Name = "Test", Value = 123 }; + using var responseStream = new MemoryStream(); + + // Act + serializer.Serialize(testModel, responseStream); + responseStream.Position = 0; + string result = Encoding.UTF8.GetString(responseStream.ToArray()); + + // Assert + Assert.Contains("\"Name\":\"Test\"", result); + Assert.Contains("\"Value\":123", result); + } + + [Fact] + public void Deserialize_WithSerializerContext_FallsBackWhenTypeNotRegistered() + { + // Arrange + var options = new JsonSerializerOptions { PropertyNameCaseInsensitive = true }; + var context = new TestSerializerContext(options); + var serializer = new TestKafkaSerializer(options, context); + + // Using a non-registered type (Dictionary instead of TestModel) + var dictionary = new Dictionary { ["Key"] = 42 }; + var dictJson = JsonSerializer.Serialize(dictionary); + var base64Value = Convert.ToBase64String(Encoding.UTF8.GetBytes(dictJson)); + + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("testKey")), + valueValue: base64Value + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>>(stream); + + // Assert + Assert.NotNull(result); + var record = result.First(); + Assert.Equal("testKey", record.Key); + Assert.Single(record.Value); + Assert.Equal(42, record.Value["Key"]); + } + + [Fact] + public void Serialize_NonRegisteredType_FallsBackToRegularSerialization() + { + // Arrange + var options = new JsonSerializerOptions(); + // Use serializer WITHOUT context to test the fallback path + var serializer = new TestKafkaSerializer(options); + + // Using a non-registered type + var nonRegisteredType = new { Id = Guid.NewGuid(), Message = "Not in context" }; + using var responseStream = new MemoryStream(); + + // Act + serializer.Serialize(nonRegisteredType, responseStream); + responseStream.Position = 0; + string result = Encoding.UTF8.GetString(responseStream.ToArray()); + + // Assert + Assert.Contains("\"Id\":", result); + Assert.Contains("\"Message\":\"Not in context\"", result); + } + } + + [JsonSerializable(typeof(TestModel))] + [JsonSerializable(typeof(ConsumerRecords))] + public partial class TestSerializerContext : JsonSerializerContext + { + } + + public class TestModel + { + public string Name { get; set; } + public int Value { get; set; } } } \ No newline at end of file diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs index 7727dd0b2..fe2b441aa 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs @@ -1,4 +1,6 @@ using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; using AWS.Lambda.Powertools.Kafka.Protobuf; using TestKafka; @@ -38,14 +40,14 @@ public void Deserialize_KafkaEventWithProtobufPayload_DeserializesToCorrectType( Assert.Equal("Laptop", product.Name); Assert.Equal(1001, product.Id); Assert.Equal(999.99, product.Price); - + // Verify second record var secondRecord = records[1]; var smartphone = secondRecord.Value; Assert.Equal("Smartphone", smartphone.Name); Assert.Equal(1002, smartphone.Id); Assert.Equal(599.99, smartphone.Price); - + // Verify third record var thirdRecord = records[2]; var headphones = thirdRecord.Value; @@ -53,7 +55,7 @@ public void Deserialize_KafkaEventWithProtobufPayload_DeserializesToCorrectType( Assert.Equal(1003, headphones.Id); Assert.Equal(149.99, headphones.Price); } - + [Fact] public void KafkaEvent_ImplementsIEnumerable_ForDirectIteration() { @@ -61,66 +63,159 @@ public void KafkaEvent_ImplementsIEnumerable_ForDirectIteration() var serializer = new PowertoolsKafkaProtobufSerializer(); string kafkaEventJson = File.ReadAllText("Protobuf/kafka-protobuf-event.json"); using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); - + // Act var result = serializer.Deserialize>(stream); - + // Assert - Test enumeration int count = 0; var products = new List(); - + // Directly iterate over ConsumerRecords foreach (var record in result) { count++; products.Add(record.Value.Name); } - + // Verify correct count and values Assert.Equal(3, count); Assert.Contains("Laptop", products); Assert.Contains("Smartphone", products); Assert.Contains("Headphones", products); - + // Get first record directly through Linq extension var firstRecord = result.First(); Assert.Equal("Laptop", firstRecord.Value.Name); Assert.Equal(1001, firstRecord.Value.Id); } - + [Fact] public void Primitive_Deserialization() { // Arrange var serializer = new PowertoolsKafkaProtobufSerializer(); - string kafkaEventJson = @$"{{ - ""eventSource"": ""aws:kafka"", - ""eventSourceArn"": ""arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4"", - ""bootstrapServers"": ""b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092"", - ""records"": {{ - ""mytopic-0"": [ - {{ - ""topic"": ""mytopic"", - ""partition"": 0, - ""offset"": 15, - ""timestamp"": 1545084650987, - ""timestampType"": ""CREATE_TIME"", - ""key"": ""{Convert.ToBase64String(Encoding.UTF8.GetBytes("MyKey"))}"", - ""value"": ""{Convert.ToBase64String(Encoding.UTF8.GetBytes("Myvalue"))}"", - ""headers"": [ - {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} - ] - }} - ] - }} - }}"; - + string kafkaEventJson = + CreateKafkaEvent(Convert.ToBase64String("MyKey"u8.ToArray()), + Convert.ToBase64String("Myvalue"u8.ToArray())); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); - + // Act var result = serializer.Deserialize>(stream); var firstRecord = result.First(); Assert.Equal("Myvalue", firstRecord.Value); Assert.Equal("MyKey", firstRecord.Key); } + + [Fact] + public void DeserializeComplexKey_WithoutProtobufParser_FallsBackToJson() + { + // Arrange + var serializer = new PowertoolsKafkaProtobufSerializer(); + var complexObject = new { Name = "Test", Id = 123 }; + var jsonBytes = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(complexObject)); + + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(jsonBytes), + valueValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("test")) + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + // Use Dictionary as key type since it doesn't have a Protobuf parser + var result = serializer.Deserialize, string>>(stream); + + // Assert + var record = result.First(); + Assert.NotNull(record.Key); + Assert.Equal("Test", record.Key["Name"].ToString()); + Assert.Equal(123, int.Parse(record.Key["Id"].ToString())); + } + + [Fact] + public void DeserializeComplexKey_WithSerializerContext_UsesContext() + { + // Arrange + // Create custom context + var options = new JsonSerializerOptions(); + var context = new TestProtobufSerializerContext(options); + var serializer = new PowertoolsKafkaProtobufSerializer(context); + + // Create test data with the registered type + var testModel = new TestModel { Name = "TestFromContext", Value = 456 }; + var jsonBytes = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(testModel)); + + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(jsonBytes), + valueValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("test")) + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + var record = result.First(); + Assert.NotNull(record.Key); + Assert.Equal("TestFromContext", record.Key.Name); + Assert.Equal(456, record.Key.Value); + } + + [Fact] + public void DeserializeComplexKey_WhenAllDeserializationMethodsFail_ReturnsNull() + { + // Arrange + var serializer = new PowertoolsKafkaProtobufSerializer(); + // Invalid JSON and not Protobuf binary + byte[] invalidBytes = { 0xDE, 0xAD, 0xBE, 0xEF }; + + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(invalidBytes), + valueValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("test")) + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + // This shouldn't throw but return a record with null key + var result = serializer.Deserialize>(stream); + + // Assert + var record = result.First(); + Assert.Null(record.Key); + } + + private string CreateKafkaEvent(string keyValue, string valueValue) + { + return @$"{{ + ""eventSource"": ""aws:kafka"", + ""eventSourceArn"": ""arn:aws:kafka:us-east-1:0123456789019:cluster/TestCluster/abcd1234"", + ""bootstrapServers"": ""b-1.test-cluster.kafka.us-east-1.amazonaws.com:9092"", + ""records"": {{ + ""mytopic-0"": [ + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""timestamp"": 1645084650987, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{keyValue}"", + ""value"": ""{valueValue}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }} + ] + }} + }}"; + } } + +[JsonSerializable(typeof(TestModel))] +public partial class TestProtobufSerializerContext : JsonSerializerContext +{ +} \ No newline at end of file From 146837f5f84d8e8eb5431c2f432cd5175213ff84 Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Wed, 18 Jun 2025 18:43:30 +0100 Subject: [PATCH 24/35] enhance tests: add header decoding and complex key/value deserialization tests for Kafka JSON serializer --- .../HeaderExtensionsTests.cs | 89 ++++++++ .../PowertoolsKafkaJsonSerializerTests.cs | 191 +++++++++++++++--- 2 files changed, 248 insertions(+), 32 deletions(-) create mode 100644 libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/HeaderExtensionsTests.cs diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/HeaderExtensionsTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/HeaderExtensionsTests.cs new file mode 100644 index 000000000..4b76f2051 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/HeaderExtensionsTests.cs @@ -0,0 +1,89 @@ +using System.Text; +using Xunit; + +namespace AWS.Lambda.Powertools.Kafka.Tests +{ + public class HeaderExtensionsTests + { + [Fact] + public void DecodedValues_WithValidHeaders_DecodesCorrectly() + { + // Arrange + var headers = new Dictionary + { + { "header1", Encoding.UTF8.GetBytes("value1") }, + { "header2", Encoding.UTF8.GetBytes("value2") } + }; + + // Act + var decoded = headers.DecodedValues(); + + // Assert + Assert.Equal(2, decoded.Count); + Assert.Equal("value1", decoded["header1"]); + Assert.Equal("value2", decoded["header2"]); + } + + [Fact] + public void DecodedValues_WithEmptyDictionary_ReturnsEmptyDictionary() + { + // Arrange + var headers = new Dictionary(); + + // Act + var decoded = headers.DecodedValues(); + + // Assert + Assert.Empty(decoded); + } + + [Fact] + public void DecodedValues_WithNullDictionary_ReturnsEmptyDictionary() + { + // Arrange + Dictionary headers = null; + + // Act + var decoded = headers.DecodedValues(); + + // Assert + Assert.Empty(decoded); + } + + [Fact] + public void DecodedValue_WithValidBytes_DecodesCorrectly() + { + // Arrange + var bytes = Encoding.UTF8.GetBytes("test-value"); + + // Act + var decoded = bytes.DecodedValue(); + + // Assert + Assert.Equal("test-value", decoded); + } + + [Fact] + public void DecodedValue_WithEmptyBytes_ReturnsEmptyString() + { + // Arrange + var bytes = Array.Empty(); + + // Act + var decoded = bytes.DecodedValue(); + + // Assert + Assert.Equal("", decoded); + } + + [Fact] + public void DecodedValue_WithNullBytes_ReturnsEmptyString() + { + // Act + var decoded = ((byte[])null).DecodedValue(); + + // Assert + Assert.Equal("", decoded); + } + } +} \ No newline at end of file diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs index 6283e6e59..a1c9b3368 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs @@ -1,5 +1,8 @@ using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; using AWS.Lambda.Powertools.Kafka.Json; +using AWS.Lambda.Powertools.Kafka.Tests; namespace AWS.Lambda.Powertools.Kafka.Tests.Json; @@ -44,14 +47,14 @@ public void Deserialize_KafkaEventWithJsonPayload_DeserializesToCorrectType() Assert.Equal("product5", product.Name); Assert.Equal(12345, product.Id); Assert.Equal(45, product.Price); - + // Verify second record var secondRecord = records[1]; var p2 = secondRecord.Value; Assert.Equal("product5", p2.Name); Assert.Equal(12345, p2.Id); Assert.Equal(45, p2.Price); - + // Verify third record var thirdRecord = records[2]; var p3 = thirdRecord.Value; @@ -59,7 +62,7 @@ public void Deserialize_KafkaEventWithJsonPayload_DeserializesToCorrectType() Assert.Equal(12345, p3.Id); Assert.Equal(45, p3.Price); } - + [Fact] public void KafkaEvent_ImplementsIEnumerable_ForDirectIteration() { @@ -67,64 +70,188 @@ public void KafkaEvent_ImplementsIEnumerable_ForDirectIteration() var serializer = new PowertoolsKafkaJsonSerializer(); string kafkaEventJson = File.ReadAllText("Json/kafka-json-event.json"); using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); - + // Act var result = serializer.Deserialize>(stream); - + // Assert - Test enumeration int count = 0; var products = new List(); - + // Directly iterate over ConsumerRecords foreach (var record in result) { count++; products.Add(record.Value.Name); } - + // Verify correct count and values Assert.Equal(3, count); Assert.Contains("product5", products); - + // Get first record directly through Linq extension var firstRecord = result.First(); Assert.Equal("product5", firstRecord.Value.Name); Assert.Equal(12345, firstRecord.Value.Id); } - + [Fact] public void Primitive_Deserialization() { // Arrange var serializer = new PowertoolsKafkaJsonSerializer(); - string kafkaEventJson = @$"{{ - ""eventSource"": ""aws:kafka"", - ""eventSourceArn"": ""arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4"", - ""bootstrapServers"": ""b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092"", - ""records"": {{ - ""mytopic-0"": [ - {{ - ""topic"": ""mytopic"", - ""partition"": 0, - ""offset"": 15, - ""timestamp"": 1545084650987, - ""timestampType"": ""CREATE_TIME"", - ""key"": ""{Convert.ToBase64String(Encoding.UTF8.GetBytes("MyKey"))}"", - ""value"": ""{Convert.ToBase64String(Encoding.UTF8.GetBytes("Myvalue"))}"", - ""headers"": [ - {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} - ] - }} - ] - }} - }}"; - + string kafkaEventJson = + CreateKafkaEvent(Convert.ToBase64String("MyKey"u8.ToArray()), + Convert.ToBase64String("Myvalue"u8.ToArray())); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); - + // Act var result = serializer.Deserialize>(stream); var firstRecord = result.First(); Assert.Equal("Myvalue", firstRecord.Value); Assert.Equal("MyKey", firstRecord.Key); } + + [Fact] + public void DeserializeComplexKey_StandardJsonDeserialization_Works() + { + // Arrange + var serializer = new PowertoolsKafkaJsonSerializer(); + var complexObject = new { Name = "Test", Id = 123 }; + var jsonBytes = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(complexObject)); + + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(jsonBytes), + valueValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("test")) + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize, string>>(stream); + + // Assert + var record = result.First(); + Assert.NotNull(record.Key); + Assert.Equal("Test", record.Key["Name"].ToString()); + Assert.Equal(123, int.Parse(record.Key["Id"].ToString())); + } + + [Fact] + public void DeserializeComplexKey_WithSerializerContext_UsesContext() + { + // Arrange + // Create custom context + var options = new JsonSerializerOptions(); + var context = new TestJsonSerializerContext(options); + var serializer = new PowertoolsKafkaJsonSerializer(context); + + // Create test data with the registered type + var testModel = new TestModel { Name = "TestFromContext", Value = 456 }; + var jsonBytes = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(testModel)); + + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(jsonBytes), + valueValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("test")) + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + var record = result.First(); + Assert.NotNull(record.Key); + Assert.Equal("TestFromContext", record.Key.Name); + Assert.Equal(456, record.Key.Value); + } + + [Fact] + public void DeserializeComplexKey_WhenDeserializationFails_ReturnsNull() + { + // Arrange + var serializer = new PowertoolsKafkaJsonSerializer(); + // Invalid JSON + byte[] invalidBytes = { 0xDE, 0xAD, 0xBE, 0xEF }; + + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(invalidBytes), + valueValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("test")) + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + // This shouldn't throw but return a record with null key + var result = serializer.Deserialize>(stream); + + // Assert + var record = result.First(); + Assert.Null(record.Key); + } + + [Fact] + public void DeserializeComplexValue_WithSerializerContext_UsesContext() + { + // Arrange + var options = new JsonSerializerOptions(); + var context = new TestJsonSerializerContext(options); + var serializer = new PowertoolsKafkaJsonSerializer(context); + + // Create test data with the registered type + var testModel = new TestModel { Name = "ValueFromContext", Value = 789 }; + var jsonBytes = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(testModel)); + + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("testKey")), + valueValue: Convert.ToBase64String(jsonBytes) + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + var record = result.First(); + Assert.Equal("testKey", record.Key); + Assert.NotNull(record.Value); + Assert.Equal("ValueFromContext", record.Value.Name); + Assert.Equal(789, record.Value.Value); + } + + /// + /// Helper method to create Kafka event JSON with specified key and value in base64 format + /// + private string CreateKafkaEvent(string keyValue, string valueValue) + { + return @$"{{ + ""eventSource"": ""aws:kafka"", + ""eventSourceArn"": ""arn:aws:kafka:us-east-1:0123456789019:cluster/TestCluster/abcd1234"", + ""bootstrapServers"": ""b-1.test-cluster.kafka.us-east-1.amazonaws.com:9092"", + ""records"": {{ + ""mytopic-0"": [ + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""timestamp"": 1645084650987, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{keyValue}"", + ""value"": ""{valueValue}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }} + ] + }} + }}"; + } } + +[JsonSerializable(typeof(TestModel))] +public partial class TestJsonSerializerContext : JsonSerializerContext +{ +} \ No newline at end of file From 0b26a1a9aa8f12ffa21cc09638f7978c2202b5e5 Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Wed, 18 Jun 2025 18:55:17 +0100 Subject: [PATCH 25/35] enhance tests: add deserialization and serialization tests for non-ConsumerRecord types and primitive values --- .../PowertoolsKafkaSerializerBaseTests.cs | 130 ++++++++++++++++++ 1 file changed, 130 insertions(+) diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsKafkaSerializerBaseTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsKafkaSerializerBaseTests.cs index 76f8f9158..5bafd1cd6 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsKafkaSerializerBaseTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsKafkaSerializerBaseTests.cs @@ -445,10 +445,140 @@ public void Serialize_NonRegisteredType_FallsBackToRegularSerialization() Assert.Contains("\"Id\":", result); Assert.Contains("\"Message\":\"Not in context\"", result); } + + [Fact] + public void Deserialize_NonConsumerRecordWithSerializerContext_UsesTypeInfo() + { + // Arrange + var options = new JsonSerializerOptions(); + var context = new TestSerializerContext(options); + var serializer = new TestKafkaSerializer(options, context); + + var testModel = new TestModel { Name = "DirectDeserialization", Value = 42 }; + var json = JsonSerializer.Serialize(testModel); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(json)); + + // Act + var result = serializer.Deserialize(stream); + + // Assert + Assert.NotNull(result); + Assert.Equal("DirectDeserialization", result.Name); + Assert.Equal(42, result.Value); + } + + [Fact] + public void Deserialize_NonConsumerRecordWithoutTypeInfo_UsesRegularDeserialize() + { + // Arrange + var options = new JsonSerializerOptions(); + var context = new TestSerializerContext(options); + var serializer = new TestKafkaSerializer(options, context); + + // Dictionary is not registered in TestSerializerContext + var dict = new Dictionary { ["test"] = 123 }; + var json = JsonSerializer.Serialize(dict); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(json)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + Assert.NotNull(result); + Assert.Equal(123, result["test"]); + } + + [Fact] + public void Deserialize_NonConsumerRecordFailed_ThrowsException() + { + // Arrange + var serializer = new TestKafkaSerializer(); + var invalidJson = "{ invalid json"; + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(invalidJson)); + + // Act & Assert + // With invalid JSON input, JsonSerializer throws JsonException directly + var ex = Assert.Throws(() => + serializer.Deserialize(stream)); + + // Check that we're getting a JSON parsing error + Assert.Contains("invalid", ex.Message.ToLower()); + } + + [Theory] + [InlineData(new byte[] { 42 }, 42)] // Single byte + [InlineData(new byte[] { 0x2A, 0x00, 0x00, 0x00 }, 42)] // Four bytes + public void DeserializePrimitiveValue_IntWithDifferentByteFormats_DeserializesCorrectly(byte[] bytes, + int expected) + { + // Arrange + var serializer = new TestKafkaSerializer(); + + // Act + var result = serializer.TestDeserializePrimitiveValue(bytes, typeof(int)); + + // Assert + Assert.Equal(expected, result); + } + + [Theory] + [InlineData(new byte[] { 0x2A, 0x00, 0x00, 0x00 }, 42L)] // Four bytes as int + [InlineData(new byte[] { 0x2A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, 42L)] // Eight bytes as long + public void DeserializePrimitiveValue_LongWithDifferentByteFormats_DeserializesCorrectly(byte[] bytes, + long expected) + { + // Arrange + var serializer = new TestKafkaSerializer(); + + // Act + var result = serializer.TestDeserializePrimitiveValue(bytes, typeof(long)); + + // Assert + Assert.Equal(expected, result); + } + + [Fact] + public void DeserializePrimitiveValue_DoubleWithShortBytes_ReturnsZero() + { + // Arrange + var serializer = new TestKafkaSerializer(); + var shortBytes = new byte[] { 0x00, 0x00, 0x00, 0x00 }; // Less than 8 bytes + + // Act + var result = serializer.TestDeserializePrimitiveValue(shortBytes, typeof(double)); + + // Assert + Assert.Equal(0.0, result); + } + + [Fact] + public void Serialize_WithTypeInfoFromContext_WritesToStream() + { + // Arrange + var options = new JsonSerializerOptions(); + var context = new TestSerializerContext(options); + var serializer = new TestKafkaSerializer(options, context); + + var testModel = new TestModel { Name = "ContextSerialization", Value = 555 }; + using var responseStream = new MemoryStream(); + + // Act + serializer.Serialize(testModel, responseStream); + responseStream.Position = 0; + string result = Encoding.UTF8.GetString(responseStream.ToArray()); + + // Assert + Assert.Contains("\"Name\":\"ContextSerialization\"", result); + Assert.Contains("\"Value\":555", result); + } } [JsonSerializable(typeof(TestModel))] [JsonSerializable(typeof(ConsumerRecords))] + [JsonSerializable(typeof(Dictionary))] public partial class TestSerializerContext : JsonSerializerContext { } From b0387c1c232fcc1c40f936c38a80949c5540b18a Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Wed, 18 Jun 2025 19:19:34 +0100 Subject: [PATCH 26/35] add license headers to all source and test files; add KafkaHandlerFunctionalTests covering key deserialization scenarios --- .../PowertoolsKafkaAvroSerializer.cs | 15 + .../PowertoolsKafkaJsonSerializer.cs | 15 + .../PowertoolsKafkaProtobufSerializer.cs | 15 + .../ConsumerRecord.cs | 15 + .../ConsumerRecords.cs | 15 + .../HeaderExtensions.cs | 15 + .../PowertoolsKafkaSerializerBase.cs | 15 + .../Avro/HandlerTests.cs | 15 + .../PowertoolsKafkaAvroSerializerTests.cs | 19 +- .../HeaderExtensionsTests.cs | 16 +- .../PowertoolsKafkaJsonSerializerTests.cs | 16 +- .../KafkaHandlerFunctionalTests.cs | 604 ++++++++++++++++++ .../PowertoolsKafkaSerializerBaseTests.cs | 19 +- .../Protobuf/HandlerTests.cs | 15 + .../PowertoolsKafkaProtobufSerializerTests.cs | 15 + 15 files changed, 814 insertions(+), 10 deletions(-) create mode 100644 libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/KafkaHandlerFunctionalTests.cs diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/PowertoolsKafkaAvroSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/PowertoolsKafkaAvroSerializer.cs index a5cdb573d..44bb5d833 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/PowertoolsKafkaAvroSerializer.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/PowertoolsKafkaAvroSerializer.cs @@ -1,3 +1,18 @@ +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + using System.Diagnostics.CodeAnalysis; using System.Reflection; using System.Text; diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Json/PowertoolsKafkaJsonSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka.Json/PowertoolsKafkaJsonSerializer.cs index 18c779fbb..95fc2e354 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka.Json/PowertoolsKafkaJsonSerializer.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Json/PowertoolsKafkaJsonSerializer.cs @@ -1,3 +1,18 @@ +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + using System.Diagnostics.CodeAnalysis; using System.Text; using System.Text.Json; diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/PowertoolsKafkaProtobufSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/PowertoolsKafkaProtobufSerializer.cs index bce8830a7..b0149fdd4 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/PowertoolsKafkaProtobufSerializer.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/PowertoolsKafkaProtobufSerializer.cs @@ -1,3 +1,18 @@ +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + using System.Diagnostics.CodeAnalysis; using System.Reflection; using System.Text; diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecord.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecord.cs index 20b3e1725..6384cd46c 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecord.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecord.cs @@ -1,3 +1,18 @@ +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + namespace AWS.Lambda.Powertools.Kafka; /// diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecords.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecords.cs index c488301d8..972ae7cd7 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecords.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecords.cs @@ -1,3 +1,18 @@ +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + using System.Collections; namespace AWS.Lambda.Powertools.Kafka; diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/HeaderExtensions.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/HeaderExtensions.cs index 48531fc49..892cf9516 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/HeaderExtensions.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/HeaderExtensions.cs @@ -1,3 +1,18 @@ +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + using System.Text; namespace AWS.Lambda.Powertools.Kafka; diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs index f74a7b8ab..d1e74fc96 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs @@ -1,3 +1,18 @@ +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + using Amazon.Lambda.Core; using System.Diagnostics.CodeAnalysis; using System.Reflection; diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs index d56ea34a2..6627dea6e 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs @@ -1,3 +1,18 @@ +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + using System.Text; using Amazon.Lambda.Core; using Amazon.Lambda.TestUtilities; diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs index ebf8a9b6b..532a19ca0 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs @@ -1,10 +1,21 @@ +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + using System.Text; using System.Text.Json; using System.Text.Json.Serialization; -using Avro; -using Avro.Generic; -using Avro.IO; -using Avro.Specific; using AWS.Lambda.Powertools.Kafka.Avro; namespace AWS.Lambda.Powertools.Kafka.Tests; diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/HeaderExtensionsTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/HeaderExtensionsTests.cs index 4b76f2051..574f79a30 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/HeaderExtensionsTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/HeaderExtensionsTests.cs @@ -1,5 +1,19 @@ +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + using System.Text; -using Xunit; namespace AWS.Lambda.Powertools.Kafka.Tests { diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs index a1c9b3368..41cd395c0 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs @@ -1,8 +1,22 @@ +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + using System.Text; using System.Text.Json; using System.Text.Json.Serialization; using AWS.Lambda.Powertools.Kafka.Json; -using AWS.Lambda.Powertools.Kafka.Tests; namespace AWS.Lambda.Powertools.Kafka.Tests.Json; diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/KafkaHandlerFunctionalTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/KafkaHandlerFunctionalTests.cs new file mode 100644 index 000000000..97327e76c --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/KafkaHandlerFunctionalTests.cs @@ -0,0 +1,604 @@ +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +/* + These tests cover the key use cases you requested: + + 1. Basic Functionality: + Processing single records + Processing multiple records + Accessing record metadata + + 2. Data Formats: + JSON deserialization + Avro deserialization + Protobuf deserialization + Raw/default deserialization + + 3. Key Processing: + Processing various key formats (string, int, complex objects) + Handling null keys + + 4.Error Handling: + Invalid JSON data + Missing schemas with fallback mechanisms + + 5.Headers & Metadata: + Accessing and parsing record headers + */ + +using System.Text; +using Amazon.Lambda.Core; +using Amazon.Lambda.TestUtilities; +using AWS.Lambda.Powertools.Kafka.Json; +using AWS.Lambda.Powertools.Kafka.Avro; +using AWS.Lambda.Powertools.Kafka.Protobuf; +using TestKafka; + +namespace AWS.Lambda.Powertools.Kafka.Tests; + +public class KafkaHandlerFunctionalTests +{ + #region JSON Serializer Tests + + [Fact] + public void Given_SingleJsonRecord_When_ProcessedWithHandler_Then_SuccessfullyDeserializedAndProcessed() + { + // Given + string Handler(ConsumerRecords records, ILambdaContext context) + { + foreach (var record in records) + { + context.Logger.LogInformation($"Processing {record.Value.Name} at ${record.Value.Price}"); + } + return "Successfully processed JSON Kafka events"; + } + + var mockLogger = new TestLambdaLogger(); + var mockContext = new TestLambdaContext { Logger = mockLogger }; + + // Create a single record + var records = new ConsumerRecords + { + Records = new Dictionary>> + { + { "mytopic-0", new List> + { + new() + { + Topic = "mytopic", + Partition = 0, + Offset = 15, + Timestamp = 1645084650987, + TimestampType = "CREATE_TIME", + Key = "product-123", + Value = new JsonProduct { Name = "Laptop", Price = 999.99m, Id = 123 }, + Headers = new Dictionary + { + { "source", Encoding.UTF8.GetBytes("online-store") } + } + } + } + } + } + }; + + // When + var result = Handler(records, mockContext); + + // Then + Assert.Equal("Successfully processed JSON Kafka events", result); + Assert.Contains("Processing Laptop at $999.99", mockLogger.Buffer.ToString()); + } + + [Fact] + public void Given_MultipleJsonRecords_When_ProcessedWithHandler_Then_AllRecordsProcessed() + { + // Given + int processedCount = 0; + string Handler(ConsumerRecords records, ILambdaContext context) + { + foreach (var record in records) + { + context.Logger.LogInformation($"Processing {record.Value.Name}"); + processedCount++; + } + return $"Processed {processedCount} records"; + } + + var mockLogger = new TestLambdaLogger(); + var mockContext = new TestLambdaContext { Logger = mockLogger }; + + // Create multiple records + var records = new ConsumerRecords + { + Records = new Dictionary>> + { + { "mytopic-0", new List> + { + new() { Topic = "mytopic", Value = new JsonProduct { Name = "Laptop" } }, + new() { Topic = "mytopic", Value = new JsonProduct { Name = "Phone" } }, + new() { Topic = "mytopic", Value = new JsonProduct { Name = "Tablet" } } + } + } + } + }; + + // When + var result = Handler(records, mockContext); + + // Then + Assert.Equal("Processed 3 records", result); + Assert.Contains("Processing Laptop", mockLogger.Buffer.ToString()); + Assert.Contains("Processing Phone", mockLogger.Buffer.ToString()); + Assert.Contains("Processing Tablet", mockLogger.Buffer.ToString()); + } + + [Fact] + public void Given_JsonRecordWithMetadata_When_ProcessedWithHandler_Then_MetadataIsAccessible() + { + // Given + string Handler(ConsumerRecords records, ILambdaContext context) + { + var record = records.First(); + context.Logger.LogInformation($"Topic: {record.Topic}, Partition: {record.Partition}, Offset: {record.Offset}, Time: {record.Timestamp}"); + return "Metadata accessed"; + } + + var mockLogger = new TestLambdaLogger(); + var mockContext = new TestLambdaContext { Logger = mockLogger }; + + var records = new ConsumerRecords + { + Records = new Dictionary>> + { + { "mytopic-0", new List> + { + new() + { + Topic = "sales-data", + Partition = 3, + Offset = 42, + Timestamp = 1645084650987, + TimestampType = "CREATE_TIME", + Value = new JsonProduct { Name = "Metadata Test" } + } + } + } + } + }; + + // When + var result = Handler(records, mockContext); + + // Then + Assert.Equal("Metadata accessed", result); + Assert.Contains("Topic: sales-data, Partition: 3, Offset: 42", mockLogger.Buffer.ToString()); + } + + [Fact] + public void Given_JsonStreamInput_When_DeserializedWithJsonSerializer_Then_CorrectlyDeserializes() + { + // Given + var serializer = new PowertoolsKafkaJsonSerializer(); + string json = @"{ + ""eventSource"": ""aws:kafka"", + ""records"": { + ""mytopic-0"": [ + { + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""timestamp"": 1645084650987, + ""key"": """ + Convert.ToBase64String(Encoding.UTF8.GetBytes("key1")) + @""", + ""value"": """ + Convert.ToBase64String(Encoding.UTF8.GetBytes("{\"Name\":\"JSON Test\",\"Price\":199.99,\"Id\":456}")) + @""" + } + ] + } + }"; + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(json)); + + // When + var result = serializer.Deserialize>(stream); + + // Then + Assert.Equal("aws:kafka", result.EventSource); + Assert.Single(result.Records); + var record = result.First(); + Assert.Equal("key1", record.Key); + Assert.Equal("JSON Test", record.Value.Name); + Assert.Equal(199.99m, record.Value.Price); + Assert.Equal(456, record.Value.Id); + } + + [Fact] + public void Given_InvalidJsonData_When_DeserializedWithJsonSerializer_Then_ThrowsSerializationException() + { + // Given + var serializer = new PowertoolsKafkaJsonSerializer(); + string json = @"{ + ""eventSource"": ""aws:kafka"", + ""records"": { + ""mytopic-0"": [ + { + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""timestamp"": 1645084650987, + ""key"": """ + Convert.ToBase64String(Encoding.UTF8.GetBytes("key1")) + @""", + ""value"": """ + Convert.ToBase64String(Encoding.UTF8.GetBytes("{invalid-json}")) + @""" + } + ] + } + }"; + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(json)); + + // Act & Assert + var exception = Assert.Throws(() => + serializer.Deserialize>(stream)); + + // Verify the exception message contains information about the JSON parsing error + Assert.Contains("invalid start of a property name", exception.Message); + } + + [Fact] + public void Given_JsonRecordWithHeaders_When_ProcessedWithHandler_Then_HeadersAreAccessible() + { + // Given + string Handler(ConsumerRecords records, ILambdaContext context) + { + var record = records.First(); + var source = record.Headers["source"].DecodedValue(); + var contentType = record.Headers["content-type"].DecodedValue(); + context.Logger.LogInformation($"Headers: source={source}, content-type={contentType}"); + return "Headers processed"; + } + + var mockLogger = new TestLambdaLogger(); + var mockContext = new TestLambdaContext { Logger = mockLogger }; + + var records = new ConsumerRecords + { + Records = new Dictionary>> + { + { "mytopic-0", new List> + { + new() + { + Value = new JsonProduct { Name = "Header Test" }, + Headers = new Dictionary + { + { "source", Encoding.UTF8.GetBytes("web-app") }, + { "content-type", Encoding.UTF8.GetBytes("application/json") } + } + } + } + } + } + }; + + // When + var result = Handler(records, mockContext); + + // Then + Assert.Equal("Headers processed", result); + Assert.Contains("Headers: source=web-app, content-type=application/json", mockLogger.Buffer.ToString()); + } + + #endregion + + #region Avro Serializer Tests + + [Fact] + public void Given_SingleAvroRecord_When_ProcessedWithHandler_Then_SuccessfullyDeserializedAndProcessed() + { + // Given + string Handler(ConsumerRecords records, ILambdaContext context) + { + foreach (var record in records) + { + context.Logger.LogInformation($"Processing {record.Value.name} at ${record.Value.price}"); + } + return "Successfully processed Avro Kafka events"; + } + + var mockLogger = new TestLambdaLogger(); + var mockContext = new TestLambdaContext { Logger = mockLogger }; + + // Create a single record + var records = new ConsumerRecords + { + Records = new Dictionary>> + { + { "mytopic-0", new List> + { + new() + { + Topic = "mytopic", + Partition = 0, + Offset = 15, + Key = "avro-key", + Value = new AvroProduct { name = "Camera", price = 349.95 } + } + } + } + } + }; + + // When + var result = Handler(records, mockContext); + + // Then + Assert.Equal("Successfully processed Avro Kafka events", result); + Assert.Contains("Processing Camera at $349.95", mockLogger.Buffer.ToString()); + } + + [Fact] + public void Given_ComplexAvroKey_When_ProcessedWithHandler_Then_KeyIsCorrectlyDeserialized() + { + // Given + string Handler(ConsumerRecords records, ILambdaContext context) + { + var record = records.First(); + context.Logger.LogInformation($"Processing product with key ID: {record.Key.id}, color: {record.Key.color}"); + return "Successfully processed complex keys"; + } + + var mockLogger = new TestLambdaLogger(); + var mockContext = new TestLambdaContext { Logger = mockLogger }; + + var records = new ConsumerRecords + { + Records = new Dictionary>> + { + { "mytopic-0", new List> + { + new() + { + Key = new AvroKey { id = 42, color = Color.GREEN }, + Value = new AvroProduct { name = "Green Item", price = 49.99 } + } + } + } + } + }; + + // When + var result = Handler(records, mockContext); + + // Then + Assert.Equal("Successfully processed complex keys", result); + Assert.Contains("Processing product with key ID: 42, color: GREEN", mockLogger.Buffer.ToString()); + } + + [Fact] + public void Given_MissingAvroSchema_When_DeserializedWithAvroSerializer_Then_ThrowsSerializationException() + { + // Arrange + var serializer = new PowertoolsKafkaAvroSerializer(); + + // Create data that looks like Avro but without schema + byte[] invalidAvroData = { 0x01, 0x02, 0x03, 0x04 }; // Just some random bytes + string base64Data = Convert.ToBase64String(invalidAvroData); + + string kafkaEventJson = @$"{{ + ""eventSource"": ""aws:kafka"", + ""records"": {{ + ""mytopic-0"": [ + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""key"": ""{Convert.ToBase64String(Encoding.UTF8.GetBytes("test-key"))}"", + ""value"": ""{base64Data}"" + }} + ] + }} + }}"; + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act & Assert + Assert.Throws(() => + serializer.Deserialize>(stream)); + } + + #endregion + + #region Protobuf Serializer Tests + + [Fact] + public void Given_SingleProtobufRecord_When_ProcessedWithHandler_Then_SuccessfullyDeserializedAndProcessed() + { + // Given + string Handler(ConsumerRecords records, ILambdaContext context) + { + foreach (var record in records) + { + context.Logger.LogInformation($"Processing {record.Value.Name} at ${record.Value.Price}"); + } + return "Successfully processed Protobuf Kafka events"; + } + + var mockLogger = new TestLambdaLogger(); + var mockContext = new TestLambdaContext { Logger = mockLogger }; + + // Create a single record + var records = new ConsumerRecords + { + Records = new Dictionary>> + { + { "mytopic-0", new List> + { + new() + { + Topic = "mytopic", + Partition = 0, + Offset = 15, + Key = 42, + Value = new ProtobufProduct { Name = "Smart Watch", Id = 789, Price = 249.99 } + } + } + } + } + }; + + // When + var result = Handler(records, mockContext); + + // Then + Assert.Equal("Successfully processed Protobuf Kafka events", result); + Assert.Contains("Processing Smart Watch at $249.99", mockLogger.Buffer.ToString()); + } + + [Fact] + public void Given_NullKeyOrValue_When_ProcessedWithHandler_Then_HandlesNullsCorrectly() + { + // Given + string Handler(ConsumerRecords records, ILambdaContext context) + { + foreach (var record in records) + { + string keyInfo = record.Key.HasValue ? record.Key.Value.ToString() : "null"; + string valueInfo = record.Value != null ? record.Value.Name : "null"; + context.Logger.LogInformation($"Key: {keyInfo}, Value: {valueInfo}"); + } + return "Processed records with nulls"; + } + + var mockLogger = new TestLambdaLogger(); + var mockContext = new TestLambdaContext { Logger = mockLogger }; + + var records = new ConsumerRecords + { + Records = new Dictionary>> + { + { "mytopic-0", new List> + { + new() { Key = 1, Value = new ProtobufProduct { Name = "Valid Product" } }, + new() { Key = null, Value = new ProtobufProduct { Name = "No Key" } }, + new() { Key = 3, Value = null } + } + } + } + }; + + // When + var result = Handler(records, mockContext); + + // Then + Assert.Equal("Processed records with nulls", result); + Assert.Contains("Key: 1, Value: Valid Product", mockLogger.Buffer.ToString()); + Assert.Contains("Key: null, Value: No Key", mockLogger.Buffer.ToString()); + Assert.Contains("Key: 3, Value: null", mockLogger.Buffer.ToString()); + } + + [Fact] + public void Given_MissingProtobufParser_When_DeserializedWithProtobufSerializer_Then_FallsBackToJson() + { + // Arrange + var serializer = new PowertoolsKafkaProtobufSerializer(); + + // Create regular JSON instead of Protobuf binary + string jsonData = "{\"Name\":\"Fallback Test\",\"Id\":789,\"Price\":59.99}"; + string base64Json = Convert.ToBase64String(Encoding.UTF8.GetBytes(jsonData)); + + string kafkaEventJson = @$"{{ + ""eventSource"": ""aws:kafka"", + ""records"": {{ + ""mytopic-0"": [ + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""key"": ""{Convert.ToBase64String(Encoding.UTF8.GetBytes("42"))}"", + ""value"": ""{base64Json}"" + }} + ] + }} + }}"; + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act & Assert + Assert.Throws(() => + serializer.Deserialize>(stream)); + } + + #endregion + + #region Raw/Default Deserialization Tests + + [Fact] + public void Given_RawUtf8Data_When_ProcessedWithDefaultHandler_Then_DeserializesToStrings() + { + // Given + string Handler(ConsumerRecords records, ILambdaContext context) + { + foreach (var record in records) + { + context.Logger.LogInformation($"Key: {record.Key}, Value: {record.Value}"); + } + return "Processed raw data"; + } + + var mockLogger = new TestLambdaLogger(); + var mockContext = new TestLambdaContext { Logger = mockLogger }; + + // Create Kafka event with raw base64-encoded strings + string kafkaEventJson = @$"{{ + ""eventSource"": ""aws:kafka"", + ""records"": {{ + ""mytopic-0"": [ + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""key"": ""{Convert.ToBase64String(Encoding.UTF8.GetBytes("simple-key"))}"", + ""value"": ""{Convert.ToBase64String(Encoding.UTF8.GetBytes("Simple UTF-8 text value"))}"", + ""headers"": [ + {{ ""content-type"": [{(int)'t'}, {(int)'e'}, {(int)'x'}, {(int)'t'}] }} + ] + }} + ] + }} + }}"; + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Use the default serializer which handles base64 → UTF-8 conversion + var serializer = new PowertoolsKafkaJsonSerializer(); + var records = serializer.Deserialize>(stream); + + // When + var result = Handler(records, mockContext); + + // Then + Assert.Equal("Processed raw data", result); + Assert.Contains("Key: simple-key, Value: Simple UTF-8 text value", mockLogger.Buffer.ToString()); + } + + #endregion +} + +// Model classes for testing +public class JsonProduct +{ + public string Name { get; set; } + public decimal Price { get; set; } + public int Id { get; set; } +} \ No newline at end of file diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsKafkaSerializerBaseTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsKafkaSerializerBaseTests.cs index 5bafd1cd6..2edd33a9a 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsKafkaSerializerBaseTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsKafkaSerializerBaseTests.cs @@ -1,11 +1,22 @@ -using System; -using System.Collections.Generic; -using System.IO; +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + using System.Runtime.Serialization; using System.Text; using System.Text.Json; using System.Text.Json.Serialization; -using Xunit; namespace AWS.Lambda.Powertools.Kafka.Tests { diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/HandlerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/HandlerTests.cs index 69234ba36..33271a938 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/HandlerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/HandlerTests.cs @@ -1,3 +1,18 @@ +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + using System.Text; using Amazon.Lambda.Core; using Amazon.Lambda.TestUtilities; diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs index fe2b441aa..368914c73 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs @@ -1,3 +1,18 @@ +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + using System.Text; using System.Text.Json; using System.Text.Json.Serialization; From 06cd719f2ca7d80f38bb0d8b60c7540b5630a5ab Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Wed, 18 Jun 2025 19:31:06 +0100 Subject: [PATCH 27/35] adding avrogen to codeql build --- .github/workflows/codeql-analysis.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index ba06d4429..206a028f8 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -39,6 +39,9 @@ jobs: with: languages: ${{ matrix.language }} + - name: Install global tools + run: dotnet tool install --global Apache.Avro.Tools + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild From 407964ecb1062fab79677660944857d6be5b2a2a Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Wed, 18 Jun 2025 23:41:31 +0100 Subject: [PATCH 28/35] feat: add schema metadata support for key and value in ConsumerRecord; update deserialization logic and tests --- .../ConsumerRecord.cs | 10 ++++ .../PowertoolsKafkaSerializerBase.cs | 26 +++++++++ .../SchemaMetadata.cs | 17 ++++++ .../PowertoolsKafkaSerializerBaseTests.cs | 58 ++++++++++++++++++- 4 files changed, 110 insertions(+), 1 deletion(-) create mode 100644 libraries/src/AWS.Lambda.Powertools.Kafka/SchemaMetadata.cs diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecord.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecord.cs index 6384cd46c..61fe9b743 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecord.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecord.cs @@ -72,4 +72,14 @@ public class ConsumerRecord /// Gets the headers associated with the record. /// public Dictionary Headers { get; internal set; } = null!; + + /// + /// Gets the schema metadata for the record's value. + /// + public SchemaMetadata ValueSchemaMetadata { get; internal set; } = null!; + + /// + /// Gets the schema metadata for the record's key. + /// + public SchemaMetadata KeySchemaMetadata { get; internal set; } = null!; } \ No newline at end of file diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs index d1e74fc96..6a6ba5704 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs @@ -259,9 +259,35 @@ private object ProcessTopicPartition(JsonElement partitionData, Type keyType, Ty // Process headers ProcessHeaders(recordElement, record, recordType); + + // Process schema metadata for both key and value + ProcessSchemaMetadata(recordElement, record, recordType, "keySchemaMetadata", "KeySchemaMetadata"); + ProcessSchemaMetadata(recordElement, record, recordType, "valueSchemaMetadata", "ValueSchemaMetadata"); + return record; } + + private void ProcessSchemaMetadata(JsonElement recordElement, object record, Type recordType, + string jsonPropertyName, string recordPropertyName) + { + if (recordElement.TryGetProperty(jsonPropertyName, out var metadataElement)) + { + var schemaMetadata = new SchemaMetadata(); + + if (metadataElement.TryGetProperty("dataFormat", out var dataFormatElement)) + { + schemaMetadata.DataFormat = dataFormatElement.GetString() ?? string.Empty; + } + + if (metadataElement.TryGetProperty("schemaId", out var schemaIdElement)) + { + schemaMetadata.SchemaId = schemaIdElement.GetString() ?? string.Empty; + } + + recordType.GetProperty(recordPropertyName)?.SetValue(record, schemaMetadata); + } + } private void ProcessKey(JsonElement recordElement, object record, Type recordType, Type keyType) { diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/SchemaMetadata.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/SchemaMetadata.cs new file mode 100644 index 000000000..4f2c9828f --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/SchemaMetadata.cs @@ -0,0 +1,17 @@ +namespace AWS.Lambda.Powertools.Kafka; + +/// +/// Represents metadata about the schema used for serializing the record's value or key. +/// +public class SchemaMetadata +{ + /// + /// Gets or sets the format of the data (e.g., "JSON", "AVRO" "Protobuf"). + /// /// + public string DataFormat { get; internal set; } = null!; + + /// + /// Gets or sets the schema ID associated with the record's value or key. + /// + public string SchemaId { get; internal set; } = null!; +} \ No newline at end of file diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsKafkaSerializerBaseTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsKafkaSerializerBaseTests.cs index 2edd33a9a..bbb82068e 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsKafkaSerializerBaseTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsKafkaSerializerBaseTests.cs @@ -514,7 +514,7 @@ public void Deserialize_NonConsumerRecordFailed_ThrowsException() // With invalid JSON input, JsonSerializer throws JsonException directly var ex = Assert.Throws(() => serializer.Deserialize(stream)); - + // Check that we're getting a JSON parsing error Assert.Contains("invalid", ex.Message.ToLower()); } @@ -585,6 +585,62 @@ public void Serialize_WithTypeInfoFromContext_WritesToStream() Assert.Contains("\"Name\":\"ContextSerialization\"", result); Assert.Contains("\"Value\":555", result); } + + [Fact] + public void Deserialize_WithSchemaMetadata_PopulatesSchemaMetadataProperties() + { + // Arrange + var serializer = new TestKafkaSerializer(); + + string kafkaEventJson = @$"{{ + ""eventSource"": ""aws:kafka"", + ""eventSourceArn"": ""arn:aws:kafka:us-east-1:0123456789019:cluster/TestCluster/abcd1234"", + ""bootstrapServers"": ""b-1.test-cluster.kafka.us-east-1.amazonaws.com:9092"", + ""records"": {{ + ""mytopic-0"": [ + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""timestamp"": 1645084650987, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{Convert.ToBase64String(Encoding.UTF8.GetBytes("testKey"))}"", + ""value"": ""{Convert.ToBase64String(Encoding.UTF8.GetBytes("testValue"))}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ], + ""keySchemaMetadata"": {{ + ""dataFormat"": ""JSON"", + ""schemaId"": ""key-schema-001"" + }}, + ""valueSchemaMetadata"": {{ + ""dataFormat"": ""AVRO"", + ""schemaId"": ""value-schema-002"" + }} + }} + ] + }} + }}"; + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + Assert.NotNull(result); + var record = result.First(); + + // Assert key schema metadata + Assert.NotNull(record.KeySchemaMetadata); + Assert.Equal("JSON", record.KeySchemaMetadata.DataFormat); + Assert.Equal("key-schema-001", record.KeySchemaMetadata.SchemaId); + + // Assert value schema metadata + Assert.NotNull(record.ValueSchemaMetadata); + Assert.Equal("AVRO", record.ValueSchemaMetadata.DataFormat); + Assert.Equal("value-schema-002", record.ValueSchemaMetadata.SchemaId); + } } [JsonSerializable(typeof(TestModel))] From 5976ed6364bbc6236aeaf8a45090fbe078b18bfd Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Thu, 19 Jun 2025 10:40:16 +0100 Subject: [PATCH 29/35] refactor: update deserialization methods to use format-specific logic; improve null handling and test cases --- .../PowertoolsKafkaAvroSerializer.cs | 58 ++++------------ .../PowertoolsKafkaJsonSerializer.cs | 61 +++++----------- .../PowertoolsKafkaProtobufSerializer.cs | 69 ++++--------------- .../PowertoolsKafkaSerializerBase.cs | 59 +++++++++++----- .../KafkaHandlerFunctionalTests.cs | 32 +++++---- .../PowertoolsKafkaSerializerBaseTests.cs | 13 +--- 6 files changed, 108 insertions(+), 184 deletions(-) diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/PowertoolsKafkaAvroSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/PowertoolsKafkaAvroSerializer.cs index 44bb5d833..d0d232e7e 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/PowertoolsKafkaAvroSerializer.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/PowertoolsKafkaAvroSerializer.cs @@ -100,50 +100,22 @@ private Schema GetAvroSchema([DynamicallyAccessedMembers(DynamicallyAccessedMemb } /// - /// Deserializes a base64-encoded Avro binary value into an object. + /// Deserializes binary data using Avro format or falls back to JSON. /// - /// The base64-encoded Avro binary data. - /// The type to deserialize to. + /// The binary data to deserialize. + /// The type to deserialize to. + /// Whether this data represents a key (true) or a value (false). /// The deserialized object. - [RequiresDynamicCode("Avro deserialization requires reflection which may be incompatible with AOT.")] - [RequiresUnreferencedCode("Avro deserialization requires reflection which may be incompatible with trimming.")] - protected override object DeserializeComplexValue(string base64Value, - [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicFields)] Type valueType) - { - var schema = GetAvroSchema(valueType); - return DeserializeAvroValue(base64Value, schema); - } - - /// - /// Deserializes a base64-encoded Avro binary value into an object using the provided schema. - /// - /// The base64-encoded Avro binary data. - /// The Avro schema to use for deserialization. - /// The deserialized object. - private object DeserializeAvroValue(string base64Value, Schema schema) - { - var avroBytes = Convert.FromBase64String(base64Value); - using var stream = new MemoryStream(avroBytes); - var decoder = new BinaryDecoder(stream); - var reader = new SpecificDatumReader(schema, schema); - var result = reader.Read(null!, decoder); - return result ?? throw new InvalidOperationException("Failed to deserialize Avro value"); - } - - /// - /// Deserializes complex key types using Avro format. - /// - /// The key bytes to deserialize. - /// The type to deserialize to. - /// The deserialized key object. [RequiresDynamicCode("Avro and JSON deserialization might require runtime code generation.")] [RequiresUnreferencedCode("Avro and JSON deserialization might require types that cannot be statically analyzed.")] - protected override object? DeserializeComplexKey(byte[] keyBytes, [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicFields)] Type keyType) + protected override object? DeserializeFormatSpecific(byte[] data, + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicFields)] + Type targetType, bool isKey) { try { - // Try to get Avro schema for the key type - var schemaField = keyType.GetField("_SCHEMA", + // Try to get Avro schema for the type + var schemaField = targetType.GetField("_SCHEMA", BindingFlags.Public | BindingFlags.Static); if (schemaField != null) @@ -151,7 +123,7 @@ private object DeserializeAvroValue(string base64Value, Schema schema) var schema = schemaField.GetValue(null) as Schema; if (schema != null) { - using var stream = new MemoryStream(keyBytes); + using var stream = new MemoryStream(data); var decoder = new BinaryDecoder(stream); var reader = new SpecificDatumReader(schema, schema); return reader.Read(null!, decoder); @@ -159,12 +131,12 @@ private object DeserializeAvroValue(string base64Value, Schema schema) } // As a fallback, try JSON deserialization - var jsonStr = Encoding.UTF8.GetString(keyBytes); + var jsonStr = Encoding.UTF8.GetString(data); if (SerializerContext != null) { // Try to get type info from context for AOT compatibility - var typeInfo = SerializerContext.GetTypeInfo(keyType); + var typeInfo = SerializerContext.GetTypeInfo(targetType); if (typeInfo != null) { return JsonSerializer.Deserialize(jsonStr, typeInfo); @@ -173,13 +145,13 @@ private object DeserializeAvroValue(string base64Value, Schema schema) // Fallback to regular deserialization #pragma warning disable IL2026, IL3050 - return JsonSerializer.Deserialize(jsonStr, keyType, JsonOptions); + return JsonSerializer.Deserialize(jsonStr, targetType, JsonOptions); #pragma warning restore IL2026, IL3050 } catch { - // If all deserialization attempts fail, return null - return null; + // If all deserialization attempts fail, return null or default + return targetType.IsValueType ? Activator.CreateInstance(targetType) : null; } } } diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Json/PowertoolsKafkaJsonSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka.Json/PowertoolsKafkaJsonSerializer.cs index 95fc2e354..9fefc8589 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka.Json/PowertoolsKafkaJsonSerializer.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Json/PowertoolsKafkaJsonSerializer.cs @@ -71,72 +71,47 @@ public PowertoolsKafkaJsonSerializer(JsonSerializerContext serializerContext) : } /// - /// Deserializes a base64-encoded JSON value into an object. + /// Deserializes binary data using JSON format. /// - /// The base64-encoded JSON data. - /// The type to deserialize to. + /// The binary data to deserialize. + /// The type to deserialize to. + /// Whether this data represents a key (true) or a value (false). /// The deserialized object. [RequiresDynamicCode("JSON deserialization might require runtime code generation.")] [RequiresUnreferencedCode("JSON deserialization might require types that cannot be statically analyzed.")] - protected override object DeserializeComplexValue(string base64Value, - [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicFields)] Type valueType) - { - var jsonBytes = Convert.FromBase64String(base64Value); - var jsonString = Encoding.UTF8.GetString(jsonBytes); - - if (SerializerContext != null) - { - // Try to get type info from context for AOT compatibility - var typeInfo = SerializerContext.GetTypeInfo(valueType); - if (typeInfo != null) - { - var result = JsonSerializer.Deserialize(jsonString, typeInfo); - return result ?? throw new InvalidOperationException($"Failed to deserialize JSON to type {valueType.Name}"); - } - } - - // Fallback to regular deserialization - #pragma warning disable IL2026, IL3050 - var fallbackResult = JsonSerializer.Deserialize(jsonString, valueType, JsonOptions); - #pragma warning restore IL2026, IL3050 - - return fallbackResult ?? throw new InvalidOperationException($"Failed to deserialize JSON to type {valueType.Name}"); - } - - /// - /// Deserializes complex key types from JSON. - /// - /// The key bytes to deserialize. - /// The type to deserialize to. - /// The deserialized key object. - [RequiresDynamicCode("JSON deserialization might require runtime code generation.")] - [RequiresUnreferencedCode("JSON deserialization might require types that cannot be statically analyzed.")] - protected override object? DeserializeComplexKey(byte[] keyBytes, [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.PublicFields)] Type keyType) + protected override object? DeserializeFormatSpecific(byte[] data, + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | + DynamicallyAccessedMemberTypes.PublicFields)] + Type targetType, bool isKey) { try { // Convert bytes to JSON string - var jsonStr = Encoding.UTF8.GetString(keyBytes); + var jsonStr = Encoding.UTF8.GetString(data); if (SerializerContext != null) { // Try to get type info from context for AOT compatibility - var typeInfo = SerializerContext.GetTypeInfo(keyType); + var typeInfo = SerializerContext.GetTypeInfo(targetType); if (typeInfo != null) { - return JsonSerializer.Deserialize(jsonStr, typeInfo); + var result = JsonSerializer.Deserialize(jsonStr, typeInfo); + if (result != null) + { + return result; + } } } // Fallback to regular deserialization #pragma warning disable IL2026, IL3050 - return JsonSerializer.Deserialize(jsonStr, keyType, JsonOptions); + return JsonSerializer.Deserialize(jsonStr, targetType, JsonOptions); #pragma warning restore IL2026, IL3050 } catch { - // If deserialization fails, return null - return null; + // If deserialization fails, return null or default + return targetType.IsValueType ? Activator.CreateInstance(targetType) : null; } } } diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/PowertoolsKafkaProtobufSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/PowertoolsKafkaProtobufSerializer.cs index b0149fdd4..cf52c660a 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/PowertoolsKafkaProtobufSerializer.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/PowertoolsKafkaProtobufSerializer.cs @@ -74,62 +74,23 @@ public PowertoolsKafkaProtobufSerializer(JsonSerializerContext serializerContext } /// - /// Deserializes a base64-encoded Protobuf binary value into an object. + /// Deserializes binary data using Protobuf format or falls back to JSON. /// - /// The base64-encoded Protobuf binary data. - /// The type to deserialize to. + /// The binary data to deserialize. + /// The type to deserialize to. + /// Whether this data represents a key (true) or a value (false). /// The deserialized object. - [RequiresDynamicCode("Protobuf deserialization might require runtime code generation.")] - [RequiresUnreferencedCode("Protobuf deserialization might require types that cannot be statically analyzed.")] - protected override object DeserializeComplexValue(string base64Value, - [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicFields)] Type valueType) - { - var protobufBytes = Convert.FromBase64String(base64Value); - return DeserializeProtobufValue(protobufBytes, valueType); - } - - /// - /// Deserializes Protobuf binary data into an object of the specified type. - /// - /// The Protobuf binary data. - /// The Protobuf message type to deserialize to. - /// The deserialized object. - [RequiresDynamicCode("Protobuf deserialization might require runtime code generation.")] - [RequiresUnreferencedCode("Protobuf deserialization might require types that cannot be statically analyzed.")] - private object DeserializeProtobufValue(byte[] protobufBytes, [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties)] Type messageType) - { - // Find the Parser property which is available on all Protobuf generated classes - var parserProperty = messageType.GetProperty("Parser", - BindingFlags.Public | BindingFlags.Static); - - if (parserProperty == null) - throw new InvalidOperationException($"Type {messageType.Name} does not appear to be a Protobuf message type: Parser property not found"); - - var parser = parserProperty.GetValue(null) as MessageParser; - if (parser == null) - throw new InvalidOperationException($"Could not get Parser for Protobuf type {messageType.Name}"); - - // Use the parser to deserialize the message - using var stream = new MemoryStream(protobufBytes); - var message = parser.ParseFrom(stream); - - return message; - } - - /// - /// Deserializes complex key types using Protobuf format. - /// - /// The key bytes to deserialize. - /// The type to deserialize to. - /// The deserialized key object. [RequiresDynamicCode("Protobuf and JSON deserialization might require runtime code generation.")] [RequiresUnreferencedCode("Protobuf and JSON deserialization might require types that cannot be statically analyzed.")] - protected override object? DeserializeComplexKey(byte[] keyBytes, [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties)] Type keyType) + protected override object? DeserializeFormatSpecific(byte[] data, + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | + DynamicallyAccessedMemberTypes.PublicFields)] + Type targetType, bool isKey) { try { // Check if it's a Protobuf message type - var parserProperty = keyType.GetProperty("Parser", + var parserProperty = targetType.GetProperty("Parser", BindingFlags.Public | BindingFlags.Static); if (parserProperty != null) @@ -137,18 +98,18 @@ private object DeserializeProtobufValue(byte[] protobufBytes, [DynamicallyAccess var parser = parserProperty.GetValue(null) as MessageParser; if (parser != null) { - using var stream = new MemoryStream(keyBytes); + using var stream = new MemoryStream(data); return parser.ParseFrom(stream); } } // As a fallback, try JSON deserialization - var jsonStr = Encoding.UTF8.GetString(keyBytes); + var jsonStr = Encoding.UTF8.GetString(data); if (SerializerContext != null) { // Try to get type info from context for AOT compatibility - var typeInfo = SerializerContext.GetTypeInfo(keyType); + var typeInfo = SerializerContext.GetTypeInfo(targetType); if (typeInfo != null) { return JsonSerializer.Deserialize(jsonStr, typeInfo); @@ -157,13 +118,13 @@ private object DeserializeProtobufValue(byte[] protobufBytes, [DynamicallyAccess // Fallback to regular deserialization #pragma warning disable IL2026, IL3050 - return JsonSerializer.Deserialize(jsonStr, keyType, JsonOptions); + return JsonSerializer.Deserialize(jsonStr, targetType, JsonOptions); #pragma warning restore IL2026, IL3050 } catch { - // If all deserialization attempts fail, return null - return null; + // If all deserialization attempts fail, return null or default + return targetType.IsValueType ? Activator.CreateInstance(targetType) : null; } } } diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs index 6a6ba5704..92ac50e6f 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs @@ -385,8 +385,8 @@ private byte[] ExtractHeaderBytes(JsonElement headerArray) return DeserializePrimitiveValue(keyBytes, keyType); } - // For complex types, try format-specific deserialization - return DeserializeComplexKey(keyBytes, keyType); + // For complex types, use format-specific deserialization + return DeserializeFormatSpecific(keyBytes, keyType, isKey: true); } /// @@ -533,26 +533,48 @@ protected virtual object DeserializeValue(string base64Value, return DeserializePrimitiveValue(bytes, valueType); } - // For complex types, use format-specific deserialization - return DeserializeComplexValue(base64Value, valueType); + // For complex types, decode base64 and use format-specific deserialization + var data = Convert.FromBase64String(base64Value); + return DeserializeFormatSpecific(data, valueType, isKey: false); } /// - /// Deserializes complex value types using the appropriate format. + /// Deserializes binary data into an object using the format-specific implementation. + /// This method must be overridden by derived classes to implement format-specific deserialization. /// - protected abstract object DeserializeComplexValue(string base64Value, - [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | + /// The binary data to deserialize. + /// The target type to deserialize to. + /// Whether this data represents a key (true) or a value (false). + /// The deserialized object. + [RequiresDynamicCode("Format-specific deserialization might require runtime code generation.")] + [RequiresUnreferencedCode("Format-specific deserialization might require types that cannot be statically analyzed.")] + protected virtual object? DeserializeFormatSpecific(byte[] data, + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.PublicFields)] - Type valueType); - - - /// - /// Deserializes complex key types using the appropriate format. - /// - /// The key bytes to deserialize. - /// The type to deserialize to. - /// The deserialized key object. - protected abstract object? DeserializeComplexKey(byte[] keyBytes, Type keyType); + Type targetType, bool isKey) + { + try + { + // Default implementation tries JSON + var jsonStr = Encoding.UTF8.GetString(data); + + if (SerializerContext != null) + { + var typeInfo = SerializerContext.GetTypeInfo(targetType); + if (typeInfo != null) + { + return JsonSerializer.Deserialize(jsonStr, typeInfo); + } + } + + return JsonSerializer.Deserialize(jsonStr, targetType, JsonOptions); + } + catch + { + // If deserialization fails, return null or default value + return targetType.IsValueType ? Activator.CreateInstance(targetType) : null; + } + } /// /// Checks if the specified type is a primitive or simple type. @@ -630,4 +652,5 @@ private object DeserializePrimitiveValue(byte[] bytes, Type valueType) // For any other type, try to parse as string return Convert.ChangeType(Encoding.UTF8.GetString(bytes), valueType); } -} \ No newline at end of file +} + diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/KafkaHandlerFunctionalTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/KafkaHandlerFunctionalTests.cs index 97327e76c..6f192691d 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/KafkaHandlerFunctionalTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/KafkaHandlerFunctionalTests.cs @@ -40,6 +40,7 @@ Accessing and parsing record headers */ using System.Text; +using System.Text.Json; using Amazon.Lambda.Core; using Amazon.Lambda.TestUtilities; using AWS.Lambda.Powertools.Kafka.Json; @@ -225,7 +226,7 @@ public void Given_JsonStreamInput_When_DeserializedWithJsonSerializer_Then_Corre } [Fact] - public void Given_InvalidJsonData_When_DeserializedWithJsonSerializer_Then_ThrowsSerializationException() + public void Given_InvalidJsonData_When_DeserializedWithJsonSerializer_Then_Returns_Null() { // Given var serializer = new PowertoolsKafkaJsonSerializer(); @@ -246,13 +247,12 @@ public void Given_InvalidJsonData_When_DeserializedWithJsonSerializer_Then_Throw }"; using var stream = new MemoryStream(Encoding.UTF8.GetBytes(json)); - + var output = serializer.Deserialize>(stream); + // Act & Assert - var exception = Assert.Throws(() => - serializer.Deserialize>(stream)); - - // Verify the exception message contains information about the JSON parsing error - Assert.Contains("invalid start of a property name", exception.Message); + Assert.Single(output.Records); + Assert.Equal("key1", output.Records.First().Value[0].Key); + Assert.Null(output.Records.First().Value[0].Value); } [Fact] @@ -386,7 +386,7 @@ string Handler(ConsumerRecords records, ILambdaContext con } [Fact] - public void Given_MissingAvroSchema_When_DeserializedWithAvroSerializer_Then_ThrowsSerializationException() + public void Given_MissingAvroSchema_When_DeserializedWithAvroSerializer_Then_ReturnsNull() { // Arrange var serializer = new PowertoolsKafkaAvroSerializer(); @@ -411,10 +411,12 @@ public void Given_MissingAvroSchema_When_DeserializedWithAvroSerializer_Then_Thr }}"; using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); - + var output = serializer.Deserialize>(stream); + // Act & Assert - Assert.Throws(() => - serializer.Deserialize>(stream)); + Assert.Single(output.Records); + Assert.Equal("test-key", output.Records.First().Value[0].Key); + Assert.Null(output.Records.First().Value[0].Value); } #endregion @@ -533,10 +535,12 @@ public void Given_MissingProtobufParser_When_DeserializedWithProtobufSerializer_ }}"; using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + var output = serializer.Deserialize>(stream); // Act & Assert - Assert.Throws(() => - serializer.Deserialize>(stream)); + Assert.Single(output.Records); + Assert.Equal("42", output.Records.First().Value[0].Key); + Assert.Equal(jsonData, JsonSerializer.Serialize(output.Records.First().Value[0].Value)); } #endregion @@ -599,6 +603,6 @@ string Handler(ConsumerRecords records, ILambdaContext context) public class JsonProduct { public string Name { get; set; } - public decimal Price { get; set; } public int Id { get; set; } + public decimal Price { get; set; } } \ No newline at end of file diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsKafkaSerializerBaseTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsKafkaSerializerBaseTests.cs index bbb82068e..2c6e12fc4 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsKafkaSerializerBaseTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsKafkaSerializerBaseTests.cs @@ -46,18 +46,7 @@ public TestKafkaSerializer(JsonSerializerOptions options, JsonSerializerContext : base(options, context) { } - - protected override object? DeserializeComplexKey(byte[] keyBytes, Type keyType) - { - return JsonSerializer.Deserialize(keyBytes, keyType); - } - - protected override object DeserializeComplexValue(string base64Value, Type valueType) - { - var bytes = Convert.FromBase64String(base64Value); - return JsonSerializer.Deserialize(bytes, valueType); - } - + // Implement our own version that mimics the private method's behavior public object TestDeserializePrimitiveValue(byte[] bytes, Type valueType) { From 37e5ba3c77be80b8a4b6262244f1c98d2155c048 Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Thu, 19 Jun 2025 11:18:27 +0100 Subject: [PATCH 30/35] feat: enhance Protobuf deserialization to support Confluent Schema Registry formats; add tests for various message index scenarios --- .../PowertoolsKafkaProtobufSerializer.cs | 165 +++++++++++++++--- .../AWS.Lambda.Powertools.Kafka.Tests.csproj | 4 + .../PowertoolsKafkaProtobufSerializerTests.cs | 76 +++++++- .../kafka-protobuf-confluent-event.json | 52 ++++++ 4 files changed, 264 insertions(+), 33 deletions(-) create mode 100644 libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/kafka-protobuf-confluent-event.json diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/PowertoolsKafkaProtobufSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/PowertoolsKafkaProtobufSerializer.cs index cf52c660a..84f8c27e1 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/PowertoolsKafkaProtobufSerializer.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/PowertoolsKafkaProtobufSerializer.cs @@ -13,6 +13,7 @@ * permissions and limitations under the License. */ +using System.Collections.Concurrent; using System.Diagnostics.CodeAnalysis; using System.Reflection; using System.Text; @@ -20,6 +21,7 @@ using System.Text.Json.Serialization; using Google.Protobuf; + namespace AWS.Lambda.Powertools.Kafka.Protobuf; /// @@ -47,6 +49,9 @@ namespace AWS.Lambda.Powertools.Kafka.Protobuf; /// public class PowertoolsKafkaProtobufSerializer : PowertoolsKafkaSerializerBase { + // Cache for Protobuf parsers to improve performance + private static readonly ConcurrentDictionary _parserCache = new(); + /// /// Initializes a new instance of the class /// with default JSON serialization options. @@ -54,7 +59,7 @@ public class PowertoolsKafkaProtobufSerializer : PowertoolsKafkaSerializerBase public PowertoolsKafkaProtobufSerializer() : base() { } - + /// /// Initializes a new instance of the class /// with custom JSON serialization options. @@ -63,7 +68,7 @@ public PowertoolsKafkaProtobufSerializer() : base() public PowertoolsKafkaProtobufSerializer(JsonSerializerOptions jsonOptions) : base(jsonOptions) { } - + /// /// Initializes a new instance of the class /// with a JSON serializer context for AOT-compatible serialization. @@ -72,59 +77,161 @@ public PowertoolsKafkaProtobufSerializer(JsonSerializerOptions jsonOptions) : ba public PowertoolsKafkaProtobufSerializer(JsonSerializerContext serializerContext) : base(serializerContext) { } - + /// /// Deserializes binary data using Protobuf format or falls back to JSON. + /// Handles both standard protobuf serialization and Confluent Schema Registry serialization. /// /// The binary data to deserialize. /// The type to deserialize to. /// Whether this data represents a key (true) or a value (false). /// The deserialized object. [RequiresDynamicCode("Protobuf and JSON deserialization might require runtime code generation.")] - [RequiresUnreferencedCode("Protobuf and JSON deserialization might require types that cannot be statically analyzed.")] - protected override object? DeserializeFormatSpecific(byte[] data, - [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | + [RequiresUnreferencedCode( + "Protobuf and JSON deserialization might require types that cannot be statically analyzed.")] + protected override object? DeserializeFormatSpecific(byte[] data, + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.PublicFields)] Type targetType, bool isKey) { try { // Check if it's a Protobuf message type - var parserProperty = targetType.GetProperty("Parser", - BindingFlags.Public | BindingFlags.Static); - - if (parserProperty != null) + if (typeof(IMessage).IsAssignableFrom(targetType)) { - var parser = parserProperty.GetValue(null) as MessageParser; + // Get the parser from cache or create a new one + var parser = GetProtobufParser(targetType); if (parser != null) { - using var stream = new MemoryStream(data); - return parser.ParseFrom(stream); + try + { + // First, try standard protobuf deserialization + return parser.ParseFrom(data); + } + catch + { + try + { + // If standard deserialization fails, try message index handling + var result = DeserializeWithMessageIndex(data, parser); + if (result != null) + { + return result; + } + } + catch + { + // Continue to JSON fallback if message index handling fails + } + } } } - // As a fallback, try JSON deserialization + // If not a Protobuf message or parser not found, fall back to JSON var jsonStr = Encoding.UTF8.GetString(data); - - if (SerializerContext != null) + + if (SerializerContext == null) return JsonSerializer.Deserialize(jsonStr, targetType, JsonOptions); + + var typeInfo = SerializerContext.GetTypeInfo(targetType); + if (typeInfo != null) + { + return JsonSerializer.Deserialize(jsonStr, typeInfo); + } + + return JsonSerializer.Deserialize(jsonStr, targetType, JsonOptions); + } + catch (Exception ex) + { + // If all deserialization attempts fail, throw with more helpful message + throw new InvalidOperationException("Unsupported type for Protobuf deserialization: " + targetType.Name + ". " + + "Protobuf deserialization requires a type of com.google.protobuf.Message. " + + "Consider using an alternative Deserializer.", ex); + } + } + + /// + /// Gets a Protobuf parser for the specified type, using a cache for better performance. + /// + /// The Protobuf message type. + /// A MessageParser for the specified type, or null if not found. + private MessageParser? GetProtobufParser(Type messageType) + { + return _parserCache.GetOrAdd(messageType, type => + { + try { - // Try to get type info from context for AOT compatibility - var typeInfo = SerializerContext.GetTypeInfo(targetType); - if (typeInfo != null) + var parserProperty = type.GetProperty("Parser", + BindingFlags.Public | BindingFlags.Static); + + if (parserProperty == null) + { + return null!; + } + + var parser = parserProperty.GetValue(null) as MessageParser; + if (parser == null) { - return JsonSerializer.Deserialize(jsonStr, typeInfo); + return null!; } + + return parser; + } + catch (Exception ex) + { + return null!; + } + }); + } + + /// + /// Deserializes Protobuf data that may include a Confluent Schema Registry message index. + /// Handles both the simple case (single 0) and complex case (length-prefixed array of indexes). + /// + /// The binary data to deserialize. + /// The Protobuf message parser. + /// The deserialized Protobuf message or throws an exception if parsing fails. + private IMessage DeserializeWithMessageIndex(byte[] data, MessageParser parser) + { + using var inputStream = new MemoryStream(data); + using var codedInput = new CodedInputStream(inputStream); + + try + { + // Read the first varint - this could be either a simple 0 or the length of message index array + var firstValue = codedInput.ReadUInt32(); + + if (firstValue == 0) + { + // Simple case: Single 0 byte means first message type + return parser.ParseFrom(codedInput); + } + else + { + // Complex case: firstValue is the length of the message index array + // Skip each message index value + for (int i = 0; i < firstValue; i++) + { + codedInput.ReadUInt32(); + } + + // Now the remaining data should be the actual protobuf message + return parser.ParseFrom(codedInput); } - - // Fallback to regular deserialization - #pragma warning disable IL2026, IL3050 - return JsonSerializer.Deserialize(jsonStr, targetType, JsonOptions); - #pragma warning restore IL2026, IL3050 } - catch + catch (Exception ex) { - // If all deserialization attempts fail, return null or default - return targetType.IsValueType ? Activator.CreateInstance(targetType) : null; + // If reading message index fails, try another approach with the remaining data + try + { + // Reset stream position and try again with the whole data + inputStream.Position = 0; + return parser.ParseFrom(inputStream); + } + catch + { + // If that also fails, throw the original exception + throw new InvalidOperationException("Failed to parse protobuf data with or without message index", ex); + } } } -} +} \ No newline at end of file diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/AWS.Lambda.Powertools.Kafka.Tests.csproj b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/AWS.Lambda.Powertools.Kafka.Tests.csproj index 28e567c01..455134b24 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/AWS.Lambda.Powertools.Kafka.Tests.csproj +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/AWS.Lambda.Powertools.Kafka.Tests.csproj @@ -54,6 +54,10 @@ PreserveNewest + + + PreserveNewest + PreserveNewest diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs index 368914c73..8d89340fc 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs @@ -13,6 +13,7 @@ * permissions and limitations under the License. */ +using System.Runtime.Serialization; using System.Text; using System.Text.Json; using System.Text.Json.Serialization; @@ -181,7 +182,7 @@ public void DeserializeComplexKey_WithSerializerContext_UsesContext() } [Fact] - public void DeserializeComplexKey_WhenAllDeserializationMethodsFail_ReturnsNull() + public void DeserializeComplexKey_WhenAllDeserializationMethodsFail_ReturnsException() { // Arrange var serializer = new PowertoolsKafkaProtobufSerializer(); @@ -196,12 +197,79 @@ public void DeserializeComplexKey_WhenAllDeserializationMethodsFail_ReturnsNull( using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); // Act - // This shouldn't throw but return a record with null key - var result = serializer.Deserialize>(stream); + var message = Assert.Throws(() => serializer.Deserialize>(stream)); + Assert.Equal("Failed to deserialize key data: Unsupported type for Protobuf deserialization: TestModel. Protobuf deserialization requires a type of com.google.protobuf.Message. Consider using an alternative Deserializer.", message.Message); + } + + [Fact] + public void Deserialize_ConfluentMessageIndexFormats_AllFormatsDeserializeCorrectly() + { + // Arrange + var serializer = new PowertoolsKafkaProtobufSerializer(); + string kafkaEventJson = File.ReadAllText("Protobuf/kafka-protobuf-confluent-event.json"); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + Assert.NotNull(result); + Assert.Equal("aws:kafka", result.EventSource); + + // Verify records + Assert.True(result.Records.ContainsKey("mytopic-0")); + var records = result.Records["mytopic-0"]; + Assert.Equal(3, records.Count); + + // Verify all records have been deserialized correctly (all should have the same content) + foreach (var record in records) + { + Assert.Equal("Laptop", record.Value.Name); + Assert.Equal(1001, record.Value.Id); + Assert.Equal(999.99, record.Value.Price); + } + } + + [Theory] + [InlineData("COkHEgZMYXB0b3AZUrgehes/j0A=", "Standard Protobuf")] // Standard protobuf + [InlineData("AAjpBxIGTGFwdG9wGVK4HoXrP49A", "Single Index")] // Confluent with single 0 index + [InlineData("AgEACOkHEgZMYXB0b3AZUrgehes/j0A=", "Complex Index")] // Confluent with index array [1, 0] + public void Deserialize_SpecificConfluentFormats_EachFormatDeserializesCorrectly(string base64Value, string testCase) + { + // Arrange + var serializer = new PowertoolsKafkaProtobufSerializer(); + string kafkaEventJson = CreateKafkaEvent("NDI=", base64Value); // Key is 42 in base64 + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); // Assert var record = result.First(); - Assert.Null(record.Key); + Assert.NotNull(record); + Assert.Equal(42, record.Key); // Key should be 42 + + // Value should be the same regardless of message index format + Assert.Equal("Laptop", record.Value.Name); + Assert.Equal(1001, record.Value.Id); + Assert.Equal(999.99, record.Value.Price); + } + + [Fact] + public void Deserialize_MessageIndexWithCorruptData_HandlesError() + { + // Arrange - Create invalid message index data (starts with 5 but doesn't have 5 entries) + byte[] invalidData = [5, 1, 2]; // Claims to have 5 entries but only has 2 + string kafkaEventJson = CreateKafkaEvent("NDI=", Convert.ToBase64String(invalidData)); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + var serializer = new PowertoolsKafkaProtobufSerializer(); + + // Act & Assert + var ex = Assert.Throws(() => + serializer.Deserialize>(stream)); + + // Verify the exception message contains useful information + Assert.Contains("Failed to deserialize value data:", ex.Message); } private string CreateKafkaEvent(string keyValue, string valueValue) diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/kafka-protobuf-confluent-event.json b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/kafka-protobuf-confluent-event.json new file mode 100644 index 000000000..d76b109d3 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/kafka-protobuf-confluent-event.json @@ -0,0 +1,52 @@ +{ + "eventSource": "aws:kafka", + "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", + "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + "records": { + "mytopic-0": [ + { + "topic": "mytopic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": "NDI=", + "value": "COkHEgZMYXB0b3AZUrgehes/j0A=", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 16, + "timestamp": 1545084650988, + "timestampType": "CREATE_TIME", + "key": "NDI=", + "value": "AAjpBxIGTGFwdG9wGVK4HoXrP49A", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 17, + "timestamp": 1545084650989, + "timestampType": "CREATE_TIME", + "key": "NDI=", + "value": "AgEACOkHEgZMYXB0b3AZUrgehes/j0A=", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + } + ] + } +} + From adda37be83687ca4e2677d2a8117362a6caba393 Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Thu, 19 Jun 2025 12:19:16 +0100 Subject: [PATCH 31/35] refactor: reorganize test namespaces and improve deserialization error handling; update tests for missing schema scenarios --- .../PowertoolsKafkaAvroSerializer.cs | 63 ++--- .../PowertoolsKafkaJsonSerializer.cs | 72 +++-- .../PowertoolsKafkaProtobufSerializer.cs | 70 +++-- .../PowertoolsKafkaSerializerBase.cs | 78 ++++-- .../Avro/HandlerTests.cs | 2 +- .../PowertoolsKafkaAvroSerializerTests.cs | 77 +----- .../KafkaHandlerFunctionalTests.cs | 47 +--- .../PowertoolsKafkaSerializerBaseTests.cs | 251 ++++++++++++------ .../PowertoolsKafkaProtobufSerializerTests.cs | 65 +---- 9 files changed, 343 insertions(+), 382 deletions(-) diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/PowertoolsKafkaAvroSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/PowertoolsKafkaAvroSerializer.cs index d0d232e7e..4bf3ea7cb 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/PowertoolsKafkaAvroSerializer.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/PowertoolsKafkaAvroSerializer.cs @@ -15,7 +15,6 @@ using System.Diagnostics.CodeAnalysis; using System.Reflection; -using System.Text; using System.Text.Json; using System.Text.Json.Serialization; using Avro; @@ -84,74 +83,52 @@ public PowertoolsKafkaAvroSerializer(JsonSerializerContext serializerContext) : /// Thrown if no schema is found for the type. [RequiresDynamicCode("Avro schema access requires reflection which may be incompatible with AOT.")] [RequiresUnreferencedCode("Avro schema access requires reflection which may be incompatible with trimming.")] - private Schema GetAvroSchema([DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicFields)] Type payloadType) + private Schema? GetAvroSchema([DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicFields)] Type payloadType) { var schemaField = payloadType.GetField("_SCHEMA", BindingFlags.Public | BindingFlags.Static); if (schemaField == null) - throw new InvalidOperationException($"No Avro schema found for type {payloadType.Name}"); + return null; - var schema = schemaField.GetValue(null) as Schema; - if (schema == null) - throw new InvalidOperationException($"Avro schema for type {payloadType.Name} is null"); - - return schema; + return schemaField.GetValue(null) as Schema; } /// - /// Deserializes binary data using Avro format or falls back to JSON. + /// Deserializes complex (non-primitive) types using Avro format. /// /// The binary data to deserialize. /// The type to deserialize to. /// Whether this data represents a key (true) or a value (false). /// The deserialized object. - [RequiresDynamicCode("Avro and JSON deserialization might require runtime code generation.")] - [RequiresUnreferencedCode("Avro and JSON deserialization might require types that cannot be statically analyzed.")] - protected override object? DeserializeFormatSpecific(byte[] data, + [RequiresDynamicCode("Avro deserialization might require runtime code generation.")] + [RequiresUnreferencedCode("Avro deserialization might require types that cannot be statically analyzed.")] + protected override object? DeserializeComplexTypeFormat(byte[] data, [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicFields)] Type targetType, bool isKey) { try { // Try to get Avro schema for the type - var schemaField = targetType.GetField("_SCHEMA", - BindingFlags.Public | BindingFlags.Static); + var schema = GetAvroSchema(targetType); - if (schemaField != null) - { - var schema = schemaField.GetValue(null) as Schema; - if (schema != null) - { - using var stream = new MemoryStream(data); - var decoder = new BinaryDecoder(stream); - var reader = new SpecificDatumReader(schema, schema); - return reader.Read(null!, decoder); - } - } - - // As a fallback, try JSON deserialization - var jsonStr = Encoding.UTF8.GetString(data); - - if (SerializerContext != null) + if (schema != null) { - // Try to get type info from context for AOT compatibility - var typeInfo = SerializerContext.GetTypeInfo(targetType); - if (typeInfo != null) - { - return JsonSerializer.Deserialize(jsonStr, typeInfo); - } + using var stream = new MemoryStream(data); + var decoder = new BinaryDecoder(stream); + var reader = new SpecificDatumReader(schema, schema); + return reader.Read(null!, decoder); } - // Fallback to regular deserialization - #pragma warning disable IL2026, IL3050 - return JsonSerializer.Deserialize(jsonStr, targetType, JsonOptions); - #pragma warning restore IL2026, IL3050 + // If no Avro schema was found, throw an exception + throw new InvalidOperationException($"Unsupported type for Avro deserialization: {targetType.Name}. " + + "Avro deserialization requires a type with a static _SCHEMA field. " + + "Consider using an alternative Deserializer."); } - catch + catch (Exception ex) { - // If all deserialization attempts fail, return null or default - return targetType.IsValueType ? Activator.CreateInstance(targetType) : null; + // Preserve the error message while wrapping in SerializationException for consistent error handling + throw new System.Runtime.Serialization.SerializationException($"Failed to deserialize {(isKey ? "key" : "value")} data: {ex.Message}", ex); } } } diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Json/PowertoolsKafkaJsonSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka.Json/PowertoolsKafkaJsonSerializer.cs index 9fefc8589..61dd8d649 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka.Json/PowertoolsKafkaJsonSerializer.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Json/PowertoolsKafkaJsonSerializer.cs @@ -22,26 +22,9 @@ namespace AWS.Lambda.Powertools.Kafka.Json; /// /// A Lambda serializer for Kafka events that handles JSON-formatted data. -/// This serializer deserializes JSON data from Kafka records into strongly-typed objects. +/// This serializer automatically deserializes the JSON format from base64-encoded strings +/// in Kafka records and converts them to strongly-typed objects. /// -/// -/// -/// [assembly: LambdaSerializer(typeof(PowertoolsKafkaJsonSerializer))] -/// -/// // Your Lambda handler will receive properly deserialized objects -/// public class Function -/// { -/// public void Handler(ConsumerRecords<string, Customer> records, ILambdaContext context) -/// { -/// foreach (var record in records) -/// { -/// Customer customer = record.Value; -/// context.Logger.LogInformation($"Processed customer {customer.Name}"); -/// } -/// } -/// } -/// -/// public class PowertoolsKafkaJsonSerializer : PowertoolsKafkaSerializerBase { /// @@ -83,6 +66,57 @@ public PowertoolsKafkaJsonSerializer(JsonSerializerContext serializerContext) : [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.PublicFields)] Type targetType, bool isKey) + { + try + { + // Handle primitive types directly + if (IsPrimitiveOrSimpleType(targetType)) + { + return DeserializePrimitiveValue(data, targetType); + } + + // Convert bytes to JSON string + var jsonStr = Encoding.UTF8.GetString(data); + + if (SerializerContext != null) + { + // Try to get type info from context for AOT compatibility + var typeInfo = SerializerContext.GetTypeInfo(targetType); + if (typeInfo != null) + { + var result = JsonSerializer.Deserialize(jsonStr, typeInfo); + if (result != null) + { + return result; + } + } + } + + // Fallback to regular deserialization + #pragma warning disable IL2026, IL3050 + return JsonSerializer.Deserialize(jsonStr, targetType, JsonOptions); + #pragma warning restore IL2026, IL3050 + } + catch + { + // If deserialization fails, return null or default + return targetType.IsValueType ? Activator.CreateInstance(targetType) : null; + } + } + + /// + /// Deserializes complex (non-primitive) types using JSON format. + /// + /// The binary data to deserialize. + /// The type to deserialize to. + /// Whether this data represents a key (true) or a value (false). + /// The deserialized object. + [RequiresDynamicCode("JSON deserialization might require runtime code generation.")] + [RequiresUnreferencedCode("JSON deserialization might require types that cannot be statically analyzed.")] + protected override object? DeserializeComplexTypeFormat(byte[] data, + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | + DynamicallyAccessedMemberTypes.PublicFields)] + Type targetType, bool isKey) { try { diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/PowertoolsKafkaProtobufSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/PowertoolsKafkaProtobufSerializer.cs index 84f8c27e1..c7d9fb7ef 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/PowertoolsKafkaProtobufSerializer.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/PowertoolsKafkaProtobufSerializer.cs @@ -16,7 +16,6 @@ using System.Collections.Concurrent; using System.Diagnostics.CodeAnalysis; using System.Reflection; -using System.Text; using System.Text.Json; using System.Text.Json.Serialization; using Google.Protobuf; @@ -79,17 +78,17 @@ public PowertoolsKafkaProtobufSerializer(JsonSerializerContext serializerContext } /// - /// Deserializes binary data using Protobuf format or falls back to JSON. + /// Deserializes complex (non-primitive) types using Protobuf format. /// Handles both standard protobuf serialization and Confluent Schema Registry serialization. /// /// The binary data to deserialize. /// The type to deserialize to. /// Whether this data represents a key (true) or a value (false). /// The deserialized object. - [RequiresDynamicCode("Protobuf and JSON deserialization might require runtime code generation.")] + [RequiresDynamicCode("Protobuf deserialization might require runtime code generation.")] [RequiresUnreferencedCode( - "Protobuf and JSON deserialization might require types that cannot be statically analyzed.")] - protected override object? DeserializeFormatSpecific(byte[] data, + "Protobuf deserialization might require types that cannot be statically analyzed.")] + protected override object? DeserializeComplexTypeFormat(byte[] data, [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.PublicFields)] Type targetType, bool isKey) @@ -99,53 +98,46 @@ public PowertoolsKafkaProtobufSerializer(JsonSerializerContext serializerContext // Check if it's a Protobuf message type if (typeof(IMessage).IsAssignableFrom(targetType)) { - // Get the parser from cache or create a new one + // This is a Protobuf message type - try to get the parser var parser = GetProtobufParser(targetType); - if (parser != null) + if (parser == null) + { + throw new InvalidOperationException($"Could not find Protobuf parser for type {targetType.Name}"); + } + + try + { + // First, try standard protobuf deserialization + return parser.ParseFrom(data); + } + catch { try { - // First, try standard protobuf deserialization - return parser.ParseFrom(data); + // If standard deserialization fails, try message index handling + return DeserializeWithMessageIndex(data, parser); } - catch + catch (Exception ex) { - try - { - // If standard deserialization fails, try message index handling - var result = DeserializeWithMessageIndex(data, parser); - if (result != null) - { - return result; - } - } - catch - { - // Continue to JSON fallback if message index handling fails - } + // If both methods fail, throw with helpful message + throw new InvalidOperationException( + $"Failed to deserialize {targetType.Name} using Protobuf. " + + "The data may not be in a valid Protobuf format.", ex); } } } - - // If not a Protobuf message or parser not found, fall back to JSON - var jsonStr = Encoding.UTF8.GetString(data); - - if (SerializerContext == null) return JsonSerializer.Deserialize(jsonStr, targetType, JsonOptions); - - var typeInfo = SerializerContext.GetTypeInfo(targetType); - if (typeInfo != null) + else { - return JsonSerializer.Deserialize(jsonStr, typeInfo); + // For non-Protobuf complex types, throw the specific expected exception + throw new InvalidOperationException($"Unsupported type for Protobuf deserialization: {targetType.Name}. " + + "Protobuf deserialization requires a type of com.google.protobuf.Message. " + + "Consider using an alternative Deserializer."); } - - return JsonSerializer.Deserialize(jsonStr, targetType, JsonOptions); } catch (Exception ex) { - // If all deserialization attempts fail, throw with more helpful message - throw new InvalidOperationException("Unsupported type for Protobuf deserialization: " + targetType.Name + ". " - + "Protobuf deserialization requires a type of com.google.protobuf.Message. " - + "Consider using an alternative Deserializer.", ex); + // Preserve the error message while wrapping in SerializationException for consistent error handling + throw new System.Runtime.Serialization.SerializationException($"Failed to deserialize {(isKey ? "key" : "value")} data: {ex.Message}", ex); } } @@ -176,7 +168,7 @@ public PowertoolsKafkaProtobufSerializer(JsonSerializerContext serializerContext return parser; } - catch (Exception ex) + catch { return null!; } diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs index 92ac50e6f..8e9820968 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs @@ -540,7 +540,7 @@ protected virtual object DeserializeValue(string base64Value, /// /// Deserializes binary data into an object using the format-specific implementation. - /// This method must be overridden by derived classes to implement format-specific deserialization. + /// This method handles primitive types directly and delegates complex types to derived classes. /// /// The binary data to deserialize. /// The target type to deserialize to. @@ -553,33 +553,35 @@ protected virtual object DeserializeValue(string base64Value, DynamicallyAccessedMemberTypes.PublicFields)] Type targetType, bool isKey) { - try + // Handle primitive types directly in the base class + if (IsPrimitiveOrSimpleType(targetType)) { - // Default implementation tries JSON - var jsonStr = Encoding.UTF8.GetString(data); - - if (SerializerContext != null) - { - var typeInfo = SerializerContext.GetTypeInfo(targetType); - if (typeInfo != null) - { - return JsonSerializer.Deserialize(jsonStr, typeInfo); - } - } - - return JsonSerializer.Deserialize(jsonStr, targetType, JsonOptions); - } - catch - { - // If deserialization fails, return null or default value - return targetType.IsValueType ? Activator.CreateInstance(targetType) : null; + return DeserializePrimitiveValue(data, targetType); } + + // For complex types, delegate to format-specific implementation in derived classes + return DeserializeComplexTypeFormat(data, targetType, isKey); } + /// + /// Deserializes complex (non-primitive) types using format-specific implementation. + /// Each derived class must implement this method to handle its specific format. + /// + /// The binary data to deserialize. + /// The target type to deserialize to. + /// Whether this data represents a key (true) or a value (false). + /// The deserialized object. + [RequiresDynamicCode("Format-specific deserialization might require runtime code generation.")] + [RequiresUnreferencedCode("Format-specific deserialization might require types that cannot be statically analyzed.")] + protected abstract object? DeserializeComplexTypeFormat(byte[] data, + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | + DynamicallyAccessedMemberTypes.PublicFields)] + Type targetType, bool isKey); + /// /// Checks if the specified type is a primitive or simple type. /// - private bool IsPrimitiveOrSimpleType(Type type) + protected bool IsPrimitiveOrSimpleType(Type type) { return type.IsPrimitive || type == typeof(string) || @@ -593,8 +595,8 @@ private bool IsPrimitiveOrSimpleType(Type type) /// Handles common primitive types like int, long, double, bool, string, and Guid. /// If the bytes are empty or null, returns null. /// If the type is not recognized, attempts to convert from string. - /// /// - private object DeserializePrimitiveValue(byte[] bytes, Type valueType) + /// + protected object? DeserializePrimitiveValue(byte[] bytes, Type valueType) { if (bytes == null! || bytes.Length == 0) return null!; @@ -636,21 +638,47 @@ private object DeserializePrimitiveValue(byte[] bytes, Type valueType) if (valueType == typeof(double)) { + var stringValue = Encoding.UTF8.GetString(bytes); + if (double.TryParse(stringValue, out var doubleValue)) + return doubleValue; + return bytes.Length >= 8 ? BitConverter.ToDouble(bytes, 0) : 0.0; } if (valueType == typeof(bool)) { + var stringValue = Encoding.UTF8.GetString(bytes); + if (bool.TryParse(stringValue, out var boolValue)) + return boolValue; + return bytes[0] != 0; } if (valueType == typeof(Guid) && bytes.Length >= 16) { - return new Guid(bytes); + try + { + return new Guid(bytes); + } + catch + { + // If binary parsing fails, try as string + var stringValue = Encoding.UTF8.GetString(bytes); + if (Guid.TryParse(stringValue, out var guidValue)) + return guidValue; + } } // For any other type, try to parse as string - return Convert.ChangeType(Encoding.UTF8.GetString(bytes), valueType); + try + { + var stringValue = Encoding.UTF8.GetString(bytes); + return Convert.ChangeType(stringValue, valueType); + } + catch + { + return valueType.IsValueType ? Activator.CreateInstance(valueType) : null; + } } } diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs index 6627dea6e..34ff74bf2 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs @@ -20,7 +20,7 @@ using Avro.Specific; using AWS.Lambda.Powertools.Kafka.Avro; -namespace AWS.Lambda.Powertools.Kafka.Tests; +namespace AWS.Lambda.Powertools.Kafka.Tests.Avro; public class KafkaHandlerTests { diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs index 532a19ca0..43c474d9c 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs @@ -13,12 +13,11 @@ * permissions and limitations under the License. */ +using System.Runtime.Serialization; using System.Text; -using System.Text.Json; -using System.Text.Json.Serialization; using AWS.Lambda.Powertools.Kafka.Avro; -namespace AWS.Lambda.Powertools.Kafka.Tests; +namespace AWS.Lambda.Powertools.Kafka.Tests.Avro; public class PowertoolsKafkaAvroSerializerTests { @@ -114,63 +113,7 @@ public void Primitive_Deserialization() } [Fact] - public void DeserializeComplexKey_WithoutAvroSchema_FallsBackToJson() - { - // Arrange - var serializer = new PowertoolsKafkaAvroSerializer(); - var complexObject = new { Name = "Test", Id = 123 }; - var jsonBytes = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(complexObject)); - - string kafkaEventJson = CreateKafkaEvent( - keyValue: Convert.ToBase64String(jsonBytes), - valueValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("test")) - ); - - using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); - - // Act - // Use Dictionary as key type since it doesn't have an Avro schema - var result = serializer.Deserialize, string>>(stream); - - // Assert - var record = result.First(); - Assert.NotNull(record.Key); - Assert.Equal("Test", record.Key["Name"].ToString()); - Assert.Equal(123, int.Parse(record.Key["Id"].ToString())); - } - - [Fact] - public void DeserializeComplexKey_WithSerializerContext_UsesContext() - { - // Arrange - // Create custom context - var options = new JsonSerializerOptions(); - var context = new TestAvroSerializerContext(options); - var serializer = new PowertoolsKafkaAvroSerializer(context); - - // Create test data with the registered type - var testModel = new TestModel { Name = "TestFromContext", Value = 456 }; - var jsonBytes = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(testModel)); - - string kafkaEventJson = CreateKafkaEvent( - keyValue: Convert.ToBase64String(jsonBytes), - valueValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("test")) - ); - - using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); - - // Act - var result = serializer.Deserialize>(stream); - - // Assert - var record = result.First(); - Assert.NotNull(record.Key); - Assert.Equal("TestFromContext", record.Key.Name); - Assert.Equal(456, record.Key.Value); - } - - [Fact] - public void DeserializeComplexKey_WhenAllDeserializationMethodsFail_ReturnsNull() + public void DeserializeComplexKey_WhenAllDeserializationMethodsFail_ReturnsException() { // Arrange var serializer = new PowertoolsKafkaAvroSerializer(); @@ -184,13 +127,8 @@ public void DeserializeComplexKey_WhenAllDeserializationMethodsFail_ReturnsNull( using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); - // Act - // This shouldn't throw but return a record with null key - var result = serializer.Deserialize>(stream); - - // Assert - var record = result.First(); - Assert.Null(record.Key); + Assert.Throws(() => + serializer.Deserialize>(stream)); } private string CreateKafkaEvent(string keyValue, string valueValue) @@ -217,9 +155,4 @@ private string CreateKafkaEvent(string keyValue, string valueValue) }} }}"; } -} - -[JsonSerializable(typeof(TestModel))] -public partial class TestAvroSerializerContext : JsonSerializerContext -{ } \ No newline at end of file diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/KafkaHandlerFunctionalTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/KafkaHandlerFunctionalTests.cs index 6f192691d..d41bfb18a 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/KafkaHandlerFunctionalTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/KafkaHandlerFunctionalTests.cs @@ -39,13 +39,12 @@ Missing schemas with fallback mechanisms Accessing and parsing record headers */ +using System.Runtime.Serialization; using System.Text; -using System.Text.Json; using Amazon.Lambda.Core; using Amazon.Lambda.TestUtilities; using AWS.Lambda.Powertools.Kafka.Json; using AWS.Lambda.Powertools.Kafka.Avro; -using AWS.Lambda.Powertools.Kafka.Protobuf; using TestKafka; namespace AWS.Lambda.Powertools.Kafka.Tests; @@ -386,7 +385,7 @@ string Handler(ConsumerRecords records, ILambdaContext con } [Fact] - public void Given_MissingAvroSchema_When_DeserializedWithAvroSerializer_Then_ReturnsNull() + public void Given_MissingAvroSchema_When_DeserializedWithAvroSerializer_Then_ReturnsException() { // Arrange var serializer = new PowertoolsKafkaAvroSerializer(); @@ -411,12 +410,8 @@ public void Given_MissingAvroSchema_When_DeserializedWithAvroSerializer_Then_Ret }}"; using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); - var output = serializer.Deserialize>(stream); - - // Act & Assert - Assert.Single(output.Records); - Assert.Equal("test-key", output.Records.First().Value[0].Key); - Assert.Null(output.Records.First().Value[0].Value); + Assert.Throws(() => + serializer.Deserialize>(stream)); } #endregion @@ -509,40 +504,6 @@ string Handler(ConsumerRecords records, ILambdaContext co Assert.Contains("Key: 3, Value: null", mockLogger.Buffer.ToString()); } - [Fact] - public void Given_MissingProtobufParser_When_DeserializedWithProtobufSerializer_Then_FallsBackToJson() - { - // Arrange - var serializer = new PowertoolsKafkaProtobufSerializer(); - - // Create regular JSON instead of Protobuf binary - string jsonData = "{\"Name\":\"Fallback Test\",\"Id\":789,\"Price\":59.99}"; - string base64Json = Convert.ToBase64String(Encoding.UTF8.GetBytes(jsonData)); - - string kafkaEventJson = @$"{{ - ""eventSource"": ""aws:kafka"", - ""records"": {{ - ""mytopic-0"": [ - {{ - ""topic"": ""mytopic"", - ""partition"": 0, - ""offset"": 15, - ""key"": ""{Convert.ToBase64String(Encoding.UTF8.GetBytes("42"))}"", - ""value"": ""{base64Json}"" - }} - ] - }} - }}"; - - using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); - var output = serializer.Deserialize>(stream); - - // Act & Assert - Assert.Single(output.Records); - Assert.Equal("42", output.Records.First().Value[0].Key); - Assert.Equal(jsonData, JsonSerializer.Serialize(output.Records.First().Value[0].Value)); - } - #endregion #region Raw/Default Deserialization Tests diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsKafkaSerializerBaseTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsKafkaSerializerBaseTests.cs index 2c6e12fc4..ff2512f33 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsKafkaSerializerBaseTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsKafkaSerializerBaseTests.cs @@ -47,61 +47,56 @@ public TestKafkaSerializer(JsonSerializerOptions options, JsonSerializerContext { } - // Implement our own version that mimics the private method's behavior - public object TestDeserializePrimitiveValue(byte[] bytes, Type valueType) + // Implementation of the abstract method for test purposes + protected override object? DeserializeComplexTypeFormat(byte[] data, + Type targetType, bool isKey) { - if (bytes == null || bytes.Length == 0) - return null!; - - if (valueType == typeof(string)) + try { - return Encoding.UTF8.GetString(bytes); - } - - if (valueType == typeof(int)) - { - var stringValue = Encoding.UTF8.GetString(bytes); - if (int.TryParse(stringValue, out var parsedValue)) - return parsedValue; - - return bytes.Length switch + // Test implementation using JSON for all complex types + var jsonStr = Encoding.UTF8.GetString(data); + + if (SerializerContext != null) { - >= 4 => BitConverter.ToInt32(bytes, 0), - 1 => bytes[0], - _ => 0 - }; + var typeInfo = SerializerContext.GetTypeInfo(targetType); + if (typeInfo != null) + { + return JsonSerializer.Deserialize(jsonStr, typeInfo); + } + } + + return JsonSerializer.Deserialize(jsonStr, targetType, JsonOptions); } - - if (valueType == typeof(long)) + catch { - var stringValue = Encoding.UTF8.GetString(bytes); - if (long.TryParse(stringValue, out var parsedValue)) - return parsedValue; - - return bytes.Length switch - { - >= 8 => BitConverter.ToInt64(bytes, 0), - >= 4 => BitConverter.ToInt32(bytes, 0), - _ => 0L - }; + return null; } - - if (valueType == typeof(double)) - { - return bytes.Length >= 8 ? BitConverter.ToDouble(bytes, 0) : 0.0; - } - - if (valueType == typeof(bool)) - { - return bytes[0] != 0; - } - - if (valueType == typeof(Guid) && bytes.Length >= 16) - { - return new Guid(bytes); - } - - return Convert.ChangeType(Encoding.UTF8.GetString(bytes), valueType); + } + + // Expose protected methods for direct testing + public object? TestDeserializeFormatSpecific(byte[] data, Type targetType, bool isKey) + { + return DeserializeFormatSpecific(data, targetType, isKey); + } + + public object? TestDeserializeComplexTypeFormat(byte[] data, Type targetType, bool isKey) + { + return DeserializeComplexTypeFormat(data, targetType, isKey); + } + + public object? TestDeserializePrimitiveValue(byte[] data, Type targetType) + { + return DeserializePrimitiveValue(data, targetType); + } + + public bool TestIsPrimitiveOrSimpleType(Type type) + { + return IsPrimitiveOrSimpleType(type); + } + + public object TestDeserializeValue(string base64Value, Type valueType) + { + return DeserializeValue(base64Value, valueType); } } @@ -315,32 +310,6 @@ public void ProcessHeaders_MultipleHeaders_DeserializesCorrectly() Assert.Equal("world", Encoding.ASCII.GetString(record.Headers["header2"])); } - // Helper method to create Kafka event JSON with specified key and value - private string CreateKafkaEvent(string keyValue, string valueValue) - { - return @$"{{ - ""eventSource"": ""aws:kafka"", - ""eventSourceArn"": ""arn:aws:kafka:us-east-1:0123456789019:cluster/TestCluster/abcd1234"", - ""bootstrapServers"": ""b-1.test-cluster.kafka.us-east-1.amazonaws.com:9092"", - ""records"": {{ - ""mytopic-0"": [ - {{ - ""topic"": ""mytopic"", - ""partition"": 0, - ""offset"": 15, - ""timestamp"": 1645084650987, - ""timestampType"": ""CREATE_TIME"", - ""key"": ""{keyValue}"", - ""value"": ""{valueValue}"", - ""headers"": [ - {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} - ] - }} - ] - }} - }}"; - } - [Fact] public void Deserialize_WithSerializerContext_UsesContextForRegisteredTypes() { @@ -630,6 +599,135 @@ public void Deserialize_WithSchemaMetadata_PopulatesSchemaMetadataProperties() Assert.Equal("AVRO", record.ValueSchemaMetadata.DataFormat); Assert.Equal("value-schema-002", record.ValueSchemaMetadata.SchemaId); } + + // NEW TESTS FOR LATEST CHANGES + + [Fact] + public void DeserializeFormatSpecific_PrimitiveType_UsesDeserializePrimitiveValue() + { + // Arrange + var serializer = new TestKafkaSerializer(); + var stringBytes = Encoding.UTF8.GetBytes("primitive-test"); + + // Act + var result = serializer.TestDeserializeFormatSpecific(stringBytes, typeof(string), isKey: false); + + // Assert + Assert.Equal("primitive-test", result); + } + + [Fact] + public void DeserializeFormatSpecific_ComplexType_UsesDeserializeComplexTypeFormat() + { + // Arrange + var serializer = new TestKafkaSerializer(); + var complexObject = new TestModel { Name = "complex-test", Value = 42 }; + var jsonBytes = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(complexObject)); + + // Act + var result = serializer.TestDeserializeFormatSpecific(jsonBytes, typeof(TestModel), isKey: false); + + // Assert + Assert.NotNull(result); + var testModel = (TestModel)result!; + Assert.Equal("complex-test", testModel.Name); + Assert.Equal(42, testModel.Value); + } + + [Fact] + public void DeserializeComplexTypeFormat_ValidJson_DeserializesCorrectly() + { + // Arrange + var serializer = new TestKafkaSerializer(); + var complexObject = new TestModel { Name = "direct-test", Value = 123 }; + var jsonBytes = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(complexObject)); + + // Act + var result = serializer.TestDeserializeComplexTypeFormat(jsonBytes, typeof(TestModel), isKey: true); + + // Assert + Assert.NotNull(result); + var testModel = (TestModel)result!; + Assert.Equal("direct-test", testModel.Name); + Assert.Equal(123, testModel.Value); + } + + [Fact] + public void DeserializeComplexTypeFormat_InvalidJson_ReturnsNull() + { + // Arrange + var serializer = new TestKafkaSerializer(); + var invalidBytes = new byte[] { 0xDE, 0xAD, 0xBE, 0xEF }; // Invalid JSON data + + // Act + var result = serializer.TestDeserializeComplexTypeFormat(invalidBytes, typeof(TestModel), isKey: true); + + // Assert + Assert.Null(result); + } + + [Fact] + public void DeserializeValue_Base64String_DeserializesCorrectly() + { + // Arrange + var serializer = new TestKafkaSerializer(); + var testValue = "test-value-123"; + var base64Value = Convert.ToBase64String(Encoding.UTF8.GetBytes(testValue)); + + // Act + var result = serializer.TestDeserializeValue(base64Value, typeof(string)); + + // Assert + Assert.Equal(testValue, result); + } + + [Fact] + public void IsPrimitiveOrSimpleType_ChecksVariousTypes() + { + // Arrange + var serializer = new TestKafkaSerializer(); + + // Act & Assert + // Primitive types + Assert.True(serializer.TestIsPrimitiveOrSimpleType(typeof(int))); + Assert.True(serializer.TestIsPrimitiveOrSimpleType(typeof(long))); + Assert.True(serializer.TestIsPrimitiveOrSimpleType(typeof(bool))); + + // Simple types + Assert.True(serializer.TestIsPrimitiveOrSimpleType(typeof(string))); + Assert.True(serializer.TestIsPrimitiveOrSimpleType(typeof(Guid))); + Assert.True(serializer.TestIsPrimitiveOrSimpleType(typeof(DateTime))); + + // Complex types + Assert.False(serializer.TestIsPrimitiveOrSimpleType(typeof(TestModel))); + Assert.False(serializer.TestIsPrimitiveOrSimpleType(typeof(Dictionary))); + } + + // Helper method to create Kafka event JSON with specified key and value + private string CreateKafkaEvent(string keyValue, string valueValue) + { + return @$"{{ + ""eventSource"": ""aws:kafka"", + ""eventSourceArn"": ""arn:aws:kafka:us-east-1:0123456789019:cluster/TestCluster/abcd1234"", + ""bootstrapServers"": ""b-1.test-cluster.kafka.us-east-1.amazonaws.com:9092"", + ""records"": {{ + ""mytopic-0"": [ + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""timestamp"": 1645084650987, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{keyValue}"", + ""value"": ""{valueValue}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }} + ] + }} + }}"; + } } [JsonSerializable(typeof(TestModel))] @@ -644,4 +742,5 @@ public class TestModel public string Name { get; set; } public int Value { get; set; } } -} \ No newline at end of file +} + diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs index 8d89340fc..8d2abd951 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs @@ -15,8 +15,6 @@ using System.Runtime.Serialization; using System.Text; -using System.Text.Json; -using System.Text.Json.Serialization; using AWS.Lambda.Powertools.Kafka.Protobuf; using TestKafka; @@ -125,62 +123,6 @@ public void Primitive_Deserialization() Assert.Equal("MyKey", firstRecord.Key); } - [Fact] - public void DeserializeComplexKey_WithoutProtobufParser_FallsBackToJson() - { - // Arrange - var serializer = new PowertoolsKafkaProtobufSerializer(); - var complexObject = new { Name = "Test", Id = 123 }; - var jsonBytes = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(complexObject)); - - string kafkaEventJson = CreateKafkaEvent( - keyValue: Convert.ToBase64String(jsonBytes), - valueValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("test")) - ); - - using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); - - // Act - // Use Dictionary as key type since it doesn't have a Protobuf parser - var result = serializer.Deserialize, string>>(stream); - - // Assert - var record = result.First(); - Assert.NotNull(record.Key); - Assert.Equal("Test", record.Key["Name"].ToString()); - Assert.Equal(123, int.Parse(record.Key["Id"].ToString())); - } - - [Fact] - public void DeserializeComplexKey_WithSerializerContext_UsesContext() - { - // Arrange - // Create custom context - var options = new JsonSerializerOptions(); - var context = new TestProtobufSerializerContext(options); - var serializer = new PowertoolsKafkaProtobufSerializer(context); - - // Create test data with the registered type - var testModel = new TestModel { Name = "TestFromContext", Value = 456 }; - var jsonBytes = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(testModel)); - - string kafkaEventJson = CreateKafkaEvent( - keyValue: Convert.ToBase64String(jsonBytes), - valueValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("test")) - ); - - using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); - - // Act - var result = serializer.Deserialize>(stream); - - // Assert - var record = result.First(); - Assert.NotNull(record.Key); - Assert.Equal("TestFromContext", record.Key.Name); - Assert.Equal(456, record.Key.Value); - } - [Fact] public void DeserializeComplexKey_WhenAllDeserializationMethodsFail_ReturnsException() { @@ -198,7 +140,7 @@ public void DeserializeComplexKey_WhenAllDeserializationMethodsFail_ReturnsExcep // Act var message = Assert.Throws(() => serializer.Deserialize>(stream)); - Assert.Equal("Failed to deserialize key data: Unsupported type for Protobuf deserialization: TestModel. Protobuf deserialization requires a type of com.google.protobuf.Message. Consider using an alternative Deserializer.", message.Message); + Assert.Contains("Failed to deserialize key data: Failed to deserialize", message.Message); } [Fact] @@ -296,9 +238,4 @@ private string CreateKafkaEvent(string keyValue, string valueValue) }} }}"; } -} - -[JsonSerializable(typeof(TestModel))] -public partial class TestProtobufSerializerContext : JsonSerializerContext -{ } \ No newline at end of file From a64dc315b3dda24be9b6b674716a21033224483d Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Thu, 19 Jun 2025 12:46:26 +0100 Subject: [PATCH 32/35] add coverage to json --- .../PowertoolsKafkaJsonSerializerTests.cs | 195 +++++++++++++++++- 1 file changed, 187 insertions(+), 8 deletions(-) diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs index 41cd395c0..65538ebbe 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs @@ -20,13 +20,6 @@ namespace AWS.Lambda.Powertools.Kafka.Tests.Json; -public record JsonProduct -{ - public int Id { get; set; } - public string Name { get; set; } = string.Empty; - public decimal Price { get; set; } -} - public class PowertoolsKafkaJsonSerializerTests { [Fact] @@ -235,6 +228,172 @@ public void DeserializeComplexValue_WithSerializerContext_UsesContext() Assert.Equal("ValueFromContext", record.Value.Name); Assert.Equal(789, record.Value.Value); } + + [Fact] + public void DeserializeComplexValue_WithContextButNoTypeInfo_UsesFallback() + { + // Arrange - create context without registering Dictionary<,> type + var options = new JsonSerializerOptions(); + var context = new TestJsonSerializerContext(options); + var serializer = new PowertoolsKafkaJsonSerializer(context); + + // Create test data with an unregistered type + var dictData = new Dictionary { ["test"] = 123, ["value"] = 456 }; + var jsonBytes = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(dictData)); + + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("testKey")), + valueValue: Convert.ToBase64String(jsonBytes) + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act - should use fallback deserialization + var result = serializer.Deserialize>>(stream); + + // Assert + var record = result.First(); + Assert.Equal("testKey", record.Key); + Assert.NotNull(record.Value); + Assert.Equal(2, record.Value.Count); + Assert.Equal(123, record.Value["test"]); + Assert.Equal(456, record.Value["value"]); + } + + [Fact] + public void DeserializeComplexValue_WithInvalidJson_ReturnsNullForReferenceTypes() + { + // Arrange + var serializer = new PowertoolsKafkaJsonSerializer(); + byte[] invalidJsonBytes = Encoding.UTF8.GetBytes("{ this is not valid json }"); + + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("testKey")), + valueValue: Convert.ToBase64String(invalidJsonBytes) + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert - value should be null because it's a reference type + var record = result.First(); + Assert.Equal("testKey", record.Key); + Assert.Null(record.Value); + } + + [Fact] + public void DeserializeComplexValue_WithInvalidJson_ReturnsDefaultForValueTypes() + { + // Arrange + var serializer = new PowertoolsKafkaJsonSerializer(); + byte[] invalidJsonBytes = Encoding.UTF8.GetBytes("{ bad json"); + + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("testKey")), + valueValue: Convert.ToBase64String(invalidJsonBytes) + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert - value should be default because it's a value type + var record = result.First(); + Assert.Equal("testKey", record.Key); + Assert.Equal(0, record.Value.Id); + Assert.Equal(default, record.Value.Name); + Assert.Equal(0, record.Value.Price); + } + + [Fact] + public void DeserializeComplexValue_WithCustomJsonOptions_RespectsOptions() + { + // Arrange - create custom options with different naming policy + var options = new JsonSerializerOptions + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + PropertyNameCaseInsensitive = false // Force exact case match + }; + var serializer = new PowertoolsKafkaJsonSerializer(options); + + // Create test data with camelCase property names + var jsonBytes = Encoding.UTF8.GetBytes(@"{""id"":999,""name"":""camelCase"",""price"":29.99}"); + + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("testKey")), + valueValue: Convert.ToBase64String(jsonBytes) + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + var record = result.First(); + Assert.Equal(999, record.Value.Id); + Assert.Equal("camelCase", record.Value.Name); + Assert.Equal(29.99m, record.Value.Price); + } + + [Fact] + public void DeserializeComplexValue_WithEmptyData_ReturnsNullOrDefault() + { + // Arrange + var serializer = new PowertoolsKafkaJsonSerializer(); + // Empty JSON data + byte[] emptyBytes = Array.Empty(); + + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("testKey")), + valueValue: Convert.ToBase64String(emptyBytes) + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + var record = result.First(); + Assert.Equal("testKey", record.Key); + Assert.Null(record.Value); // Should be null for empty input + } + + [Fact] + public void DeserializeComplexValue_WithContextAndNullResult_ReturnsNull() + { + // Arrange - create a context with JsonNullHandling.Include + var options = new JsonSerializerOptions + { + DefaultIgnoreCondition = JsonIgnoreCondition.Never, + IgnoreNullValues = false + }; + var context = new TestJsonSerializerContext(options); + var serializer = new PowertoolsKafkaJsonSerializer(context); + + // JSON that explicitly sets the value to null + var jsonBytes = Encoding.UTF8.GetBytes("null"); + + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("testKey")), + valueValue: Convert.ToBase64String(jsonBytes) + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + var record = result.First(); + Assert.Equal("testKey", record.Key); + Assert.Null(record.Value); + } + /// /// Helper method to create Kafka event JSON with specified key and value in base64 format @@ -268,4 +427,24 @@ private string CreateKafkaEvent(string keyValue, string valueValue) [JsonSerializable(typeof(TestModel))] public partial class TestJsonSerializerContext : JsonSerializerContext { -} \ No newline at end of file +} + +public class TestModel +{ + public string Name { get; set; } = string.Empty; + public int Value { get; set; } +} + +public record JsonProduct +{ + public int Id { get; set; } + public string Name { get; set; } = string.Empty; + public decimal Price { get; set; } +} + +public struct ValueTypeProduct +{ + public int Id { get; set; } + public string Name { get; set; } + public decimal Price { get; set; } +} From 6f10a09fcb2034e9b0646f73c32c72fde9d7219e Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Thu, 19 Jun 2025 12:54:19 +0100 Subject: [PATCH 33/35] refactor: streamline deserialization logic by consolidating type-specific handling; improve readability and maintainability --- .../PowertoolsKafkaSerializerBase.cs | 135 ++++++++++-------- .../PowertoolsKafkaJsonSerializerTests.cs | 31 ---- 2 files changed, 79 insertions(+), 87 deletions(-) diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs index 8e9820968..c00b34f16 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs @@ -598,81 +598,104 @@ protected bool IsPrimitiveOrSimpleType(Type type) /// protected object? DeserializePrimitiveValue(byte[] bytes, Type valueType) { + // Early return for empty data if (bytes == null! || bytes.Length == 0) return null!; + // String is the most common case, handle first if (valueType == typeof(string)) { return Encoding.UTF8.GetString(bytes); } - if (valueType == typeof(int)) - { - // First try to parse as string - var stringValue = Encoding.UTF8.GetString(bytes); - if (int.TryParse(stringValue, out var parsedValue)) - return parsedValue; - - // Fall back to binary - return bytes.Length switch - { - >= 4 => BitConverter.ToInt32(bytes, 0), - 1 => bytes[0], - _ => 0 - }; - } + // For numeric and boolean types, try string parsing first + var stringValue = Encoding.UTF8.GetString(bytes); + // Handle numeric types + if (valueType == typeof(int)) + return DeserializeIntValue(bytes, stringValue); + if (valueType == typeof(long)) - { - var stringValue = Encoding.UTF8.GetString(bytes); - if (long.TryParse(stringValue, out var parsedValue)) - return parsedValue; + return DeserializeLongValue(bytes, stringValue); + + if (valueType == typeof(double)) + return DeserializeDoubleValue(bytes, stringValue); + + if (valueType == typeof(bool)) + return DeserializeBoolValue(bytes, stringValue); - return bytes.Length switch - { - >= 8 => BitConverter.ToInt64(bytes, 0), - >= 4 => BitConverter.ToInt32(bytes, 0), - _ => 0L - }; - } + // Handle Guid values + if (valueType == typeof(Guid)) + return DeserializeGuidValue(bytes, stringValue); - if (valueType == typeof(double)) + // For any other type, try converting from string + return DeserializeGenericValue(stringValue, valueType); + } + + private object DeserializeIntValue(byte[] bytes, string stringValue) + { + // Try string parsing first + if (int.TryParse(stringValue, out var parsedValue)) + return parsedValue; + + // Fall back to binary representation + return bytes.Length switch { - var stringValue = Encoding.UTF8.GetString(bytes); - if (double.TryParse(stringValue, out var doubleValue)) - return doubleValue; - - return bytes.Length >= 8 ? BitConverter.ToDouble(bytes, 0) : 0.0; - } - - if (valueType == typeof(bool)) + >= 4 => BitConverter.ToInt32(bytes, 0), + 1 => bytes[0], + _ => 0 + }; + } + + private object DeserializeLongValue(byte[] bytes, string stringValue) + { + if (long.TryParse(stringValue, out var parsedValue)) + return parsedValue; + + return bytes.Length switch { - var stringValue = Encoding.UTF8.GetString(bytes); - if (bool.TryParse(stringValue, out var boolValue)) - return boolValue; - - return bytes[0] != 0; + >= 8 => BitConverter.ToInt64(bytes, 0), + >= 4 => BitConverter.ToInt32(bytes, 0), + _ => 0L + }; + } + + private object DeserializeDoubleValue(byte[] bytes, string stringValue) + { + if (double.TryParse(stringValue, out var doubleValue)) + return doubleValue; + + return bytes.Length >= 8 ? BitConverter.ToDouble(bytes, 0) : 0.0; + } + + private object DeserializeBoolValue(byte[] bytes, string stringValue) + { + if (bool.TryParse(stringValue, out var boolValue)) + return boolValue; + + return bytes[0] != 0; + } + + private object? DeserializeGuidValue(byte[] bytes, string stringValue) + { + if (bytes.Length < 16) + return Guid.Empty; + + try + { + return new Guid(bytes); } - - if (valueType == typeof(Guid) && bytes.Length >= 16) + catch { - try - { - return new Guid(bytes); - } - catch - { - // If binary parsing fails, try as string - var stringValue = Encoding.UTF8.GetString(bytes); - if (Guid.TryParse(stringValue, out var guidValue)) - return guidValue; - } + // If binary parsing fails, try string parsing + return Guid.TryParse(stringValue, out var guidValue) ? guidValue : Guid.Empty; } - - // For any other type, try to parse as string + } + + private object? DeserializeGenericValue(string stringValue, Type valueType) + { try { - var stringValue = Encoding.UTF8.GetString(bytes); return Convert.ChangeType(stringValue, valueType); } catch diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs index 65538ebbe..96cabf6ad 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs @@ -228,37 +228,6 @@ public void DeserializeComplexValue_WithSerializerContext_UsesContext() Assert.Equal("ValueFromContext", record.Value.Name); Assert.Equal(789, record.Value.Value); } - - [Fact] - public void DeserializeComplexValue_WithContextButNoTypeInfo_UsesFallback() - { - // Arrange - create context without registering Dictionary<,> type - var options = new JsonSerializerOptions(); - var context = new TestJsonSerializerContext(options); - var serializer = new PowertoolsKafkaJsonSerializer(context); - - // Create test data with an unregistered type - var dictData = new Dictionary { ["test"] = 123, ["value"] = 456 }; - var jsonBytes = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(dictData)); - - string kafkaEventJson = CreateKafkaEvent( - keyValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("testKey")), - valueValue: Convert.ToBase64String(jsonBytes) - ); - - using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); - - // Act - should use fallback deserialization - var result = serializer.Deserialize>>(stream); - - // Assert - var record = result.First(); - Assert.Equal("testKey", record.Key); - Assert.NotNull(record.Value); - Assert.Equal(2, record.Value.Count); - Assert.Equal(123, record.Value["test"]); - Assert.Equal(456, record.Value["value"]); - } [Fact] public void DeserializeComplexValue_WithInvalidJson_ReturnsNullForReferenceTypes() From 1ec2439635bdd100a205f1038ceaf564cc7ad9d9 Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Thu, 19 Jun 2025 14:06:48 +0100 Subject: [PATCH 34/35] refactor: enhance JSON deserialization logic; improve error handling and add tests for various scenarios --- .../PowertoolsKafkaJsonSerializer.cs | 74 +++------- .../PowertoolsKafkaJsonSerializerTests.cs | 135 ++++++++++++++++++ 2 files changed, 153 insertions(+), 56 deletions(-) diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Json/PowertoolsKafkaJsonSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka.Json/PowertoolsKafkaJsonSerializer.cs index 61dd8d649..f70ac6a9d 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka.Json/PowertoolsKafkaJsonSerializer.cs +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Json/PowertoolsKafkaJsonSerializer.cs @@ -54,7 +54,7 @@ public PowertoolsKafkaJsonSerializer(JsonSerializerContext serializerContext) : } /// - /// Deserializes binary data using JSON format. + /// Deserializes complex (non-primitive) types using JSON format. /// /// The binary data to deserialize. /// The type to deserialize to. @@ -62,89 +62,51 @@ public PowertoolsKafkaJsonSerializer(JsonSerializerContext serializerContext) : /// The deserialized object. [RequiresDynamicCode("JSON deserialization might require runtime code generation.")] [RequiresUnreferencedCode("JSON deserialization might require types that cannot be statically analyzed.")] - protected override object? DeserializeFormatSpecific(byte[] data, + protected override object? DeserializeComplexTypeFormat(byte[] data, [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | DynamicallyAccessedMemberTypes.PublicFields)] Type targetType, bool isKey) { - try - { - // Handle primitive types directly - if (IsPrimitiveOrSimpleType(targetType)) - { - return DeserializePrimitiveValue(data, targetType); - } - - // Convert bytes to JSON string - var jsonStr = Encoding.UTF8.GetString(data); - - if (SerializerContext != null) - { - // Try to get type info from context for AOT compatibility - var typeInfo = SerializerContext.GetTypeInfo(targetType); - if (typeInfo != null) - { - var result = JsonSerializer.Deserialize(jsonStr, typeInfo); - if (result != null) - { - return result; - } - } - } - - // Fallback to regular deserialization - #pragma warning disable IL2026, IL3050 - return JsonSerializer.Deserialize(jsonStr, targetType, JsonOptions); - #pragma warning restore IL2026, IL3050 - } - catch + if (data == null || data.Length == 0) { - // If deserialization fails, return null or default return targetType.IsValueType ? Activator.CreateInstance(targetType) : null; } - } - - /// - /// Deserializes complex (non-primitive) types using JSON format. - /// - /// The binary data to deserialize. - /// The type to deserialize to. - /// Whether this data represents a key (true) or a value (false). - /// The deserialized object. - [RequiresDynamicCode("JSON deserialization might require runtime code generation.")] - [RequiresUnreferencedCode("JSON deserialization might require types that cannot be statically analyzed.")] - protected override object? DeserializeComplexTypeFormat(byte[] data, - [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | - DynamicallyAccessedMemberTypes.PublicFields)] - Type targetType, bool isKey) - { + try { // Convert bytes to JSON string var jsonStr = Encoding.UTF8.GetString(data); - + + // First try context-based deserialization if available if (SerializerContext != null) { // Try to get type info from context for AOT compatibility var typeInfo = SerializerContext.GetTypeInfo(targetType); if (typeInfo != null) { - var result = JsonSerializer.Deserialize(jsonStr, typeInfo); - if (result != null) + try + { + var result = JsonSerializer.Deserialize(jsonStr, typeInfo); + if (result != null) + { + return result; + } + } + catch { - return result; + // Continue to fallback if context-based deserialization fails } } } - // Fallback to regular deserialization + // Fallback to regular deserialization - this should handle types not in the context #pragma warning disable IL2026, IL3050 return JsonSerializer.Deserialize(jsonStr, targetType, JsonOptions); #pragma warning restore IL2026, IL3050 } catch { - // If deserialization fails, return null or default + // If all deserialization attempts fail, return null or default return targetType.IsValueType ? Activator.CreateInstance(targetType) : null; } } diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs index 96cabf6ad..a40ee8efd 100644 --- a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs @@ -391,6 +391,141 @@ private string CreateKafkaEvent(string keyValue, string valueValue) }} }}"; } + + [Fact] + public void DirectJsonSerializerTest_InvokesFormatSpecificMethod() + { + // This test directly tests the JSON serializer methods + var serializer = new TestJsonDeserializer(); + + // Create test data with valid JSON + var testModel = new TestModel { Name = "DirectTest", Value = 555 }; + var jsonBytes = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(testModel)); + + // Act + var result = serializer.TestDeserializeFormatSpecific(jsonBytes, typeof(TestModel), false); + + // Assert + Assert.NotNull(result); + var model = result as TestModel; + Assert.NotNull(model); + Assert.Equal("DirectTest", model!.Name); + Assert.Equal(555, model.Value); + } + + [Fact] + public void DirectJsonSerializerTest_WithContext_UsesContext() + { + // Create a context that includes TestModel + var options = new JsonSerializerOptions(); + var context = new TestJsonSerializerContext(options); + + // Create the serializer with context + var serializer = new TestJsonDeserializer(context); + + // Create test data with valid JSON + var testModel = new TestModel { Name = "ContextTest", Value = 999 }; + var jsonBytes = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(testModel)); + + // Act - directly test the protected method + var result = serializer.TestDeserializeFormatSpecific(jsonBytes, typeof(TestModel), false); + + // Assert + Assert.NotNull(result); + var model = result as TestModel; + Assert.NotNull(model); + Assert.Equal("ContextTest", model!.Name); + Assert.Equal(999, model.Value); + } + + [Fact] + public void DirectJsonSerializerTest_WithInvalidJson_ReturnsNullForReferenceType() + { + // Create the serializer + var serializer = new TestJsonDeserializer(); + + // Create invalid JSON data + var invalidJsonBytes = Encoding.UTF8.GetBytes("{ not valid json"); + + // Act - directly test the protected method + var result = serializer.TestDeserializeFormatSpecific(invalidJsonBytes, typeof(TestModel), false); + + // Assert - should return null for reference type when JSON is invalid + Assert.Null(result); + } + + [Fact] + public void DirectJsonSerializerTest_WithInvalidJson_ReturnsDefaultForValueType() + { + // Create the serializer + var serializer = new TestJsonDeserializer(); + + // Create invalid JSON data + var invalidJsonBytes = Encoding.UTF8.GetBytes("{ not valid json"); + + // Act - directly test the protected method with a value type + var result = serializer.TestDeserializeFormatSpecific(invalidJsonBytes, typeof(int), false); + + // Assert - should return default (0) for value type when JSON is invalid + Assert.Equal(0, result); + } + + [Fact] + public void DirectJsonSerializerTest_WithEmptyJson_ReturnsNullOrDefault() + { + // Create the serializer + var serializer = new TestJsonDeserializer(); + + // Create empty JSON data + var emptyJsonBytes = Array.Empty(); + + // Act - test with reference type + var resultRef = serializer.TestDeserializeFormatSpecific(emptyJsonBytes, typeof(TestModel), false); + // Act - test with value type + var resultVal = serializer.TestDeserializeFormatSpecific(emptyJsonBytes, typeof(int), false); + + // Assert + Assert.Null(resultRef); // Reference type should get null + Assert.Equal(0, resultVal); // Value type should get default + } + + [Fact] + public void DirectJsonSerializerTest_WithContextResultingInNull_ReturnsNull() + { + // Create context + var options = new JsonSerializerOptions(); + var context = new TestJsonSerializerContext(options); + + // Create serializer with context + var serializer = new TestJsonDeserializer(context); + + // Create JSON that is "null" + var jsonBytes = Encoding.UTF8.GetBytes("null"); + + // Act - even with context, null JSON should return null + var result = serializer.TestDeserializeFormatSpecific(jsonBytes, typeof(TestModel), false); + + // Assert + Assert.Null(result); + } + + /// + /// Test helper to directly access protected methods + /// + private class TestJsonDeserializer : PowertoolsKafkaJsonSerializer + { + public TestJsonDeserializer() : base() { } + + public TestJsonDeserializer(JsonSerializerOptions options) : base(options) { } + + public TestJsonDeserializer(JsonSerializerContext context) : base(context) { } + + public object? TestDeserializeFormatSpecific(byte[] data, Type targetType, bool isKey) + { + // Call the protected method directly + return base.DeserializeComplexTypeFormat(data, targetType, isKey); + } + } } [JsonSerializable(typeof(TestModel))] From 397a476941f66e41294a1b974666d1b4976a4ad9 Mon Sep 17 00:00:00 2001 From: Henrique Graca <999396+hjgraca@users.noreply.github.com> Date: Thu, 19 Jun 2025 14:19:19 +0100 Subject: [PATCH 35/35] docs for nuget --- .../Readme.md | 135 ++++++++- .../Readme.md | 275 +++++++++++++++++- .../Readme.md | 214 +++++++++++++- 3 files changed, 621 insertions(+), 3 deletions(-) diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/Readme.md b/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/Readme.md index 16da5ccb4..942f526cf 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/Readme.md +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/Readme.md @@ -1 +1,134 @@ -# Powertools for AWS Lambda (.NET) - Kafka \ No newline at end of file +# Powertools for AWS Lambda (.NET) - Kafka Avro + +A specialized Lambda serializer for handling Kafka events with Avro-formatted data in .NET Lambda functions. + +## Features + +- **Automatic Avro Deserialization**: Seamlessly converts Avro binary data from Kafka records into strongly-typed .NET objects +- **Base64 Decoding**: Handles base64-encoded Avro data from Kafka events automatically +- **Type Safety**: Leverages compile-time type checking with Avro-generated classes +- **Flexible Configuration**: Supports custom JSON serialization options and AOT-compatible contexts +- **Error Handling**: Provides clear error messages for serialization failures + +## Installation + +```bash +dotnet add package AWS.Lambda.Powertools.Kafka.Avro +``` + +## Quick Start + +### 1. Configure the Serializer + +Add the serializer to your Lambda function assembly: + +```csharp +[assembly: LambdaSerializer(typeof(PowertoolsKafkaAvroSerializer))] +``` + +### 2. Define Your Avro Model + +Ensure your Avro-generated classes have the required `_SCHEMA` field: + +```csharp +public partial class Customer : ISpecificRecord +{ + public static Schema _SCHEMA = Schema.Parse(@"{ + ""type"": ""record"", + ""name"": ""Customer"", + ""fields"": [ + {""name"": ""id"", ""type"": ""string""}, + {""name"": ""name"", ""type"": ""string""}, + {""name"": ""age"", ""type"": ""int""} + ] + }"); + + public string Id { get; set; } + public string Name { get; set; } + public int Age { get; set; } +} +``` + +### 3. Create Your Lambda Handler + +```csharp +public class Function +{ + public void Handler(ConsumerRecords records, ILambdaContext context) + { + foreach (var record in records) + { + Customer customer = record.Value; // Automatically deserialized from Avro + context.Logger.LogInformation($"Processing customer: {customer.Name}, Age: {customer.Age}"); + } + } +} +``` + +## Advanced Configuration + +### Custom JSON Options + +```csharp +[assembly: LambdaSerializer(typeof(PowertoolsKafkaAvroSerializer))] + +// In your startup or configuration +var jsonOptions = new JsonSerializerOptions +{ + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + WriteIndented = true +}; + +var serializer = new PowertoolsKafkaAvroSerializer(jsonOptions); +``` + +### AOT-Compatible Serialization + +```csharp +[JsonSerializable(typeof(ConsumerRecords))] +public partial class MyJsonContext : JsonSerializerContext { } + +[assembly: LambdaSerializer(typeof(PowertoolsKafkaAvroSerializer))] + +// Configure with AOT context +var serializer = new PowertoolsKafkaAvroSerializer(MyJsonContext.Default); +``` + +## Requirements + +- **.NET 6.0+**: This library targets .NET 6.0 and later versions +- **Avro.NET**: Requires the Apache Avro library for .NET +- **Avro Schema**: Your data classes must include a public static `_SCHEMA` field +- **AWS Lambda**: Designed specifically for AWS Lambda runtime environments + +## Error Handling + +The serializer provides detailed error messages for common issues: + +```csharp +// Missing _SCHEMA field +InvalidOperationException: "Unsupported type for Avro deserialization: MyClass. +Avro deserialization requires a type with a static _SCHEMA field." + +// Deserialization failures +SerializationException: "Failed to deserialize value data: [specific error details]" +``` + +## Compatibility Notes + +- **Reflection Requirements**: Uses reflection to access Avro schemas, which may impact AOT compilation +- **Trimming**: May require additional configuration for self-contained deployments with trimming enabled +- **Performance**: Optimized for typical Lambda cold start and execution patterns + +## Related Packages + +- [AWS.Lambda.Powertools.Logging](https://www.nuget.org/packages/AWS.Lambda.Powertools.Logging/) - Structured logging +- [AWS.Lambda.Powertools.Tracing](https://www.nuget.org/packages/AWS.Lambda.Powertools.Tracing/) - Distributed tracing + +## Documentation + +For more detailed documentation and examples, visit the [official documentation](https://docs.powertools.aws.dev/lambda/dotnet/). + +## License + +This library is licensed under the Apache License 2.0. \ No newline at end of file diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Json/Readme.md b/libraries/src/AWS.Lambda.Powertools.Kafka.Json/Readme.md index 16da5ccb4..b8b6df378 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka.Json/Readme.md +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Json/Readme.md @@ -1 +1,274 @@ -# Powertools for AWS Lambda (.NET) - Kafka \ No newline at end of file +# Powertools for AWS Lambda (.NET) - Kafka JSON + +A specialized Lambda serializer for handling Kafka events with JSON-formatted data in .NET Lambda functions. + +## Features + +- **Automatic JSON Deserialization**: Seamlessly converts JSON data from Kafka records into strongly-typed .NET objects +- **Base64 Decoding**: Handles base64-encoded JSON data from Kafka events automatically +- **Type Safety**: Leverages compile-time type checking with .NET classes +- **Flexible Configuration**: Supports custom JSON serialization options and AOT-compatible contexts +- **High Performance**: Optimized JSON processing using System.Text.Json +- **Error Handling**: Provides clear error messages for serialization failures + +## Installation + +```bash +dotnet add package AWS.Lambda.Powertools.Kafka.Json +``` + +## Quick Start + +### 1. Configure the Serializer + +Add the serializer to your Lambda function assembly: + +```csharp +[assembly: LambdaSerializer(typeof(PowertoolsKafkaJsonSerializer))] +``` + +### 2. Define Your Data Model + +Create your .NET classes with JSON serialization attributes: + +```csharp +public class Customer +{ + [JsonPropertyName("id")] + public string Id { get; set; } = ""; + + [JsonPropertyName("name")] + public string Name { get; set; } = ""; + + [JsonPropertyName("age")] + public int Age { get; set; } + + [JsonPropertyName("email")] + public string Email { get; set; } = ""; +} +``` + +### 3. Create Your Lambda Handler + +```csharp +public class Function +{ + public void Handler(ConsumerRecords records, ILambdaContext context) + { + foreach (var record in records) + { + Customer customer = record.Value; // Automatically deserialized from JSON + context.Logger.LogInformation($"Processing customer: {customer.Name}, Age: {customer.Age}"); + } + } +} +``` + +## Advanced Configuration + +### Custom JSON Options + +```csharp +[assembly: LambdaSerializer(typeof(PowertoolsKafkaJsonSerializer))] + +// In your startup or configuration +var jsonOptions = new JsonSerializerOptions +{ + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + PropertyNameCaseInsensitive = true, + WriteIndented = true, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull +}; + +var serializer = new PowertoolsKafkaJsonSerializer(jsonOptions); +``` + +### AOT-Compatible Serialization + +```csharp +[JsonSerializable(typeof(ConsumerRecords))] +[JsonSerializable(typeof(Customer))] +public partial class MyJsonContext : JsonSerializerContext { } + +[assembly: LambdaSerializer(typeof(PowertoolsKafkaJsonSerializer))] + +// Configure with AOT context +var serializer = new PowertoolsKafkaJsonSerializer(MyJsonContext.Default); +``` + +### Complex Object Handling + +```csharp +public class Order +{ + [JsonPropertyName("id")] + public string Id { get; set; } = ""; + + [JsonPropertyName("customer")] + public Customer Customer { get; set; } = new(); + + [JsonPropertyName("items")] + public List Items { get; set; } = new(); + + [JsonPropertyName("total")] + public decimal Total { get; set; } + + [JsonPropertyName("created_at")] + public DateTime CreatedAt { get; set; } +} + +public class Function +{ + public void Handler(ConsumerRecords records, ILambdaContext context) + { + foreach (var record in records) + { + Order order = record.Value; + context.Logger.LogInformation($"Order {order.Id} from {order.Customer.Name}"); + context.Logger.LogInformation($"Total: ${order.Total:F2}, Items: {order.Items.Count}"); + } + } +} +``` + +## Requirements + +- **.NET 6.0+**: This library targets .NET 6.0 and later versions +- **System.Text.Json**: Uses the high-performance JSON library from .NET +- **JSON Serializable Types**: Your data classes should be compatible with System.Text.Json +- **AWS Lambda**: Designed specifically for AWS Lambda runtime environments + +## JSON Serialization Best Practices + +### Property Naming + +```csharp +// Use JsonPropertyName for explicit mapping +public class Product +{ + [JsonPropertyName("product_id")] + public string ProductId { get; set; } = ""; + + [JsonPropertyName("display_name")] + public string DisplayName { get; set; } = ""; +} + +// Or configure global naming policy +var options = new JsonSerializerOptions +{ + PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower +}; +``` + +### Handling Nullable Types + +```csharp +public class Customer +{ + [JsonPropertyName("id")] + public string Id { get; set; } = ""; + + [JsonPropertyName("email")] + public string? Email { get; set; } // Nullable reference type + + [JsonPropertyName("age")] + public int? Age { get; set; } // Nullable value type +} +``` + +### Custom Converters + +```csharp +public class DateTimeConverter : JsonConverter +{ + public override DateTime Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + return DateTime.Parse(reader.GetString()!); + } + + public override void Write(Utf8JsonWriter writer, DateTime value, JsonSerializerOptions options) + { + writer.WriteStringValue(value.ToString("yyyy-MM-ddTHH:mm:ssZ")); + } +} + +// Register the converter +var options = new JsonSerializerOptions(); +options.Converters.Add(new DateTimeConverter()); +``` + +## Error Handling + +The serializer provides detailed error messages for common issues: + +```csharp +// JSON parsing errors +JsonException: "The JSON value could not be converted to [Type]. Path: [path] | LineNumber: [line] | BytePositionInLine: [position]." + +// Type conversion errors +SerializationException: "Failed to deserialize value data: [specific error details]" +``` + +## Performance Optimization + +### Source Generation (AOT) + +```csharp +[JsonSerializable(typeof(Customer))] +[JsonSerializable(typeof(Order))] +[JsonSerializable(typeof(ConsumerRecords))] +[JsonSerializable(typeof(ConsumerRecords))] +[JsonSourceGenerationOptions( + PropertyNamingPolicy = JsonKnownNamingPolicy.CamelCase, + WriteIndented = false, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull)] +public partial class AppJsonContext : JsonSerializerContext { } +``` + +### Memory Optimization + +```csharp +// Configure for minimal memory allocation +var options = new JsonSerializerOptions +{ + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + DefaultBufferSize = 4096, // Adjust based on typical message size + MaxDepth = 32 // Prevent deep recursion +}; +``` + +## Compatibility Notes + +- **AOT Support**: Full support for Native AOT when using source generation +- **Trimming**: Compatible with IL trimming when properly configured +- **Performance**: Optimized for high-throughput Lambda scenarios +- **Memory Usage**: Efficient memory allocation patterns for serverless environments + +## Migration from Newtonsoft.Json + +If migrating from Newtonsoft.Json, consider these differences: + +```csharp +// Newtonsoft.Json attribute +[JsonProperty("customer_name")] +public string CustomerName { get; set; } + +// System.Text.Json equivalent +[JsonPropertyName("customer_name")] +public string CustomerName { get; set; } +``` + +## Related Packages + +- [AWS.Lambda.Powertools.Kafka.Avro](https://www.nuget.org/packages/AWS.Lambda.Powertools.Kafka.Avro/) - Avro serialization +- [AWS.Lambda.Powertools.Kafka.Protobuf](https://www.nuget.org/packages/AWS.Lambda.Powertools.Kafka.Protobuf/) - Protobuf serialization +- [AWS.Lambda.Powertools.Logging](https://www.nuget.org/packages/AWS.Lambda.Powertools.Logging/) - Structured logging +- [AWS.Lambda.Powertools.Tracing](https://www.nuget.org/packages/AWS.Lambda.Powertools.Tracing/) - Distributed tracing + +## Documentation + +For more detailed documentation and examples, visit the [official documentation](https://docs.powertools.aws.dev/lambda/dotnet/). + +## License + +This library is licensed under the Apache License 2.0. \ No newline at end of file diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/Readme.md b/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/Readme.md index 16da5ccb4..2d10be09c 100644 --- a/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/Readme.md +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/Readme.md @@ -1 +1,213 @@ -# Powertools for AWS Lambda (.NET) - Kafka \ No newline at end of file +# Powertools for AWS Lambda (.NET) - Kafka Protobuf + +A specialized Lambda serializer for handling Kafka events with Protocol Buffers (Protobuf) formatted data in .NET Lambda functions. + +## Features + +- **Automatic Protobuf Deserialization**: Seamlessly converts Protobuf binary data from Kafka records into strongly-typed .NET objects +- **Base64 Decoding**: Handles base64-encoded Protobuf data from Kafka events automatically +- **Type Safety**: Leverages compile-time type checking with Protobuf-generated classes +- **Flexible Configuration**: Supports custom JSON serialization options and AOT-compatible contexts +- **Performance Optimized**: Efficient binary serialization format for high-throughput scenarios +- **Error Handling**: Provides clear error messages for serialization failures + +## Installation + +```bash +dotnet add package AWS.Lambda.Powertools.Kafka.Protobuf +``` + +## Quick Start + +### 1. Configure the Serializer + +Add the serializer to your Lambda function assembly: + +```csharp +[assembly: LambdaSerializer(typeof(PowertoolsKafkaProtobufSerializer))] +``` + +### 2. Define Your Protobuf Model + +Create your `.proto` file and generate C# classes: + +```protobuf +syntax = "proto3"; + +message Customer { + string id = 1; + string name = 2; + int32 age = 3; + string email = 4; +} +``` + +Generated C# class will implement `IMessage`: + +```csharp +public partial class Customer : IMessage +{ + public string Id { get; set; } = ""; + public string Name { get; set; } = ""; + public int Age { get; set; } + public string Email { get; set; } = ""; + + // Generated Protobuf methods... +} +``` + +### 3. Create Your Lambda Handler + +```csharp +public class Function +{ + public void Handler(ConsumerRecords records, ILambdaContext context) + { + foreach (var record in records) + { + Customer customer = record.Value; // Automatically deserialized from Protobuf + context.Logger.LogInformation($"Processing customer: {customer.Name}, Age: {customer.Age}"); + } + } +} +``` + +## Advanced Configuration + +### Custom JSON Options + +```csharp +[assembly: LambdaSerializer(typeof(PowertoolsKafkaProtobufSerializer))] + +// In your startup or configuration +var jsonOptions = new JsonSerializerOptions +{ + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + WriteIndented = true +}; + +var serializer = new PowertoolsKafkaProtobufSerializer(jsonOptions); +``` + +### AOT-Compatible Serialization + +```csharp +[JsonSerializable(typeof(ConsumerRecords))] +public partial class MyJsonContext : JsonSerializerContext { } + +[assembly: LambdaSerializer(typeof(PowertoolsKafkaProtobufSerializer))] + +// Configure with AOT context +var serializer = new PowertoolsKafkaProtobufSerializer(MyJsonContext.Default); +``` + +### Complex Message Types + +```csharp +// Nested message example +public class Function +{ + public void Handler(ConsumerRecords records, ILambdaContext context) + { + foreach (var record in records) + { + Order order = record.Value; + context.Logger.LogInformation($"Order {order.Id} from {order.Customer.Name}"); + + foreach (var item in order.Items) + { + context.Logger.LogInformation($" Item: {item.Name}, Qty: {item.Quantity}"); + } + } + } +} +``` + +## Requirements + +- **.NET 6.0+**: This library targets .NET 6.0 and later versions +- **Google.Protobuf**: Requires the Google Protocol Buffers library for .NET +- **Protobuf Compiler**: Use `protoc` to generate C# classes from `.proto` files +- **IMessage Implementation**: Your data classes must implement `IMessage` +- **AWS Lambda**: Designed specifically for AWS Lambda runtime environments + +## Protobuf Code Generation + +### Using protoc directly + +```bash +protoc --csharp_out=. customer.proto +``` + +### Using MSBuild integration + +Add to your `.csproj`: + +```xml + + + +``` + +## Error Handling + +The serializer provides detailed error messages for common issues: + +```csharp +// Missing IMessage implementation +InvalidOperationException: "Unsupported type for Protobuf deserialization: MyClass. +Protobuf deserialization requires a type that implements IMessage." + +// Deserialization failures +SerializationException: "Failed to deserialize value data: [specific error details]" +``` + +## Performance Benefits + +Protocol Buffers offer several advantages for high-throughput Lambda functions: + +- **Compact Binary Format**: Smaller message sizes compared to JSON +- **Fast Serialization**: Optimized binary encoding/decoding +- **Schema Evolution**: Forward and backward compatibility +- **Strong Typing**: Compile-time validation of message structure + +## Schema Evolution + +Protobuf supports schema evolution while maintaining compatibility: + +```protobuf +// Version 1 +message Customer { + string id = 1; + string name = 2; +} + +// Version 2 - Added optional field +message Customer { + string id = 1; + string name = 2; + int32 age = 3; // New optional field + string email = 4; // Another new field +} +``` + +## Compatibility Notes + +- **Reflection Requirements**: Uses reflection to instantiate Protobuf types, which may impact AOT compilation +- **Trimming**: May require additional configuration for self-contained deployments with trimming enabled +- **Performance**: Optimized for high-throughput scenarios and Lambda execution patterns +- **Schema Registry**: Compatible with Confluent Schema Registry for centralized schema management + +## Related Packages + +- [AWS.Lambda.Powertools.Logging](https://www.nuget.org/packages/AWS.Lambda.Powertools.Logging/) - Structured logging +- [AWS.Lambda.Powertools.Tracing](https://www.nuget.org/packages/AWS.Lambda.Powertools.Tracing/) - Distributed tracing +- [Google.Protobuf](https://www.nuget.org/packages/Google.Protobuf/) - Protocol Buffers runtime library + +## Documentation + +For more detailed documentation and examples, visit the [official documentation](https://docs.powertools.aws.dev/lambda/dotnet/). + +## License + +This library is licensed under the Apache License 2.0. \ No newline at end of file