diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index ba06d4429..206a028f8 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -39,6 +39,9 @@ jobs: with: languages: ${{ matrix.language }} + - name: Install global tools + run: dotnet tool install --global Apache.Avro.Tools + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild diff --git a/.github/workflows/examples-tests.yml b/.github/workflows/examples-tests.yml index 83522084d..77b322e18 100644 --- a/.github/workflows/examples-tests.yml +++ b/.github/workflows/examples-tests.yml @@ -33,6 +33,9 @@ jobs: - name: Install dependencies run: dotnet restore + - name: Install global tools + run: dotnet tool install --global Apache.Avro.Tools + - name: Build run: dotnet build --configuration Release --no-restore /tl diff --git a/examples/Kafka/Avro/src/Avro.csproj b/examples/Kafka/Avro/src/Avro.csproj new file mode 100644 index 000000000..05314f2fb --- /dev/null +++ b/examples/Kafka/Avro/src/Avro.csproj @@ -0,0 +1,35 @@ + + + Exe + net8.0 + enable + enable + true + Lambda + + true + + true + + Avro.Example + + + + + + + + + + + + + + + + + + PreserveNewest + + + \ No newline at end of file diff --git a/examples/Kafka/Avro/src/CustomerProfile.avsc b/examples/Kafka/Avro/src/CustomerProfile.avsc new file mode 100644 index 000000000..bf8cc090c --- /dev/null +++ b/examples/Kafka/Avro/src/CustomerProfile.avsc @@ -0,0 +1,46 @@ +{ + "type": "record", + "name": "CustomerProfile", + "namespace": "com.example", + "fields": [ + {"name": "user_id", "type": "string"}, + {"name": "full_name", "type": "string"}, + {"name": "email", "type": { + "type": "record", + "name": "EmailAddress", + "fields": [ + {"name": "address", "type": "string"}, + {"name": "verified", "type": "boolean"}, + {"name": "primary", "type": "boolean"} + ] + }}, + {"name": "age", "type": "int"}, + {"name": "address", "type": { + "type": "record", + "name": "Address", + "fields": [ + {"name": "street", "type": "string"}, + {"name": "city", "type": "string"}, + {"name": "state", "type": "string"}, + {"name": "country", "type": "string"}, + {"name": "zip_code", "type": "string"} + ] + }}, + {"name": "phone_numbers", "type": { + "type": "array", + "items": { + "type": "record", + "name": "PhoneNumber", + "fields": [ + {"name": "number", "type": "string"}, + {"name": "type", "type": {"type": "enum", "name": "PhoneType", "symbols": ["HOME", "WORK", "MOBILE"]}} + ] + } + }}, + {"name": "preferences", "type": { + "type": "map", + "values": "string" + }}, + {"name": "account_status", "type": {"type": "enum", "name": "AccountStatus", "symbols": ["ACTIVE", "INACTIVE", "SUSPENDED"]}} + ] +} \ No newline at end of file diff --git a/examples/Kafka/Avro/src/Function.cs b/examples/Kafka/Avro/src/Function.cs new file mode 100644 index 000000000..6ca9ebdb5 --- /dev/null +++ b/examples/Kafka/Avro/src/Function.cs @@ -0,0 +1,21 @@ +using Amazon.Lambda.Core; +using Amazon.Lambda.RuntimeSupport; +using AWS.Lambda.Powertools.Kafka; +using AWS.Lambda.Powertools.Kafka.Avro; +using AWS.Lambda.Powertools.Logging; +using com.example; + +string Handler(ConsumerRecords records, ILambdaContext context) +{ + foreach (var record in records) + { + Logger.LogInformation("Record Value: {@record}", record.Value); + } + + return "Processed " + records.Count() + " records"; +} + +await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, + new PowertoolsKafkaAvroSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization + .Build() + .RunAsync(); \ No newline at end of file diff --git a/examples/Kafka/Avro/src/Generated/com/example/AccountStatus.cs b/examples/Kafka/Avro/src/Generated/com/example/AccountStatus.cs new file mode 100644 index 000000000..c7809f518 --- /dev/null +++ b/examples/Kafka/Avro/src/Generated/com/example/AccountStatus.cs @@ -0,0 +1,23 @@ +// ------------------------------------------------------------------------------ +// +// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e +// Changes to this file may cause incorrect behavior and will be lost if code +// is regenerated +// +// ------------------------------------------------------------------------------ +namespace com.example +{ + using System; + using System.Collections.Generic; + using System.Text; + using global::Avro; + using global::Avro.Specific; + + [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")] + public enum AccountStatus + { + ACTIVE, + INACTIVE, + SUSPENDED, + } +} diff --git a/examples/Kafka/Avro/src/Generated/com/example/Address.cs b/examples/Kafka/Avro/src/Generated/com/example/Address.cs new file mode 100644 index 000000000..e2053e0f2 --- /dev/null +++ b/examples/Kafka/Avro/src/Generated/com/example/Address.cs @@ -0,0 +1,115 @@ +// ------------------------------------------------------------------------------ +// +// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e +// Changes to this file may cause incorrect behavior and will be lost if code +// is regenerated +// +// ------------------------------------------------------------------------------ +namespace com.example +{ + using System; + using System.Collections.Generic; + using System.Text; + using global::Avro; + using global::Avro.Specific; + + [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")] + public partial class Address : global::Avro.Specific.ISpecificRecord + { + public static global::Avro.Schema _SCHEMA = global::Avro.Schema.Parse("{\"type\":\"record\",\"name\":\"Address\",\"namespace\":\"com.example\",\"fields\":[{\"name\":\"st" + + "reet\",\"type\":\"string\"},{\"name\":\"city\",\"type\":\"string\"},{\"name\":\"state\",\"type\":\"s" + + "tring\"},{\"name\":\"country\",\"type\":\"string\"},{\"name\":\"zip_code\",\"type\":\"string\"}]}" + + ""); + private string _street; + private string _city; + private string _state; + private string _country; + private string _zip_code; + public virtual global::Avro.Schema Schema + { + get + { + return Address._SCHEMA; + } + } + public string street + { + get + { + return this._street; + } + set + { + this._street = value; + } + } + public string city + { + get + { + return this._city; + } + set + { + this._city = value; + } + } + public string state + { + get + { + return this._state; + } + set + { + this._state = value; + } + } + public string country + { + get + { + return this._country; + } + set + { + this._country = value; + } + } + public string zip_code + { + get + { + return this._zip_code; + } + set + { + this._zip_code = value; + } + } + public virtual object Get(int fieldPos) + { + switch (fieldPos) + { + case 0: return this.street; + case 1: return this.city; + case 2: return this.state; + case 3: return this.country; + case 4: return this.zip_code; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Get()"); + }; + } + public virtual void Put(int fieldPos, object fieldValue) + { + switch (fieldPos) + { + case 0: this.street = (System.String)fieldValue; break; + case 1: this.city = (System.String)fieldValue; break; + case 2: this.state = (System.String)fieldValue; break; + case 3: this.country = (System.String)fieldValue; break; + case 4: this.zip_code = (System.String)fieldValue; break; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Put()"); + }; + } + } +} diff --git a/examples/Kafka/Avro/src/Generated/com/example/CustomerProfile.cs b/examples/Kafka/Avro/src/Generated/com/example/CustomerProfile.cs new file mode 100644 index 000000000..15d62095d --- /dev/null +++ b/examples/Kafka/Avro/src/Generated/com/example/CustomerProfile.cs @@ -0,0 +1,154 @@ +// ------------------------------------------------------------------------------ +// +// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e +// Changes to this file may cause incorrect behavior and will be lost if code +// is regenerated +// +// ------------------------------------------------------------------------------ +namespace com.example +{ + using System; + using System.Collections.Generic; + using System.Text; + using global::Avro; + using global::Avro.Specific; + + [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")] + public partial class CustomerProfile : global::Avro.Specific.ISpecificRecord + { + public static global::Avro.Schema _SCHEMA = global::Avro.Schema.Parse(@"{""type"":""record"",""name"":""CustomerProfile"",""namespace"":""com.example"",""fields"":[{""name"":""user_id"",""type"":""string""},{""name"":""full_name"",""type"":""string""},{""name"":""email"",""type"":{""type"":""record"",""name"":""EmailAddress"",""namespace"":""com.example"",""fields"":[{""name"":""address"",""type"":""string""},{""name"":""verified"",""type"":""boolean""},{""name"":""primary"",""type"":""boolean""}]}},{""name"":""age"",""type"":""int""},{""name"":""address"",""type"":{""type"":""record"",""name"":""Address"",""namespace"":""com.example"",""fields"":[{""name"":""street"",""type"":""string""},{""name"":""city"",""type"":""string""},{""name"":""state"",""type"":""string""},{""name"":""country"",""type"":""string""},{""name"":""zip_code"",""type"":""string""}]}},{""name"":""phone_numbers"",""type"":{""type"":""array"",""items"":{""type"":""record"",""name"":""PhoneNumber"",""namespace"":""com.example"",""fields"":[{""name"":""number"",""type"":""string""},{""name"":""type"",""type"":{""type"":""enum"",""name"":""PhoneType"",""namespace"":""com.example"",""symbols"":[""HOME"",""WORK"",""MOBILE""]}}]}}},{""name"":""preferences"",""type"":{""type"":""map"",""values"":""string""}},{""name"":""account_status"",""type"":{""type"":""enum"",""name"":""AccountStatus"",""namespace"":""com.example"",""symbols"":[""ACTIVE"",""INACTIVE"",""SUSPENDED""]}}]}"); + private string _user_id; + private string _full_name; + private com.example.EmailAddress _email; + private int _age; + private com.example.Address _address; + private IList _phone_numbers; + private IDictionary _preferences; + private com.example.AccountStatus _account_status; + public virtual global::Avro.Schema Schema + { + get + { + return CustomerProfile._SCHEMA; + } + } + public string user_id + { + get + { + return this._user_id; + } + set + { + this._user_id = value; + } + } + public string full_name + { + get + { + return this._full_name; + } + set + { + this._full_name = value; + } + } + public com.example.EmailAddress email + { + get + { + return this._email; + } + set + { + this._email = value; + } + } + public int age + { + get + { + return this._age; + } + set + { + this._age = value; + } + } + public com.example.Address address + { + get + { + return this._address; + } + set + { + this._address = value; + } + } + public IList phone_numbers + { + get + { + return this._phone_numbers; + } + set + { + this._phone_numbers = value; + } + } + public IDictionary preferences + { + get + { + return this._preferences; + } + set + { + this._preferences = value; + } + } + public com.example.AccountStatus account_status + { + get + { + return this._account_status; + } + set + { + this._account_status = value; + } + } + public virtual object Get(int fieldPos) + { + switch (fieldPos) + { + case 0: return this.user_id; + case 1: return this.full_name; + case 2: return this.email; + case 3: return this.age; + case 4: return this.address; + case 5: return this.phone_numbers; + case 6: return this.preferences; + case 7: return this.account_status; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Get()"); + }; + } + public virtual void Put(int fieldPos, object fieldValue) + { + switch (fieldPos) + { + case 0: this.user_id = (System.String)fieldValue; break; + case 1: this.full_name = (System.String)fieldValue; break; + case 2: this.email = (com.example.EmailAddress)fieldValue; break; + case 3: this.age = (System.Int32)fieldValue; break; + case 4: this.address = (com.example.Address)fieldValue; break; + case 5: this.phone_numbers = (IList)fieldValue; break; + case 6: this.preferences = (IDictionary)fieldValue; break; + case 7: this.account_status = (com.example.AccountStatus)fieldValue; break; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Put()"); + }; + } + } +} diff --git a/examples/Kafka/Avro/src/Generated/com/example/EmailAddress.cs b/examples/Kafka/Avro/src/Generated/com/example/EmailAddress.cs new file mode 100644 index 000000000..4a25a6e0b --- /dev/null +++ b/examples/Kafka/Avro/src/Generated/com/example/EmailAddress.cs @@ -0,0 +1,86 @@ +// ------------------------------------------------------------------------------ +// +// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e +// Changes to this file may cause incorrect behavior and will be lost if code +// is regenerated +// +// ------------------------------------------------------------------------------ +namespace com.example +{ + using System; + using System.Collections.Generic; + using System.Text; + using global::Avro; + using global::Avro.Specific; + + [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")] + public partial class EmailAddress : global::Avro.Specific.ISpecificRecord + { + public static global::Avro.Schema _SCHEMA = global::Avro.Schema.Parse("{\"type\":\"record\",\"name\":\"EmailAddress\",\"namespace\":\"com.example\",\"fields\":[{\"name" + + "\":\"address\",\"type\":\"string\"},{\"name\":\"verified\",\"type\":\"boolean\"},{\"name\":\"prima" + + "ry\",\"type\":\"boolean\"}]}"); + private string _address; + private bool _verified; + private bool _primary; + public virtual global::Avro.Schema Schema + { + get + { + return EmailAddress._SCHEMA; + } + } + public string address + { + get + { + return this._address; + } + set + { + this._address = value; + } + } + public bool verified + { + get + { + return this._verified; + } + set + { + this._verified = value; + } + } + public bool primary + { + get + { + return this._primary; + } + set + { + this._primary = value; + } + } + public virtual object Get(int fieldPos) + { + switch (fieldPos) + { + case 0: return this.address; + case 1: return this.verified; + case 2: return this.primary; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Get()"); + }; + } + public virtual void Put(int fieldPos, object fieldValue) + { + switch (fieldPos) + { + case 0: this.address = (System.String)fieldValue; break; + case 1: this.verified = (System.Boolean)fieldValue; break; + case 2: this.primary = (System.Boolean)fieldValue; break; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Put()"); + }; + } + } +} diff --git a/examples/Kafka/Avro/src/Generated/com/example/PhoneNumber.cs b/examples/Kafka/Avro/src/Generated/com/example/PhoneNumber.cs new file mode 100644 index 000000000..ea3d2b8ed --- /dev/null +++ b/examples/Kafka/Avro/src/Generated/com/example/PhoneNumber.cs @@ -0,0 +1,72 @@ +// ------------------------------------------------------------------------------ +// +// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e +// Changes to this file may cause incorrect behavior and will be lost if code +// is regenerated +// +// ------------------------------------------------------------------------------ +namespace com.example +{ + using System; + using System.Collections.Generic; + using System.Text; + using global::Avro; + using global::Avro.Specific; + + [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")] + public partial class PhoneNumber : global::Avro.Specific.ISpecificRecord + { + public static global::Avro.Schema _SCHEMA = global::Avro.Schema.Parse("{\"type\":\"record\",\"name\":\"PhoneNumber\",\"namespace\":\"com.example\",\"fields\":[{\"name\"" + + ":\"number\",\"type\":\"string\"},{\"name\":\"type\",\"type\":{\"type\":\"enum\",\"name\":\"PhoneTyp" + + "e\",\"namespace\":\"com.example\",\"symbols\":[\"HOME\",\"WORK\",\"MOBILE\"]}}]}"); + private string _number; + private com.example.PhoneType _type; + public virtual global::Avro.Schema Schema + { + get + { + return PhoneNumber._SCHEMA; + } + } + public string number + { + get + { + return this._number; + } + set + { + this._number = value; + } + } + public com.example.PhoneType type + { + get + { + return this._type; + } + set + { + this._type = value; + } + } + public virtual object Get(int fieldPos) + { + switch (fieldPos) + { + case 0: return this.number; + case 1: return this.type; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Get()"); + }; + } + public virtual void Put(int fieldPos, object fieldValue) + { + switch (fieldPos) + { + case 0: this.number = (System.String)fieldValue; break; + case 1: this.type = (com.example.PhoneType)fieldValue; break; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Put()"); + }; + } + } +} diff --git a/examples/Kafka/Avro/src/Generated/com/example/PhoneType.cs b/examples/Kafka/Avro/src/Generated/com/example/PhoneType.cs new file mode 100644 index 000000000..f592d8692 --- /dev/null +++ b/examples/Kafka/Avro/src/Generated/com/example/PhoneType.cs @@ -0,0 +1,23 @@ +// ------------------------------------------------------------------------------ +// +// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e +// Changes to this file may cause incorrect behavior and will be lost if code +// is regenerated +// +// ------------------------------------------------------------------------------ +namespace com.example +{ + using System; + using System.Collections.Generic; + using System.Text; + using global::Avro; + using global::Avro.Specific; + + [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")] + public enum PhoneType + { + HOME, + WORK, + MOBILE, + } +} diff --git a/examples/Kafka/Avro/src/Readme.md b/examples/Kafka/Avro/src/Readme.md new file mode 100644 index 000000000..23e64e8e2 --- /dev/null +++ b/examples/Kafka/Avro/src/Readme.md @@ -0,0 +1,131 @@ +# AWS Powertools for AWS Lambda .NET - Kafka Avro Example + +This project demonstrates how to use AWS Lambda Powertools for .NET with Amazon MSK (Managed Streaming for Kafka) to process events from Kafka topics. + +## Overview + +This example showcases a Lambda functions that consume messages from Kafka topics with Avro serialization format. + +It uses the `AWS.Lambda.Powertools.Kafka.Avro` NuGet package to easily deserialize and process Kafka records. + +## Project Structure + +```bash +examples/Kafka/Avro/src/ +├── Function.cs # Entry point for the Lambda function +├── aws-lambda-tools-defaults.json # Default argument settings for AWS Lambda deployment +├── template.yaml # AWS SAM template for deploying the function +├── CustomerProfile.avsc # Avro schema definition file for the data structure used in the Kafka messages +└── kafka-avro-event.json # Sample Avro event to test the function +``` + +## Prerequisites + +- [Dotnet](https://dotnet.microsoft.com/en-us/download/dotnet) (dotnet8 or later) +- [AWS SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/install-sam-cli.html) +- [AWS CLI](https://aws.amazon.com/cli/) +- An AWS account with appropriate permissions +- [Amazon MSK](https://aws.amazon.com/msk/) cluster set up with a topic to consume messages from +- [AWS.Lambda.Powertools.Kafka.Avro](https://www.nuget.org/packages/AWS.Lambda.Powertools.Kafka.Avro/) NuGet package installed in your project +- [Avro Tools](https://www.nuget.org/packages/Apache.Avro.Tools/) codegen tool to generate C# classes from the Avro schema + +## Installation + +1. Clone the repository: + + ```bash + git clone https://github.com/aws-powertools/powertools-lambda-dotnet.git + ``` + +2. Navigate to the project directory: + + ```bash + cd powertools-lambda-dotnet/examples/Kafka/Avro/src + ``` + +3. Build the project: + + ```bash + dotnet build + ``` +4. Install the Avro Tools globally to generate C# classes from the Avro schema: + + ```bash + dotnet tool install --global Apache.Avro.Tools + ``` + +## Deployment + +Deploy the application using the AWS SAM CLI: + +```bash +sam build +sam deploy --guided +``` + +Follow the prompts to configure your deployment. + +## Avro Format +Avro is a binary serialization format that provides a compact and efficient way to serialize structured data. It uses schemas to define the structure of the data, which allows for robust data evolution. + +In this example we provide a schema called `CustomerProfile.avsc`. The schema is used to serialize and deserialize the data in the Kafka messages. + +The classes are generated from the .cs file using the Avro Tools command: + +```xml + + + +``` + +## Usage Examples + +Once deployed, you can test the Lambda function by sending a sample Avro event to the configured Kafka topic. +You can use the `kafka-avro-event.json` file as a sample event to test the function. + +### Testing + +You can test the function locally using the AWS SAM CLI (Requires Docker to be installed): + +```bash +sam local invoke AvroDeserializationFunction --event kafka-avro-event.json +``` + +This command simulates an invocation of the Lambda function with the provided event data. + +## How It Works + +1. **Event Source**: Configure your Lambda functions with an MSK or self-managed Kafka cluster as an event source. +2. **Deserializing Records**: Powertools handles deserializing the records based on the specified format. +3. **Processing**: Each record is processed within the handler function. + +## Event Deserialization + +Pass the `PowertoolsKafkaAvroSerializer` to the `LambdaBootstrapBuilder.Create()` method to enable Avro deserialization of Kafka records: + +```csharp +await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, + new PowertoolsKafkaAvroSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization + .Build() + .RunAsync(); + ``` + +## Configuration + +The SAM template (`template.yaml`) defines three Lambda function: + +- **AvroDeserializationFunction**: Handles Avro-formatted Kafka messages + +## Customization + +To customize the examples: + +1. Modify the schema definitions to match your data structures +2. Update the handler logic to process the records according to your requirements + +## Resources + +- [AWS Lambda Powertools for .NET Documentation](https://docs.powertools.aws.dev/lambda/dotnet/) +- [Amazon MSK Documentation](https://docs.aws.amazon.com/msk/) +- [AWS Lambda Developer Guide](https://docs.aws.amazon.com/lambda/) +- [Apache Avro Documentation](https://avro.apache.org/docs/) \ No newline at end of file diff --git a/examples/Kafka/Avro/src/aws-lambda-tools-defaults.json b/examples/Kafka/Avro/src/aws-lambda-tools-defaults.json new file mode 100644 index 000000000..cd93437eb --- /dev/null +++ b/examples/Kafka/Avro/src/aws-lambda-tools-defaults.json @@ -0,0 +1,15 @@ +{ + "Information": [ + "This file provides default values for the deployment wizard inside Visual Studio and the AWS Lambda commands added to the .NET Core CLI.", + "To learn more about the Lambda commands with the .NET Core CLI execute the following command at the command line in the project root directory.", + "dotnet lambda help", + "All the command line options for the Lambda command can be specified in this file." + ], + "profile": "", + "region": "", + "configuration": "Release", + "function-runtime": "dotnet8", + "function-memory-size": 512, + "function-timeout": 30, + "function-handler": "Avro.Example" +} \ No newline at end of file diff --git a/examples/Kafka/Avro/src/kafka-avro-event.json b/examples/Kafka/Avro/src/kafka-avro-event.json new file mode 100644 index 000000000..6f5e045e3 --- /dev/null +++ b/examples/Kafka/Avro/src/kafka-avro-event.json @@ -0,0 +1,23 @@ +{ + "eventSource": "aws:kafka", + "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/CustomerCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", + "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + "records": { + "customer-topic-0": [ + { + "topic": "customer-topic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": "dXNlcl85NzU0", + "value": "EnVzZXJfOTc1NBxVc2VyIHVzZXJfOTc1NCh1c2VyXzk3NTRAaWNsb3VkLmNvbQABahg5MzQwIE1haW4gU3QQU2FuIEpvc2UEQ0EGVVNBCjM5NTk2AhgyNDQtNDA3LTg4NzECAAYQdGltZXpvbmUOZW5hYmxlZBBsYW5ndWFnZRBkaXNhYmxlZBpub3RpZmljYXRpb25zCGRhcmsABA==", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + } + ] + } +} \ No newline at end of file diff --git a/examples/Kafka/Avro/src/template.yaml b/examples/Kafka/Avro/src/template.yaml new file mode 100644 index 000000000..a08325be2 --- /dev/null +++ b/examples/Kafka/Avro/src/template.yaml @@ -0,0 +1,27 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: > + kafka + + Sample SAM Template for kafka + +# More info about Globals: https://github.com/awslabs/serverless-application-model/blob/master/docs/globals.rst +Globals: + Function: + Timeout: 15 + MemorySize: 512 + Runtime: dotnet8 + +Resources: + AvroDeserializationFunction: + Type: AWS::Serverless::Function + Properties: + Handler: Avro.Example + Architectures: + - x86_64 + Tracing: Active + Environment: # Powertools env vars: https://awslabs.github.io/aws-lambda-powertools-python/#environment-variables + Variables: + POWERTOOLS_SERVICE_NAME: PowertoolsHelloWorld + POWERTOOLS_LOG_LEVEL: Info + POWERTOOLS_LOGGER_CASE: PascalCase # Allowed values are: CamelCase, PascalCase and SnakeCase (Default) \ No newline at end of file diff --git a/examples/Kafka/Json/src/Function.cs b/examples/Kafka/Json/src/Function.cs new file mode 100644 index 000000000..d7d96bfca --- /dev/null +++ b/examples/Kafka/Json/src/Function.cs @@ -0,0 +1,21 @@ +using Amazon.Lambda.Core; +using Amazon.Lambda.RuntimeSupport; +using AWS.Lambda.Powertools.Kafka; +using AWS.Lambda.Powertools.Kafka.Json; +using AWS.Lambda.Powertools.Logging; +using Json.Models; + +string Handler(ConsumerRecords records, ILambdaContext context) +{ + foreach (var record in records) + { + Logger.LogInformation("Record Value: {@record}", record.Value); + } + + return "Processed " + records.Count() + " records"; +} + +await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, + new PowertoolsKafkaJsonSerializer()) // Use PowertoolsKafkaJsonSerializer for Json serialization + .Build() + .RunAsync(); \ No newline at end of file diff --git a/examples/Kafka/Json/src/Json.csproj b/examples/Kafka/Json/src/Json.csproj new file mode 100644 index 000000000..aba6cde89 --- /dev/null +++ b/examples/Kafka/Json/src/Json.csproj @@ -0,0 +1,30 @@ + + + Exe + net8.0 + enable + enable + true + Lambda + + true + + true + + + + + + + + + + + + + + PreserveNewest + + + + \ No newline at end of file diff --git a/examples/Kafka/Json/src/Models/Address.cs b/examples/Kafka/Json/src/Models/Address.cs new file mode 100644 index 000000000..a011b3cee --- /dev/null +++ b/examples/Kafka/Json/src/Models/Address.cs @@ -0,0 +1,16 @@ +using System.Text.Json.Serialization; + +namespace Json.Models; + +public partial class Address +{ + [JsonPropertyName("street")] public string Street { get; set; } + + [JsonPropertyName("city")] public string City { get; set; } + + [JsonPropertyName("state")] public string State { get; set; } + + [JsonPropertyName("country")] public string Country { get; set; } + + [JsonPropertyName("zip_code")] public string ZipCode { get; set; } +} \ No newline at end of file diff --git a/examples/Kafka/Json/src/Models/CustomerProfile.cs b/examples/Kafka/Json/src/Models/CustomerProfile.cs new file mode 100644 index 000000000..1e7ab62b6 --- /dev/null +++ b/examples/Kafka/Json/src/Models/CustomerProfile.cs @@ -0,0 +1,22 @@ +using System.Text.Json.Serialization; + +namespace Json.Models; + +public partial class CustomerProfile +{ + [JsonPropertyName("user_id")] public string UserId { get; set; } + + [JsonPropertyName("full_name")] public string FullName { get; set; } + + [JsonPropertyName("email")] public Email Email { get; set; } + + [JsonPropertyName("age")] public long Age { get; set; } + + [JsonPropertyName("address")] public Address Address { get; set; } + + [JsonPropertyName("phone_numbers")] public List PhoneNumbers { get; set; } + + [JsonPropertyName("preferences")] public Preferences Preferences { get; set; } + + [JsonPropertyName("account_status")] public string AccountStatus { get; set; } +} \ No newline at end of file diff --git a/examples/Kafka/Json/src/Models/Email.cs b/examples/Kafka/Json/src/Models/Email.cs new file mode 100644 index 000000000..045118baf --- /dev/null +++ b/examples/Kafka/Json/src/Models/Email.cs @@ -0,0 +1,12 @@ +using System.Text.Json.Serialization; + +namespace Json.Models; + +public partial class Email +{ + [JsonPropertyName("address")] public string Address { get; set; } + + [JsonPropertyName("verified")] public bool Verified { get; set; } + + [JsonPropertyName("primary")] public bool Primary { get; set; } +} \ No newline at end of file diff --git a/examples/Kafka/Json/src/Models/PhoneNumber.cs b/examples/Kafka/Json/src/Models/PhoneNumber.cs new file mode 100644 index 000000000..7681265d1 --- /dev/null +++ b/examples/Kafka/Json/src/Models/PhoneNumber.cs @@ -0,0 +1,10 @@ +using System.Text.Json.Serialization; + +namespace Json.Models; + +public partial class PhoneNumber +{ + [JsonPropertyName("number")] public string Number { get; set; } + + [JsonPropertyName("type")] public string Type { get; set; } +} \ No newline at end of file diff --git a/examples/Kafka/Json/src/Models/Preferences.cs b/examples/Kafka/Json/src/Models/Preferences.cs new file mode 100644 index 000000000..5dd84aa99 --- /dev/null +++ b/examples/Kafka/Json/src/Models/Preferences.cs @@ -0,0 +1,12 @@ +using System.Text.Json.Serialization; + +namespace Json.Models; + +public partial class Preferences +{ + [JsonPropertyName("language")] public string Language { get; set; } + + [JsonPropertyName("notifications")] public string Notifications { get; set; } + + [JsonPropertyName("timezone")] public string Timezone { get; set; } +} \ No newline at end of file diff --git a/examples/Kafka/Json/src/Readme.md b/examples/Kafka/Json/src/Readme.md new file mode 100644 index 000000000..4315f2da7 --- /dev/null +++ b/examples/Kafka/Json/src/Readme.md @@ -0,0 +1,111 @@ +# AWS Powertools for AWS Lambda .NET - Kafka Json Example + +This project demonstrates how to use AWS Lambda Powertools for .NET with Amazon MSK (Managed Streaming for Kafka) to process events from Kafka topics. + +## Overview + +This example showcases a Lambda functions that consume messages from Kafka topics with Json serialization format. + +It uses the `AWS.Lambda.Powertools.Kafka.Json` NuGet package to easily deserialize and process Kafka records. + +## Project Structure + +```bash +examples/Kafka/Json/src/ +├── Function.cs # Entry point for the Lambda function +├── aws-lambda-tools-defaults.json # Default argument settings for AWS Lambda deployment +├── template.yaml # AWS SAM template for deploying the function +└── kafka-json-event.json # Sample Json event to test the function +``` + +## Prerequisites + +- [Dotnet](https://dotnet.microsoft.com/en-us/download/dotnet) (dotnet8 or later) +- [AWS SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/install-sam-cli.html) +- [AWS CLI](https://aws.amazon.com/cli/) +- An AWS account with appropriate permissions +- [Amazon MSK](https://aws.amazon.com/msk/) cluster set up with a topic to consume messages from +- [AWS.Lambda.Powertools.Kafka.Json](https://www.nuget.org/packages/AWS.Lambda.Powertools.Kafka.Json/) NuGet package installed in your project + +## Installation + +1. Clone the repository: + + ```bash + git clone https://github.com/aws-powertools/powertools-lambda-dotnet.git + ``` + +2. Navigate to the project directory: + + ```bash + cd powertools-lambda-dotnet/examples/Kafka/Json/src + ``` + +3. Build the project: + + ```bash + dotnet build + ``` + +## Deployment + +Deploy the application using the AWS SAM CLI: + +```bash +sam build +sam deploy --guided +``` + +Follow the prompts to configure your deployment. + + +## Usage Examples + +Once deployed, you can test the Lambda function by sending a sample Json event to the configured Kafka topic. +You can use the `kafka-json-event.json` file as a sample event to test the function. + +### Testing + +You can test the function locally using the AWS SAM CLI (Requires Docker to be installed): + +```bash +sam local invoke JsonDeserializationFunction --event kafka-json-event.json +``` + +This command simulates an invocation of the Lambda function with the provided event data. + +## How It Works + +1. **Event Source**: Configure your Lambda functions with an MSK or self-managed Kafka cluster as an event source. +2. **Deserializing Records**: Powertools handles deserializing the records based on the specified format. +3. **Processing**: Each record is processed within the handler function. + +## Event Deserialization + +Pass the `PowertoolsKafkaJsonSerializer` to the `LambdaBootstrapBuilder.Create()` method to enable JSON deserialization of Kafka records: + +```csharp +await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, + new PowertoolsKafkaJsonSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization + .Build() + .RunAsync(); + ``` + +## Configuration + +The SAM template (`template.yaml`) defines three Lambda function: + +- **JsonDeserializationFunction**: Handles json-formatted Kafka messages + +## Customization + +To customize the examples: + +1. Modify the schema definitions to match your data structures +2. Update the handler logic to process the records according to your requirements + +## Resources + +- [AWS Lambda Powertools for .NET Documentation](https://docs.powertools.aws.dev/lambda/dotnet/) +- [Amazon MSK Documentation](https://docs.aws.amazon.com/msk/) +- [AWS Lambda Developer Guide](https://docs.aws.amazon.com/lambda/) \ No newline at end of file diff --git a/examples/Kafka/Json/src/aws-lambda-tools-defaults.json b/examples/Kafka/Json/src/aws-lambda-tools-defaults.json new file mode 100644 index 000000000..fb3240903 --- /dev/null +++ b/examples/Kafka/Json/src/aws-lambda-tools-defaults.json @@ -0,0 +1,15 @@ +{ + "Information": [ + "This file provides default values for the deployment wizard inside Visual Studio and the AWS Lambda commands added to the .NET Core CLI.", + "To learn more about the Lambda commands with the .NET Core CLI execute the following command at the command line in the project root directory.", + "dotnet lambda help", + "All the command line options for the Lambda command can be specified in this file." + ], + "profile": "", + "region": "", + "configuration": "Release", + "function-runtime": "dotnet8", + "function-memory-size": 512, + "function-timeout": 30, + "function-handler": "Json" +} \ No newline at end of file diff --git a/examples/Kafka/Json/src/kafka-json-event.json b/examples/Kafka/Json/src/kafka-json-event.json new file mode 100644 index 000000000..66dc2ab5a --- /dev/null +++ b/examples/Kafka/Json/src/kafka-json-event.json @@ -0,0 +1,23 @@ +{ + "eventSource": "aws:kafka", + "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/CustomerCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", + "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + "records": { + "customer-topic-0": [ + { + "topic": "customer-topic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": "dXNlcl85NzU0", + "value": "eyJwaG9uZV9udW1iZXJzIjpbeyJudW1iZXIiOiIyNDQtNDA3LTg4NzEiLCJ0eXBlIjoiV09SSyJ9XSwicHJlZmVyZW5jZXMiOnsidGltZXpvbmUiOiJlbmFibGVkIiwibGFuZ3VhZ2UiOiJkaXNhYmxlZCIsIm5vdGlmaWNhdGlvbnMiOiJkYXJrIn0sImZ1bGxfbmFtZSI6IlVzZXIgdXNlcl85NzU0IiwiYWRkcmVzcyI6eyJjb3VudHJ5IjoiVVNBIiwiY2l0eSI6IlNhbiBKb3NlIiwic3RyZWV0IjoiOTM0MCBNYWluIFN0Iiwic3RhdGUiOiJDQSIsInppcF9jb2RlIjoiMzk1OTYifSwidXNlcl9pZCI6InVzZXJfOTc1NCIsImFjY291bnRfc3RhdHVzIjoiU1VTUEVOREVEIiwiYWdlIjo1MywiZW1haWwiOnsiYWRkcmVzcyI6InVzZXJfOTc1NEBpY2xvdWQuY29tIiwidmVyaWZpZWQiOmZhbHNlLCJwcmltYXJ5Ijp0cnVlfX0=", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + } + ] + } +} \ No newline at end of file diff --git a/examples/Kafka/Json/src/template.yaml b/examples/Kafka/Json/src/template.yaml new file mode 100644 index 000000000..dd4bfb9ff --- /dev/null +++ b/examples/Kafka/Json/src/template.yaml @@ -0,0 +1,27 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: > + kafka + + Sample SAM Template for kafka + +# More info about Globals: https://github.com/awslabs/serverless-application-model/blob/master/docs/globals.rst +Globals: + Function: + Timeout: 15 + MemorySize: 512 + Runtime: dotnet8 + +Resources: + JsonDeserializationFunction: + Type: AWS::Serverless::Function + Properties: + Handler: Json + Architectures: + - x86_64 + Tracing: Active + Environment: # Powertools env vars: https://awslabs.github.io/aws-lambda-powertools-python/#environment-variables + Variables: + POWERTOOLS_SERVICE_NAME: PowertoolsHelloWorld + POWERTOOLS_LOG_LEVEL: Info + POWERTOOLS_LOGGER_CASE: PascalCase # Allowed values are: CamelCase, PascalCase and SnakeCase (Default) \ No newline at end of file diff --git a/examples/Kafka/JsonClassLibrary/src/CustomerProfile.proto b/examples/Kafka/JsonClassLibrary/src/CustomerProfile.proto new file mode 100644 index 000000000..9c69b1c41 --- /dev/null +++ b/examples/Kafka/JsonClassLibrary/src/CustomerProfile.proto @@ -0,0 +1,49 @@ +syntax = "proto3"; + +package com.example; + +enum PhoneType { + HOME = 0; + WORK = 1; + MOBILE = 2; +} + +enum AccountStatus { + ACTIVE = 0; + INACTIVE = 1; + SUSPENDED = 2; +} + +// EmailAddress message +message EmailAddress { + string address = 1; + bool verified = 2; + bool primary = 3; +} + +// Address message +message Address { + string street = 1; + string city = 2; + string state = 3; + string country = 4; + string zip_code = 5; +} + +// PhoneNumber message +message PhoneNumber { + string number = 1; + PhoneType type = 2; +} + +// CustomerProfile message +message CustomerProfile { + string user_id = 1; + string full_name = 2; + EmailAddress email = 3; + int32 age = 4; + Address address = 5; + repeated PhoneNumber phone_numbers = 6; + map preferences = 7; + AccountStatus account_status = 8; +} \ No newline at end of file diff --git a/examples/Kafka/JsonClassLibrary/src/Function.cs b/examples/Kafka/JsonClassLibrary/src/Function.cs new file mode 100644 index 000000000..98795029e --- /dev/null +++ b/examples/Kafka/JsonClassLibrary/src/Function.cs @@ -0,0 +1,32 @@ +using Amazon.Lambda.Core; +using AWS.Lambda.Powertools.Kafka; +using AWS.Lambda.Powertools.Kafka.Protobuf; +using AWS.Lambda.Powertools.Logging; +using Com.Example; + +// Assembly attribute to enable the Lambda function's JSON input to be converted into a .NET class. +[assembly: LambdaSerializer(typeof(PowertoolsKafkaProtobufSerializer))] + +namespace ProtoBufClassLibrary; + +public class Function +{ + public string FunctionHandler(ConsumerRecords records, ILambdaContext context) + { + foreach (var record in records) + { + Logger.LogInformation("Processing messagem from topic: {topic}", record.Topic); + Logger.LogInformation("Partition: {partition}, Offset: {offset}", record.Partition, record.Offset); + Logger.LogInformation("Produced at: {timestamp}", record.Timestamp); + + foreach (var header in record.Headers.DecodedValues()) + { + Logger.LogInformation($"{header.Key}: {header.Value}"); + } + + Logger.LogInformation("Processing order for: {fullName}", record.Value.FullName); + } + + return "Processed " + records.Count() + " records"; + } +} \ No newline at end of file diff --git a/examples/Kafka/JsonClassLibrary/src/ProtoBufClassLibrary.csproj b/examples/Kafka/JsonClassLibrary/src/ProtoBufClassLibrary.csproj new file mode 100644 index 000000000..a28e1a2f8 --- /dev/null +++ b/examples/Kafka/JsonClassLibrary/src/ProtoBufClassLibrary.csproj @@ -0,0 +1,42 @@ + + + net8.0 + enable + enable + true + Lambda + + true + + true + + + + + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + + + + + + + + PreserveNewest + + + + + Client + Public + True + True + obj/Debug/net8.0/ + MSBuild:Compile + PreserveNewest + + + + \ No newline at end of file diff --git a/examples/Kafka/JsonClassLibrary/src/Readme.md b/examples/Kafka/JsonClassLibrary/src/Readme.md new file mode 100644 index 000000000..ae7e610f4 --- /dev/null +++ b/examples/Kafka/JsonClassLibrary/src/Readme.md @@ -0,0 +1,130 @@ +# AWS Powertools for AWS Lambda .NET - Kafka Protobuf Example + +This project demonstrates how to use AWS Lambda Powertools for .NET with Amazon MSK (Managed Streaming for Kafka) to process events from Kafka topics. + +## Overview + +This example showcases a Lambda functions that consume messages from Kafka topics with Protocol Buffers serialization format. + +It uses the `AWS.Lambda.Powertools.Kafka.Protobuf` NuGet package to easily deserialize and process Kafka records. + +## Project Structure + +```bash +examples/Kafka/Protobuf/src/ +├── Function.cs # Entry point for the Lambda function +├── aws-lambda-tools-defaults.json # Default argument settings for AWS Lambda deployment +├── template.yaml # AWS SAM template for deploying the function +├── CustomerProfile.proto # Protocol Buffers definition file for the data structure used in the Kafka messages +└── kafka-protobuf-event.json # Sample Protocol Buffers event to test the function +``` + +## Prerequisites + +- [Dotnet](https://dotnet.microsoft.com/en-us/download/dotnet) (dotnet8 or later) +- [AWS SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/install-sam-cli.html) +- [AWS CLI](https://aws.amazon.com/cli/) +- An AWS account with appropriate permissions +- [Amazon MSK](https://aws.amazon.com/msk/) cluster set up with a topic to consume messages from +- [AWS.Lambda.Powertools.Kafka.Protobuf](https://www.nuget.org/packages/AWS.Lambda.Powertools.Kafka.Protobuf/) NuGet package installed in your project + +## Installation + +1. Clone the repository: + + ```bash + git clone https://github.com/aws-powertools/powertools-lambda-dotnet.git + ``` + +2. Navigate to the project directory: + + ```bash + cd powertools-lambda-dotnet/examples/Kafka/Protobuf/src + ``` + +3. Build the project: + + ```bash + dotnet build + ``` + +## Deployment + +Deploy the application using the AWS SAM CLI: + +```bash +sam build +sam deploy --guided +``` + +Follow the prompts to configure your deployment. + +## Protocol Buffers Format + +The Protobuf example handles messages serialized with Protocol Buffers. The schema is defined in a `.proto` file (which would need to be created), and the C# code is generated from that schema. + +This requires the `Grpc.Tools` package to deserialize the messages correctly. + +And update the `.csproj` file to include the `.proto` files. + +```xml + + Client + Public + True + True + obj\Debug/net8.0/ + MSBuild:Compile + PreserveNewest + +``` + +## Usage Examples + +Once deployed, you can test the Lambda function by sending a sample Protocol Buffers event to the configured Kafka topic. +You can use the `kafka-protobuf-event.json` file as a sample event to test the function. + +### Testing + +You can test the function locally using the AWS SAM CLI (Requires Docker to be installed): + +```bash +sam local invoke ProtobufDeserializationFunction --event kafka-protobuf-event.json +``` + +This command simulates an invocation of the Lambda function with the provided event data. + +## How It Works + +1. **Event Source**: Configure your Lambda functions with an MSK or self-managed Kafka cluster as an event source. +2. **Deserializing Records**: Powertools handles deserializing the records based on the specified format. +3. **Processing**: Each record is processed within the handler function. + +## Event Deserialization + +Pass the `PowertoolsKafkaProtobufSerializer` to the `[assembly: LambdaSerializer(typeof(PowertoolsKafkaProtobufSerializer))]`: + +```csharp +[assembly: LambdaSerializer(typeof(PowertoolsKafkaProtobufSerializer))] + ``` + +## Configuration + +The SAM template (`template.yaml`) defines three Lambda function: + +- **ProtobufDeserializationFunction**: Handles Protobuf-formatted Kafka messages + +## Customization + +To customize the examples: + +1. Modify the schema definitions to match your data structures +2. Update the handler logic to process the records according to your requirements +3. Ensure you have the proper `.proto` files and that they are included in your project for Protocol Buffers serialization/deserialization. + +## Resources + +- [AWS Lambda Powertools for .NET Documentation](https://docs.powertools.aws.dev/lambda/dotnet/) +- [Amazon MSK Documentation](https://docs.aws.amazon.com/msk/) +- [AWS Lambda Developer Guide](https://docs.aws.amazon.com/lambda/) +- [Protocol Buffers Documentation](https://developers.google.com/protocol-buffers) \ No newline at end of file diff --git a/examples/Kafka/JsonClassLibrary/src/aws-lambda-tools-defaults.json b/examples/Kafka/JsonClassLibrary/src/aws-lambda-tools-defaults.json new file mode 100644 index 000000000..d4ec43f14 --- /dev/null +++ b/examples/Kafka/JsonClassLibrary/src/aws-lambda-tools-defaults.json @@ -0,0 +1,16 @@ +{ + "Information": [ + "This file provides default values for the deployment wizard inside Visual Studio and the AWS Lambda commands added to the .NET Core CLI.", + "To learn more about the Lambda commands with the .NET Core CLI execute the following command at the command line in the project root directory.", + "dotnet lambda help", + "All the command line options for the Lambda command can be specified in this file." + ], + "profile": "", + "region": "", + "configuration": "Release", + "function-architecture": "x86_64", + "function-runtime": "dotnet8", + "function-memory-size": 512, + "function-timeout": 30, + "function-handler": "ProtoBufClassLibrary::ProtoBufClassLibrary.Function::FunctionHandler" +} \ No newline at end of file diff --git a/examples/Kafka/JsonClassLibrary/src/kafka-protobuf-event.json b/examples/Kafka/JsonClassLibrary/src/kafka-protobuf-event.json new file mode 100644 index 000000000..6731ceb40 --- /dev/null +++ b/examples/Kafka/JsonClassLibrary/src/kafka-protobuf-event.json @@ -0,0 +1,23 @@ +{ + "eventSource": "aws:kafka", + "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/CustomerCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", + "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + "records": { + "customer-topic-0": [ + { + "topic": "customer-topic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": "dXNlcl85NzU0", + "value": "Cgl1c2VyXzk3NTQSDlVzZXIgdXNlcl85NzU0GhgKFHVzZXJfOTc1NEBpY2xvdWQuY29tGAEgNSooCgw5MzQwIE1haW4gU3QSCFNhbiBKb3NlGgJDQSIDVVNBKgUzOTU5NjIQCgwyNDQtNDA3LTg4NzEQAToUCghsYW5ndWFnZRIIZGlzYWJsZWQ6FQoNbm90aWZpY2F0aW9ucxIEZGFyazoTCgh0aW1lem9uZRIHZW5hYmxlZEAC", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + } + ] + } +} \ No newline at end of file diff --git a/examples/Kafka/JsonClassLibrary/src/template.yaml b/examples/Kafka/JsonClassLibrary/src/template.yaml new file mode 100644 index 000000000..0df5feaa2 --- /dev/null +++ b/examples/Kafka/JsonClassLibrary/src/template.yaml @@ -0,0 +1,27 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: > + kafka + + Sample SAM Template for kafka + +# More info about Globals: https://github.com/awslabs/serverless-application-model/blob/master/docs/globals.rst +Globals: + Function: + Timeout: 15 + MemorySize: 512 + Runtime: dotnet8 + +Resources: + ProtobufClassLibraryDeserializationFunction: + Type: AWS::Serverless::Function + Properties: + Handler: ProtoBufClassLibrary::ProtoBufClassLibrary.Function::FunctionHandler + Architectures: + - x86_64 + Tracing: Active + Environment: # Powertools env vars: https://awslabs.github.io/aws-lambda-powertools-python/#environment-variables + Variables: + POWERTOOLS_SERVICE_NAME: PowertoolsHelloWorld + POWERTOOLS_LOG_LEVEL: Info + POWERTOOLS_LOGGER_CASE: PascalCase # Allowed values are: CamelCase, PascalCase and SnakeCase (Default) \ No newline at end of file diff --git a/examples/Kafka/Protobuf/src/CustomerProfile.proto b/examples/Kafka/Protobuf/src/CustomerProfile.proto new file mode 100644 index 000000000..9c69b1c41 --- /dev/null +++ b/examples/Kafka/Protobuf/src/CustomerProfile.proto @@ -0,0 +1,49 @@ +syntax = "proto3"; + +package com.example; + +enum PhoneType { + HOME = 0; + WORK = 1; + MOBILE = 2; +} + +enum AccountStatus { + ACTIVE = 0; + INACTIVE = 1; + SUSPENDED = 2; +} + +// EmailAddress message +message EmailAddress { + string address = 1; + bool verified = 2; + bool primary = 3; +} + +// Address message +message Address { + string street = 1; + string city = 2; + string state = 3; + string country = 4; + string zip_code = 5; +} + +// PhoneNumber message +message PhoneNumber { + string number = 1; + PhoneType type = 2; +} + +// CustomerProfile message +message CustomerProfile { + string user_id = 1; + string full_name = 2; + EmailAddress email = 3; + int32 age = 4; + Address address = 5; + repeated PhoneNumber phone_numbers = 6; + map preferences = 7; + AccountStatus account_status = 8; +} \ No newline at end of file diff --git a/examples/Kafka/Protobuf/src/Function.cs b/examples/Kafka/Protobuf/src/Function.cs new file mode 100644 index 000000000..446328696 --- /dev/null +++ b/examples/Kafka/Protobuf/src/Function.cs @@ -0,0 +1,22 @@ +using Amazon.Lambda.Core; +using Amazon.Lambda.RuntimeSupport; +using AWS.Lambda.Powertools.Kafka; +using AWS.Lambda.Powertools.Kafka.Protobuf; +using AWS.Lambda.Powertools.Logging; +using Com.Example; + +string Handler(ConsumerRecords records, ILambdaContext context) +{ + foreach (var record in records) + { + Logger.LogInformation("Record Value: {@record}", record.Value); + } + + return "Processed " + records.Count() + " records"; +} + +await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, + new PowertoolsKafkaProtobufSerializer()) // Use PowertoolsKafkaProtobufSerializer for Protobuf serialization + .Build() + .RunAsync(); + diff --git a/examples/Kafka/Protobuf/src/Protobuf.csproj b/examples/Kafka/Protobuf/src/Protobuf.csproj new file mode 100644 index 000000000..275fa84ec --- /dev/null +++ b/examples/Kafka/Protobuf/src/Protobuf.csproj @@ -0,0 +1,43 @@ + + + Exe + net8.0 + enable + enable + true + Lambda + + true + + true + + + + + + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + + + + + PreserveNewest + + + + + Client + Public + True + True + obj\Debug/net8.0/ + MSBuild:Compile + PreserveNewest + + + + + + \ No newline at end of file diff --git a/examples/Kafka/Protobuf/src/Readme.md b/examples/Kafka/Protobuf/src/Readme.md new file mode 100644 index 000000000..886bbffa1 --- /dev/null +++ b/examples/Kafka/Protobuf/src/Readme.md @@ -0,0 +1,133 @@ +# AWS Powertools for AWS Lambda .NET - Kafka Protobuf Example + +This project demonstrates how to use AWS Lambda Powertools for .NET with Amazon MSK (Managed Streaming for Kafka) to process events from Kafka topics. + +## Overview + +This example showcases a Lambda functions that consume messages from Kafka topics with Protocol Buffers serialization format. + +It uses the `AWS.Lambda.Powertools.Kafka.Protobuf` NuGet package to easily deserialize and process Kafka records. + +## Project Structure + +```bash +examples/Kafka/Protobuf/src/ +├── Function.cs # Entry point for the Lambda function +├── aws-lambda-tools-defaults.json # Default argument settings for AWS Lambda deployment +├── template.yaml # AWS SAM template for deploying the function +├── CustomerProfile.proto # Protocol Buffers definition file for the data structure used in the Kafka messages +└── kafka-protobuf-event.json # Sample Protocol Buffers event to test the function +``` + +## Prerequisites + +- [Dotnet](https://dotnet.microsoft.com/en-us/download/dotnet) (dotnet8 or later) +- [AWS SAM CLI](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/install-sam-cli.html) +- [AWS CLI](https://aws.amazon.com/cli/) +- An AWS account with appropriate permissions +- [Amazon MSK](https://aws.amazon.com/msk/) cluster set up with a topic to consume messages from +- [AWS.Lambda.Powertools.Kafka.Protobuf](https://www.nuget.org/packages/AWS.Lambda.Powertools.Kafka.Protobuf/) NuGet package installed in your project + +## Installation + +1. Clone the repository: + + ```bash + git clone https://github.com/aws-powertools/powertools-lambda-dotnet.git + ``` + +2. Navigate to the project directory: + + ```bash + cd powertools-lambda-dotnet/examples/Kafka/Protobuf/src + ``` + +3. Build the project: + + ```bash + dotnet build + ``` + +## Deployment + +Deploy the application using the AWS SAM CLI: + +```bash +sam build +sam deploy --guided +``` + +Follow the prompts to configure your deployment. + +## Protocol Buffers Format + +The Protobuf example handles messages serialized with Protocol Buffers. The schema is defined in a `.proto` file (which would need to be created), and the C# code is generated from that schema. + +This requires the `Grpc.Tools` package to deserialize the messages correctly. + +And update the `.csproj` file to include the `.proto` files. + +```xml + + Client + Public + True + True + obj\Debug/net8.0/ + MSBuild:Compile + PreserveNewest + +``` + +## Usage Examples + +Once deployed, you can test the Lambda function by sending a sample Protocol Buffers event to the configured Kafka topic. +You can use the `kafka-protobuf-event.json` file as a sample event to test the function. + +### Testing + +You can test the function locally using the AWS SAM CLI (Requires Docker to be installed): + +```bash +sam local invoke ProtobufDeserializationFunction --event kafka-protobuf-event.json +``` + +This command simulates an invocation of the Lambda function with the provided event data. + +## How It Works + +1. **Event Source**: Configure your Lambda functions with an MSK or self-managed Kafka cluster as an event source. +2. **Deserializing Records**: Powertools handles deserializing the records based on the specified format. +3. **Processing**: Each record is processed within the handler function. + +## Event Deserialization + +Pass the `PowertoolsKafkaProtobufSerializer` to the `LambdaBootstrapBuilder.Create()` method to enable Protobuf deserialization of Kafka records: + +```csharp +await LambdaBootstrapBuilder.Create((Func, ILambdaContext, string>?)Handler, + new PowertoolsKafkaProtobufSerializer()) // Use PowertoolsKafkaAvroSerializer for Avro serialization + .Build() + .RunAsync(); + ``` + +## Configuration + +The SAM template (`template.yaml`) defines three Lambda function: + +- **ProtobufDeserializationFunction**: Handles Protobuf-formatted Kafka messages + +## Customization + +To customize the examples: + +1. Modify the schema definitions to match your data structures +2. Update the handler logic to process the records according to your requirements +3. Ensure you have the proper `.proto` files and that they are included in your project for Protocol Buffers serialization/deserialization. + +## Resources + +- [AWS Lambda Powertools for .NET Documentation](https://docs.powertools.aws.dev/lambda/dotnet/) +- [Amazon MSK Documentation](https://docs.aws.amazon.com/msk/) +- [AWS Lambda Developer Guide](https://docs.aws.amazon.com/lambda/) +- [Protocol Buffers Documentation](https://developers.google.com/protocol-buffers) \ No newline at end of file diff --git a/examples/Kafka/Protobuf/src/aws-lambda-tools-defaults.json b/examples/Kafka/Protobuf/src/aws-lambda-tools-defaults.json new file mode 100644 index 000000000..1a1c5de1d --- /dev/null +++ b/examples/Kafka/Protobuf/src/aws-lambda-tools-defaults.json @@ -0,0 +1,15 @@ +{ + "Information": [ + "This file provides default values for the deployment wizard inside Visual Studio and the AWS Lambda commands added to the .NET Core CLI.", + "To learn more about the Lambda commands with the .NET Core CLI execute the following command at the command line in the project root directory.", + "dotnet lambda help", + "All the command line options for the Lambda command can be specified in this file." + ], + "profile": "", + "region": "", + "configuration": "Release", + "function-runtime": "dotnet8", + "function-memory-size": 512, + "function-timeout": 30, + "function-handler": "Protobuf" +} \ No newline at end of file diff --git a/examples/Kafka/Protobuf/src/kafka-protobuf-event.json b/examples/Kafka/Protobuf/src/kafka-protobuf-event.json new file mode 100644 index 000000000..6731ceb40 --- /dev/null +++ b/examples/Kafka/Protobuf/src/kafka-protobuf-event.json @@ -0,0 +1,23 @@ +{ + "eventSource": "aws:kafka", + "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/CustomerCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", + "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + "records": { + "customer-topic-0": [ + { + "topic": "customer-topic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": "dXNlcl85NzU0", + "value": "Cgl1c2VyXzk3NTQSDlVzZXIgdXNlcl85NzU0GhgKFHVzZXJfOTc1NEBpY2xvdWQuY29tGAEgNSooCgw5MzQwIE1haW4gU3QSCFNhbiBKb3NlGgJDQSIDVVNBKgUzOTU5NjIQCgwyNDQtNDA3LTg4NzEQAToUCghsYW5ndWFnZRIIZGlzYWJsZWQ6FQoNbm90aWZpY2F0aW9ucxIEZGFyazoTCgh0aW1lem9uZRIHZW5hYmxlZEAC", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + } + ] + } +} \ No newline at end of file diff --git a/examples/Kafka/Protobuf/src/template.yaml b/examples/Kafka/Protobuf/src/template.yaml new file mode 100644 index 000000000..b8f7df6a5 --- /dev/null +++ b/examples/Kafka/Protobuf/src/template.yaml @@ -0,0 +1,27 @@ +AWSTemplateFormatVersion: '2010-09-09' +Transform: AWS::Serverless-2016-10-31 +Description: > + kafka + + Sample SAM Template for kafka + +# More info about Globals: https://github.com/awslabs/serverless-application-model/blob/master/docs/globals.rst +Globals: + Function: + Timeout: 15 + MemorySize: 512 + Runtime: dotnet8 + +Resources: + ProtobufDeserializationFunction: + Type: AWS::Serverless::Function + Properties: + Handler: Protobuf + Architectures: + - x86_64 + Tracing: Active + Environment: # Powertools env vars: https://awslabs.github.io/aws-lambda-powertools-python/#environment-variables + Variables: + POWERTOOLS_SERVICE_NAME: PowertoolsHelloWorld + POWERTOOLS_LOG_LEVEL: Info + POWERTOOLS_LOGGER_CASE: PascalCase # Allowed values are: CamelCase, PascalCase and SnakeCase (Default) \ No newline at end of file diff --git a/examples/examples.sln b/examples/examples.sln index 10ec48509..6b9fa877a 100644 --- a/examples/examples.sln +++ b/examples/examples.sln @@ -109,6 +109,16 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AOT_Logging", "AOT\AOT_Logg EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AOT_Logging.Tests", "AOT\AOT_Logging\test\AOT_Logging.Tests\AOT_Logging.Tests.csproj", "{FC010A0E-64A9-4440-97FE-DEDA8CEE0BE5}" EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Kafka", "Kafka", "{71027B81-CA39-498C-9A50-ADDAFA2AC2F5}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Json", "Kafka\Json\src\Json.csproj", "{58EC305E-353A-4996-A541-3CF7FC0EDD80}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Protobuf", "Kafka\Protobuf\src\Protobuf.csproj", "{853F6FE9-1762-4BA3-BAF4-2FCD605B81CF}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Avro", "Kafka\Avro\src\Avro.csproj", "{B03F22B2-315C-429B-9CC0-C15BE94CBF77}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ProtoBufClassLibrary", "Kafka\JsonClassLibrary\src\ProtoBufClassLibrary.csproj", "{B6B3136D-B739-4917-AD3D-30F19FE12D3F}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -202,6 +212,22 @@ Global {FC010A0E-64A9-4440-97FE-DEDA8CEE0BE5}.Debug|Any CPU.Build.0 = Debug|Any CPU {FC010A0E-64A9-4440-97FE-DEDA8CEE0BE5}.Release|Any CPU.ActiveCfg = Release|Any CPU {FC010A0E-64A9-4440-97FE-DEDA8CEE0BE5}.Release|Any CPU.Build.0 = Release|Any CPU + {58EC305E-353A-4996-A541-3CF7FC0EDD80}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {58EC305E-353A-4996-A541-3CF7FC0EDD80}.Debug|Any CPU.Build.0 = Debug|Any CPU + {58EC305E-353A-4996-A541-3CF7FC0EDD80}.Release|Any CPU.ActiveCfg = Release|Any CPU + {58EC305E-353A-4996-A541-3CF7FC0EDD80}.Release|Any CPU.Build.0 = Release|Any CPU + {853F6FE9-1762-4BA3-BAF4-2FCD605B81CF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {853F6FE9-1762-4BA3-BAF4-2FCD605B81CF}.Debug|Any CPU.Build.0 = Debug|Any CPU + {853F6FE9-1762-4BA3-BAF4-2FCD605B81CF}.Release|Any CPU.ActiveCfg = Release|Any CPU + {853F6FE9-1762-4BA3-BAF4-2FCD605B81CF}.Release|Any CPU.Build.0 = Release|Any CPU + {B03F22B2-315C-429B-9CC0-C15BE94CBF77}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B03F22B2-315C-429B-9CC0-C15BE94CBF77}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B03F22B2-315C-429B-9CC0-C15BE94CBF77}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B03F22B2-315C-429B-9CC0-C15BE94CBF77}.Release|Any CPU.Build.0 = Release|Any CPU + {B6B3136D-B739-4917-AD3D-30F19FE12D3F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B6B3136D-B739-4917-AD3D-30F19FE12D3F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B6B3136D-B739-4917-AD3D-30F19FE12D3F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B6B3136D-B739-4917-AD3D-30F19FE12D3F}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(NestedProjects) = preSolution {0CC66DBC-C1DF-4AF6-8EEB-FFED6C578BF4} = {526F1EF7-5A9C-4BFF-ABAE-75992ACD8F78} @@ -249,5 +275,9 @@ Global {343CF6B9-C006-43F8-924C-BF5BF5B6D051} = {FE1CAA26-87E9-4B71-800E-81D2997A7B53} {FC02CF45-DE15-4413-958A-D86808B99146} = {FEE72EAB-494F-403B-A75A-825E713C3D43} {FC010A0E-64A9-4440-97FE-DEDA8CEE0BE5} = {F3480212-EE7F-46FE-9ED5-24ACAB5B681D} + {58EC305E-353A-4996-A541-3CF7FC0EDD80} = {71027B81-CA39-498C-9A50-ADDAFA2AC2F5} + {853F6FE9-1762-4BA3-BAF4-2FCD605B81CF} = {71027B81-CA39-498C-9A50-ADDAFA2AC2F5} + {B03F22B2-315C-429B-9CC0-C15BE94CBF77} = {71027B81-CA39-498C-9A50-ADDAFA2AC2F5} + {B6B3136D-B739-4917-AD3D-30F19FE12D3F} = {71027B81-CA39-498C-9A50-ADDAFA2AC2F5} EndGlobalSection EndGlobal diff --git a/libraries/AWS.Lambda.Powertools.sln b/libraries/AWS.Lambda.Powertools.sln index c3056d147..325c683e0 100644 --- a/libraries/AWS.Lambda.Powertools.sln +++ b/libraries/AWS.Lambda.Powertools.sln @@ -113,6 +113,16 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AWS.Lambda.Powertools.Event EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore", "src\AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore\AWS.Lambda.Powertools.EventHandler.Resolvers.BedrockAgentFunction.AspNetCore.csproj", "{8A22F22E-D10A-4897-A89A-DC76C267F6BB}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AWS.Lambda.Powertools.Kafka", "src\AWS.Lambda.Powertools.Kafka\AWS.Lambda.Powertools.Kafka.csproj", "{5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AWS.Lambda.Powertools.Kafka.Tests", "tests\AWS.Lambda.Powertools.Kafka.Tests\AWS.Lambda.Powertools.Kafka.Tests.csproj", "{FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AWS.Lambda.Powertools.Kafka.Avro", "src\AWS.Lambda.Powertools.Kafka.Avro\AWS.Lambda.Powertools.Kafka.Avro.csproj", "{25F0929B-2E04-4ED6-A0ED-5379A0A755B0}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AWS.Lambda.Powertools.Kafka.Json", "src\AWS.Lambda.Powertools.Kafka.Json\AWS.Lambda.Powertools.Kafka.Json.csproj", "{9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "AWS.Lambda.Powertools.Kafka.Protobuf", "src\AWS.Lambda.Powertools.Kafka.Protobuf\AWS.Lambda.Powertools.Kafka.Protobuf.csproj", "{B640DB80-C982-407B-A2EC-CD29AC77DDB8}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -618,6 +628,66 @@ Global {8A22F22E-D10A-4897-A89A-DC76C267F6BB}.Release|x64.Build.0 = Release|Any CPU {8A22F22E-D10A-4897-A89A-DC76C267F6BB}.Release|x86.ActiveCfg = Release|Any CPU {8A22F22E-D10A-4897-A89A-DC76C267F6BB}.Release|x86.Build.0 = Release|Any CPU + {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Debug|x64.ActiveCfg = Debug|Any CPU + {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Debug|x64.Build.0 = Debug|Any CPU + {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Debug|x86.ActiveCfg = Debug|Any CPU + {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Debug|x86.Build.0 = Debug|Any CPU + {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Release|Any CPU.Build.0 = Release|Any CPU + {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Release|x64.ActiveCfg = Release|Any CPU + {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Release|x64.Build.0 = Release|Any CPU + {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Release|x86.ActiveCfg = Release|Any CPU + {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3}.Release|x86.Build.0 = Release|Any CPU + {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Debug|Any CPU.Build.0 = Debug|Any CPU + {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Debug|x64.ActiveCfg = Debug|Any CPU + {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Debug|x64.Build.0 = Debug|Any CPU + {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Debug|x86.ActiveCfg = Debug|Any CPU + {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Debug|x86.Build.0 = Debug|Any CPU + {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Release|Any CPU.ActiveCfg = Release|Any CPU + {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Release|Any CPU.Build.0 = Release|Any CPU + {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Release|x64.ActiveCfg = Release|Any CPU + {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Release|x64.Build.0 = Release|Any CPU + {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Release|x86.ActiveCfg = Release|Any CPU + {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645}.Release|x86.Build.0 = Release|Any CPU + {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Debug|Any CPU.Build.0 = Debug|Any CPU + {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Debug|x64.ActiveCfg = Debug|Any CPU + {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Debug|x64.Build.0 = Debug|Any CPU + {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Debug|x86.ActiveCfg = Debug|Any CPU + {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Debug|x86.Build.0 = Debug|Any CPU + {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Release|Any CPU.ActiveCfg = Release|Any CPU + {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Release|Any CPU.Build.0 = Release|Any CPU + {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Release|x64.ActiveCfg = Release|Any CPU + {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Release|x64.Build.0 = Release|Any CPU + {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Release|x86.ActiveCfg = Release|Any CPU + {25F0929B-2E04-4ED6-A0ED-5379A0A755B0}.Release|x86.Build.0 = Release|Any CPU + {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Debug|x64.ActiveCfg = Debug|Any CPU + {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Debug|x64.Build.0 = Debug|Any CPU + {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Debug|x86.ActiveCfg = Debug|Any CPU + {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Debug|x86.Build.0 = Debug|Any CPU + {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Release|Any CPU.Build.0 = Release|Any CPU + {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Release|x64.ActiveCfg = Release|Any CPU + {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Release|x64.Build.0 = Release|Any CPU + {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Release|x86.ActiveCfg = Release|Any CPU + {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E}.Release|x86.Build.0 = Release|Any CPU + {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Debug|x64.ActiveCfg = Debug|Any CPU + {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Debug|x64.Build.0 = Debug|Any CPU + {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Debug|x86.ActiveCfg = Debug|Any CPU + {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Debug|x86.Build.0 = Debug|Any CPU + {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Release|Any CPU.Build.0 = Release|Any CPU + {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Release|x64.ActiveCfg = Release|Any CPU + {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Release|x64.Build.0 = Release|Any CPU + {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Release|x86.ActiveCfg = Release|Any CPU + {B640DB80-C982-407B-A2EC-CD29AC77DDB8}.Release|x86.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(NestedProjects) = preSolution @@ -671,5 +741,10 @@ Global {F4B8D5AF-D3CA-4910-A14D-E5BAEF0FD1DE} = {73C9B1E5-3893-47E8-B373-17E5F5D7E6F5} {281F7EB5-ACE5-458F-BC88-46A8899DF3BA} = {73C9B1E5-3893-47E8-B373-17E5F5D7E6F5} {8A22F22E-D10A-4897-A89A-DC76C267F6BB} = {73C9B1E5-3893-47E8-B373-17E5F5D7E6F5} + {5B0DDE6F-ED16-452F-90D3-F0B6086D51B3} = {73C9B1E5-3893-47E8-B373-17E5F5D7E6F5} + {FDBDB9F8-B3E2-4ACA-9FC6-E12FF3D95645} = {1CFF5568-8486-475F-81F6-06105C437528} + {25F0929B-2E04-4ED6-A0ED-5379A0A755B0} = {73C9B1E5-3893-47E8-B373-17E5F5D7E6F5} + {9E2B8160-3E76-4B33-86AB-DE35A5FCDB1E} = {73C9B1E5-3893-47E8-B373-17E5F5D7E6F5} + {B640DB80-C982-407B-A2EC-CD29AC77DDB8} = {73C9B1E5-3893-47E8-B373-17E5F5D7E6F5} EndGlobalSection EndGlobal diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/AWS.Lambda.Powertools.Kafka.Avro.csproj b/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/AWS.Lambda.Powertools.Kafka.Avro.csproj new file mode 100644 index 000000000..255e852a6 --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/AWS.Lambda.Powertools.Kafka.Avro.csproj @@ -0,0 +1,21 @@ + + + + + + AWS.Lambda.Powertools.Kafka.Avro + Powertools for AWS Lambda (.NET) - Kafka Avro consumer package. + AWS.Lambda.Powertools.Kafka.Avro + AWS.Lambda.Powertools.Kafka.Avro + net8.0 + false + enable + enable + + + + + + + + diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/PowertoolsKafkaAvroSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/PowertoolsKafkaAvroSerializer.cs new file mode 100644 index 000000000..4bf3ea7cb --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/PowertoolsKafkaAvroSerializer.cs @@ -0,0 +1,134 @@ +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +using System.Diagnostics.CodeAnalysis; +using System.Reflection; +using System.Text.Json; +using System.Text.Json.Serialization; +using Avro; +using Avro.IO; +using Avro.Specific; + +namespace AWS.Lambda.Powertools.Kafka.Avro; + +/// +/// A Lambda serializer for Kafka events that handles Avro-formatted data. +/// This serializer automatically deserializes the Avro binary format from base64-encoded strings +/// in Kafka records and converts them to strongly-typed objects. +/// +/// +/// +/// [assembly: LambdaSerializer(typeof(PowertoolsKafkaAvroSerializer))] +/// +/// // Your Lambda handler will receive properly deserialized objects +/// public class Function +/// { +/// public void Handler(ConsumerRecords<string, Customer> records, ILambdaContext context) +/// { +/// foreach (var record in records) +/// { +/// Customer customer = record.Value; +/// context.Logger.LogInformation($"Processed customer {customer.Name}, age {customer.Age}"); +/// } +/// } +/// } +/// +/// +public class PowertoolsKafkaAvroSerializer : PowertoolsKafkaSerializerBase +{ + /// + /// Initializes a new instance of the class + /// with default JSON serialization options. + /// + public PowertoolsKafkaAvroSerializer() : base() + { + } + + /// + /// Initializes a new instance of the class + /// with custom JSON serialization options. + /// + /// Custom JSON serializer options to use during deserialization. + public PowertoolsKafkaAvroSerializer(JsonSerializerOptions jsonOptions) : base(jsonOptions) + { + } + + /// + /// Initializes a new instance of the class + /// with a JSON serializer context for AOT-compatible serialization. + /// + /// JSON serializer context for AOT compatibility. + public PowertoolsKafkaAvroSerializer(JsonSerializerContext serializerContext) : base(serializerContext) + { + } + + /// + /// Gets the Avro schema for the specified type. + /// The type must have a public static _SCHEMA field defined. + /// + /// The type to get the Avro schema for. + /// The Avro Schema object. + /// Thrown if no schema is found for the type. + [RequiresDynamicCode("Avro schema access requires reflection which may be incompatible with AOT.")] + [RequiresUnreferencedCode("Avro schema access requires reflection which may be incompatible with trimming.")] + private Schema? GetAvroSchema([DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicFields)] Type payloadType) + { + var schemaField = payloadType.GetField("_SCHEMA", + BindingFlags.Public | BindingFlags.Static); + + if (schemaField == null) + return null; + + return schemaField.GetValue(null) as Schema; + } + + /// + /// Deserializes complex (non-primitive) types using Avro format. + /// + /// The binary data to deserialize. + /// The type to deserialize to. + /// Whether this data represents a key (true) or a value (false). + /// The deserialized object. + [RequiresDynamicCode("Avro deserialization might require runtime code generation.")] + [RequiresUnreferencedCode("Avro deserialization might require types that cannot be statically analyzed.")] + protected override object? DeserializeComplexTypeFormat(byte[] data, + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicFields)] + Type targetType, bool isKey) + { + try + { + // Try to get Avro schema for the type + var schema = GetAvroSchema(targetType); + + if (schema != null) + { + using var stream = new MemoryStream(data); + var decoder = new BinaryDecoder(stream); + var reader = new SpecificDatumReader(schema, schema); + return reader.Read(null!, decoder); + } + + // If no Avro schema was found, throw an exception + throw new InvalidOperationException($"Unsupported type for Avro deserialization: {targetType.Name}. " + + "Avro deserialization requires a type with a static _SCHEMA field. " + + "Consider using an alternative Deserializer."); + } + catch (Exception ex) + { + // Preserve the error message while wrapping in SerializationException for consistent error handling + throw new System.Runtime.Serialization.SerializationException($"Failed to deserialize {(isKey ? "key" : "value")} data: {ex.Message}", ex); + } + } +} diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/Readme.md b/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/Readme.md new file mode 100644 index 000000000..942f526cf --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Avro/Readme.md @@ -0,0 +1,134 @@ +# Powertools for AWS Lambda (.NET) - Kafka Avro + +A specialized Lambda serializer for handling Kafka events with Avro-formatted data in .NET Lambda functions. + +## Features + +- **Automatic Avro Deserialization**: Seamlessly converts Avro binary data from Kafka records into strongly-typed .NET objects +- **Base64 Decoding**: Handles base64-encoded Avro data from Kafka events automatically +- **Type Safety**: Leverages compile-time type checking with Avro-generated classes +- **Flexible Configuration**: Supports custom JSON serialization options and AOT-compatible contexts +- **Error Handling**: Provides clear error messages for serialization failures + +## Installation + +```bash +dotnet add package AWS.Lambda.Powertools.Kafka.Avro +``` + +## Quick Start + +### 1. Configure the Serializer + +Add the serializer to your Lambda function assembly: + +```csharp +[assembly: LambdaSerializer(typeof(PowertoolsKafkaAvroSerializer))] +``` + +### 2. Define Your Avro Model + +Ensure your Avro-generated classes have the required `_SCHEMA` field: + +```csharp +public partial class Customer : ISpecificRecord +{ + public static Schema _SCHEMA = Schema.Parse(@"{ + ""type"": ""record"", + ""name"": ""Customer"", + ""fields"": [ + {""name"": ""id"", ""type"": ""string""}, + {""name"": ""name"", ""type"": ""string""}, + {""name"": ""age"", ""type"": ""int""} + ] + }"); + + public string Id { get; set; } + public string Name { get; set; } + public int Age { get; set; } +} +``` + +### 3. Create Your Lambda Handler + +```csharp +public class Function +{ + public void Handler(ConsumerRecords records, ILambdaContext context) + { + foreach (var record in records) + { + Customer customer = record.Value; // Automatically deserialized from Avro + context.Logger.LogInformation($"Processing customer: {customer.Name}, Age: {customer.Age}"); + } + } +} +``` + +## Advanced Configuration + +### Custom JSON Options + +```csharp +[assembly: LambdaSerializer(typeof(PowertoolsKafkaAvroSerializer))] + +// In your startup or configuration +var jsonOptions = new JsonSerializerOptions +{ + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + WriteIndented = true +}; + +var serializer = new PowertoolsKafkaAvroSerializer(jsonOptions); +``` + +### AOT-Compatible Serialization + +```csharp +[JsonSerializable(typeof(ConsumerRecords))] +public partial class MyJsonContext : JsonSerializerContext { } + +[assembly: LambdaSerializer(typeof(PowertoolsKafkaAvroSerializer))] + +// Configure with AOT context +var serializer = new PowertoolsKafkaAvroSerializer(MyJsonContext.Default); +``` + +## Requirements + +- **.NET 6.0+**: This library targets .NET 6.0 and later versions +- **Avro.NET**: Requires the Apache Avro library for .NET +- **Avro Schema**: Your data classes must include a public static `_SCHEMA` field +- **AWS Lambda**: Designed specifically for AWS Lambda runtime environments + +## Error Handling + +The serializer provides detailed error messages for common issues: + +```csharp +// Missing _SCHEMA field +InvalidOperationException: "Unsupported type for Avro deserialization: MyClass. +Avro deserialization requires a type with a static _SCHEMA field." + +// Deserialization failures +SerializationException: "Failed to deserialize value data: [specific error details]" +``` + +## Compatibility Notes + +- **Reflection Requirements**: Uses reflection to access Avro schemas, which may impact AOT compilation +- **Trimming**: May require additional configuration for self-contained deployments with trimming enabled +- **Performance**: Optimized for typical Lambda cold start and execution patterns + +## Related Packages + +- [AWS.Lambda.Powertools.Logging](https://www.nuget.org/packages/AWS.Lambda.Powertools.Logging/) - Structured logging +- [AWS.Lambda.Powertools.Tracing](https://www.nuget.org/packages/AWS.Lambda.Powertools.Tracing/) - Distributed tracing + +## Documentation + +For more detailed documentation and examples, visit the [official documentation](https://docs.powertools.aws.dev/lambda/dotnet/). + +## License + +This library is licensed under the Apache License 2.0. \ No newline at end of file diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Json/AWS.Lambda.Powertools.Kafka.Json.csproj b/libraries/src/AWS.Lambda.Powertools.Kafka.Json/AWS.Lambda.Powertools.Kafka.Json.csproj new file mode 100644 index 000000000..db093159d --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Json/AWS.Lambda.Powertools.Kafka.Json.csproj @@ -0,0 +1,19 @@ + + + + + AWS.Lambda.Powertools.Kafka.Json + Powertools for AWS Lambda (.NET) - Kafka Json consumer package. + AWS.Lambda.Powertools.Kafka.Json + AWS.Lambda.Powertools.Kafka.Json + net8.0 + false + enable + enable + + + + + + + diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Json/PowertoolsKafkaJsonSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka.Json/PowertoolsKafkaJsonSerializer.cs new file mode 100644 index 000000000..f70ac6a9d --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Json/PowertoolsKafkaJsonSerializer.cs @@ -0,0 +1,113 @@ +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +using System.Diagnostics.CodeAnalysis; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace AWS.Lambda.Powertools.Kafka.Json; + +/// +/// A Lambda serializer for Kafka events that handles JSON-formatted data. +/// This serializer automatically deserializes the JSON format from base64-encoded strings +/// in Kafka records and converts them to strongly-typed objects. +/// +public class PowertoolsKafkaJsonSerializer : PowertoolsKafkaSerializerBase +{ + /// + /// Initializes a new instance of the class + /// with default JSON serialization options. + /// + public PowertoolsKafkaJsonSerializer() : base() + { + } + + /// + /// Initializes a new instance of the class + /// with custom JSON serialization options. + /// + /// Custom JSON serializer options to use during deserialization. + public PowertoolsKafkaJsonSerializer(JsonSerializerOptions jsonOptions) : base(jsonOptions) + { + } + + /// + /// Initializes a new instance of the class + /// with a JSON serializer context for AOT-compatible serialization. + /// + /// JSON serializer context for AOT compatibility. + public PowertoolsKafkaJsonSerializer(JsonSerializerContext serializerContext) : base(serializerContext) + { + } + + /// + /// Deserializes complex (non-primitive) types using JSON format. + /// + /// The binary data to deserialize. + /// The type to deserialize to. + /// Whether this data represents a key (true) or a value (false). + /// The deserialized object. + [RequiresDynamicCode("JSON deserialization might require runtime code generation.")] + [RequiresUnreferencedCode("JSON deserialization might require types that cannot be statically analyzed.")] + protected override object? DeserializeComplexTypeFormat(byte[] data, + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | + DynamicallyAccessedMemberTypes.PublicFields)] + Type targetType, bool isKey) + { + if (data == null || data.Length == 0) + { + return targetType.IsValueType ? Activator.CreateInstance(targetType) : null; + } + + try + { + // Convert bytes to JSON string + var jsonStr = Encoding.UTF8.GetString(data); + + // First try context-based deserialization if available + if (SerializerContext != null) + { + // Try to get type info from context for AOT compatibility + var typeInfo = SerializerContext.GetTypeInfo(targetType); + if (typeInfo != null) + { + try + { + var result = JsonSerializer.Deserialize(jsonStr, typeInfo); + if (result != null) + { + return result; + } + } + catch + { + // Continue to fallback if context-based deserialization fails + } + } + } + + // Fallback to regular deserialization - this should handle types not in the context + #pragma warning disable IL2026, IL3050 + return JsonSerializer.Deserialize(jsonStr, targetType, JsonOptions); + #pragma warning restore IL2026, IL3050 + } + catch + { + // If all deserialization attempts fail, return null or default + return targetType.IsValueType ? Activator.CreateInstance(targetType) : null; + } + } +} diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Json/Readme.md b/libraries/src/AWS.Lambda.Powertools.Kafka.Json/Readme.md new file mode 100644 index 000000000..b8b6df378 --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Json/Readme.md @@ -0,0 +1,274 @@ +# Powertools for AWS Lambda (.NET) - Kafka JSON + +A specialized Lambda serializer for handling Kafka events with JSON-formatted data in .NET Lambda functions. + +## Features + +- **Automatic JSON Deserialization**: Seamlessly converts JSON data from Kafka records into strongly-typed .NET objects +- **Base64 Decoding**: Handles base64-encoded JSON data from Kafka events automatically +- **Type Safety**: Leverages compile-time type checking with .NET classes +- **Flexible Configuration**: Supports custom JSON serialization options and AOT-compatible contexts +- **High Performance**: Optimized JSON processing using System.Text.Json +- **Error Handling**: Provides clear error messages for serialization failures + +## Installation + +```bash +dotnet add package AWS.Lambda.Powertools.Kafka.Json +``` + +## Quick Start + +### 1. Configure the Serializer + +Add the serializer to your Lambda function assembly: + +```csharp +[assembly: LambdaSerializer(typeof(PowertoolsKafkaJsonSerializer))] +``` + +### 2. Define Your Data Model + +Create your .NET classes with JSON serialization attributes: + +```csharp +public class Customer +{ + [JsonPropertyName("id")] + public string Id { get; set; } = ""; + + [JsonPropertyName("name")] + public string Name { get; set; } = ""; + + [JsonPropertyName("age")] + public int Age { get; set; } + + [JsonPropertyName("email")] + public string Email { get; set; } = ""; +} +``` + +### 3. Create Your Lambda Handler + +```csharp +public class Function +{ + public void Handler(ConsumerRecords records, ILambdaContext context) + { + foreach (var record in records) + { + Customer customer = record.Value; // Automatically deserialized from JSON + context.Logger.LogInformation($"Processing customer: {customer.Name}, Age: {customer.Age}"); + } + } +} +``` + +## Advanced Configuration + +### Custom JSON Options + +```csharp +[assembly: LambdaSerializer(typeof(PowertoolsKafkaJsonSerializer))] + +// In your startup or configuration +var jsonOptions = new JsonSerializerOptions +{ + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + PropertyNameCaseInsensitive = true, + WriteIndented = true, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull +}; + +var serializer = new PowertoolsKafkaJsonSerializer(jsonOptions); +``` + +### AOT-Compatible Serialization + +```csharp +[JsonSerializable(typeof(ConsumerRecords))] +[JsonSerializable(typeof(Customer))] +public partial class MyJsonContext : JsonSerializerContext { } + +[assembly: LambdaSerializer(typeof(PowertoolsKafkaJsonSerializer))] + +// Configure with AOT context +var serializer = new PowertoolsKafkaJsonSerializer(MyJsonContext.Default); +``` + +### Complex Object Handling + +```csharp +public class Order +{ + [JsonPropertyName("id")] + public string Id { get; set; } = ""; + + [JsonPropertyName("customer")] + public Customer Customer { get; set; } = new(); + + [JsonPropertyName("items")] + public List Items { get; set; } = new(); + + [JsonPropertyName("total")] + public decimal Total { get; set; } + + [JsonPropertyName("created_at")] + public DateTime CreatedAt { get; set; } +} + +public class Function +{ + public void Handler(ConsumerRecords records, ILambdaContext context) + { + foreach (var record in records) + { + Order order = record.Value; + context.Logger.LogInformation($"Order {order.Id} from {order.Customer.Name}"); + context.Logger.LogInformation($"Total: ${order.Total:F2}, Items: {order.Items.Count}"); + } + } +} +``` + +## Requirements + +- **.NET 6.0+**: This library targets .NET 6.0 and later versions +- **System.Text.Json**: Uses the high-performance JSON library from .NET +- **JSON Serializable Types**: Your data classes should be compatible with System.Text.Json +- **AWS Lambda**: Designed specifically for AWS Lambda runtime environments + +## JSON Serialization Best Practices + +### Property Naming + +```csharp +// Use JsonPropertyName for explicit mapping +public class Product +{ + [JsonPropertyName("product_id")] + public string ProductId { get; set; } = ""; + + [JsonPropertyName("display_name")] + public string DisplayName { get; set; } = ""; +} + +// Or configure global naming policy +var options = new JsonSerializerOptions +{ + PropertyNamingPolicy = JsonNamingPolicy.SnakeCaseLower +}; +``` + +### Handling Nullable Types + +```csharp +public class Customer +{ + [JsonPropertyName("id")] + public string Id { get; set; } = ""; + + [JsonPropertyName("email")] + public string? Email { get; set; } // Nullable reference type + + [JsonPropertyName("age")] + public int? Age { get; set; } // Nullable value type +} +``` + +### Custom Converters + +```csharp +public class DateTimeConverter : JsonConverter +{ + public override DateTime Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + return DateTime.Parse(reader.GetString()!); + } + + public override void Write(Utf8JsonWriter writer, DateTime value, JsonSerializerOptions options) + { + writer.WriteStringValue(value.ToString("yyyy-MM-ddTHH:mm:ssZ")); + } +} + +// Register the converter +var options = new JsonSerializerOptions(); +options.Converters.Add(new DateTimeConverter()); +``` + +## Error Handling + +The serializer provides detailed error messages for common issues: + +```csharp +// JSON parsing errors +JsonException: "The JSON value could not be converted to [Type]. Path: [path] | LineNumber: [line] | BytePositionInLine: [position]." + +// Type conversion errors +SerializationException: "Failed to deserialize value data: [specific error details]" +``` + +## Performance Optimization + +### Source Generation (AOT) + +```csharp +[JsonSerializable(typeof(Customer))] +[JsonSerializable(typeof(Order))] +[JsonSerializable(typeof(ConsumerRecords))] +[JsonSerializable(typeof(ConsumerRecords))] +[JsonSourceGenerationOptions( + PropertyNamingPolicy = JsonKnownNamingPolicy.CamelCase, + WriteIndented = false, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull)] +public partial class AppJsonContext : JsonSerializerContext { } +``` + +### Memory Optimization + +```csharp +// Configure for minimal memory allocation +var options = new JsonSerializerOptions +{ + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + DefaultBufferSize = 4096, // Adjust based on typical message size + MaxDepth = 32 // Prevent deep recursion +}; +``` + +## Compatibility Notes + +- **AOT Support**: Full support for Native AOT when using source generation +- **Trimming**: Compatible with IL trimming when properly configured +- **Performance**: Optimized for high-throughput Lambda scenarios +- **Memory Usage**: Efficient memory allocation patterns for serverless environments + +## Migration from Newtonsoft.Json + +If migrating from Newtonsoft.Json, consider these differences: + +```csharp +// Newtonsoft.Json attribute +[JsonProperty("customer_name")] +public string CustomerName { get; set; } + +// System.Text.Json equivalent +[JsonPropertyName("customer_name")] +public string CustomerName { get; set; } +``` + +## Related Packages + +- [AWS.Lambda.Powertools.Kafka.Avro](https://www.nuget.org/packages/AWS.Lambda.Powertools.Kafka.Avro/) - Avro serialization +- [AWS.Lambda.Powertools.Kafka.Protobuf](https://www.nuget.org/packages/AWS.Lambda.Powertools.Kafka.Protobuf/) - Protobuf serialization +- [AWS.Lambda.Powertools.Logging](https://www.nuget.org/packages/AWS.Lambda.Powertools.Logging/) - Structured logging +- [AWS.Lambda.Powertools.Tracing](https://www.nuget.org/packages/AWS.Lambda.Powertools.Tracing/) - Distributed tracing + +## Documentation + +For more detailed documentation and examples, visit the [official documentation](https://docs.powertools.aws.dev/lambda/dotnet/). + +## License + +This library is licensed under the Apache License 2.0. \ No newline at end of file diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/AWS.Lambda.Powertools.Kafka.Protobuf.csproj b/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/AWS.Lambda.Powertools.Kafka.Protobuf.csproj new file mode 100644 index 000000000..ab1c3844f --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/AWS.Lambda.Powertools.Kafka.Protobuf.csproj @@ -0,0 +1,24 @@ + + + + + + AWS.Lambda.Powertools.Kafka.Protobuf + Powertools for AWS Lambda (.NET) - Kafka Protobuf consumer package. + AWS.Lambda.Powertools.Kafka.Protobuf + AWS.Lambda.Powertools.Kafka.Protobuf + net8.0 + false + enable + enable + + + + + + + + + + + diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/PowertoolsKafkaProtobufSerializer.cs b/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/PowertoolsKafkaProtobufSerializer.cs new file mode 100644 index 000000000..c7d9fb7ef --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/PowertoolsKafkaProtobufSerializer.cs @@ -0,0 +1,229 @@ +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +using System.Collections.Concurrent; +using System.Diagnostics.CodeAnalysis; +using System.Reflection; +using System.Text.Json; +using System.Text.Json.Serialization; +using Google.Protobuf; + + +namespace AWS.Lambda.Powertools.Kafka.Protobuf; + +/// +/// A Lambda serializer for Kafka events that handles Protobuf-formatted data. +/// This serializer automatically deserializes the Protobuf binary format from base64-encoded strings +/// in Kafka records and converts them to strongly-typed objects. +/// +/// +/// +/// [assembly: LambdaSerializer(typeof(PowertoolsKafkaProtobufSerializer))] +/// +/// // Your Lambda handler will receive properly deserialized objects +/// public class Function +/// { +/// public void Handler(ConsumerRecords<string, Customer> records, ILambdaContext context) +/// { +/// foreach (var record in records) +/// { +/// Customer customer = record.Value; +/// context.Logger.LogInformation($"Processed customer {customer.Name}"); +/// } +/// } +/// } +/// +/// +public class PowertoolsKafkaProtobufSerializer : PowertoolsKafkaSerializerBase +{ + // Cache for Protobuf parsers to improve performance + private static readonly ConcurrentDictionary _parserCache = new(); + + /// + /// Initializes a new instance of the class + /// with default JSON serialization options. + /// + public PowertoolsKafkaProtobufSerializer() : base() + { + } + + /// + /// Initializes a new instance of the class + /// with custom JSON serialization options. + /// + /// Custom JSON serializer options to use during deserialization. + public PowertoolsKafkaProtobufSerializer(JsonSerializerOptions jsonOptions) : base(jsonOptions) + { + } + + /// + /// Initializes a new instance of the class + /// with a JSON serializer context for AOT-compatible serialization. + /// + /// JSON serializer context for AOT compatibility. + public PowertoolsKafkaProtobufSerializer(JsonSerializerContext serializerContext) : base(serializerContext) + { + } + + /// + /// Deserializes complex (non-primitive) types using Protobuf format. + /// Handles both standard protobuf serialization and Confluent Schema Registry serialization. + /// + /// The binary data to deserialize. + /// The type to deserialize to. + /// Whether this data represents a key (true) or a value (false). + /// The deserialized object. + [RequiresDynamicCode("Protobuf deserialization might require runtime code generation.")] + [RequiresUnreferencedCode( + "Protobuf deserialization might require types that cannot be statically analyzed.")] + protected override object? DeserializeComplexTypeFormat(byte[] data, + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | + DynamicallyAccessedMemberTypes.PublicFields)] + Type targetType, bool isKey) + { + try + { + // Check if it's a Protobuf message type + if (typeof(IMessage).IsAssignableFrom(targetType)) + { + // This is a Protobuf message type - try to get the parser + var parser = GetProtobufParser(targetType); + if (parser == null) + { + throw new InvalidOperationException($"Could not find Protobuf parser for type {targetType.Name}"); + } + + try + { + // First, try standard protobuf deserialization + return parser.ParseFrom(data); + } + catch + { + try + { + // If standard deserialization fails, try message index handling + return DeserializeWithMessageIndex(data, parser); + } + catch (Exception ex) + { + // If both methods fail, throw with helpful message + throw new InvalidOperationException( + $"Failed to deserialize {targetType.Name} using Protobuf. " + + "The data may not be in a valid Protobuf format.", ex); + } + } + } + else + { + // For non-Protobuf complex types, throw the specific expected exception + throw new InvalidOperationException($"Unsupported type for Protobuf deserialization: {targetType.Name}. " + + "Protobuf deserialization requires a type of com.google.protobuf.Message. " + + "Consider using an alternative Deserializer."); + } + } + catch (Exception ex) + { + // Preserve the error message while wrapping in SerializationException for consistent error handling + throw new System.Runtime.Serialization.SerializationException($"Failed to deserialize {(isKey ? "key" : "value")} data: {ex.Message}", ex); + } + } + + /// + /// Gets a Protobuf parser for the specified type, using a cache for better performance. + /// + /// The Protobuf message type. + /// A MessageParser for the specified type, or null if not found. + private MessageParser? GetProtobufParser(Type messageType) + { + return _parserCache.GetOrAdd(messageType, type => + { + try + { + var parserProperty = type.GetProperty("Parser", + BindingFlags.Public | BindingFlags.Static); + + if (parserProperty == null) + { + return null!; + } + + var parser = parserProperty.GetValue(null) as MessageParser; + if (parser == null) + { + return null!; + } + + return parser; + } + catch + { + return null!; + } + }); + } + + /// + /// Deserializes Protobuf data that may include a Confluent Schema Registry message index. + /// Handles both the simple case (single 0) and complex case (length-prefixed array of indexes). + /// + /// The binary data to deserialize. + /// The Protobuf message parser. + /// The deserialized Protobuf message or throws an exception if parsing fails. + private IMessage DeserializeWithMessageIndex(byte[] data, MessageParser parser) + { + using var inputStream = new MemoryStream(data); + using var codedInput = new CodedInputStream(inputStream); + + try + { + // Read the first varint - this could be either a simple 0 or the length of message index array + var firstValue = codedInput.ReadUInt32(); + + if (firstValue == 0) + { + // Simple case: Single 0 byte means first message type + return parser.ParseFrom(codedInput); + } + else + { + // Complex case: firstValue is the length of the message index array + // Skip each message index value + for (int i = 0; i < firstValue; i++) + { + codedInput.ReadUInt32(); + } + + // Now the remaining data should be the actual protobuf message + return parser.ParseFrom(codedInput); + } + } + catch (Exception ex) + { + // If reading message index fails, try another approach with the remaining data + try + { + // Reset stream position and try again with the whole data + inputStream.Position = 0; + return parser.ParseFrom(inputStream); + } + catch + { + // If that also fails, throw the original exception + throw new InvalidOperationException("Failed to parse protobuf data with or without message index", ex); + } + } + } +} \ No newline at end of file diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/Readme.md b/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/Readme.md new file mode 100644 index 000000000..2d10be09c --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka.Protobuf/Readme.md @@ -0,0 +1,213 @@ +# Powertools for AWS Lambda (.NET) - Kafka Protobuf + +A specialized Lambda serializer for handling Kafka events with Protocol Buffers (Protobuf) formatted data in .NET Lambda functions. + +## Features + +- **Automatic Protobuf Deserialization**: Seamlessly converts Protobuf binary data from Kafka records into strongly-typed .NET objects +- **Base64 Decoding**: Handles base64-encoded Protobuf data from Kafka events automatically +- **Type Safety**: Leverages compile-time type checking with Protobuf-generated classes +- **Flexible Configuration**: Supports custom JSON serialization options and AOT-compatible contexts +- **Performance Optimized**: Efficient binary serialization format for high-throughput scenarios +- **Error Handling**: Provides clear error messages for serialization failures + +## Installation + +```bash +dotnet add package AWS.Lambda.Powertools.Kafka.Protobuf +``` + +## Quick Start + +### 1. Configure the Serializer + +Add the serializer to your Lambda function assembly: + +```csharp +[assembly: LambdaSerializer(typeof(PowertoolsKafkaProtobufSerializer))] +``` + +### 2. Define Your Protobuf Model + +Create your `.proto` file and generate C# classes: + +```protobuf +syntax = "proto3"; + +message Customer { + string id = 1; + string name = 2; + int32 age = 3; + string email = 4; +} +``` + +Generated C# class will implement `IMessage`: + +```csharp +public partial class Customer : IMessage +{ + public string Id { get; set; } = ""; + public string Name { get; set; } = ""; + public int Age { get; set; } + public string Email { get; set; } = ""; + + // Generated Protobuf methods... +} +``` + +### 3. Create Your Lambda Handler + +```csharp +public class Function +{ + public void Handler(ConsumerRecords records, ILambdaContext context) + { + foreach (var record in records) + { + Customer customer = record.Value; // Automatically deserialized from Protobuf + context.Logger.LogInformation($"Processing customer: {customer.Name}, Age: {customer.Age}"); + } + } +} +``` + +## Advanced Configuration + +### Custom JSON Options + +```csharp +[assembly: LambdaSerializer(typeof(PowertoolsKafkaProtobufSerializer))] + +// In your startup or configuration +var jsonOptions = new JsonSerializerOptions +{ + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + WriteIndented = true +}; + +var serializer = new PowertoolsKafkaProtobufSerializer(jsonOptions); +``` + +### AOT-Compatible Serialization + +```csharp +[JsonSerializable(typeof(ConsumerRecords))] +public partial class MyJsonContext : JsonSerializerContext { } + +[assembly: LambdaSerializer(typeof(PowertoolsKafkaProtobufSerializer))] + +// Configure with AOT context +var serializer = new PowertoolsKafkaProtobufSerializer(MyJsonContext.Default); +``` + +### Complex Message Types + +```csharp +// Nested message example +public class Function +{ + public void Handler(ConsumerRecords records, ILambdaContext context) + { + foreach (var record in records) + { + Order order = record.Value; + context.Logger.LogInformation($"Order {order.Id} from {order.Customer.Name}"); + + foreach (var item in order.Items) + { + context.Logger.LogInformation($" Item: {item.Name}, Qty: {item.Quantity}"); + } + } + } +} +``` + +## Requirements + +- **.NET 6.0+**: This library targets .NET 6.0 and later versions +- **Google.Protobuf**: Requires the Google Protocol Buffers library for .NET +- **Protobuf Compiler**: Use `protoc` to generate C# classes from `.proto` files +- **IMessage Implementation**: Your data classes must implement `IMessage` +- **AWS Lambda**: Designed specifically for AWS Lambda runtime environments + +## Protobuf Code Generation + +### Using protoc directly + +```bash +protoc --csharp_out=. customer.proto +``` + +### Using MSBuild integration + +Add to your `.csproj`: + +```xml + + + +``` + +## Error Handling + +The serializer provides detailed error messages for common issues: + +```csharp +// Missing IMessage implementation +InvalidOperationException: "Unsupported type for Protobuf deserialization: MyClass. +Protobuf deserialization requires a type that implements IMessage." + +// Deserialization failures +SerializationException: "Failed to deserialize value data: [specific error details]" +``` + +## Performance Benefits + +Protocol Buffers offer several advantages for high-throughput Lambda functions: + +- **Compact Binary Format**: Smaller message sizes compared to JSON +- **Fast Serialization**: Optimized binary encoding/decoding +- **Schema Evolution**: Forward and backward compatibility +- **Strong Typing**: Compile-time validation of message structure + +## Schema Evolution + +Protobuf supports schema evolution while maintaining compatibility: + +```protobuf +// Version 1 +message Customer { + string id = 1; + string name = 2; +} + +// Version 2 - Added optional field +message Customer { + string id = 1; + string name = 2; + int32 age = 3; // New optional field + string email = 4; // Another new field +} +``` + +## Compatibility Notes + +- **Reflection Requirements**: Uses reflection to instantiate Protobuf types, which may impact AOT compilation +- **Trimming**: May require additional configuration for self-contained deployments with trimming enabled +- **Performance**: Optimized for high-throughput scenarios and Lambda execution patterns +- **Schema Registry**: Compatible with Confluent Schema Registry for centralized schema management + +## Related Packages + +- [AWS.Lambda.Powertools.Logging](https://www.nuget.org/packages/AWS.Lambda.Powertools.Logging/) - Structured logging +- [AWS.Lambda.Powertools.Tracing](https://www.nuget.org/packages/AWS.Lambda.Powertools.Tracing/) - Distributed tracing +- [Google.Protobuf](https://www.nuget.org/packages/Google.Protobuf/) - Protocol Buffers runtime library + +## Documentation + +For more detailed documentation and examples, visit the [official documentation](https://docs.powertools.aws.dev/lambda/dotnet/). + +## License + +This library is licensed under the Apache License 2.0. \ No newline at end of file diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/AWS.Lambda.Powertools.Kafka.csproj b/libraries/src/AWS.Lambda.Powertools.Kafka/AWS.Lambda.Powertools.Kafka.csproj new file mode 100644 index 000000000..8461809d4 --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/AWS.Lambda.Powertools.Kafka.csproj @@ -0,0 +1,19 @@ + + + + + AWS.Lambda.Powertools.Kafka + Powertools for AWS Lambda (.NET) - Kafka consumer package. + AWS.Lambda.Powertools.Kafka + AWS.Lambda.Powertools.Kafka + net8.0 + false + enable + enable + + + + + + + diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecord.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecord.cs new file mode 100644 index 000000000..61fe9b743 --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecord.cs @@ -0,0 +1,85 @@ +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +namespace AWS.Lambda.Powertools.Kafka; + +/// +/// Represents a single record consumed from a Kafka topic. +/// +/// The type of the record's value. +/// The type of the key value +/// +/// +/// var record = new ConsumerRecord<Customer> +/// { +/// Topic = "customers", +/// Partition = 0, +/// Offset = 42, +/// Value = new Customer { Id = 123, Name = "John Doe" } +/// }; +/// +/// +public class ConsumerRecord +{ + /// + /// Gets or sets the Kafka topic name from which the record was consumed. + /// + public string Topic { get; internal set; } = null!; + + /// + /// Gets the Kafka partition from which the record was consumed. + /// + public int Partition { get; internal set; } + + /// + /// Gets the offset of the record within its Kafka partition. + /// + public long Offset { get; internal set; } + + /// + /// Gets the timestamp of the record (typically in Unix time). + /// + public long Timestamp { get; internal set; } + + /// + /// Gets the type of timestamp (e.g., "CREATE_TIME" or "LOG_APPEND_TIME"). + /// + public string TimestampType { get; internal set; } = null!; + + /// + /// Gets the key of the record (often used for partitioning). + /// + public TK Key { get; internal set; } = default!; + + /// + /// Gets the deserialized value of the record. + /// + public T Value { get; internal set; } = default!; + + /// + /// Gets the headers associated with the record. + /// + public Dictionary Headers { get; internal set; } = null!; + + /// + /// Gets the schema metadata for the record's value. + /// + public SchemaMetadata ValueSchemaMetadata { get; internal set; } = null!; + + /// + /// Gets the schema metadata for the record's key. + /// + public SchemaMetadata KeySchemaMetadata { get; internal set; } = null!; +} \ No newline at end of file diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecords.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecords.cs new file mode 100644 index 000000000..972ae7cd7 --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/ConsumerRecords.cs @@ -0,0 +1,65 @@ +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +using System.Collections; + +namespace AWS.Lambda.Powertools.Kafka; + +/// +/// Represents a collection of Kafka consumer records that can be enumerated. +/// Contains event metadata and records organized by topics. +/// +/// The type of the record values from the event. +/// The type of Key values from the event. +public class ConsumerRecords : IEnumerable> +{ + /// + /// Gets the event source (typically "aws:kafka"). + /// + public string EventSource { get; internal set; } = null!; + + /// + /// Gets the ARN of the event source (MSK cluster or Self-managed Kafka). + /// + public string EventSourceArn { get; internal set; } = null!; + + /// + /// Gets the Kafka bootstrap servers connection string. + /// + public string BootstrapServers { get; internal set; } = null!; + + internal Dictionary>> Records { get; set; } = new(); + + /// + /// Returns an enumerator that iterates through all consumer records across all topics. + /// + /// An enumerator of ConsumerRecord<T> objects. + public IEnumerator> GetEnumerator() + { + foreach (var topicRecords in Records) + { + foreach (var record in topicRecords.Value) + { + yield return record; + } + } + } + + // Implement non-generic IEnumerable (required) + IEnumerator IEnumerable.GetEnumerator() + { + return GetEnumerator(); + } +} \ No newline at end of file diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/HeaderExtensions.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/HeaderExtensions.cs new file mode 100644 index 000000000..892cf9516 --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/HeaderExtensions.cs @@ -0,0 +1,56 @@ +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +using System.Text; + +namespace AWS.Lambda.Powertools.Kafka; + +/// +/// Extension methods for Kafka headers in ConsumerRecord. +/// +public static class HeaderExtensions +{ + /// + /// Gets the decoded value of a Kafka header from the ConsumerRecord's Headers dictionary. + /// + /// The header key-value pair from ConsumerRecord.Headers + /// The decoded string value. + public static Dictionary DecodedValues(this Dictionary headers) + { + if (headers == null) + { + return new Dictionary(); + } + + return headers.ToDictionary( + pair => pair.Key, + pair => pair.Value.DecodedValue() + ); + } + + /// + /// Decodes a byte array from a Kafka header into a UTF-8 string. + /// Returns an empty string if the byte array is null or empty. + /// + public static string DecodedValue(this byte[]? headerBytes) + { + if (headerBytes == null || headerBytes.Length == 0) + { + return string.Empty; + } + + return Encoding.UTF8.GetString(headerBytes); + } +} \ No newline at end of file diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/InternalsVisibleTo.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/InternalsVisibleTo.cs new file mode 100644 index 000000000..35c17ea16 --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/InternalsVisibleTo.cs @@ -0,0 +1,18 @@ +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("AWS.Lambda.Powertools.Kafka.Tests")] \ No newline at end of file diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs new file mode 100644 index 000000000..c00b34f16 --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/PowertoolsKafkaSerializerBase.cs @@ -0,0 +1,707 @@ +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +using Amazon.Lambda.Core; +using System.Diagnostics.CodeAnalysis; +using System.Reflection; +using System.Runtime.Serialization; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Text.Json.Serialization.Metadata; + +namespace AWS.Lambda.Powertools.Kafka; + +/// +/// Base class for Kafka event serializers that provides common functionality +/// for deserializing Kafka event structures in Lambda functions. +/// +/// +/// Inherit from this class to implement specific formats like Avro, Protobuf or JSON. +/// +public abstract class PowertoolsKafkaSerializerBase : ILambdaSerializer +{ + /// + /// JSON serializer options used for deserialization. + /// + protected readonly JsonSerializerOptions JsonOptions; + + /// + /// JSON serializer context used for AOT-compatible serialization/deserialization. + /// + protected readonly JsonSerializerContext? SerializerContext; + + /// + /// Initializes a new instance of the class + /// with default JSON serialization options. + /// + protected PowertoolsKafkaSerializerBase() : this(new JsonSerializerOptions + { + PropertyNameCaseInsensitive = true + }, null) + { + } + + /// + /// Initializes a new instance of the class + /// with custom JSON serialization options. + /// + /// Custom JSON serializer options to use during deserialization. + protected PowertoolsKafkaSerializerBase(JsonSerializerOptions jsonOptions) : this(jsonOptions, null) + { + } + + /// + /// Initializes a new instance of the class + /// with a JSON serializer context for AOT-compatible serialization/deserialization. + /// + /// The JSON serializer context for AOT compatibility. + protected PowertoolsKafkaSerializerBase(JsonSerializerContext serializerContext) : this(serializerContext.Options, + serializerContext) + { + } + + /// + /// Initializes a new instance of the class + /// with custom JSON serialization options and an optional serializer context. + /// + /// Custom JSON serializer options to use during deserialization. + /// Optional JSON serializer context for AOT compatibility. + protected PowertoolsKafkaSerializerBase(JsonSerializerOptions jsonOptions, JsonSerializerContext? serializerContext) + { + JsonOptions = jsonOptions ?? new JsonSerializerOptions { PropertyNameCaseInsensitive = true }; + SerializerContext = serializerContext; + } + + /// + /// Deserializes the Lambda input stream into the specified type. + /// Handles Kafka events with various serialization formats. + /// + /// The type to deserialize to. For Kafka events, typically ConsumerRecords<TKey,TValue>. + /// The stream containing the serialized Lambda event. + /// The deserialized object of type T. + public T Deserialize(Stream requestStream) + { + if (SerializerContext != null && typeof(T) != typeof(ConsumerRecords<,>)) + { + // Fast path for regular JSON types when serializer context is provided + var typeInfo = GetJsonTypeInfo(); + if (typeInfo != null) + { + return JsonSerializer.Deserialize(requestStream, typeInfo) ?? throw new InvalidOperationException(); + } + } + + using var reader = new StreamReader(requestStream); + var json = reader.ReadToEnd(); + + var targetType = typeof(T); + + if (targetType.IsGenericType && targetType.GetGenericTypeDefinition() == typeof(ConsumerRecords<,>)) + { + return DeserializeConsumerRecords(json); + } + + if (SerializerContext != null) + { + // Try to find type info in context + var typeInfo = SerializerContext.GetTypeInfo(targetType); + if (typeInfo != null) + { + return (T)JsonSerializer.Deserialize(json, typeInfo)!; + } + } + + // Fallback to regular deserialization with warning +#pragma warning disable IL2026, IL3050 + var result = JsonSerializer.Deserialize(json, JsonOptions); +#pragma warning restore IL2026, IL3050 + + if (!EqualityComparer.Default.Equals(result, default(T))) + { + return result!; + } + + throw new InvalidOperationException($"Failed to deserialize to type {typeof(T).Name}"); + } + + /// + /// Deserializes a Kafka ConsumerRecords event from JSON string. + /// + /// The ConsumerRecords type with key and value generics. + /// The JSON string to deserialize. + /// The deserialized ConsumerRecords object. + [RequiresUnreferencedCode("ConsumerRecords deserialization uses reflection and may be incompatible with trimming.")] + [RequiresDynamicCode( + "ConsumerRecords deserialization dynamically creates generic types and may be incompatible with NativeAOT.")] + private T DeserializeConsumerRecords(string json) + { + var targetType = typeof(T); + var typeArgs = targetType.GetGenericArguments(); + var keyType = typeArgs[0]; + var valueType = typeArgs[1]; + + using var document = JsonDocument.Parse(json); + var root = document.RootElement; + + // Create the typed instance and set basic properties + var typedEvent = CreateConsumerRecordsInstance(targetType); + SetBasicProperties(root, typedEvent, targetType); + + // Create and populate records dictionary + if (root.TryGetProperty("records", out var recordsElement)) + { + var records = CreateRecordsDictionary(recordsElement, keyType, valueType); + targetType.GetProperty("Records", BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance) + ?.SetValue(typedEvent, records); + } + + return (T)typedEvent; + } + + private object CreateConsumerRecordsInstance(Type targetType) + { + return Activator.CreateInstance(targetType) ?? + throw new InvalidOperationException($"Failed to create instance of {targetType.Name}"); + } + + private void SetBasicProperties(JsonElement root, object instance, Type targetType) + { + if (root.TryGetProperty("eventSource", out var eventSource)) + targetType.GetProperty("EventSource", BindingFlags.Public | BindingFlags.Instance) + ?.SetValue(instance, eventSource.GetString()); + + if (root.TryGetProperty("eventSourceArn", out var eventSourceArn)) + targetType.GetProperty("EventSourceArn")?.SetValue(instance, eventSourceArn.GetString()); + + if (root.TryGetProperty("bootstrapServers", out var bootstrapServers)) + targetType.GetProperty("BootstrapServers")?.SetValue(instance, bootstrapServers.GetString()); + } + + private object CreateRecordsDictionary(JsonElement recordsElement, Type keyType, Type valueType) + { + // Create dictionary with correct generic types + var dictType = typeof(Dictionary<,>).MakeGenericType( + typeof(string), + typeof(List<>).MakeGenericType(typeof(ConsumerRecord<,>).MakeGenericType(keyType, valueType)) + ); + var records = Activator.CreateInstance(dictType) ?? + throw new InvalidOperationException($"Failed to create dictionary of type {dictType.Name}"); + var dictAddMethod = dictType.GetMethod("Add") ?? + throw new InvalidOperationException("Add method not found on dictionary type"); + + // Process each topic partition + foreach (var topicPartition in recordsElement.EnumerateObject()) + { + var topicName = topicPartition.Name; + var recordsList = ProcessTopicPartition(topicPartition.Value, keyType, valueType); + dictAddMethod.Invoke(records, new[] { topicName, recordsList }); + } + + return records; + } + + private object ProcessTopicPartition(JsonElement partitionData, Type keyType, Type valueType) + { + // Create list type with correct generics + var listType = typeof(List<>).MakeGenericType( + typeof(ConsumerRecord<,>).MakeGenericType(keyType, valueType)); + var recordsList = Activator.CreateInstance(listType) ?? + throw new InvalidOperationException($"Failed to create list of type {listType.Name}"); + var listAddMethod = listType.GetMethod("Add") ?? + throw new InvalidOperationException("Add method not found on list type"); + + // Process each record + foreach (var recordElement in partitionData.EnumerateArray()) + { + var record = CreateAndPopulateRecord(recordElement, keyType, valueType); + if (record != null) + { + listAddMethod.Invoke(recordsList, new[] { record }); + } + } + + return recordsList; + } + + private object? CreateAndPopulateRecord(JsonElement recordElement, Type keyType, Type valueType) + { + // Create record instance + var recordType = typeof(ConsumerRecord<,>).MakeGenericType(keyType, valueType); + var record = Activator.CreateInstance(recordType); + if (record == null) + return null; + + // Set basic properties + SetProperty(recordType, record, "Topic", recordElement, "topic"); + SetProperty(recordType, record, "Partition", recordElement, "partition"); + SetProperty(recordType, record, "Offset", recordElement, "offset"); + SetProperty(recordType, record, "Timestamp", recordElement, "timestamp"); + SetProperty(recordType, record, "TimestampType", recordElement, "timestampType"); + + // Process key + ProcessKey(recordElement, record, recordType, keyType); + + // Process value + ProcessValue(recordElement, record, recordType, valueType); + + // Process headers + ProcessHeaders(recordElement, record, recordType); + + // Process schema metadata for both key and value + ProcessSchemaMetadata(recordElement, record, recordType, "keySchemaMetadata", "KeySchemaMetadata"); + ProcessSchemaMetadata(recordElement, record, recordType, "valueSchemaMetadata", "ValueSchemaMetadata"); + + + return record; + } + + private void ProcessSchemaMetadata(JsonElement recordElement, object record, Type recordType, + string jsonPropertyName, string recordPropertyName) + { + if (recordElement.TryGetProperty(jsonPropertyName, out var metadataElement)) + { + var schemaMetadata = new SchemaMetadata(); + + if (metadataElement.TryGetProperty("dataFormat", out var dataFormatElement)) + { + schemaMetadata.DataFormat = dataFormatElement.GetString() ?? string.Empty; + } + + if (metadataElement.TryGetProperty("schemaId", out var schemaIdElement)) + { + schemaMetadata.SchemaId = schemaIdElement.GetString() ?? string.Empty; + } + + recordType.GetProperty(recordPropertyName)?.SetValue(record, schemaMetadata); + } + } + + private void ProcessKey(JsonElement recordElement, object record, Type recordType, Type keyType) + { + if (recordElement.TryGetProperty("key", out var keyElement) && keyElement.ValueKind == JsonValueKind.String) + { + var base64Key = keyElement.GetString(); + if (!string.IsNullOrEmpty(base64Key)) + { + try + { + var keyBytes = Convert.FromBase64String(base64Key); + var decodedKey = DeserializeKey(keyBytes, keyType); + recordType.GetProperty("Key")?.SetValue(record, decodedKey); + } + catch (Exception ex) + { + throw new SerializationException($"Failed to deserialize key data: {ex.Message}", ex); + } + } + } + } + + private void ProcessValue(JsonElement recordElement, object record, Type recordType, Type valueType) + { + if (recordElement.TryGetProperty("value", out var valueElement) && valueElement.ValueKind == JsonValueKind.String) + { + var base64Value = valueElement.GetString(); + var valueProperty = recordType.GetProperty("Value"); + + if (base64Value != null && valueProperty != null) + { + try + { + var deserializedValue = DeserializeValue(base64Value, valueType); + valueProperty.SetValue(record, deserializedValue); + } + catch (Exception ex) + { + throw new SerializationException($"Failed to deserialize value data: {ex.Message}", ex); + } + } + } + } + + private void ProcessHeaders(JsonElement recordElement, object record, Type recordType) + { + if (recordElement.TryGetProperty("headers", out var headersElement) && + headersElement.ValueKind == JsonValueKind.Array) + { + var headers = new Dictionary(); + + foreach (var headerObj in headersElement.EnumerateArray()) + { + foreach (var header in headerObj.EnumerateObject()) + { + if (header.Value.ValueKind == JsonValueKind.Array) + { + headers[header.Name] = ExtractHeaderBytes(header.Value); + } + } + } + + var headersProperty = recordType.GetProperty("Headers", + BindingFlags.Public | BindingFlags.Instance); + headersProperty?.SetValue(record, headers); + } + } + + private byte[] ExtractHeaderBytes(JsonElement headerArray) + { + var headerBytes = new byte[headerArray.GetArrayLength()]; + var i = 0; + foreach (var byteVal in headerArray.EnumerateArray()) + { + headerBytes[i++] = (byte)byteVal.GetInt32(); + } + + return headerBytes; + } + + /// + /// Deserializes a key from bytes based on the specified key type. + /// + /// The key bytes to deserialize. + /// The target type for the key. + /// The deserialized key object. + private object? DeserializeKey(byte[] keyBytes, Type keyType) + { + // ReSharper disable once ConditionIsAlwaysTrueOrFalseAccordingToNullableAPIContract + if (keyBytes == null || keyBytes.Length == 0) + return null; + + if (IsPrimitiveOrSimpleType(keyType)) + { + return DeserializePrimitiveValue(keyBytes, keyType); + } + + // For complex types, use format-specific deserialization + return DeserializeFormatSpecific(keyBytes, keyType, isKey: true); + } + + /// + /// Sets a property value on an object instance from a JsonElement. + /// + /// The type of the object. + /// The object instance. + /// The name of the property to set. + /// The JsonElement containing the source data. + /// The property name within the JsonElement. + [RequiresDynamicCode("Dynamically accesses properties which might be trimmed.")] + [RequiresUnreferencedCode("Dynamically accesses properties which might be trimmed.")] + private void SetProperty( + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties)] + Type type, object instance, string propertyName, + JsonElement element, string jsonPropertyName) + { + if (!element.TryGetProperty(jsonPropertyName, out var jsonValue) || + jsonValue.ValueKind == JsonValueKind.Null) + return; + + // Add BindingFlags to find internal properties too + var property = type.GetProperty(propertyName, + BindingFlags.Public | BindingFlags.Instance); + if (property == null) return; + var propertyType = property.PropertyType; + + object value; + if (propertyType == typeof(int)) value = jsonValue.GetInt32(); + else if (propertyType == typeof(long)) value = jsonValue.GetInt64(); + else if (propertyType == typeof(double)) value = jsonValue.GetDouble(); + else if (propertyType == typeof(string)) value = jsonValue.GetString()!; + else return; + + property.SetValue(instance, value); + } + + /// + /// Serializes an object to JSON and writes it to the provided stream. + /// + /// The type of object to serialize. + /// The object to serialize. + /// The stream to write the serialized data to. + public void Serialize(T response, Stream responseStream) + { + if (EqualityComparer.Default.Equals(response, default(T))) + { + // According to ILambdaSerializer contract, if response is null, an empty stream or "null" should be written. + // AWS's default System.Text.Json serializer writes "null". + // Let's ensure the stream is written to, as HandlerWrapper might expect some output. + if (responseStream.CanWrite) + { + var nullBytes = Encoding.UTF8.GetBytes("null"); + responseStream.Write(nullBytes, 0, nullBytes.Length); + } + + return; + } + + if (SerializerContext != null) + { + // Attempt to get TypeInfo for the actual type of the response. + // This is important if T is object or an interface. + var typeInfo = SerializerContext.GetTypeInfo(response.GetType()); + + if (typeInfo != null) + { + // JsonSerializer.Serialize to a stream does not close it by default. + JsonSerializer.Serialize(responseStream, response, typeInfo); + return; + } + + // Fallback: if specific type info not found, try with typeof(T) from context + // This might be useful if T is concrete and response.GetType() is the same. + typeInfo = GetJsonTypeInfoFromContext(typeof(T)); + if (typeInfo != null) + { + // Need to cast typeInfo to non-generic JsonTypeInfo for the Serialize overload + JsonSerializer.Serialize(responseStream, response, typeInfo); + return; + } + } + + // Fallback to default JsonSerializer with options, ensuring the stream is left open. + // StreamWriter by default uses UTF-8 encoding. We specify it explicitly for clarity. + // The buffer size -1 can be used for default, or a specific size like 1024. + // Crucially, leaveOpen: true prevents the StreamWriter from disposing responseStream. + using var writer = new StreamWriter(responseStream, encoding: Encoding.UTF8, bufferSize: 1024, leaveOpen: true); +#pragma warning disable IL2026, IL3050 + var jsonResponse = JsonSerializer.Serialize(response, JsonOptions); +#pragma warning restore IL2026, IL3050 + writer.Write(jsonResponse); + writer.Flush(); // Ensure all data is written to the stream before writer is disposed. + } + + // Helper to get non-generic JsonTypeInfo from context based on a Type argument + private JsonTypeInfo? GetJsonTypeInfoFromContext(Type type) + { + if (SerializerContext == null) + return null; + + return SerializerContext.GetTypeInfo(type); + } + + // Adjusted GetJsonTypeInfo to return non-generic JsonTypeInfo for consistency, + // or keep it if it's used elsewhere for JsonTypeInfo specifically. + // For Serialize, GetJsonTypeInfoFromContext(typeof(T)) is more direct. + private JsonTypeInfo? GetJsonTypeInfo() // This is the original generic helper + { + if (SerializerContext == null) + return null; + + // Use reflection to find the right JsonTypeInfo property + // This is specific to how a user might structure their JsonSerializerContext. + // A more robust way for general types is SerializerContext.GetTypeInfo(typeof(T)). + foreach (var prop in SerializerContext.GetType().GetProperties()) + { + if (prop.PropertyType == typeof(JsonTypeInfo)) + { + return prop.GetValue(SerializerContext) as JsonTypeInfo; + } + } + + return null; + } + + /// + /// Deserializes a base64-encoded value into an object using the appropriate format. + /// + /// The base64-encoded binary data. + /// The target type to deserialize to. + /// The deserialized object. + [RequiresDynamicCode("Deserializing values might require runtime code generation depending on format.")] + [RequiresUnreferencedCode("Deserializing values might require types that cannot be statically analyzed.")] + protected virtual object DeserializeValue(string base64Value, + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | + DynamicallyAccessedMemberTypes.PublicFields)] + Type valueType) + { + // Handle primitive types first + if (IsPrimitiveOrSimpleType(valueType)) + { + var bytes = Convert.FromBase64String(base64Value); + return DeserializePrimitiveValue(bytes, valueType); + } + + // For complex types, decode base64 and use format-specific deserialization + var data = Convert.FromBase64String(base64Value); + return DeserializeFormatSpecific(data, valueType, isKey: false); + } + + /// + /// Deserializes binary data into an object using the format-specific implementation. + /// This method handles primitive types directly and delegates complex types to derived classes. + /// + /// The binary data to deserialize. + /// The target type to deserialize to. + /// Whether this data represents a key (true) or a value (false). + /// The deserialized object. + [RequiresDynamicCode("Format-specific deserialization might require runtime code generation.")] + [RequiresUnreferencedCode("Format-specific deserialization might require types that cannot be statically analyzed.")] + protected virtual object? DeserializeFormatSpecific(byte[] data, + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | + DynamicallyAccessedMemberTypes.PublicFields)] + Type targetType, bool isKey) + { + // Handle primitive types directly in the base class + if (IsPrimitiveOrSimpleType(targetType)) + { + return DeserializePrimitiveValue(data, targetType); + } + + // For complex types, delegate to format-specific implementation in derived classes + return DeserializeComplexTypeFormat(data, targetType, isKey); + } + + /// + /// Deserializes complex (non-primitive) types using format-specific implementation. + /// Each derived class must implement this method to handle its specific format. + /// + /// The binary data to deserialize. + /// The target type to deserialize to. + /// Whether this data represents a key (true) or a value (false). + /// The deserialized object. + [RequiresDynamicCode("Format-specific deserialization might require runtime code generation.")] + [RequiresUnreferencedCode("Format-specific deserialization might require types that cannot be statically analyzed.")] + protected abstract object? DeserializeComplexTypeFormat(byte[] data, + [DynamicallyAccessedMembers(DynamicallyAccessedMemberTypes.PublicProperties | + DynamicallyAccessedMemberTypes.PublicFields)] + Type targetType, bool isKey); + + /// + /// Checks if the specified type is a primitive or simple type. + /// + protected bool IsPrimitiveOrSimpleType(Type type) + { + return type.IsPrimitive || + type == typeof(string) || + type == typeof(decimal) || + type == typeof(DateTime) || + type == typeof(Guid); + } + + /// + /// Deserializes a primitive value from bytes based on the specified type. + /// Handles common primitive types like int, long, double, bool, string, and Guid. + /// If the bytes are empty or null, returns null. + /// If the type is not recognized, attempts to convert from string. + /// + protected object? DeserializePrimitiveValue(byte[] bytes, Type valueType) + { + // Early return for empty data + if (bytes == null! || bytes.Length == 0) + return null!; + + // String is the most common case, handle first + if (valueType == typeof(string)) + { + return Encoding.UTF8.GetString(bytes); + } + + // For numeric and boolean types, try string parsing first + var stringValue = Encoding.UTF8.GetString(bytes); + + // Handle numeric types + if (valueType == typeof(int)) + return DeserializeIntValue(bytes, stringValue); + + if (valueType == typeof(long)) + return DeserializeLongValue(bytes, stringValue); + + if (valueType == typeof(double)) + return DeserializeDoubleValue(bytes, stringValue); + + if (valueType == typeof(bool)) + return DeserializeBoolValue(bytes, stringValue); + + // Handle Guid values + if (valueType == typeof(Guid)) + return DeserializeGuidValue(bytes, stringValue); + + // For any other type, try converting from string + return DeserializeGenericValue(stringValue, valueType); + } + + private object DeserializeIntValue(byte[] bytes, string stringValue) + { + // Try string parsing first + if (int.TryParse(stringValue, out var parsedValue)) + return parsedValue; + + // Fall back to binary representation + return bytes.Length switch + { + >= 4 => BitConverter.ToInt32(bytes, 0), + 1 => bytes[0], + _ => 0 + }; + } + + private object DeserializeLongValue(byte[] bytes, string stringValue) + { + if (long.TryParse(stringValue, out var parsedValue)) + return parsedValue; + + return bytes.Length switch + { + >= 8 => BitConverter.ToInt64(bytes, 0), + >= 4 => BitConverter.ToInt32(bytes, 0), + _ => 0L + }; + } + + private object DeserializeDoubleValue(byte[] bytes, string stringValue) + { + if (double.TryParse(stringValue, out var doubleValue)) + return doubleValue; + + return bytes.Length >= 8 ? BitConverter.ToDouble(bytes, 0) : 0.0; + } + + private object DeserializeBoolValue(byte[] bytes, string stringValue) + { + if (bool.TryParse(stringValue, out var boolValue)) + return boolValue; + + return bytes[0] != 0; + } + + private object? DeserializeGuidValue(byte[] bytes, string stringValue) + { + if (bytes.Length < 16) + return Guid.Empty; + + try + { + return new Guid(bytes); + } + catch + { + // If binary parsing fails, try string parsing + return Guid.TryParse(stringValue, out var guidValue) ? guidValue : Guid.Empty; + } + } + + private object? DeserializeGenericValue(string stringValue, Type valueType) + { + try + { + return Convert.ChangeType(stringValue, valueType); + } + catch + { + return valueType.IsValueType ? Activator.CreateInstance(valueType) : null; + } + } +} + diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/Readme.md b/libraries/src/AWS.Lambda.Powertools.Kafka/Readme.md new file mode 100644 index 000000000..16da5ccb4 --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/Readme.md @@ -0,0 +1 @@ +# Powertools for AWS Lambda (.NET) - Kafka \ No newline at end of file diff --git a/libraries/src/AWS.Lambda.Powertools.Kafka/SchemaMetadata.cs b/libraries/src/AWS.Lambda.Powertools.Kafka/SchemaMetadata.cs new file mode 100644 index 000000000..4f2c9828f --- /dev/null +++ b/libraries/src/AWS.Lambda.Powertools.Kafka/SchemaMetadata.cs @@ -0,0 +1,17 @@ +namespace AWS.Lambda.Powertools.Kafka; + +/// +/// Represents metadata about the schema used for serializing the record's value or key. +/// +public class SchemaMetadata +{ + /// + /// Gets or sets the format of the data (e.g., "JSON", "AVRO" "Protobuf"). + /// /// + public string DataFormat { get; internal set; } = null!; + + /// + /// Gets or sets the schema ID associated with the record's value or key. + /// + public string SchemaId { get; internal set; } = null!; +} \ No newline at end of file diff --git a/libraries/src/Directory.Packages.props b/libraries/src/Directory.Packages.props index a4421f6fc..be5d56855 100644 --- a/libraries/src/Directory.Packages.props +++ b/libraries/src/Directory.Packages.props @@ -5,6 +5,7 @@ + @@ -12,6 +13,7 @@ + diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/AWS.Lambda.Powertools.Kafka.Tests.csproj b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/AWS.Lambda.Powertools.Kafka.Tests.csproj new file mode 100644 index 000000000..455134b24 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/AWS.Lambda.Powertools.Kafka.Tests.csproj @@ -0,0 +1,81 @@ + + + + + + AWS.Lambda.Powertools.Kafka.Tests + AWS.Lambda.Powertools.Kafka.Tests + net8.0 + enable + enable + + false + true + + + + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + + + + + + + PreserveNewest + + + + PreserveNewest + + + + PreserveNewest + + + + PreserveNewest + + + + PreserveNewest + + + + Client + PreserveNewest + MSBuild:Compile + + + + PreserveNewest + + + + PreserveNewest + + + + diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AWS/Lambda/Powertools/Kafka/Tests/AvroKey.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AWS/Lambda/Powertools/Kafka/Tests/AvroKey.cs new file mode 100644 index 000000000..96d09316e --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AWS/Lambda/Powertools/Kafka/Tests/AvroKey.cs @@ -0,0 +1,70 @@ +// ------------------------------------------------------------------------------ +// +// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e +// Changes to this file may cause incorrect behavior and will be lost if code +// is regenerated +// +// ------------------------------------------------------------------------------ +namespace AWS.Lambda.Powertools.Kafka.Tests +{ + using System; + using System.Collections.Generic; + using System.Text; + using global::Avro; + using global::Avro.Specific; + + [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")] + public partial class AvroKey : global::Avro.Specific.ISpecificRecord + { + public static global::Avro.Schema _SCHEMA = global::Avro.Schema.Parse(@"{""type"":""record"",""name"":""AvroKey"",""namespace"":""AWS.Lambda.Powertools.Kafka.Tests"",""fields"":[{""name"":""id"",""type"":""int""},{""name"":""color"",""type"":{""type"":""enum"",""name"":""Color"",""namespace"":""AWS.Lambda.Powertools.Kafka.Tests"",""symbols"":[""UNKNOWN"",""GREEN"",""RED""],""default"":""UNKNOWN""}}]}"); + private int _id; + private AWS.Lambda.Powertools.Kafka.Tests.Color _color = AWS.Lambda.Powertools.Kafka.Tests.Color.UNKNOWN; + public virtual global::Avro.Schema Schema + { + get + { + return AvroKey._SCHEMA; + } + } + public int id + { + get + { + return this._id; + } + set + { + this._id = value; + } + } + public AWS.Lambda.Powertools.Kafka.Tests.Color color + { + get + { + return this._color; + } + set + { + this._color = value; + } + } + public virtual object Get(int fieldPos) + { + switch (fieldPos) + { + case 0: return this.id; + case 1: return this.color; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Get()"); + }; + } + public virtual void Put(int fieldPos, object fieldValue) + { + switch (fieldPos) + { + case 0: this.id = (System.Int32)fieldValue; break; + case 1: this.color = (AWS.Lambda.Powertools.Kafka.Tests.Color)fieldValue; break; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Put()"); + }; + } + } +} diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AWS/Lambda/Powertools/Kafka/Tests/AvroProduct.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AWS/Lambda/Powertools/Kafka/Tests/AvroProduct.cs new file mode 100644 index 000000000..f1c6aa8d4 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AWS/Lambda/Powertools/Kafka/Tests/AvroProduct.cs @@ -0,0 +1,86 @@ +// ------------------------------------------------------------------------------ +// +// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e +// Changes to this file may cause incorrect behavior and will be lost if code +// is regenerated +// +// ------------------------------------------------------------------------------ +namespace AWS.Lambda.Powertools.Kafka.Tests +{ + using System; + using System.Collections.Generic; + using System.Text; + using global::Avro; + using global::Avro.Specific; + + [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")] + public partial class AvroProduct : global::Avro.Specific.ISpecificRecord + { + public static global::Avro.Schema _SCHEMA = global::Avro.Schema.Parse("{\"type\":\"record\",\"name\":\"AvroProduct\",\"namespace\":\"AWS.Lambda.Powertools.Kafka.Te" + + "sts\",\"fields\":[{\"name\":\"id\",\"type\":\"int\"},{\"name\":\"name\",\"type\":\"string\"},{\"name" + + "\":\"price\",\"type\":\"double\"}]}"); + private int _id; + private string _name; + private double _price; + public virtual global::Avro.Schema Schema + { + get + { + return AvroProduct._SCHEMA; + } + } + public int id + { + get + { + return this._id; + } + set + { + this._id = value; + } + } + public string name + { + get + { + return this._name; + } + set + { + this._name = value; + } + } + public double price + { + get + { + return this._price; + } + set + { + this._price = value; + } + } + public virtual object Get(int fieldPos) + { + switch (fieldPos) + { + case 0: return this.id; + case 1: return this.name; + case 2: return this.price; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Get()"); + }; + } + public virtual void Put(int fieldPos, object fieldValue) + { + switch (fieldPos) + { + case 0: this.id = (System.Int32)fieldValue; break; + case 1: this.name = (System.String)fieldValue; break; + case 2: this.price = (System.Double)fieldValue; break; + default: throw new global::Avro.AvroRuntimeException("Bad index " + fieldPos + " in Put()"); + }; + } + } +} diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AWS/Lambda/Powertools/Kafka/Tests/Color.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AWS/Lambda/Powertools/Kafka/Tests/Color.cs new file mode 100644 index 000000000..963233679 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AWS/Lambda/Powertools/Kafka/Tests/Color.cs @@ -0,0 +1,23 @@ +// ------------------------------------------------------------------------------ +// +// Generated by avrogen, version 1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e +// Changes to this file may cause incorrect behavior and will be lost if code +// is regenerated +// +// ------------------------------------------------------------------------------ +namespace AWS.Lambda.Powertools.Kafka.Tests +{ + using System; + using System.Collections.Generic; + using System.Text; + using global::Avro; + using global::Avro.Specific; + + [global::System.CodeDom.Compiler.GeneratedCodeAttribute("avrogen", "1.12.0+8c27801dc8d42ccc00997f25c0b8f45f8d4a233e")] + public enum Color + { + UNKNOWN, + GREEN, + RED, + } +} diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AvroKey.avsc b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AvroKey.avsc new file mode 100644 index 000000000..cc15c9e72 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AvroKey.avsc @@ -0,0 +1,24 @@ +{ + "namespace": "AWS.Lambda.Powertools.Kafka.Tests", + "type": "record", + "name": "AvroKey", + "fields": [ + { + "name": "id", + "type": "int" + }, + { + "name": "color", + "type": { + "type": "enum", + "name": "Color", + "symbols": [ + "UNKNOWN", + "GREEN", + "RED" + ], + "default": "UNKNOWN" + } + } + ] +} \ No newline at end of file diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AvroProduct.avsc b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AvroProduct.avsc new file mode 100644 index 000000000..60b8ed002 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/AvroProduct.avsc @@ -0,0 +1,10 @@ +{ + "namespace": "AWS.Lambda.Powertools.Kafka.Tests", + "type": "record", + "name": "AvroProduct", + "fields": [ + {"name": "id", "type": "int"}, + {"name": "name", "type": "string"}, + {"name": "price", "type": "double"} + ] +} \ No newline at end of file diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs new file mode 100644 index 000000000..34ff74bf2 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/HandlerTests.cs @@ -0,0 +1,423 @@ +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +using System.Text; +using Amazon.Lambda.Core; +using Amazon.Lambda.TestUtilities; +using Avro.IO; +using Avro.Specific; +using AWS.Lambda.Powertools.Kafka.Avro; + +namespace AWS.Lambda.Powertools.Kafka.Tests.Avro; + +public class KafkaHandlerTests +{ + [Fact] + public async Task Handler_ProcessesKafkaEvent_Successfully() + { + // Arrange + var kafkaJson = GetMockKafkaEvent(); + var mockContext = new TestLambdaContext(); + var serializer = new PowertoolsKafkaAvroSerializer(); + + // Convert JSON string to stream for deserialization + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaJson)); + + // Act - Deserialize and process + var kafkaEvent = serializer.Deserialize>(stream); + var response = await Handler(kafkaEvent, mockContext); + + // Assert + Assert.Equal("Successfully processed Kafka events", response); + + // Verify event structure + Assert.Equal("aws:kafka", kafkaEvent.EventSource); + Assert.Single(kafkaEvent.Records); + + // Verify record content + var records = kafkaEvent.Records["mytopic-0"]; + Assert.Equal(3, records.Count); + + // Verify first record + var firstRecord = records[0]; + Assert.Equal("mytopic", firstRecord.Topic); + Assert.Equal(0, firstRecord.Partition); + Assert.Equal(15, firstRecord.Offset); + + // Verify deserialized value + var product = firstRecord.Value; + Assert.Equal("Laptop", product.name); + Assert.Equal(999.99, product.price); + + // Verify decoded key and headers + Assert.Equal(42, firstRecord.Key); + Assert.Equal("headerValue", firstRecord.Headers["headerKey"].DecodedValue()); + + var secondRecord = records[1]; + Assert.Equal(43, secondRecord.Key); + + var thirdRecord = records[2]; + Assert.Equal(0, thirdRecord.Key); + } + + [Fact] + public async Task Handler_ProcessesKafkaEvent_Primitive_Successfully() + { + // Arrange + var kafkaJson = GetSimpleMockKafkaEvent(); + var mockContext = new TestLambdaContext(); + var serializer = new PowertoolsKafkaAvroSerializer(); + + // Convert JSON string to stream for deserialization + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaJson)); + + // Act - Deserialize and process + var kafkaEvent = serializer.Deserialize>(stream); + var response = await HandlerSimple(kafkaEvent, mockContext); + + // Assert + Assert.Equal("Successfully processed Kafka events", response); + + // Verify event structure + Assert.Equal("aws:kafka", kafkaEvent.EventSource); + Assert.Single(kafkaEvent.Records); + + // Verify record content + var records = kafkaEvent.Records["mytopic-0"]; + Assert.Equal(3, records.Count); + + // Verify first record + var firstRecord = records[0]; + Assert.Equal("mytopic", firstRecord.Topic); + Assert.Equal(0, firstRecord.Partition); + Assert.Equal(15, firstRecord.Offset); + + // Verify deserialized value + Assert.Equal("Laptop", firstRecord.Value); + + // Verify decoded key and headers + Assert.Equal(42, firstRecord.Key); + Assert.Equal("headerValue", firstRecord.Headers["headerKey"].DecodedValue()); + + var secondRecord = records[1]; + Assert.Equal(43, secondRecord.Key); + Assert.Equal("Smartphone", secondRecord.Value); + + var thirdRecord = records[2]; + Assert.Equal(0, thirdRecord.Key); + Assert.Null(thirdRecord.Value); + } + + private string GetMockKafkaEvent() + { + // For testing, we'll create base64-encoded Avro data for our test products + var laptop = new AvroProduct { name = "Laptop", price = 999.99 }; + var smartphone = new AvroProduct { name = "Smartphone", price = 499.99 }; + var headphones = new AvroProduct { name = "Headphones", price = 99.99 }; + + // Convert to base64-encoded Avro + string laptopBase64 = ConvertToAvroBase64(laptop); + string smartphoneBase64 = ConvertToAvroBase64(smartphone); + string headphonesBase64 = ConvertToAvroBase64(headphones); + + string firstRecordKey = Convert.ToBase64String(Encoding.UTF8.GetBytes("42")); // Example key + string secondRecordKey = Convert.ToBase64String(Encoding.UTF8.GetBytes("43")); // Example key for second record + + // Create mock Kafka event JSON + return @$"{{ + ""eventSource"": ""aws:kafka"", + ""eventSourceArn"": ""arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4"", + ""bootstrapServers"": ""b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092"", + ""records"": {{ + ""mytopic-0"": [ + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""timestamp"": 1545084650987, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{firstRecordKey}"", + ""value"": ""{laptopBase64}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }}, + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 16, + ""timestamp"": 1545084650988, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{secondRecordKey}"", + ""value"": ""{smartphoneBase64}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }}, + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 17, + ""timestamp"": 1545084650989, + ""timestampType"": ""CREATE_TIME"", + ""key"": null, + ""value"": ""{headphonesBase64}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }} + ] + }} + }}"; + } + + private string GetSimpleMockKafkaEvent() + { + // For testing, we'll create base64-encoded Avro data for our test products + + // Convert to base64-encoded Avro + string laptopBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes("Laptop")); + string smartphoneBase64 = Convert.ToBase64String(Encoding.UTF8.GetBytes("Smartphone")); + + string firstRecordKey = Convert.ToBase64String(Encoding.UTF8.GetBytes("42")); // Example key + string secondRecordKey = Convert.ToBase64String(Encoding.UTF8.GetBytes("43")); // Example key for second record + + // Create mock Kafka event JSON + return @$"{{ + ""eventSource"": ""aws:kafka"", + ""eventSourceArn"": ""arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4"", + ""bootstrapServers"": ""b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092"", + ""records"": {{ + ""mytopic-0"": [ + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""timestamp"": 1545084650987, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{firstRecordKey}"", + ""value"": ""{laptopBase64}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }}, + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 16, + ""timestamp"": 1545084650988, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{secondRecordKey}"", + ""value"": ""{smartphoneBase64}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }}, + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 17, + ""timestamp"": 1545084650989, + ""timestampType"": ""CREATE_TIME"", + ""key"": null, + ""value"": null, + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }} + ] + }} + }}"; + } + + private string ConvertToAvroBase64(AvroProduct product) + { + using var stream = new MemoryStream(); + var encoder = new BinaryEncoder(stream); + var writer = new SpecificDatumWriter(AvroProduct._SCHEMA); + + writer.Write(product, encoder); + encoder.Flush(); + + return Convert.ToBase64String(stream.ToArray()); + } + + // Define the test handler method + private async Task Handler(ConsumerRecords records, ILambdaContext context) + { + foreach (var record in records) + { + var product = record.Value; + context.Logger.LogInformation($"Processing {product.name} at ${product.price}"); + } + + return "Successfully processed Kafka events"; + } + + private async Task HandlerSimple(ConsumerRecords records, ILambdaContext context) + { + foreach (var record in records) + { + var product = record.Value; + context.Logger.LogInformation($"Processing {product}"); + } + + return "Successfully processed Kafka events"; + } + + [Fact] + public async Task Handler_ProcessesKafkaEvent_WithAvroKey_Successfully() + { + // Arrange + var kafkaJson = GetMockKafkaEventWithAvroKeys(); + var mockContext = new TestLambdaContext(); + var serializer = new PowertoolsKafkaAvroSerializer(); + + // Convert JSON string to stream for deserialization + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaJson)); + + // Act - Deserialize and process + var kafkaEvent = serializer.Deserialize>(stream); + var response = await HandlerWithAvroKeys(kafkaEvent, mockContext); + + // Assert + Assert.Equal("Successfully processed Kafka events", response); + + // Verify event structure + Assert.Equal("aws:kafka", kafkaEvent.EventSource); + Assert.Single(kafkaEvent.Records); + + // Verify record content + var records = kafkaEvent.Records["mytopic-0"]; + Assert.Equal(3, records.Count); + + // Verify first record + var firstRecord = records[0]; + Assert.Equal("mytopic", firstRecord.Topic); + Assert.Equal(0, firstRecord.Partition); + Assert.Equal(15, firstRecord.Offset); + + // Verify deserialized Avro key and value + Assert.Equal("Laptop", firstRecord.Value.name); + Assert.Equal(999.99, firstRecord.Value.price); + Assert.Equal(1, firstRecord.Key.id); + Assert.Equal(Color.GREEN, firstRecord.Key.color); + + // Verify headers + Assert.Equal("headerValue", firstRecord.Headers["headerKey"].DecodedValue()); + + var secondRecord = records[1]; + Assert.Equal(2, secondRecord.Key.id); + Assert.Equal(Color.UNKNOWN, secondRecord.Key.color); + + var thirdRecord = records[2]; + Assert.Equal(3, thirdRecord.Key.id); + Assert.Equal(Color.RED, thirdRecord.Key.color); + } + + private string GetMockKafkaEventWithAvroKeys() + { + // Create test products + var laptop = new AvroProduct { name = "Laptop", price = 999.99 }; + var smartphone = new AvroProduct { name = "Smartphone", price = 499.99 }; + var headphones = new AvroProduct { name = "Headphones", price = 99.99 }; + + // Create test keys + var key1 = new AvroKey { id = 1, color = Color.GREEN }; + var key2 = new AvroKey { id = 2 }; + var key3 = new AvroKey { id = 3, color = Color.RED }; + + // Convert values to base64-encoded Avro + string laptopBase64 = ConvertToAvroBase64(laptop); + string smartphoneBase64 = ConvertToAvroBase64(smartphone); + string headphonesBase64 = ConvertToAvroBase64(headphones); + + // Convert keys to base64-encoded Avro + string key1Base64 = ConvertKeyToAvroBase64(key1); + string key2Base64 = ConvertKeyToAvroBase64(key2); + string key3Base64 = ConvertKeyToAvroBase64(key3); + + // Create mock Kafka event JSON + return @$"{{ + ""eventSource"": ""aws:kafka"", + ""eventSourceArn"": ""arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4"", + ""bootstrapServers"": ""b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092"", + ""records"": {{ + ""mytopic-0"": [ + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""timestamp"": 1545084650987, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{key1Base64}"", + ""value"": ""{laptopBase64}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }}, + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 16, + ""timestamp"": 1545084650988, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{key2Base64}"", + ""value"": ""{smartphoneBase64}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }}, + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 17, + ""timestamp"": 1545084650989, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{key3Base64}"", + ""value"": ""{headphonesBase64}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }} + ] + }} + }}"; + } + + private string ConvertKeyToAvroBase64(AvroKey key) + { + using var stream = new MemoryStream(); + var encoder = new BinaryEncoder(stream); + var writer = new SpecificDatumWriter(AvroKey._SCHEMA); + + writer.Write(key, encoder); + encoder.Flush(); + + return Convert.ToBase64String(stream.ToArray()); + } + + private async Task HandlerWithAvroKeys(ConsumerRecords records, + ILambdaContext context) + { + foreach (var record in records) + { + var key = record.Key.id; + var product = record.Value; + } + + return "Successfully processed Kafka events"; + } +} \ No newline at end of file diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs new file mode 100644 index 000000000..43c474d9c --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/PowertoolsKafkaAvroSerializerTests.cs @@ -0,0 +1,158 @@ +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +using System.Runtime.Serialization; +using System.Text; +using AWS.Lambda.Powertools.Kafka.Avro; + +namespace AWS.Lambda.Powertools.Kafka.Tests.Avro; + +public class PowertoolsKafkaAvroSerializerTests +{ + [Fact] + public void Deserialize_KafkaEventWithAvroPayload_DeserializesToCorrectType() + { + // Arrange + var serializer = new PowertoolsKafkaAvroSerializer(); + string kafkaEventJson = File.ReadAllText("Avro/kafka-avro-event.json"); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + Assert.NotNull(result); + Assert.Equal("aws:kafka", result.EventSource); + + // Verify records were deserialized + Assert.True(result.Records.ContainsKey("mytopic-0")); + var records = result.Records["mytopic-0"]; + Assert.Equal(3, records.Count); + + // Verify first record's content + var firstRecord = records[0]; + Assert.Equal("mytopic", firstRecord.Topic); + Assert.Equal(0, firstRecord.Partition); + Assert.Equal(15, firstRecord.Offset); + Assert.Equal(42, firstRecord.Key); + + // Verify deserialized Avro value + var product = firstRecord.Value; + Assert.Equal("Laptop", product.name); + Assert.Equal(1001, product.id); + Assert.Equal(999.99000000000001, product.price); + + // Verify second record + var secondRecord = records[1]; + var smartphone = secondRecord.Value; + Assert.Equal("Smartphone", smartphone.name); + } + + [Fact] + public void KafkaEvent_ImplementsIEnumerable_ForDirectIteration() + { + // Arrange + var serializer = new PowertoolsKafkaAvroSerializer(); + string kafkaEventJson = File.ReadAllText("Avro/kafka-avro-event.json"); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert - Test enumeration + int count = 0; + var products = new List(); + + // Directly iterate over ConsumerRecords + foreach (var record in result) + { + count++; + products.Add(record.Value.name); + } + + // Verify correct count and values + Assert.Equal(3, count); + Assert.Contains("Laptop", products); + Assert.Contains("Smartphone", products); + Assert.Equal(3, products.Count); + + // Get first record directly through Linq extension + var firstRecord = result.First(); + Assert.Equal("Laptop", firstRecord.Value.name); + Assert.Equal(1001, firstRecord.Value.id); + } + + [Fact] + public void Primitive_Deserialization() + { + // Arrange + var serializer = new PowertoolsKafkaAvroSerializer(); + string kafkaEventJson = + CreateKafkaEvent(Convert.ToBase64String("MyKey"u8.ToArray()), + Convert.ToBase64String("Myvalue"u8.ToArray())); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + var firstRecord = result.First(); + Assert.Equal("Myvalue", firstRecord.Value); + Assert.Equal("MyKey", firstRecord.Key); + } + + [Fact] + public void DeserializeComplexKey_WhenAllDeserializationMethodsFail_ReturnsException() + { + // Arrange + var serializer = new PowertoolsKafkaAvroSerializer(); + // Invalid JSON and not Avro binary + byte[] invalidBytes = { 0xDE, 0xAD, 0xBE, 0xEF }; + + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(invalidBytes), + valueValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("test")) + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + Assert.Throws(() => + serializer.Deserialize>(stream)); + } + + private string CreateKafkaEvent(string keyValue, string valueValue) + { + return @$"{{ + ""eventSource"": ""aws:kafka"", + ""eventSourceArn"": ""arn:aws:kafka:us-east-1:0123456789019:cluster/TestCluster/abcd1234"", + ""bootstrapServers"": ""b-1.test-cluster.kafka.us-east-1.amazonaws.com:9092"", + ""records"": {{ + ""mytopic-0"": [ + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""timestamp"": 1645084650987, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{keyValue}"", + ""value"": ""{valueValue}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }} + ] + }} + }}"; + } +} \ No newline at end of file diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/kafka-avro-event.json b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/kafka-avro-event.json new file mode 100644 index 000000000..8d6ef2210 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/kafka-avro-event.json @@ -0,0 +1,51 @@ +{ + "eventSource": "aws:kafka", + "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", + "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + "records": { + "mytopic-0": [ + { + "topic": "mytopic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": "NDI=", + "value": "0g8MTGFwdG9wUrgehes/j0A=", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 16, + "timestamp": 1545084650988, + "timestampType": "CREATE_TIME", + "key": "NDI=", + "value": "1A8UU21hcnRwaG9uZVK4HoXrv4JA", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 17, + "timestamp": 1545084650989, + "timestampType": "CREATE_TIME", + "key": null, + "value": "1g8USGVhZHBob25lc0jhehSuv2JA", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + } + ] + } +} diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/HeaderExtensionsTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/HeaderExtensionsTests.cs new file mode 100644 index 000000000..574f79a30 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/HeaderExtensionsTests.cs @@ -0,0 +1,103 @@ +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +using System.Text; + +namespace AWS.Lambda.Powertools.Kafka.Tests +{ + public class HeaderExtensionsTests + { + [Fact] + public void DecodedValues_WithValidHeaders_DecodesCorrectly() + { + // Arrange + var headers = new Dictionary + { + { "header1", Encoding.UTF8.GetBytes("value1") }, + { "header2", Encoding.UTF8.GetBytes("value2") } + }; + + // Act + var decoded = headers.DecodedValues(); + + // Assert + Assert.Equal(2, decoded.Count); + Assert.Equal("value1", decoded["header1"]); + Assert.Equal("value2", decoded["header2"]); + } + + [Fact] + public void DecodedValues_WithEmptyDictionary_ReturnsEmptyDictionary() + { + // Arrange + var headers = new Dictionary(); + + // Act + var decoded = headers.DecodedValues(); + + // Assert + Assert.Empty(decoded); + } + + [Fact] + public void DecodedValues_WithNullDictionary_ReturnsEmptyDictionary() + { + // Arrange + Dictionary headers = null; + + // Act + var decoded = headers.DecodedValues(); + + // Assert + Assert.Empty(decoded); + } + + [Fact] + public void DecodedValue_WithValidBytes_DecodesCorrectly() + { + // Arrange + var bytes = Encoding.UTF8.GetBytes("test-value"); + + // Act + var decoded = bytes.DecodedValue(); + + // Assert + Assert.Equal("test-value", decoded); + } + + [Fact] + public void DecodedValue_WithEmptyBytes_ReturnsEmptyString() + { + // Arrange + var bytes = Array.Empty(); + + // Act + var decoded = bytes.DecodedValue(); + + // Assert + Assert.Equal("", decoded); + } + + [Fact] + public void DecodedValue_WithNullBytes_ReturnsEmptyString() + { + // Act + var decoded = ((byte[])null).DecodedValue(); + + // Assert + Assert.Equal("", decoded); + } + } +} \ No newline at end of file diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs new file mode 100644 index 000000000..a40ee8efd --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/PowertoolsKafkaJsonSerializerTests.cs @@ -0,0 +1,554 @@ +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using AWS.Lambda.Powertools.Kafka.Json; + +namespace AWS.Lambda.Powertools.Kafka.Tests.Json; + +public class PowertoolsKafkaJsonSerializerTests +{ + [Fact] + public void Deserialize_KafkaEventWithJsonPayload_DeserializesToCorrectType() + { + // Arrange + var serializer = new PowertoolsKafkaJsonSerializer(); + string kafkaEventJson = File.ReadAllText("Json/kafka-json-event.json"); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + Assert.NotNull(result); + Assert.Equal("aws:kafka", result.EventSource); + + // Verify records were deserialized + Assert.True(result.Records.ContainsKey("mytopic-0")); + var records = result.Records["mytopic-0"]; + Assert.Equal(3, records.Count); + + // Verify first record's content + var firstRecord = records[0]; + Assert.Equal("mytopic", firstRecord.Topic); + Assert.Equal(0, firstRecord.Partition); + Assert.Equal(15, firstRecord.Offset); + Assert.Equal("recordKey", firstRecord.Key); + + // Verify deserialized JSON value + var product = firstRecord.Value; + Assert.Equal("product5", product.Name); + Assert.Equal(12345, product.Id); + Assert.Equal(45, product.Price); + + // Verify second record + var secondRecord = records[1]; + var p2 = secondRecord.Value; + Assert.Equal("product5", p2.Name); + Assert.Equal(12345, p2.Id); + Assert.Equal(45, p2.Price); + + // Verify third record + var thirdRecord = records[2]; + var p3 = thirdRecord.Value; + Assert.Equal("product5", p3.Name); + Assert.Equal(12345, p3.Id); + Assert.Equal(45, p3.Price); + } + + [Fact] + public void KafkaEvent_ImplementsIEnumerable_ForDirectIteration() + { + // Arrange + var serializer = new PowertoolsKafkaJsonSerializer(); + string kafkaEventJson = File.ReadAllText("Json/kafka-json-event.json"); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert - Test enumeration + int count = 0; + var products = new List(); + + // Directly iterate over ConsumerRecords + foreach (var record in result) + { + count++; + products.Add(record.Value.Name); + } + + // Verify correct count and values + Assert.Equal(3, count); + Assert.Contains("product5", products); + + // Get first record directly through Linq extension + var firstRecord = result.First(); + Assert.Equal("product5", firstRecord.Value.Name); + Assert.Equal(12345, firstRecord.Value.Id); + } + + [Fact] + public void Primitive_Deserialization() + { + // Arrange + var serializer = new PowertoolsKafkaJsonSerializer(); + string kafkaEventJson = + CreateKafkaEvent(Convert.ToBase64String("MyKey"u8.ToArray()), + Convert.ToBase64String("Myvalue"u8.ToArray())); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + var firstRecord = result.First(); + Assert.Equal("Myvalue", firstRecord.Value); + Assert.Equal("MyKey", firstRecord.Key); + } + + [Fact] + public void DeserializeComplexKey_StandardJsonDeserialization_Works() + { + // Arrange + var serializer = new PowertoolsKafkaJsonSerializer(); + var complexObject = new { Name = "Test", Id = 123 }; + var jsonBytes = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(complexObject)); + + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(jsonBytes), + valueValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("test")) + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize, string>>(stream); + + // Assert + var record = result.First(); + Assert.NotNull(record.Key); + Assert.Equal("Test", record.Key["Name"].ToString()); + Assert.Equal(123, int.Parse(record.Key["Id"].ToString())); + } + + [Fact] + public void DeserializeComplexKey_WithSerializerContext_UsesContext() + { + // Arrange + // Create custom context + var options = new JsonSerializerOptions(); + var context = new TestJsonSerializerContext(options); + var serializer = new PowertoolsKafkaJsonSerializer(context); + + // Create test data with the registered type + var testModel = new TestModel { Name = "TestFromContext", Value = 456 }; + var jsonBytes = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(testModel)); + + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(jsonBytes), + valueValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("test")) + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + var record = result.First(); + Assert.NotNull(record.Key); + Assert.Equal("TestFromContext", record.Key.Name); + Assert.Equal(456, record.Key.Value); + } + + [Fact] + public void DeserializeComplexKey_WhenDeserializationFails_ReturnsNull() + { + // Arrange + var serializer = new PowertoolsKafkaJsonSerializer(); + // Invalid JSON + byte[] invalidBytes = { 0xDE, 0xAD, 0xBE, 0xEF }; + + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(invalidBytes), + valueValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("test")) + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + // This shouldn't throw but return a record with null key + var result = serializer.Deserialize>(stream); + + // Assert + var record = result.First(); + Assert.Null(record.Key); + } + + [Fact] + public void DeserializeComplexValue_WithSerializerContext_UsesContext() + { + // Arrange + var options = new JsonSerializerOptions(); + var context = new TestJsonSerializerContext(options); + var serializer = new PowertoolsKafkaJsonSerializer(context); + + // Create test data with the registered type + var testModel = new TestModel { Name = "ValueFromContext", Value = 789 }; + var jsonBytes = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(testModel)); + + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("testKey")), + valueValue: Convert.ToBase64String(jsonBytes) + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + var record = result.First(); + Assert.Equal("testKey", record.Key); + Assert.NotNull(record.Value); + Assert.Equal("ValueFromContext", record.Value.Name); + Assert.Equal(789, record.Value.Value); + } + + [Fact] + public void DeserializeComplexValue_WithInvalidJson_ReturnsNullForReferenceTypes() + { + // Arrange + var serializer = new PowertoolsKafkaJsonSerializer(); + byte[] invalidJsonBytes = Encoding.UTF8.GetBytes("{ this is not valid json }"); + + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("testKey")), + valueValue: Convert.ToBase64String(invalidJsonBytes) + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert - value should be null because it's a reference type + var record = result.First(); + Assert.Equal("testKey", record.Key); + Assert.Null(record.Value); + } + + [Fact] + public void DeserializeComplexValue_WithInvalidJson_ReturnsDefaultForValueTypes() + { + // Arrange + var serializer = new PowertoolsKafkaJsonSerializer(); + byte[] invalidJsonBytes = Encoding.UTF8.GetBytes("{ bad json"); + + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("testKey")), + valueValue: Convert.ToBase64String(invalidJsonBytes) + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert - value should be default because it's a value type + var record = result.First(); + Assert.Equal("testKey", record.Key); + Assert.Equal(0, record.Value.Id); + Assert.Equal(default, record.Value.Name); + Assert.Equal(0, record.Value.Price); + } + + [Fact] + public void DeserializeComplexValue_WithCustomJsonOptions_RespectsOptions() + { + // Arrange - create custom options with different naming policy + var options = new JsonSerializerOptions + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + PropertyNameCaseInsensitive = false // Force exact case match + }; + var serializer = new PowertoolsKafkaJsonSerializer(options); + + // Create test data with camelCase property names + var jsonBytes = Encoding.UTF8.GetBytes(@"{""id"":999,""name"":""camelCase"",""price"":29.99}"); + + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("testKey")), + valueValue: Convert.ToBase64String(jsonBytes) + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + var record = result.First(); + Assert.Equal(999, record.Value.Id); + Assert.Equal("camelCase", record.Value.Name); + Assert.Equal(29.99m, record.Value.Price); + } + + [Fact] + public void DeserializeComplexValue_WithEmptyData_ReturnsNullOrDefault() + { + // Arrange + var serializer = new PowertoolsKafkaJsonSerializer(); + // Empty JSON data + byte[] emptyBytes = Array.Empty(); + + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("testKey")), + valueValue: Convert.ToBase64String(emptyBytes) + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + var record = result.First(); + Assert.Equal("testKey", record.Key); + Assert.Null(record.Value); // Should be null for empty input + } + + [Fact] + public void DeserializeComplexValue_WithContextAndNullResult_ReturnsNull() + { + // Arrange - create a context with JsonNullHandling.Include + var options = new JsonSerializerOptions + { + DefaultIgnoreCondition = JsonIgnoreCondition.Never, + IgnoreNullValues = false + }; + var context = new TestJsonSerializerContext(options); + var serializer = new PowertoolsKafkaJsonSerializer(context); + + // JSON that explicitly sets the value to null + var jsonBytes = Encoding.UTF8.GetBytes("null"); + + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("testKey")), + valueValue: Convert.ToBase64String(jsonBytes) + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + var record = result.First(); + Assert.Equal("testKey", record.Key); + Assert.Null(record.Value); + } + + + /// + /// Helper method to create Kafka event JSON with specified key and value in base64 format + /// + private string CreateKafkaEvent(string keyValue, string valueValue) + { + return @$"{{ + ""eventSource"": ""aws:kafka"", + ""eventSourceArn"": ""arn:aws:kafka:us-east-1:0123456789019:cluster/TestCluster/abcd1234"", + ""bootstrapServers"": ""b-1.test-cluster.kafka.us-east-1.amazonaws.com:9092"", + ""records"": {{ + ""mytopic-0"": [ + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""timestamp"": 1645084650987, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{keyValue}"", + ""value"": ""{valueValue}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }} + ] + }} + }}"; + } + + [Fact] + public void DirectJsonSerializerTest_InvokesFormatSpecificMethod() + { + // This test directly tests the JSON serializer methods + var serializer = new TestJsonDeserializer(); + + // Create test data with valid JSON + var testModel = new TestModel { Name = "DirectTest", Value = 555 }; + var jsonBytes = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(testModel)); + + // Act + var result = serializer.TestDeserializeFormatSpecific(jsonBytes, typeof(TestModel), false); + + // Assert + Assert.NotNull(result); + var model = result as TestModel; + Assert.NotNull(model); + Assert.Equal("DirectTest", model!.Name); + Assert.Equal(555, model.Value); + } + + [Fact] + public void DirectJsonSerializerTest_WithContext_UsesContext() + { + // Create a context that includes TestModel + var options = new JsonSerializerOptions(); + var context = new TestJsonSerializerContext(options); + + // Create the serializer with context + var serializer = new TestJsonDeserializer(context); + + // Create test data with valid JSON + var testModel = new TestModel { Name = "ContextTest", Value = 999 }; + var jsonBytes = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(testModel)); + + // Act - directly test the protected method + var result = serializer.TestDeserializeFormatSpecific(jsonBytes, typeof(TestModel), false); + + // Assert + Assert.NotNull(result); + var model = result as TestModel; + Assert.NotNull(model); + Assert.Equal("ContextTest", model!.Name); + Assert.Equal(999, model.Value); + } + + [Fact] + public void DirectJsonSerializerTest_WithInvalidJson_ReturnsNullForReferenceType() + { + // Create the serializer + var serializer = new TestJsonDeserializer(); + + // Create invalid JSON data + var invalidJsonBytes = Encoding.UTF8.GetBytes("{ not valid json"); + + // Act - directly test the protected method + var result = serializer.TestDeserializeFormatSpecific(invalidJsonBytes, typeof(TestModel), false); + + // Assert - should return null for reference type when JSON is invalid + Assert.Null(result); + } + + [Fact] + public void DirectJsonSerializerTest_WithInvalidJson_ReturnsDefaultForValueType() + { + // Create the serializer + var serializer = new TestJsonDeserializer(); + + // Create invalid JSON data + var invalidJsonBytes = Encoding.UTF8.GetBytes("{ not valid json"); + + // Act - directly test the protected method with a value type + var result = serializer.TestDeserializeFormatSpecific(invalidJsonBytes, typeof(int), false); + + // Assert - should return default (0) for value type when JSON is invalid + Assert.Equal(0, result); + } + + [Fact] + public void DirectJsonSerializerTest_WithEmptyJson_ReturnsNullOrDefault() + { + // Create the serializer + var serializer = new TestJsonDeserializer(); + + // Create empty JSON data + var emptyJsonBytes = Array.Empty(); + + // Act - test with reference type + var resultRef = serializer.TestDeserializeFormatSpecific(emptyJsonBytes, typeof(TestModel), false); + // Act - test with value type + var resultVal = serializer.TestDeserializeFormatSpecific(emptyJsonBytes, typeof(int), false); + + // Assert + Assert.Null(resultRef); // Reference type should get null + Assert.Equal(0, resultVal); // Value type should get default + } + + [Fact] + public void DirectJsonSerializerTest_WithContextResultingInNull_ReturnsNull() + { + // Create context + var options = new JsonSerializerOptions(); + var context = new TestJsonSerializerContext(options); + + // Create serializer with context + var serializer = new TestJsonDeserializer(context); + + // Create JSON that is "null" + var jsonBytes = Encoding.UTF8.GetBytes("null"); + + // Act - even with context, null JSON should return null + var result = serializer.TestDeserializeFormatSpecific(jsonBytes, typeof(TestModel), false); + + // Assert + Assert.Null(result); + } + + /// + /// Test helper to directly access protected methods + /// + private class TestJsonDeserializer : PowertoolsKafkaJsonSerializer + { + public TestJsonDeserializer() : base() { } + + public TestJsonDeserializer(JsonSerializerOptions options) : base(options) { } + + public TestJsonDeserializer(JsonSerializerContext context) : base(context) { } + + public object? TestDeserializeFormatSpecific(byte[] data, Type targetType, bool isKey) + { + // Call the protected method directly + return base.DeserializeComplexTypeFormat(data, targetType, isKey); + } + } +} + +[JsonSerializable(typeof(TestModel))] +public partial class TestJsonSerializerContext : JsonSerializerContext +{ +} + +public class TestModel +{ + public string Name { get; set; } = string.Empty; + public int Value { get; set; } +} + +public record JsonProduct +{ + public int Id { get; set; } + public string Name { get; set; } = string.Empty; + public decimal Price { get; set; } +} + +public struct ValueTypeProduct +{ + public int Id { get; set; } + public string Name { get; set; } + public decimal Price { get; set; } +} diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/kafka-json-event.json b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/kafka-json-event.json new file mode 100644 index 000000000..d85c40654 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Json/kafka-json-event.json @@ -0,0 +1,50 @@ +{ + "eventSource": "aws:kafka", + "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", + "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + "records": { + "mytopic-0": [ + { + "topic": "mytopic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": "cmVjb3JkS2V5", + "value": "ewogICJpZCI6IDEyMzQ1LAogICJuYW1lIjogInByb2R1Y3Q1IiwKICAicHJpY2UiOiA0NQp9", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "value": "ewogICJpZCI6IDEyMzQ1LAogICJuYW1lIjogInByb2R1Y3Q1IiwKICAicHJpY2UiOiA0NQp9", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": null, + "value": "ewogICJpZCI6IDEyMzQ1LAogICJuYW1lIjogInByb2R1Y3Q1IiwKICAicHJpY2UiOiA0NQp9", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + } + ] + } +} diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/KafkaHandlerFunctionalTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/KafkaHandlerFunctionalTests.cs new file mode 100644 index 000000000..d41bfb18a --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/KafkaHandlerFunctionalTests.cs @@ -0,0 +1,569 @@ +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +/* + These tests cover the key use cases you requested: + + 1. Basic Functionality: + Processing single records + Processing multiple records + Accessing record metadata + + 2. Data Formats: + JSON deserialization + Avro deserialization + Protobuf deserialization + Raw/default deserialization + + 3. Key Processing: + Processing various key formats (string, int, complex objects) + Handling null keys + + 4.Error Handling: + Invalid JSON data + Missing schemas with fallback mechanisms + + 5.Headers & Metadata: + Accessing and parsing record headers + */ + +using System.Runtime.Serialization; +using System.Text; +using Amazon.Lambda.Core; +using Amazon.Lambda.TestUtilities; +using AWS.Lambda.Powertools.Kafka.Json; +using AWS.Lambda.Powertools.Kafka.Avro; +using TestKafka; + +namespace AWS.Lambda.Powertools.Kafka.Tests; + +public class KafkaHandlerFunctionalTests +{ + #region JSON Serializer Tests + + [Fact] + public void Given_SingleJsonRecord_When_ProcessedWithHandler_Then_SuccessfullyDeserializedAndProcessed() + { + // Given + string Handler(ConsumerRecords records, ILambdaContext context) + { + foreach (var record in records) + { + context.Logger.LogInformation($"Processing {record.Value.Name} at ${record.Value.Price}"); + } + return "Successfully processed JSON Kafka events"; + } + + var mockLogger = new TestLambdaLogger(); + var mockContext = new TestLambdaContext { Logger = mockLogger }; + + // Create a single record + var records = new ConsumerRecords + { + Records = new Dictionary>> + { + { "mytopic-0", new List> + { + new() + { + Topic = "mytopic", + Partition = 0, + Offset = 15, + Timestamp = 1645084650987, + TimestampType = "CREATE_TIME", + Key = "product-123", + Value = new JsonProduct { Name = "Laptop", Price = 999.99m, Id = 123 }, + Headers = new Dictionary + { + { "source", Encoding.UTF8.GetBytes("online-store") } + } + } + } + } + } + }; + + // When + var result = Handler(records, mockContext); + + // Then + Assert.Equal("Successfully processed JSON Kafka events", result); + Assert.Contains("Processing Laptop at $999.99", mockLogger.Buffer.ToString()); + } + + [Fact] + public void Given_MultipleJsonRecords_When_ProcessedWithHandler_Then_AllRecordsProcessed() + { + // Given + int processedCount = 0; + string Handler(ConsumerRecords records, ILambdaContext context) + { + foreach (var record in records) + { + context.Logger.LogInformation($"Processing {record.Value.Name}"); + processedCount++; + } + return $"Processed {processedCount} records"; + } + + var mockLogger = new TestLambdaLogger(); + var mockContext = new TestLambdaContext { Logger = mockLogger }; + + // Create multiple records + var records = new ConsumerRecords + { + Records = new Dictionary>> + { + { "mytopic-0", new List> + { + new() { Topic = "mytopic", Value = new JsonProduct { Name = "Laptop" } }, + new() { Topic = "mytopic", Value = new JsonProduct { Name = "Phone" } }, + new() { Topic = "mytopic", Value = new JsonProduct { Name = "Tablet" } } + } + } + } + }; + + // When + var result = Handler(records, mockContext); + + // Then + Assert.Equal("Processed 3 records", result); + Assert.Contains("Processing Laptop", mockLogger.Buffer.ToString()); + Assert.Contains("Processing Phone", mockLogger.Buffer.ToString()); + Assert.Contains("Processing Tablet", mockLogger.Buffer.ToString()); + } + + [Fact] + public void Given_JsonRecordWithMetadata_When_ProcessedWithHandler_Then_MetadataIsAccessible() + { + // Given + string Handler(ConsumerRecords records, ILambdaContext context) + { + var record = records.First(); + context.Logger.LogInformation($"Topic: {record.Topic}, Partition: {record.Partition}, Offset: {record.Offset}, Time: {record.Timestamp}"); + return "Metadata accessed"; + } + + var mockLogger = new TestLambdaLogger(); + var mockContext = new TestLambdaContext { Logger = mockLogger }; + + var records = new ConsumerRecords + { + Records = new Dictionary>> + { + { "mytopic-0", new List> + { + new() + { + Topic = "sales-data", + Partition = 3, + Offset = 42, + Timestamp = 1645084650987, + TimestampType = "CREATE_TIME", + Value = new JsonProduct { Name = "Metadata Test" } + } + } + } + } + }; + + // When + var result = Handler(records, mockContext); + + // Then + Assert.Equal("Metadata accessed", result); + Assert.Contains("Topic: sales-data, Partition: 3, Offset: 42", mockLogger.Buffer.ToString()); + } + + [Fact] + public void Given_JsonStreamInput_When_DeserializedWithJsonSerializer_Then_CorrectlyDeserializes() + { + // Given + var serializer = new PowertoolsKafkaJsonSerializer(); + string json = @"{ + ""eventSource"": ""aws:kafka"", + ""records"": { + ""mytopic-0"": [ + { + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""timestamp"": 1645084650987, + ""key"": """ + Convert.ToBase64String(Encoding.UTF8.GetBytes("key1")) + @""", + ""value"": """ + Convert.ToBase64String(Encoding.UTF8.GetBytes("{\"Name\":\"JSON Test\",\"Price\":199.99,\"Id\":456}")) + @""" + } + ] + } + }"; + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(json)); + + // When + var result = serializer.Deserialize>(stream); + + // Then + Assert.Equal("aws:kafka", result.EventSource); + Assert.Single(result.Records); + var record = result.First(); + Assert.Equal("key1", record.Key); + Assert.Equal("JSON Test", record.Value.Name); + Assert.Equal(199.99m, record.Value.Price); + Assert.Equal(456, record.Value.Id); + } + + [Fact] + public void Given_InvalidJsonData_When_DeserializedWithJsonSerializer_Then_Returns_Null() + { + // Given + var serializer = new PowertoolsKafkaJsonSerializer(); + string json = @"{ + ""eventSource"": ""aws:kafka"", + ""records"": { + ""mytopic-0"": [ + { + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""timestamp"": 1645084650987, + ""key"": """ + Convert.ToBase64String(Encoding.UTF8.GetBytes("key1")) + @""", + ""value"": """ + Convert.ToBase64String(Encoding.UTF8.GetBytes("{invalid-json}")) + @""" + } + ] + } + }"; + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(json)); + var output = serializer.Deserialize>(stream); + + // Act & Assert + Assert.Single(output.Records); + Assert.Equal("key1", output.Records.First().Value[0].Key); + Assert.Null(output.Records.First().Value[0].Value); + } + + [Fact] + public void Given_JsonRecordWithHeaders_When_ProcessedWithHandler_Then_HeadersAreAccessible() + { + // Given + string Handler(ConsumerRecords records, ILambdaContext context) + { + var record = records.First(); + var source = record.Headers["source"].DecodedValue(); + var contentType = record.Headers["content-type"].DecodedValue(); + context.Logger.LogInformation($"Headers: source={source}, content-type={contentType}"); + return "Headers processed"; + } + + var mockLogger = new TestLambdaLogger(); + var mockContext = new TestLambdaContext { Logger = mockLogger }; + + var records = new ConsumerRecords + { + Records = new Dictionary>> + { + { "mytopic-0", new List> + { + new() + { + Value = new JsonProduct { Name = "Header Test" }, + Headers = new Dictionary + { + { "source", Encoding.UTF8.GetBytes("web-app") }, + { "content-type", Encoding.UTF8.GetBytes("application/json") } + } + } + } + } + } + }; + + // When + var result = Handler(records, mockContext); + + // Then + Assert.Equal("Headers processed", result); + Assert.Contains("Headers: source=web-app, content-type=application/json", mockLogger.Buffer.ToString()); + } + + #endregion + + #region Avro Serializer Tests + + [Fact] + public void Given_SingleAvroRecord_When_ProcessedWithHandler_Then_SuccessfullyDeserializedAndProcessed() + { + // Given + string Handler(ConsumerRecords records, ILambdaContext context) + { + foreach (var record in records) + { + context.Logger.LogInformation($"Processing {record.Value.name} at ${record.Value.price}"); + } + return "Successfully processed Avro Kafka events"; + } + + var mockLogger = new TestLambdaLogger(); + var mockContext = new TestLambdaContext { Logger = mockLogger }; + + // Create a single record + var records = new ConsumerRecords + { + Records = new Dictionary>> + { + { "mytopic-0", new List> + { + new() + { + Topic = "mytopic", + Partition = 0, + Offset = 15, + Key = "avro-key", + Value = new AvroProduct { name = "Camera", price = 349.95 } + } + } + } + } + }; + + // When + var result = Handler(records, mockContext); + + // Then + Assert.Equal("Successfully processed Avro Kafka events", result); + Assert.Contains("Processing Camera at $349.95", mockLogger.Buffer.ToString()); + } + + [Fact] + public void Given_ComplexAvroKey_When_ProcessedWithHandler_Then_KeyIsCorrectlyDeserialized() + { + // Given + string Handler(ConsumerRecords records, ILambdaContext context) + { + var record = records.First(); + context.Logger.LogInformation($"Processing product with key ID: {record.Key.id}, color: {record.Key.color}"); + return "Successfully processed complex keys"; + } + + var mockLogger = new TestLambdaLogger(); + var mockContext = new TestLambdaContext { Logger = mockLogger }; + + var records = new ConsumerRecords + { + Records = new Dictionary>> + { + { "mytopic-0", new List> + { + new() + { + Key = new AvroKey { id = 42, color = Color.GREEN }, + Value = new AvroProduct { name = "Green Item", price = 49.99 } + } + } + } + } + }; + + // When + var result = Handler(records, mockContext); + + // Then + Assert.Equal("Successfully processed complex keys", result); + Assert.Contains("Processing product with key ID: 42, color: GREEN", mockLogger.Buffer.ToString()); + } + + [Fact] + public void Given_MissingAvroSchema_When_DeserializedWithAvroSerializer_Then_ReturnsException() + { + // Arrange + var serializer = new PowertoolsKafkaAvroSerializer(); + + // Create data that looks like Avro but without schema + byte[] invalidAvroData = { 0x01, 0x02, 0x03, 0x04 }; // Just some random bytes + string base64Data = Convert.ToBase64String(invalidAvroData); + + string kafkaEventJson = @$"{{ + ""eventSource"": ""aws:kafka"", + ""records"": {{ + ""mytopic-0"": [ + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""key"": ""{Convert.ToBase64String(Encoding.UTF8.GetBytes("test-key"))}"", + ""value"": ""{base64Data}"" + }} + ] + }} + }}"; + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + Assert.Throws(() => + serializer.Deserialize>(stream)); + } + + #endregion + + #region Protobuf Serializer Tests + + [Fact] + public void Given_SingleProtobufRecord_When_ProcessedWithHandler_Then_SuccessfullyDeserializedAndProcessed() + { + // Given + string Handler(ConsumerRecords records, ILambdaContext context) + { + foreach (var record in records) + { + context.Logger.LogInformation($"Processing {record.Value.Name} at ${record.Value.Price}"); + } + return "Successfully processed Protobuf Kafka events"; + } + + var mockLogger = new TestLambdaLogger(); + var mockContext = new TestLambdaContext { Logger = mockLogger }; + + // Create a single record + var records = new ConsumerRecords + { + Records = new Dictionary>> + { + { "mytopic-0", new List> + { + new() + { + Topic = "mytopic", + Partition = 0, + Offset = 15, + Key = 42, + Value = new ProtobufProduct { Name = "Smart Watch", Id = 789, Price = 249.99 } + } + } + } + } + }; + + // When + var result = Handler(records, mockContext); + + // Then + Assert.Equal("Successfully processed Protobuf Kafka events", result); + Assert.Contains("Processing Smart Watch at $249.99", mockLogger.Buffer.ToString()); + } + + [Fact] + public void Given_NullKeyOrValue_When_ProcessedWithHandler_Then_HandlesNullsCorrectly() + { + // Given + string Handler(ConsumerRecords records, ILambdaContext context) + { + foreach (var record in records) + { + string keyInfo = record.Key.HasValue ? record.Key.Value.ToString() : "null"; + string valueInfo = record.Value != null ? record.Value.Name : "null"; + context.Logger.LogInformation($"Key: {keyInfo}, Value: {valueInfo}"); + } + return "Processed records with nulls"; + } + + var mockLogger = new TestLambdaLogger(); + var mockContext = new TestLambdaContext { Logger = mockLogger }; + + var records = new ConsumerRecords + { + Records = new Dictionary>> + { + { "mytopic-0", new List> + { + new() { Key = 1, Value = new ProtobufProduct { Name = "Valid Product" } }, + new() { Key = null, Value = new ProtobufProduct { Name = "No Key" } }, + new() { Key = 3, Value = null } + } + } + } + }; + + // When + var result = Handler(records, mockContext); + + // Then + Assert.Equal("Processed records with nulls", result); + Assert.Contains("Key: 1, Value: Valid Product", mockLogger.Buffer.ToString()); + Assert.Contains("Key: null, Value: No Key", mockLogger.Buffer.ToString()); + Assert.Contains("Key: 3, Value: null", mockLogger.Buffer.ToString()); + } + + #endregion + + #region Raw/Default Deserialization Tests + + [Fact] + public void Given_RawUtf8Data_When_ProcessedWithDefaultHandler_Then_DeserializesToStrings() + { + // Given + string Handler(ConsumerRecords records, ILambdaContext context) + { + foreach (var record in records) + { + context.Logger.LogInformation($"Key: {record.Key}, Value: {record.Value}"); + } + return "Processed raw data"; + } + + var mockLogger = new TestLambdaLogger(); + var mockContext = new TestLambdaContext { Logger = mockLogger }; + + // Create Kafka event with raw base64-encoded strings + string kafkaEventJson = @$"{{ + ""eventSource"": ""aws:kafka"", + ""records"": {{ + ""mytopic-0"": [ + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""key"": ""{Convert.ToBase64String(Encoding.UTF8.GetBytes("simple-key"))}"", + ""value"": ""{Convert.ToBase64String(Encoding.UTF8.GetBytes("Simple UTF-8 text value"))}"", + ""headers"": [ + {{ ""content-type"": [{(int)'t'}, {(int)'e'}, {(int)'x'}, {(int)'t'}] }} + ] + }} + ] + }} + }}"; + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Use the default serializer which handles base64 → UTF-8 conversion + var serializer = new PowertoolsKafkaJsonSerializer(); + var records = serializer.Deserialize>(stream); + + // When + var result = Handler(records, mockContext); + + // Then + Assert.Equal("Processed raw data", result); + Assert.Contains("Key: simple-key, Value: Simple UTF-8 text value", mockLogger.Buffer.ToString()); + } + + #endregion +} + +// Model classes for testing +public class JsonProduct +{ + public string Name { get; set; } + public int Id { get; set; } + public decimal Price { get; set; } +} \ No newline at end of file diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsKafkaSerializerBaseTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsKafkaSerializerBaseTests.cs new file mode 100644 index 000000000..ff2512f33 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/PowertoolsKafkaSerializerBaseTests.cs @@ -0,0 +1,746 @@ +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +using System.Runtime.Serialization; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace AWS.Lambda.Powertools.Kafka.Tests +{ + /// + /// Additional tests for PowertoolsKafkaSerializerBase + /// + public class PowertoolsKafkaSerializerBaseTests + { + /// + /// Simple serializer implementation for testing base class + /// + private class TestKafkaSerializer : PowertoolsKafkaSerializerBase + { + public TestKafkaSerializer() : base() + { + } + + public TestKafkaSerializer(JsonSerializerOptions options) : base(options) + { + } + + public TestKafkaSerializer(JsonSerializerContext context) : base(context) + { + } + + public TestKafkaSerializer(JsonSerializerOptions options, JsonSerializerContext context) + : base(options, context) + { + } + + // Implementation of the abstract method for test purposes + protected override object? DeserializeComplexTypeFormat(byte[] data, + Type targetType, bool isKey) + { + try + { + // Test implementation using JSON for all complex types + var jsonStr = Encoding.UTF8.GetString(data); + + if (SerializerContext != null) + { + var typeInfo = SerializerContext.GetTypeInfo(targetType); + if (typeInfo != null) + { + return JsonSerializer.Deserialize(jsonStr, typeInfo); + } + } + + return JsonSerializer.Deserialize(jsonStr, targetType, JsonOptions); + } + catch + { + return null; + } + } + + // Expose protected methods for direct testing + public object? TestDeserializeFormatSpecific(byte[] data, Type targetType, bool isKey) + { + return DeserializeFormatSpecific(data, targetType, isKey); + } + + public object? TestDeserializeComplexTypeFormat(byte[] data, Type targetType, bool isKey) + { + return DeserializeComplexTypeFormat(data, targetType, isKey); + } + + public object? TestDeserializePrimitiveValue(byte[] data, Type targetType) + { + return DeserializePrimitiveValue(data, targetType); + } + + public bool TestIsPrimitiveOrSimpleType(Type type) + { + return IsPrimitiveOrSimpleType(type); + } + + public object TestDeserializeValue(string base64Value, Type valueType) + { + return DeserializeValue(base64Value, valueType); + } + } + + [Fact] + public void Deserialize_BooleanValues_HandlesCorrectly() + { + // Arrange + var serializer = new TestKafkaSerializer(); + string kafkaEventJson = CreateKafkaEvent( + keyValue: "dHJ1ZQ==", // "true" in base64 + valueValue: "AQ==" // byte[1] = {1} in base64 + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + Assert.NotNull(result); + var firstRecord = result.First(); + Assert.Equal("true", firstRecord.Key); + Assert.True(firstRecord.Value); + } + + [Fact] + public void Deserialize_NumericValues_HandlesCorrectly() + { + // Arrange + var serializer = new TestKafkaSerializer(); + string kafkaEventJson = CreateKafkaEvent( + keyValue: "NDI=", // "42" in base64 + valueValue: "MTIzNA==" // "1234" in base64 + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + Assert.NotNull(result); + var firstRecord = result.First(); + Assert.Equal(42, firstRecord.Key); + Assert.Equal(1234, firstRecord.Value); + } + + [Fact] + public void Deserialize_GuidValues_HandlesCorrectly() + { + // Arrange + var guid = Guid.NewGuid(); + var serializer = new TestKafkaSerializer(); + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(guid.ToByteArray()), + valueValue: Convert.ToBase64String(Encoding.UTF8.GetBytes(guid.ToString())) + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + Assert.NotNull(result); + var firstRecord = result.First(); + Assert.Equal(guid, firstRecord.Key); + Assert.Equal(guid.ToString(), firstRecord.Value); + } + + [Fact] + public void Deserialize_InvalidJson_ThrowsException() + { + // Arrange + var serializer = new TestKafkaSerializer(); + string invalidJson = "{ this is not valid json }"; + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(invalidJson)); + + // Act & Assert + Assert.ThrowsAny(() => + serializer.Deserialize>(stream)); + } + + [Fact] + public void Deserialize_MalformedBase64_ThrowsException() + { + // Arrange + var serializer = new TestKafkaSerializer(); + string kafkaEventJson = CreateKafkaEvent( + keyValue: "not-base64!", + valueValue: "valid-base64==" + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act & Assert + var ex = Assert.Throws(() => + serializer.Deserialize>(stream)); + + Assert.Contains("Failed to deserialize key data", ex.Message); + } + + [Fact] + public void Serialize_ValidObject_WritesToStream() + { + // Arrange + var serializer = new TestKafkaSerializer(); + var testObject = new { Name = "Test", Value = 42 }; + using var responseStream = new MemoryStream(); + + // Act + serializer.Serialize(testObject, responseStream); + responseStream.Position = 0; + string result = Encoding.UTF8.GetString(responseStream.ToArray()); + + // Assert + Assert.Contains("\"Name\":\"Test\"", result); + Assert.Contains("\"Value\":42", result); + } + + [Fact] + public void Serialize_NullObject_WritesNullToStream() + { + // Arrange + var serializer = new TestKafkaSerializer(); + using var responseStream = new MemoryStream(); + + // Act + serializer.Serialize(null, responseStream); + responseStream.Position = 0; + string result = Encoding.UTF8.GetString(responseStream.ToArray()); + + // Assert + Assert.Equal("null", result); + } + + [Fact] + public void DeserializePrimitiveValue_EmptyBytes_ReturnsNull() + { + // Arrange + var serializer = new TestKafkaSerializer(); + + // Act + var result = serializer.TestDeserializePrimitiveValue(Array.Empty(), typeof(string)); + + // Assert + Assert.Null(result); + } + + [Fact] + public void DeserializePrimitiveValue_LongValue_DeserializesCorrectly() + { + // Arrange + var serializer = new TestKafkaSerializer(); + var longBytes = BitConverter.GetBytes(long.MaxValue); + + // Act + var result = serializer.TestDeserializePrimitiveValue(longBytes, typeof(long)); + + // Assert + Assert.Equal(long.MaxValue, result); + } + + [Fact] + public void DeserializePrimitiveValue_DoubleValue_DeserializesCorrectly() + { + // Arrange + var serializer = new TestKafkaSerializer(); + var doubleBytes = BitConverter.GetBytes(3.14159); + + // Act + var result = serializer.TestDeserializePrimitiveValue(doubleBytes, typeof(double)); + + // Assert + Assert.Equal(3.14159, result); + } + + [Fact] + public void ProcessHeaders_MultipleHeaders_DeserializesCorrectly() + { + // Arrange + var serializer = new TestKafkaSerializer(); + string kafkaEventJson = @$"{{ + ""eventSource"": ""aws:kafka"", + ""records"": {{ + ""mytopic-0"": [ + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""key"": ""{Convert.ToBase64String(Encoding.UTF8.GetBytes("key"))}"", + ""value"": ""{Convert.ToBase64String(Encoding.UTF8.GetBytes("value"))}"", + ""headers"": [ + {{ ""header1"": [104, 101, 108, 108, 111] }}, + {{ ""header2"": [119, 111, 114, 108, 100] }} + ] + }} + ] + }} + }}"; + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + var record = result.First(); + Assert.Equal(2, record.Headers.Count); + Assert.Equal("hello", Encoding.ASCII.GetString(record.Headers["header1"])); + Assert.Equal("world", Encoding.ASCII.GetString(record.Headers["header2"])); + } + + [Fact] + public void Deserialize_WithSerializerContext_UsesContextForRegisteredTypes() + { + // Arrange + var options = new JsonSerializerOptions { PropertyNameCaseInsensitive = true }; + var context = new TestSerializerContext(options); + // Use only options for constructor, but we'll make the context available for the model deserialization + var serializer = new TestKafkaSerializer(options); + + var testModel = new TestModel { Name = "Test", Value = 123 }; + var modelJson = JsonSerializer.Serialize(testModel, context.TestModel); + var base64Value = Convert.ToBase64String(Encoding.UTF8.GetBytes(modelJson)); + + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("testKey")), + valueValue: base64Value + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + Assert.NotNull(result); + var record = result.First(); + Assert.Equal("testKey", record.Key); + Assert.Equal("Test", record.Value.Name); + Assert.Equal(123, record.Value.Value); + } + + [Fact] + public void Serialize_WithSerializerContext_UsesContextForRegisteredTypes() + { + // Arrange + var options = new JsonSerializerOptions(); + var context = new TestSerializerContext(options); + var serializer = new TestKafkaSerializer(options, context); + + var testModel = new TestModel { Name = "Test", Value = 123 }; + using var responseStream = new MemoryStream(); + + // Act + serializer.Serialize(testModel, responseStream); + responseStream.Position = 0; + string result = Encoding.UTF8.GetString(responseStream.ToArray()); + + // Assert + Assert.Contains("\"Name\":\"Test\"", result); + Assert.Contains("\"Value\":123", result); + } + + [Fact] + public void Deserialize_WithSerializerContext_FallsBackWhenTypeNotRegistered() + { + // Arrange + var options = new JsonSerializerOptions { PropertyNameCaseInsensitive = true }; + var context = new TestSerializerContext(options); + var serializer = new TestKafkaSerializer(options, context); + + // Using a non-registered type (Dictionary instead of TestModel) + var dictionary = new Dictionary { ["Key"] = 42 }; + var dictJson = JsonSerializer.Serialize(dictionary); + var base64Value = Convert.ToBase64String(Encoding.UTF8.GetBytes(dictJson)); + + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("testKey")), + valueValue: base64Value + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>>(stream); + + // Assert + Assert.NotNull(result); + var record = result.First(); + Assert.Equal("testKey", record.Key); + Assert.Single(record.Value); + Assert.Equal(42, record.Value["Key"]); + } + + [Fact] + public void Serialize_NonRegisteredType_FallsBackToRegularSerialization() + { + // Arrange + var options = new JsonSerializerOptions(); + // Use serializer WITHOUT context to test the fallback path + var serializer = new TestKafkaSerializer(options); + + // Using a non-registered type + var nonRegisteredType = new { Id = Guid.NewGuid(), Message = "Not in context" }; + using var responseStream = new MemoryStream(); + + // Act + serializer.Serialize(nonRegisteredType, responseStream); + responseStream.Position = 0; + string result = Encoding.UTF8.GetString(responseStream.ToArray()); + + // Assert + Assert.Contains("\"Id\":", result); + Assert.Contains("\"Message\":\"Not in context\"", result); + } + + [Fact] + public void Deserialize_NonConsumerRecordWithSerializerContext_UsesTypeInfo() + { + // Arrange + var options = new JsonSerializerOptions(); + var context = new TestSerializerContext(options); + var serializer = new TestKafkaSerializer(options, context); + + var testModel = new TestModel { Name = "DirectDeserialization", Value = 42 }; + var json = JsonSerializer.Serialize(testModel); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(json)); + + // Act + var result = serializer.Deserialize(stream); + + // Assert + Assert.NotNull(result); + Assert.Equal("DirectDeserialization", result.Name); + Assert.Equal(42, result.Value); + } + + [Fact] + public void Deserialize_NonConsumerRecordWithoutTypeInfo_UsesRegularDeserialize() + { + // Arrange + var options = new JsonSerializerOptions(); + var context = new TestSerializerContext(options); + var serializer = new TestKafkaSerializer(options, context); + + // Dictionary is not registered in TestSerializerContext + var dict = new Dictionary { ["test"] = 123 }; + var json = JsonSerializer.Serialize(dict); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(json)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + Assert.NotNull(result); + Assert.Equal(123, result["test"]); + } + + [Fact] + public void Deserialize_NonConsumerRecordFailed_ThrowsException() + { + // Arrange + var serializer = new TestKafkaSerializer(); + var invalidJson = "{ invalid json"; + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(invalidJson)); + + // Act & Assert + // With invalid JSON input, JsonSerializer throws JsonException directly + var ex = Assert.Throws(() => + serializer.Deserialize(stream)); + + // Check that we're getting a JSON parsing error + Assert.Contains("invalid", ex.Message.ToLower()); + } + + [Theory] + [InlineData(new byte[] { 42 }, 42)] // Single byte + [InlineData(new byte[] { 0x2A, 0x00, 0x00, 0x00 }, 42)] // Four bytes + public void DeserializePrimitiveValue_IntWithDifferentByteFormats_DeserializesCorrectly(byte[] bytes, + int expected) + { + // Arrange + var serializer = new TestKafkaSerializer(); + + // Act + var result = serializer.TestDeserializePrimitiveValue(bytes, typeof(int)); + + // Assert + Assert.Equal(expected, result); + } + + [Theory] + [InlineData(new byte[] { 0x2A, 0x00, 0x00, 0x00 }, 42L)] // Four bytes as int + [InlineData(new byte[] { 0x2A, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }, 42L)] // Eight bytes as long + public void DeserializePrimitiveValue_LongWithDifferentByteFormats_DeserializesCorrectly(byte[] bytes, + long expected) + { + // Arrange + var serializer = new TestKafkaSerializer(); + + // Act + var result = serializer.TestDeserializePrimitiveValue(bytes, typeof(long)); + + // Assert + Assert.Equal(expected, result); + } + + [Fact] + public void DeserializePrimitiveValue_DoubleWithShortBytes_ReturnsZero() + { + // Arrange + var serializer = new TestKafkaSerializer(); + var shortBytes = new byte[] { 0x00, 0x00, 0x00, 0x00 }; // Less than 8 bytes + + // Act + var result = serializer.TestDeserializePrimitiveValue(shortBytes, typeof(double)); + + // Assert + Assert.Equal(0.0, result); + } + + [Fact] + public void Serialize_WithTypeInfoFromContext_WritesToStream() + { + // Arrange + var options = new JsonSerializerOptions(); + var context = new TestSerializerContext(options); + var serializer = new TestKafkaSerializer(options, context); + + var testModel = new TestModel { Name = "ContextSerialization", Value = 555 }; + using var responseStream = new MemoryStream(); + + // Act + serializer.Serialize(testModel, responseStream); + responseStream.Position = 0; + string result = Encoding.UTF8.GetString(responseStream.ToArray()); + + // Assert + Assert.Contains("\"Name\":\"ContextSerialization\"", result); + Assert.Contains("\"Value\":555", result); + } + + [Fact] + public void Deserialize_WithSchemaMetadata_PopulatesSchemaMetadataProperties() + { + // Arrange + var serializer = new TestKafkaSerializer(); + + string kafkaEventJson = @$"{{ + ""eventSource"": ""aws:kafka"", + ""eventSourceArn"": ""arn:aws:kafka:us-east-1:0123456789019:cluster/TestCluster/abcd1234"", + ""bootstrapServers"": ""b-1.test-cluster.kafka.us-east-1.amazonaws.com:9092"", + ""records"": {{ + ""mytopic-0"": [ + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""timestamp"": 1645084650987, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{Convert.ToBase64String(Encoding.UTF8.GetBytes("testKey"))}"", + ""value"": ""{Convert.ToBase64String(Encoding.UTF8.GetBytes("testValue"))}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ], + ""keySchemaMetadata"": {{ + ""dataFormat"": ""JSON"", + ""schemaId"": ""key-schema-001"" + }}, + ""valueSchemaMetadata"": {{ + ""dataFormat"": ""AVRO"", + ""schemaId"": ""value-schema-002"" + }} + }} + ] + }} + }}"; + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + Assert.NotNull(result); + var record = result.First(); + + // Assert key schema metadata + Assert.NotNull(record.KeySchemaMetadata); + Assert.Equal("JSON", record.KeySchemaMetadata.DataFormat); + Assert.Equal("key-schema-001", record.KeySchemaMetadata.SchemaId); + + // Assert value schema metadata + Assert.NotNull(record.ValueSchemaMetadata); + Assert.Equal("AVRO", record.ValueSchemaMetadata.DataFormat); + Assert.Equal("value-schema-002", record.ValueSchemaMetadata.SchemaId); + } + + // NEW TESTS FOR LATEST CHANGES + + [Fact] + public void DeserializeFormatSpecific_PrimitiveType_UsesDeserializePrimitiveValue() + { + // Arrange + var serializer = new TestKafkaSerializer(); + var stringBytes = Encoding.UTF8.GetBytes("primitive-test"); + + // Act + var result = serializer.TestDeserializeFormatSpecific(stringBytes, typeof(string), isKey: false); + + // Assert + Assert.Equal("primitive-test", result); + } + + [Fact] + public void DeserializeFormatSpecific_ComplexType_UsesDeserializeComplexTypeFormat() + { + // Arrange + var serializer = new TestKafkaSerializer(); + var complexObject = new TestModel { Name = "complex-test", Value = 42 }; + var jsonBytes = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(complexObject)); + + // Act + var result = serializer.TestDeserializeFormatSpecific(jsonBytes, typeof(TestModel), isKey: false); + + // Assert + Assert.NotNull(result); + var testModel = (TestModel)result!; + Assert.Equal("complex-test", testModel.Name); + Assert.Equal(42, testModel.Value); + } + + [Fact] + public void DeserializeComplexTypeFormat_ValidJson_DeserializesCorrectly() + { + // Arrange + var serializer = new TestKafkaSerializer(); + var complexObject = new TestModel { Name = "direct-test", Value = 123 }; + var jsonBytes = Encoding.UTF8.GetBytes(JsonSerializer.Serialize(complexObject)); + + // Act + var result = serializer.TestDeserializeComplexTypeFormat(jsonBytes, typeof(TestModel), isKey: true); + + // Assert + Assert.NotNull(result); + var testModel = (TestModel)result!; + Assert.Equal("direct-test", testModel.Name); + Assert.Equal(123, testModel.Value); + } + + [Fact] + public void DeserializeComplexTypeFormat_InvalidJson_ReturnsNull() + { + // Arrange + var serializer = new TestKafkaSerializer(); + var invalidBytes = new byte[] { 0xDE, 0xAD, 0xBE, 0xEF }; // Invalid JSON data + + // Act + var result = serializer.TestDeserializeComplexTypeFormat(invalidBytes, typeof(TestModel), isKey: true); + + // Assert + Assert.Null(result); + } + + [Fact] + public void DeserializeValue_Base64String_DeserializesCorrectly() + { + // Arrange + var serializer = new TestKafkaSerializer(); + var testValue = "test-value-123"; + var base64Value = Convert.ToBase64String(Encoding.UTF8.GetBytes(testValue)); + + // Act + var result = serializer.TestDeserializeValue(base64Value, typeof(string)); + + // Assert + Assert.Equal(testValue, result); + } + + [Fact] + public void IsPrimitiveOrSimpleType_ChecksVariousTypes() + { + // Arrange + var serializer = new TestKafkaSerializer(); + + // Act & Assert + // Primitive types + Assert.True(serializer.TestIsPrimitiveOrSimpleType(typeof(int))); + Assert.True(serializer.TestIsPrimitiveOrSimpleType(typeof(long))); + Assert.True(serializer.TestIsPrimitiveOrSimpleType(typeof(bool))); + + // Simple types + Assert.True(serializer.TestIsPrimitiveOrSimpleType(typeof(string))); + Assert.True(serializer.TestIsPrimitiveOrSimpleType(typeof(Guid))); + Assert.True(serializer.TestIsPrimitiveOrSimpleType(typeof(DateTime))); + + // Complex types + Assert.False(serializer.TestIsPrimitiveOrSimpleType(typeof(TestModel))); + Assert.False(serializer.TestIsPrimitiveOrSimpleType(typeof(Dictionary))); + } + + // Helper method to create Kafka event JSON with specified key and value + private string CreateKafkaEvent(string keyValue, string valueValue) + { + return @$"{{ + ""eventSource"": ""aws:kafka"", + ""eventSourceArn"": ""arn:aws:kafka:us-east-1:0123456789019:cluster/TestCluster/abcd1234"", + ""bootstrapServers"": ""b-1.test-cluster.kafka.us-east-1.amazonaws.com:9092"", + ""records"": {{ + ""mytopic-0"": [ + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""timestamp"": 1645084650987, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{keyValue}"", + ""value"": ""{valueValue}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }} + ] + }} + }}"; + } + } + + [JsonSerializable(typeof(TestModel))] + [JsonSerializable(typeof(ConsumerRecords))] + [JsonSerializable(typeof(Dictionary))] + public partial class TestSerializerContext : JsonSerializerContext + { + } + + public class TestModel + { + public string Name { get; set; } + public int Value { get; set; } + } +} + diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/HandlerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/HandlerTests.cs new file mode 100644 index 000000000..33271a938 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/HandlerTests.cs @@ -0,0 +1,377 @@ +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +using System.Text; +using Amazon.Lambda.Core; +using Amazon.Lambda.TestUtilities; +using AWS.Lambda.Powertools.Kafka.Protobuf; +using Google.Protobuf; +using TestKafka; + +namespace AWS.Lambda.Powertools.Kafka.Tests.Protobuf; + +public class ProtobufHandlerTests +{ + [Fact] + public async Task Handler_ProcessesKafkaEvent_Successfully() + { + // Arrange + var kafkaJson = GetMockKafkaEvent(); + var mockContext = new TestLambdaContext(); + var serializer = new PowertoolsKafkaProtobufSerializer(); + + // Convert JSON string to stream for deserialization + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaJson)); + + // Act - Deserialize and process + var kafkaEvent = serializer.Deserialize>(stream); + var response = await Handler(kafkaEvent, mockContext); + + // Assert + Assert.Equal("Successfully processed Protobuf Kafka events", response); + + // Verify event structure + Assert.Equal("aws:kafka", kafkaEvent.EventSource); + Assert.Single(kafkaEvent.Records); + + // Verify record content + var records = kafkaEvent.Records["mytopic-0"]; + Assert.Equal(3, records.Count); + + // Verify first record + var firstRecord = records[0]; + Assert.Equal("mytopic", firstRecord.Topic); + Assert.Equal(0, firstRecord.Partition); + Assert.Equal(15, firstRecord.Offset); + + // Verify deserialized value + var product = firstRecord.Value; + Assert.Equal("Laptop", product.Name); + Assert.Equal(999.99, product.Price); + + // Verify decoded key and headers + Assert.Equal(42, firstRecord.Key); + Assert.Equal("headerValue", firstRecord.Headers["headerKey"].DecodedValue()); + + var secondRecord = records[1]; + Assert.Equal(43, secondRecord.Key); + + var thirdRecord = records[2]; + Assert.Equal(0, thirdRecord.Key); + } + + [Fact] + public async Task Handler_ProcessesKafkaEvent_WithProtobufKey_Successfully() + { + // Arrange + var kafkaJson = GetMockKafkaEventWithProtobufKeys(); + var mockContext = new TestLambdaContext(); + var serializer = new PowertoolsKafkaProtobufSerializer(); + + // Convert JSON string to stream for deserialization + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaJson)); + + // Act - Deserialize and process + var kafkaEvent = serializer.Deserialize>(stream); + var response = await HandlerWithProtobufKeys(kafkaEvent, mockContext); + + // Assert + Assert.Equal("Successfully processed Protobuf Kafka events with complex keys", response); + + // Verify event structure + Assert.Equal("aws:kafka", kafkaEvent.EventSource); + Assert.Single(kafkaEvent.Records); + + // Verify record content + var records = kafkaEvent.Records["mytopic-0"]; + Assert.Equal(3, records.Count); + + // Verify first record + var firstRecord = records[0]; + Assert.Equal("mytopic", firstRecord.Topic); + Assert.Equal(0, firstRecord.Partition); + Assert.Equal(15, firstRecord.Offset); + + // Verify deserialized Protobuf key and value + Assert.Equal("Laptop", firstRecord.Value.Name); + Assert.Equal(999.99, firstRecord.Value.Price); + Assert.Equal(1, firstRecord.Key.Id); + Assert.Equal(TestKafka.Color.Green, firstRecord.Key.Color); + + // Verify headers + Assert.Equal("headerValue", firstRecord.Headers["headerKey"].DecodedValue()); + + var secondRecord = records[1]; + Assert.Equal(2, secondRecord.Key.Id); + Assert.Equal(TestKafka.Color.Unknown, secondRecord.Key.Color); + + var thirdRecord = records[2]; + Assert.Equal(3, thirdRecord.Key.Id); + Assert.Equal(TestKafka.Color.Red, thirdRecord.Key.Color); + } + + private string GetMockKafkaEvent() + { + // For testing, we'll create base64-encoded Protobuf data for our test products + var laptop = new ProtobufProduct + { + Name = "Laptop", + Id = 1001, + Price = 999.99 + }; + + var smartphone = new ProtobufProduct + { + Name = "Smartphone", + Id = 1002, + Price = 499.99 + }; + + var headphones = new ProtobufProduct + { + Name = "Headphones", + Id = 1003, + Price = 99.99 + }; + + // Convert to base64-encoded Protobuf + string laptopBase64 = Convert.ToBase64String(laptop.ToByteArray()); + string smartphoneBase64 = Convert.ToBase64String(smartphone.ToByteArray()); + string headphonesBase64 = Convert.ToBase64String(headphones.ToByteArray()); + + string firstRecordKey = Convert.ToBase64String(Encoding.UTF8.GetBytes("42")); // Example key + string secondRecordKey = Convert.ToBase64String(Encoding.UTF8.GetBytes("43")); // Example key for second record + + // Create mock Kafka event JSON + return @$"{{ + ""eventSource"": ""aws:kafka"", + ""eventSourceArn"": ""arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4"", + ""bootstrapServers"": ""b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092"", + ""records"": {{ + ""mytopic-0"": [ + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""timestamp"": 1545084650987, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{firstRecordKey}"", + ""value"": ""{laptopBase64}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }}, + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 16, + ""timestamp"": 1545084650988, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{secondRecordKey}"", + ""value"": ""{smartphoneBase64}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }}, + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 17, + ""timestamp"": 1545084650989, + ""timestampType"": ""CREATE_TIME"", + ""key"": null, + ""value"": ""{headphonesBase64}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }} + ] + }} + }}"; + } + + private string GetMockKafkaEventWithProtobufKeys() + { + // Create test products + var laptop = new ProtobufProduct + { + Name = "Laptop", + Id = 1001, + Price = 999.99 + }; + + var smartphone = new ProtobufProduct + { + Name = "Smartphone", + Id = 1002, + Price = 499.99 + }; + + var headphones = new ProtobufProduct + { + Name = "Headphones", + Id = 1003, + Price = 99.99 + }; + + // Create test keys + var key1 = new ProtobufKey { Id = 1, Color = TestKafka.Color.Green }; + var key2 = new ProtobufKey { Id = 2 }; + var key3 = new ProtobufKey { Id = 3, Color = TestKafka.Color.Red }; + + // Convert values to base64-encoded Protobuf + string laptopBase64 = Convert.ToBase64String(laptop.ToByteArray()); + string smartphoneBase64 = Convert.ToBase64String(smartphone.ToByteArray()); + string headphonesBase64 = Convert.ToBase64String(headphones.ToByteArray()); + + // Convert keys to base64-encoded Protobuf + string key1Base64 = Convert.ToBase64String(key1.ToByteArray()); + string key2Base64 = Convert.ToBase64String(key2.ToByteArray()); + string key3Base64 = Convert.ToBase64String(key3.ToByteArray()); + + // Create mock Kafka event JSON + return @$"{{ + ""eventSource"": ""aws:kafka"", + ""eventSourceArn"": ""arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4"", + ""bootstrapServers"": ""b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092"", + ""records"": {{ + ""mytopic-0"": [ + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""timestamp"": 1545084650987, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{key1Base64}"", + ""value"": ""{laptopBase64}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }}, + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 16, + ""timestamp"": 1545084650988, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{key2Base64}"", + ""value"": ""{smartphoneBase64}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }}, + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 17, + ""timestamp"": 1545084650989, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{key3Base64}"", + ""value"": ""{headphonesBase64}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }} + ] + }} + }}"; + } + + // Define the test handler method + private async Task Handler(ConsumerRecords records, ILambdaContext context) + { + foreach (var record in records) + { + var product = record.Value; + context.Logger.LogInformation($"Processing {product.Name} at ${product.Price}"); + } + + return "Successfully processed Protobuf Kafka events"; + } + + private async Task HandlerWithProtobufKeys(ConsumerRecords records, + ILambdaContext context) + { + foreach (var record in records) + { + var key = record.Key; + var product = record.Value; + context.Logger.LogInformation($"Processing key {key.Id} - {product.Name} at ${product.Price}"); + } + + return "Successfully processed Protobuf Kafka events with complex keys"; + } + + [Fact] + public void SimpleHandlerTest() + { + string Handler(ConsumerRecords records, ILambdaContext context) + { + foreach (var record in records) + { + var product = record.Value; + context.Logger.LogInformation($"Processing {product.Name} at ${product.Price}"); + } + + return "Successfully processed Protobuf Kafka events"; + } + // Simulate the handler execution + var mockLogger = new TestLambdaLogger(); + var mockContext = new TestLambdaContext + { + Logger = mockLogger + }; + + var records = new ConsumerRecords + { + Records = new Dictionary>> + { + { "mytopic-0", new List> + { + new() + { + Topic = "mytopic", + Partition = 0, + Offset = 15, + Key = 42, + Value = new ProtobufProduct { Name = "Test Product", Id = 1, Price = 99.99 } + } + } + } + } + }; + + // Call the handler + var result = Handler(records, mockContext); + + // Assert the result + Assert.Equal("Successfully processed Protobuf Kafka events", result); + + // Verify the context logger output + Assert.Contains("Processing Test Product at $99.99", mockLogger.Buffer.ToString()); + + // Verify the records were processed + Assert.Single(records.Records); + Assert.Contains("mytopic-0", records.Records.Keys); + Assert.Single(records.Records["mytopic-0"]); + Assert.Equal("mytopic", records.Records["mytopic-0"][0].Topic); + Assert.Equal(0, records.Records["mytopic-0"][0].Partition); + Assert.Equal(15, records.Records["mytopic-0"][0].Offset); + Assert.Equal(42, records.Records["mytopic-0"][0].Key); + Assert.Equal("Test Product", records.Records["mytopic-0"][0].Value.Name); + Assert.Equal(1, records.Records["mytopic-0"][0].Value.Id); + Assert.Equal(99.99, records.Records["mytopic-0"][0].Value.Price); + } +} diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/Key.proto b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/Key.proto new file mode 100644 index 000000000..deedcf5dc --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/Key.proto @@ -0,0 +1,14 @@ +syntax = "proto3"; + +option csharp_namespace = "TestKafka"; + +message ProtobufKey { + int32 id = 1; + Color color = 2; +} + +enum Color { + UNKNOWN = 0; + GREEN = 1; + RED = 2; +} \ No newline at end of file diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs new file mode 100644 index 000000000..8d2abd951 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/PowertoolsKafkaProtobufSerializerTests.cs @@ -0,0 +1,241 @@ +/* + * Copyright JsonCons.Net authors. All Rights Reserved. + * + * Licensed under the Apache License, Version 2.0 (the "License"). + * You may not use this file except in compliance with the License. + * A copy of the License is located at + * + * http://aws.amazon.com/apache2.0 + * + * or in the "license" file accompanying this file. This file is distributed + * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language governing + * permissions and limitations under the License. + */ + +using System.Runtime.Serialization; +using System.Text; +using AWS.Lambda.Powertools.Kafka.Protobuf; +using TestKafka; + +namespace AWS.Lambda.Powertools.Kafka.Tests.Protobuf; + +public class PowertoolsKafkaProtobufSerializerTests +{ + [Fact] + public void Deserialize_KafkaEventWithProtobufPayload_DeserializesToCorrectType() + { + // Arrange + var serializer = new PowertoolsKafkaProtobufSerializer(); + string kafkaEventJson = File.ReadAllText("Protobuf/kafka-protobuf-event.json"); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + Assert.NotNull(result); + Assert.Equal("aws:kafka", result.EventSource); + + // Verify records were deserialized + Assert.True(result.Records.ContainsKey("mytopic-0")); + var records = result.Records["mytopic-0"]; + Assert.Equal(3, records.Count); // Fixed to expect 3 records instead of 1 + + // Verify first record's content + var firstRecord = records[0]; + Assert.Equal("mytopic", firstRecord.Topic); + Assert.Equal(0, firstRecord.Partition); + Assert.Equal(15, firstRecord.Offset); + Assert.Equal(42, firstRecord.Key); + + // Verify deserialized Protobuf value + var product = firstRecord.Value; + Assert.Equal("Laptop", product.Name); + Assert.Equal(1001, product.Id); + Assert.Equal(999.99, product.Price); + + // Verify second record + var secondRecord = records[1]; + var smartphone = secondRecord.Value; + Assert.Equal("Smartphone", smartphone.Name); + Assert.Equal(1002, smartphone.Id); + Assert.Equal(599.99, smartphone.Price); + + // Verify third record + var thirdRecord = records[2]; + var headphones = thirdRecord.Value; + Assert.Equal("Headphones", headphones.Name); + Assert.Equal(1003, headphones.Id); + Assert.Equal(149.99, headphones.Price); + } + + [Fact] + public void KafkaEvent_ImplementsIEnumerable_ForDirectIteration() + { + // Arrange + var serializer = new PowertoolsKafkaProtobufSerializer(); + string kafkaEventJson = File.ReadAllText("Protobuf/kafka-protobuf-event.json"); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert - Test enumeration + int count = 0; + var products = new List(); + + // Directly iterate over ConsumerRecords + foreach (var record in result) + { + count++; + products.Add(record.Value.Name); + } + + // Verify correct count and values + Assert.Equal(3, count); + Assert.Contains("Laptop", products); + Assert.Contains("Smartphone", products); + Assert.Contains("Headphones", products); + + // Get first record directly through Linq extension + var firstRecord = result.First(); + Assert.Equal("Laptop", firstRecord.Value.Name); + Assert.Equal(1001, firstRecord.Value.Id); + } + + [Fact] + public void Primitive_Deserialization() + { + // Arrange + var serializer = new PowertoolsKafkaProtobufSerializer(); + string kafkaEventJson = + CreateKafkaEvent(Convert.ToBase64String("MyKey"u8.ToArray()), + Convert.ToBase64String("Myvalue"u8.ToArray())); + + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + var firstRecord = result.First(); + Assert.Equal("Myvalue", firstRecord.Value); + Assert.Equal("MyKey", firstRecord.Key); + } + + [Fact] + public void DeserializeComplexKey_WhenAllDeserializationMethodsFail_ReturnsException() + { + // Arrange + var serializer = new PowertoolsKafkaProtobufSerializer(); + // Invalid JSON and not Protobuf binary + byte[] invalidBytes = { 0xDE, 0xAD, 0xBE, 0xEF }; + + string kafkaEventJson = CreateKafkaEvent( + keyValue: Convert.ToBase64String(invalidBytes), + valueValue: Convert.ToBase64String(Encoding.UTF8.GetBytes("test")) + ); + + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var message = Assert.Throws(() => serializer.Deserialize>(stream)); + Assert.Contains("Failed to deserialize key data: Failed to deserialize", message.Message); + } + + [Fact] + public void Deserialize_ConfluentMessageIndexFormats_AllFormatsDeserializeCorrectly() + { + // Arrange + var serializer = new PowertoolsKafkaProtobufSerializer(); + string kafkaEventJson = File.ReadAllText("Protobuf/kafka-protobuf-confluent-event.json"); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + Assert.NotNull(result); + Assert.Equal("aws:kafka", result.EventSource); + + // Verify records + Assert.True(result.Records.ContainsKey("mytopic-0")); + var records = result.Records["mytopic-0"]; + Assert.Equal(3, records.Count); + + // Verify all records have been deserialized correctly (all should have the same content) + foreach (var record in records) + { + Assert.Equal("Laptop", record.Value.Name); + Assert.Equal(1001, record.Value.Id); + Assert.Equal(999.99, record.Value.Price); + } + } + + [Theory] + [InlineData("COkHEgZMYXB0b3AZUrgehes/j0A=", "Standard Protobuf")] // Standard protobuf + [InlineData("AAjpBxIGTGFwdG9wGVK4HoXrP49A", "Single Index")] // Confluent with single 0 index + [InlineData("AgEACOkHEgZMYXB0b3AZUrgehes/j0A=", "Complex Index")] // Confluent with index array [1, 0] + public void Deserialize_SpecificConfluentFormats_EachFormatDeserializesCorrectly(string base64Value, string testCase) + { + // Arrange + var serializer = new PowertoolsKafkaProtobufSerializer(); + string kafkaEventJson = CreateKafkaEvent("NDI=", base64Value); // Key is 42 in base64 + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + + // Act + var result = serializer.Deserialize>(stream); + + // Assert + var record = result.First(); + Assert.NotNull(record); + Assert.Equal(42, record.Key); // Key should be 42 + + // Value should be the same regardless of message index format + Assert.Equal("Laptop", record.Value.Name); + Assert.Equal(1001, record.Value.Id); + Assert.Equal(999.99, record.Value.Price); + } + + [Fact] + public void Deserialize_MessageIndexWithCorruptData_HandlesError() + { + // Arrange - Create invalid message index data (starts with 5 but doesn't have 5 entries) + byte[] invalidData = [5, 1, 2]; // Claims to have 5 entries but only has 2 + string kafkaEventJson = CreateKafkaEvent("NDI=", Convert.ToBase64String(invalidData)); + using var stream = new MemoryStream(Encoding.UTF8.GetBytes(kafkaEventJson)); + var serializer = new PowertoolsKafkaProtobufSerializer(); + + // Act & Assert + var ex = Assert.Throws(() => + serializer.Deserialize>(stream)); + + // Verify the exception message contains useful information + Assert.Contains("Failed to deserialize value data:", ex.Message); + } + + private string CreateKafkaEvent(string keyValue, string valueValue) + { + return @$"{{ + ""eventSource"": ""aws:kafka"", + ""eventSourceArn"": ""arn:aws:kafka:us-east-1:0123456789019:cluster/TestCluster/abcd1234"", + ""bootstrapServers"": ""b-1.test-cluster.kafka.us-east-1.amazonaws.com:9092"", + ""records"": {{ + ""mytopic-0"": [ + {{ + ""topic"": ""mytopic"", + ""partition"": 0, + ""offset"": 15, + ""timestamp"": 1645084650987, + ""timestampType"": ""CREATE_TIME"", + ""key"": ""{keyValue}"", + ""value"": ""{valueValue}"", + ""headers"": [ + {{ ""headerKey"": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] }} + ] + }} + ] + }} + }}"; + } +} \ No newline at end of file diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/Product.proto b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/Product.proto new file mode 100644 index 000000000..1d4c64e90 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/Product.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +option csharp_namespace = "TestKafka"; + +message ProtobufProduct { + int32 id = 1; + string name = 2; + double price = 3; +} \ No newline at end of file diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/kafka-protobuf-confluent-event.json b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/kafka-protobuf-confluent-event.json new file mode 100644 index 000000000..d76b109d3 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/kafka-protobuf-confluent-event.json @@ -0,0 +1,52 @@ +{ + "eventSource": "aws:kafka", + "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", + "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + "records": { + "mytopic-0": [ + { + "topic": "mytopic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": "NDI=", + "value": "COkHEgZMYXB0b3AZUrgehes/j0A=", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 16, + "timestamp": 1545084650988, + "timestampType": "CREATE_TIME", + "key": "NDI=", + "value": "AAjpBxIGTGFwdG9wGVK4HoXrP49A", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 17, + "timestamp": 1545084650989, + "timestampType": "CREATE_TIME", + "key": "NDI=", + "value": "AgEACOkHEgZMYXB0b3AZUrgehes/j0A=", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + } + ] + } +} + diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/kafka-protobuf-event.json b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/kafka-protobuf-event.json new file mode 100644 index 000000000..b3e0139e3 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Protobuf/kafka-protobuf-event.json @@ -0,0 +1,51 @@ +{ + "eventSource": "aws:kafka", + "eventSourceArn": "arn:aws:kafka:us-east-1:0123456789019:cluster/SalesCluster/abcd1234-abcd-cafe-abab-9876543210ab-4", + "bootstrapServers": "b-2.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092,b-1.demo-cluster-1.a1bcde.c1.kafka.us-east-1.amazonaws.com:9092", + "records": { + "mytopic-0": [ + { + "topic": "mytopic", + "partition": 0, + "offset": 15, + "timestamp": 1545084650987, + "timestampType": "CREATE_TIME", + "key": "NDI=", + "value": "COkHEgZMYXB0b3AZUrgehes/j0A=", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 16, + "timestamp": 1545084650988, + "timestampType": "CREATE_TIME", + "key": "NDI=", + "value": "COoHEgpTbWFydHBob25lGVK4HoXrv4JA", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + }, + { + "topic": "mytopic", + "partition": 0, + "offset": 17, + "timestamp": 1545084650989, + "timestampType": "CREATE_TIME", + "key": null, + "value": "COsHEgpIZWFkcGhvbmVzGUjhehSuv2JA", + "headers": [ + { + "headerKey": [104, 101, 97, 100, 101, 114, 86, 97, 108, 117, 101] + } + ] + } + ] + } +} diff --git a/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Readme.md b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Readme.md new file mode 100644 index 000000000..4df25b4b8 --- /dev/null +++ b/libraries/tests/AWS.Lambda.Powertools.Kafka.Tests/Readme.md @@ -0,0 +1,31 @@ +# Avro + +```bash +dotnet tool install --global Apache.Avro.Tools + +cd tests/AWS.Lambda.Powertools.Kafka.Tests/Avro/ +avrogen -s AvroProduct.avsc ./ +``` + +```xml + + + + + + + +``` + +# Protobuf + +```xml + + + + PreserveNewest + + + + +``` \ No newline at end of file diff --git a/libraries/tests/Directory.Packages.props b/libraries/tests/Directory.Packages.props index 88751caf4..804b073e2 100644 --- a/libraries/tests/Directory.Packages.props +++ b/libraries/tests/Directory.Packages.props @@ -6,6 +6,7 @@ + diff --git a/version.json b/version.json index fd3b95021..332a97927 100644 --- a/version.json +++ b/version.json @@ -10,6 +10,9 @@ "Idempotency": "1.3.0", "BatchProcessing": "1.2.1", "EventHandler": "1.0.0", - "EventHandler.Resolvers.BedrockAgentFunction": "1.0.0" + "EventHandler.Resolvers.BedrockAgentFunction": "1.0.0", + "Kafka.Json" : "1.0.0", + "Kafka.Avro" : "1.0.0", + "Kafka.Protobuf" : "1.0.0" } }