@@ -175,10 +175,15 @@ function parseBinary<T>(message: KafkaMessage<T>): CloudEvent<T> {
175175 const eventObj : { [ key : string ] : unknown } = { } ;
176176 const headers = { ...message . headers } ;
177177
178+ eventObj . datacontenttype = headers [ CONSTANTS . HEADER_CONTENT_TYPE ] ;
179+
178180 for ( const key in KAFKA_CE_HEADERS ) {
179181 const h = KAFKA_CE_HEADERS [ key ] ;
180182 if ( ! ! headers [ h ] ) {
181183 eventObj [ HEADER_MAP [ h ] ] = headers [ h ] ;
184+ if ( h === KAFKA_CE_HEADERS . TIME ) {
185+ eventObj . time = new Date ( eventObj . time as string ) . toISOString ( ) ;
186+ }
182187 delete headers [ h ] ;
183188 }
184189 }
@@ -211,9 +216,10 @@ function parseStructured<T>(message: KafkaMessage<T>): CloudEvent<T> {
211216 if ( ! message . headers [ CONSTANTS . HEADER_CONTENT_TYPE ] ?. startsWith ( CONSTANTS . MIME_CE_JSON ) ) {
212217 throw new ValidationError ( `Unsupported event encoding ${ message . headers [ CONSTANTS . HEADER_CONTENT_TYPE ] } ` ) ;
213218 }
214-
219+ const eventObj = JSON . parse ( message . value as string ) ;
220+ eventObj . time = new Date ( eventObj . time ) . toISOString ( ) ;
215221 return new CloudEvent ( {
216- ...JSON . parse ( message . value as string ) ,
222+ ...eventObj ,
217223 partitionkey : message . key ,
218224 } , false ) ;
219225}
@@ -242,7 +248,7 @@ function parseBatched<T>(message: KafkaMessage<T>): CloudEvent<T>[] {
242248function extractBinaryData < T > ( message : KafkaMessage < T > ) : T {
243249 let data = message . value as T ;
244250 // If the event data is JSON, go ahead and parse it
245- const datacontenttype = message . headers [ KAFKA_CE_HEADERS . DATACONTENTTYPE ] as string ;
251+ const datacontenttype = message . headers [ CONSTANTS . HEADER_CONTENT_TYPE ] as string ;
246252 if ( ! ! datacontenttype && datacontenttype . startsWith ( CONSTANTS . MIME_JSON ) ) {
247253 if ( typeof message . value === "string" ) {
248254 data = JSON . parse ( message . value ) ;
0 commit comments