1616 *
1717 */
1818
19- use std:: collections:: HashMap ;
20- use std:: sync:: Arc ;
21-
22- use actix_web:: http:: header:: ContentType ;
23- use actix_web:: { HttpRequest , HttpResponse } ;
19+ use actix_web:: { http:: header:: ContentType , HttpRequest , HttpResponse } ;
2420use arrow_schema:: Field ;
2521use bytes:: Bytes ;
2622use http:: StatusCode ;
2723use serde_json:: Value ;
24+ use std:: collections:: { BTreeMap , HashMap } ;
25+ use std:: sync:: Arc ;
2826
2927use crate :: event:: error:: EventError ;
3028use crate :: event:: format:: EventFormat ;
3129use crate :: event:: { self , format} ;
32- use crate :: handlers:: { PREFIX_META , PREFIX_TAGS , SEPARATOR , STREAM_NAME_HEADER_KEY , LOG_SOURCE_KEY } ;
30+ use crate :: handlers:: {
31+ LOG_SOURCE_KEY , PREFIX_META , PREFIX_TAGS , SEPARATOR , STREAM_NAME_HEADER_KEY ,
32+ LOG_SOURCE_VALUE_FOR_KINEIS , LOG_SOURCE_VALUE_FOR_OTEL
33+ } ;
3334use crate :: metadata:: STREAM_INFO ;
3435use crate :: utils:: header_parsing:: { collect_labelled_headers, ParseHeaderError } ;
3536
36- use super :: logstream:: error:: CreateStreamError ;
3737use super :: kinesis;
38+ use super :: logstream:: error:: CreateStreamError ;
3839
3940// Handler for POST /api/v1/ingest
4041// ingests events by extracting stream name from header
@@ -48,40 +49,44 @@ pub async fn ingest(req: HttpRequest, body: Bytes) -> Result<HttpResponse, PostE
4849 let stream_name = stream_name. to_str ( ) . unwrap ( ) . to_owned ( ) ;
4950 create_stream_if_not_exists ( & stream_name) . await ?;
5051
51- //section to flatten ingested log data
52- let cloned_req = req. clone ( ) ;
53- flatten_logs ( cloned_req, & body) ;
54- //section ends
55- push_logs ( stream_name, req, body) . await ?;
52+ flatten_and_push_logs ( req, body, stream_name) . await ?;
5653 Ok ( HttpResponse :: Ok ( ) . finish ( ) )
5754 } else {
5855 Err ( PostError :: Header ( ParseHeaderError :: MissingStreamName ) )
5956 }
6057}
6158
62- fn flatten_logs ( req : HttpRequest , body : & Bytes ) {
59+ async fn flatten_and_push_logs (
60+ req : HttpRequest ,
61+ body : Bytes ,
62+ stream_name : String ,
63+ ) -> Result < ( ) , PostError > {
6364 //flatten logs
64- if let Some ( ( _, log_source) ) = req
65- . headers ( )
66- . iter ( )
67- . find ( |& ( key, _) | key == LOG_SOURCE_KEY )
68- {
65+ if let Some ( ( _, log_source) ) = req. headers ( ) . iter ( ) . find ( |& ( key, _) | key == LOG_SOURCE_KEY ) {
66+ let mut json: Vec < BTreeMap < String , Value > > = Vec :: new ( ) ;
6967 let log_source: String = log_source. to_str ( ) . unwrap ( ) . to_owned ( ) ;
70- //println!("log source: {}", log_source);
71- match log_source. as_str ( ) {
72- "kinesis_firehose" => kinesis:: flatten_kinesis_logs ( body) ,
73- _ => { } //do nothing so far
74- } ;
68+ match log_source. as_str ( ) {
69+ LOG_SOURCE_VALUE_FOR_KINEIS => json = kinesis:: flatten_kinesis_logs ( & body) ,
70+ LOG_SOURCE_VALUE_FOR_OTEL => { }
71+ _ => { }
72+ }
73+ for record in json. iter_mut ( ) {
74+ let body: Bytes = serde_json:: to_vec ( record) . unwrap ( ) . into ( ) ;
75+ push_logs ( stream_name. to_string ( ) , req. clone ( ) , body) . await ?;
76+ }
77+ } else {
78+ push_logs ( stream_name. to_string ( ) , req, body) . await ?;
7579 }
80+ Ok ( ( ) )
7681}
7782
78-
7983// Handler for POST /api/v1/logstream/{logstream}
8084// only ingests events into the specified logstream
8185// fails if the logstream does not exist
8286pub async fn post_event ( req : HttpRequest , body : Bytes ) -> Result < HttpResponse , PostError > {
8387 let stream_name: String = req. match_info ( ) . get ( "logstream" ) . unwrap ( ) . parse ( ) . unwrap ( ) ;
84- push_logs ( stream_name, req, body) . await ?;
88+
89+ flatten_and_push_logs ( req, body, stream_name) . await ?;
8590 Ok ( HttpResponse :: Ok ( ) . finish ( ) )
8691}
8792
@@ -93,7 +98,6 @@ async fn push_logs(stream_name: String, req: HttpRequest, body: Bytes) -> Result
9398 . ok_or ( PostError :: StreamNotFound ( stream_name. clone ( ) ) ) ?
9499 . schema
95100 . clone ( ) ;
96-
97101 into_event_batch ( req, body, schema) ?
98102 } ;
99103
@@ -118,15 +122,12 @@ fn into_event_batch(
118122 let tags = collect_labelled_headers ( & req, PREFIX_TAGS , SEPARATOR ) ?;
119123 let metadata = collect_labelled_headers ( & req, PREFIX_META , SEPARATOR ) ?;
120124 let size = body. len ( ) ;
121- println ! ( "{:?}" , body) ;
122125 let body: Value = serde_json:: from_slice ( & body) ?;
123126 let event = format:: json:: Event {
124127 data : body,
125128 tags,
126129 metadata,
127130 } ;
128-
129- println ! ( "{:?}" , event) ;
130131 let ( rb, is_first) = event. into_recordbatch ( schema) ?;
131132 Ok ( ( size, rb, is_first) )
132133}
0 commit comments