@@ -22,20 +22,20 @@ use std::{
2222 sync:: Arc ,
2323} ;
2424
25+ use arrow:: error:: Result ;
2526use arrow_array:: { ArrayRef , RecordBatch , StringArray , TimestampMillisecondArray , UInt64Array } ;
26- use arrow_schema:: { ArrowError , DataType , Field , Schema , TimeUnit } ;
27+ use arrow_schema:: { DataType , Field , Schema , TimeUnit } ;
2728use arrow_select:: take:: take;
29+ pub use batch_adapter:: adapt_batch;
2830use chrono:: { DateTime , Utc } ;
2931use itertools:: Itertools ;
30-
31- pub mod batch_adapter;
32- pub mod flight;
33-
34- pub use batch_adapter:: adapt_batch;
3532use serde_json:: { Map , Value } ;
3633
3734use crate :: event:: DEFAULT_TIMESTAMP_KEY ;
3835
36+ pub mod batch_adapter;
37+ pub mod flight;
38+
3939/// Converts a slice of record batches to JSON.
4040///
4141/// # Arguments
@@ -46,9 +46,10 @@ use crate::event::DEFAULT_TIMESTAMP_KEY;
4646/// * Result<Vec<Map<String, Value>>>
4747///
4848/// A vector of JSON objects representing the record batches.
49- pub fn record_batches_to_json (
50- records : & [ RecordBatch ] ,
51- ) -> Result < Vec < Map < String , Value > > , ArrowError > {
49+ ///
50+ /// TODO: maybe this can be futher optimized by directly converting `arrow`
51+ /// to an in-memory type instead of serializing to bytes.
52+ pub fn record_batches_to_json ( records : & [ RecordBatch ] ) -> Result < Vec < Map < String , Value > > > {
5253 let buf = vec ! [ ] ;
5354 let mut writer = arrow_json:: ArrayWriter :: new ( buf) ;
5455 for record in records {
@@ -101,7 +102,7 @@ pub fn add_parseable_fields(
101102 rb : RecordBatch ,
102103 p_timestamp : DateTime < Utc > ,
103104 p_custom_fields : & HashMap < String , String > ,
104- ) -> Result < RecordBatch , ArrowError > {
105+ ) -> Result < RecordBatch > {
105106 // Return Result for proper error handling
106107
107108 // Add custom fields in sorted order
0 commit comments