@@ -55,13 +55,13 @@ fn flatten_exemplar(exemplars: &[Exemplar]) -> BTreeMap<String, Value> {
5555 match value {
5656 ExemplarValue :: AsDouble ( double_val) => {
5757 exemplar_json. insert (
58- "exemplar_value_as_double " . to_string ( ) ,
58+ "exemplar_value " . to_string ( ) ,
5959 Value :: Number ( serde_json:: Number :: from_f64 ( * double_val) . unwrap ( ) ) ,
6060 ) ;
6161 }
6262 ExemplarValue :: AsInt ( int_val) => {
6363 exemplar_json. insert (
64- "exemplar_value_as_int " . to_string ( ) ,
64+ "exemplar_value " . to_string ( ) ,
6565 Value :: Number ( serde_json:: Number :: from ( * int_val) ) ,
6666 ) ;
6767 }
@@ -102,13 +102,13 @@ fn flatten_number_data_points(data_points: &[NumberDataPoint]) -> Vec<BTreeMap<S
102102 match value {
103103 NumberDataPointValue :: AsDouble ( double_val) => {
104104 data_point_json. insert (
105- "data_point_value_as_double " . to_string ( ) ,
105+ "data_point_value " . to_string ( ) ,
106106 Value :: Number ( serde_json:: Number :: from_f64 ( * double_val) . unwrap ( ) ) ,
107107 ) ;
108108 }
109109 NumberDataPointValue :: AsInt ( int_val) => {
110110 data_point_json. insert (
111- "data_point_value_as_int " . to_string ( ) ,
111+ "data_point_value " . to_string ( ) ,
112112 Value :: Number ( serde_json:: Number :: from ( * int_val) ) ,
113113 ) ;
114114 }
@@ -129,7 +129,7 @@ fn flatten_gauge(gauge: &Gauge) -> Vec<BTreeMap<String, Value>> {
129129 for data_point_json in data_points_json {
130130 let mut gauge_json = BTreeMap :: new ( ) ;
131131 for ( key, value) in & data_point_json {
132- gauge_json. insert ( format ! ( "gauge_{}" , key) , value. clone ( ) ) ;
132+ gauge_json. insert ( key. clone ( ) , value. clone ( ) ) ;
133133 }
134134 vec_gauge_json. push ( gauge_json) ;
135135 }
@@ -146,16 +146,13 @@ fn flatten_sum(sum: &Sum) -> Vec<BTreeMap<String, Value>> {
146146 for data_point_json in data_points_json {
147147 let mut sum_json = BTreeMap :: new ( ) ;
148148 for ( key, value) in & data_point_json {
149- sum_json. insert ( format ! ( "sum_{}" , key) , value. clone ( ) ) ;
149+ sum_json. insert ( key. clone ( ) , value. clone ( ) ) ;
150150 }
151151 vec_sum_json. push ( sum_json) ;
152152 }
153153 let mut sum_json = BTreeMap :: new ( ) ;
154154 sum_json. extend ( flatten_aggregation_temporality ( sum. aggregation_temporality ) ) ;
155- sum_json. insert (
156- "sum_is_monotonic" . to_string ( ) ,
157- Value :: Bool ( sum. is_monotonic ) ,
158- ) ;
155+ sum_json. insert ( "is_monotonic" . to_string ( ) , Value :: Bool ( sum. is_monotonic ) ) ;
159156 for data_point_json in & mut vec_sum_json {
160157 for ( key, value) in & sum_json {
161158 data_point_json. insert ( key. clone ( ) , value. clone ( ) ) ;
@@ -174,45 +171,49 @@ fn flatten_histogram(histogram: &Histogram) -> Vec<BTreeMap<String, Value>> {
174171 let mut data_point_json = BTreeMap :: new ( ) ;
175172 insert_attributes ( & mut data_point_json, & data_point. attributes ) ;
176173 data_point_json. insert (
177- "histogram_start_time_unix_nano " . to_string ( ) ,
174+ "start_time_unix_nano " . to_string ( ) ,
178175 Value :: String ( convert_epoch_nano_to_timestamp (
179176 data_point. start_time_unix_nano as i64 ,
180177 ) ) ,
181178 ) ;
182179 data_point_json. insert (
183- "histogram_time_unix_nano " . to_string ( ) ,
180+ "time_unix_nano " . to_string ( ) ,
184181 Value :: String ( convert_epoch_nano_to_timestamp (
185182 data_point. time_unix_nano as i64 ,
186183 ) ) ,
187184 ) ;
188185 data_point_json. insert (
189- "histogram_data_point_count " . to_string ( ) ,
186+ "data_point_count " . to_string ( ) ,
190187 Value :: Number ( data_point. count . into ( ) ) ,
191188 ) ;
192- insert_number_if_some (
193- & mut data_point_json,
194- "histogram_data_point_sum" ,
195- & data_point. sum ,
189+ insert_number_if_some ( & mut data_point_json, "data_point_sum" , & data_point. sum ) ;
190+ data_point_json. insert (
191+ "data_point_bucket_counts" . to_string ( ) ,
192+ Value :: Array (
193+ data_point
194+ . bucket_counts
195+ . iter ( )
196+ . map ( |& count| Value :: Number ( count. into ( ) ) )
197+ . collect ( ) ,
198+ ) ,
199+ ) ;
200+ data_point_json. insert (
201+ "data_point_explicit_bounds" . to_string ( ) ,
202+ Value :: Array (
203+ data_point
204+ . explicit_bounds
205+ . iter ( )
206+ . map ( |bound| Value :: String ( bound. to_string ( ) ) )
207+ . collect ( ) ,
208+ ) ,
196209 ) ;
197- for ( index, bucket_count) in data_point. bucket_counts . iter ( ) . enumerate ( ) {
198- data_point_json. insert (
199- format ! ( "histogram_data_point_bucket_count_{}" , index + 1 ) ,
200- Value :: String ( bucket_count. to_string ( ) ) ,
201- ) ;
202- }
203- for ( index, explicit_bound) in data_point. explicit_bounds . iter ( ) . enumerate ( ) {
204- data_point_json. insert (
205- format ! ( "histogram_data_point_explicit_bound_{}" , index + 1 ) ,
206- Value :: String ( explicit_bound. to_string ( ) ) ,
207- ) ;
208- }
209210 let exemplar_json = flatten_exemplar ( & data_point. exemplars ) ;
210211 for ( key, value) in exemplar_json {
211- data_point_json. insert ( format ! ( "histogram_{}" , key) , value) ;
212+ data_point_json. insert ( key. to_string ( ) , value) ;
212213 }
213214 data_point_json. extend ( flatten_data_point_flags ( data_point. flags ) ) ;
214- insert_number_if_some ( & mut data_point_json, "histogram_min " , & data_point. min ) ;
215- insert_number_if_some ( & mut data_point_json, "histogram_max " , & data_point. max ) ;
215+ insert_number_if_some ( & mut data_point_json, "min " , & data_point. min ) ;
216+ insert_number_if_some ( & mut data_point_json, "max " , & data_point. max ) ;
216217 data_points_json. push ( data_point_json) ;
217218 }
218219 let mut histogram_json = BTreeMap :: new ( ) ;
@@ -233,13 +234,16 @@ fn flatten_histogram(histogram: &Histogram) -> Vec<BTreeMap<String, Value>> {
233234fn flatten_buckets ( bucket : & Buckets ) -> BTreeMap < String , Value > {
234235 let mut bucket_json = BTreeMap :: new ( ) ;
235236 bucket_json. insert ( "offset" . to_string ( ) , Value :: Number ( bucket. offset . into ( ) ) ) ;
236-
237- for ( index, bucket_count) in bucket. bucket_counts . iter ( ) . enumerate ( ) {
238- bucket_json. insert (
239- format ! ( "bucket_count_{}" , index + 1 ) ,
240- Value :: String ( bucket_count. to_string ( ) ) ,
241- ) ;
242- }
237+ bucket_json. insert (
238+ "bucket_count" . to_string ( ) ,
239+ Value :: Array (
240+ bucket
241+ . bucket_counts
242+ . iter ( )
243+ . map ( |& count| Value :: Number ( count. into ( ) ) )
244+ . collect ( ) ,
245+ ) ,
246+ ) ;
243247 bucket_json
244248}
245249
@@ -253,49 +257,45 @@ fn flatten_exp_histogram(exp_histogram: &ExponentialHistogram) -> Vec<BTreeMap<S
253257 let mut data_point_json = BTreeMap :: new ( ) ;
254258 insert_attributes ( & mut data_point_json, & data_point. attributes ) ;
255259 data_point_json. insert (
256- "exponential_histogram_start_time_unix_nano " . to_string ( ) ,
260+ "start_time_unix_nano " . to_string ( ) ,
257261 Value :: String ( convert_epoch_nano_to_timestamp (
258262 data_point. start_time_unix_nano as i64 ,
259263 ) ) ,
260264 ) ;
261265 data_point_json. insert (
262- "exponential_histogram_time_unix_nano " . to_string ( ) ,
266+ "time_unix_nano " . to_string ( ) ,
263267 Value :: String ( convert_epoch_nano_to_timestamp (
264268 data_point. time_unix_nano as i64 ,
265269 ) ) ,
266270 ) ;
267271 data_point_json. insert (
268- "exponential_histogram_data_point_count " . to_string ( ) ,
272+ "data_point_count " . to_string ( ) ,
269273 Value :: Number ( data_point. count . into ( ) ) ,
270274 ) ;
271- insert_number_if_some (
272- & mut data_point_json,
273- "exponential_histogram_data_point_sum" ,
274- & data_point. sum ,
275- ) ;
275+ insert_number_if_some ( & mut data_point_json, "data_point_sum" , & data_point. sum ) ;
276276 data_point_json. insert (
277- "exponential_histogram_data_point_scale " . to_string ( ) ,
277+ "data_point_scale " . to_string ( ) ,
278278 Value :: Number ( data_point. scale . into ( ) ) ,
279279 ) ;
280280 data_point_json. insert (
281- "exponential_histogram_data_point_zero_count " . to_string ( ) ,
281+ "data_point_zero_count " . to_string ( ) ,
282282 Value :: Number ( data_point. zero_count . into ( ) ) ,
283283 ) ;
284284 if let Some ( positive) = & data_point. positive {
285285 let positive_json = flatten_buckets ( positive) ;
286286 for ( key, value) in positive_json {
287- data_point_json. insert ( format ! ( "exponential_histogram_positive_ {}" , key) , value) ;
287+ data_point_json. insert ( format ! ( "positive_ {}" , key) , value) ;
288288 }
289289 }
290290 if let Some ( negative) = & data_point. negative {
291291 let negative_json = flatten_buckets ( negative) ;
292292 for ( key, value) in negative_json {
293- data_point_json. insert ( format ! ( "exponential_histogram_negative_ {}" , key) , value) ;
293+ data_point_json. insert ( format ! ( "negative_ {}" , key) , value) ;
294294 }
295295 }
296296 let exemplar_json = flatten_exemplar ( & data_point. exemplars ) ;
297297 for ( key, value) in exemplar_json {
298- data_point_json. insert ( format ! ( "exponential_histogram_{}" , key) , value) ;
298+ data_point_json. insert ( key, value) ;
299299 }
300300 data_points_json. push ( data_point_json) ;
301301 }
@@ -321,35 +321,56 @@ fn flatten_summary(summary: &Summary) -> Vec<BTreeMap<String, Value>> {
321321 let mut data_point_json = BTreeMap :: new ( ) ;
322322 insert_attributes ( & mut data_point_json, & data_point. attributes ) ;
323323 data_point_json. insert (
324- "summary_start_time_unix_nano " . to_string ( ) ,
324+ "start_time_unix_nano " . to_string ( ) ,
325325 Value :: String ( convert_epoch_nano_to_timestamp (
326326 data_point. start_time_unix_nano as i64 ,
327327 ) ) ,
328328 ) ;
329329 data_point_json. insert (
330- "summary_time_unix_nano " . to_string ( ) ,
330+ "time_unix_nano " . to_string ( ) ,
331331 Value :: String ( convert_epoch_nano_to_timestamp (
332332 data_point. time_unix_nano as i64 ,
333333 ) ) ,
334334 ) ;
335335 data_point_json. insert (
336- "summary_data_point_count " . to_string ( ) ,
336+ "data_point_count " . to_string ( ) ,
337337 Value :: Number ( data_point. count . into ( ) ) ,
338338 ) ;
339339 data_point_json. insert (
340- "summary_data_point_sum " . to_string ( ) ,
340+ "data_point_sum " . to_string ( ) ,
341341 Value :: Number ( serde_json:: Number :: from_f64 ( data_point. sum ) . unwrap ( ) ) ,
342342 ) ;
343- for ( index, quantile_value) in data_point. quantile_values . iter ( ) . enumerate ( ) {
344- data_point_json. insert (
345- format ! ( "summary_quantile_value_quantile_{}" , index + 1 ) ,
346- Value :: Number ( serde_json:: Number :: from_f64 ( quantile_value. quantile ) . unwrap ( ) ) ,
347- ) ;
348- data_point_json. insert (
349- format ! ( "summary_quantile_value_value_{}" , index + 1 ) ,
350- Value :: Number ( serde_json:: Number :: from_f64 ( quantile_value. value ) . unwrap ( ) ) ,
351- ) ;
352- }
343+ data_point_json. insert (
344+ "data_point_quantile_values" . to_string ( ) ,
345+ Value :: Array (
346+ data_point
347+ . quantile_values
348+ . iter ( )
349+ . map ( |quantile_value| {
350+ Value :: Object (
351+ vec ! [
352+ (
353+ "quantile" ,
354+ Value :: Number (
355+ serde_json:: Number :: from_f64( quantile_value. quantile)
356+ . unwrap( ) ,
357+ ) ,
358+ ) ,
359+ (
360+ "value" ,
361+ Value :: Number (
362+ serde_json:: Number :: from_f64( quantile_value. value) . unwrap( ) ,
363+ ) ,
364+ ) ,
365+ ]
366+ . into_iter ( )
367+ . map ( |( k, v) | ( k. to_string ( ) , v) )
368+ . collect ( ) ,
369+ )
370+ } )
371+ . collect ( ) ,
372+ ) ,
373+ ) ;
353374 data_points_json. push ( data_point_json) ;
354375 }
355376 data_points_json
0 commit comments