From 9ea295b7df26c954cbb4f5f3c0dc669b14a2389d Mon Sep 17 00:00:00 2001 From: Corey Richardson Date: Fri, 9 Aug 2013 20:05:24 -0400 Subject: [PATCH 1/3] extra::json: use a different encoding for enums. It now uses `{"type": VariantName, "fields": [...]}`, which, according to @Seldaek, since all enums will have the same "shape" rather than being a weird ad-hoc array, will optimize better in javascript JITs. It also looks prettier, and makes more sense. --- src/libextra/json.rs | 39 ++++++++++++++++++++++++--------------- 1 file changed, 24 insertions(+), 15 deletions(-) diff --git a/src/libextra/json.rs b/src/libextra/json.rs index f76dc05b2773d..e0e860d102e05 100644 --- a/src/libextra/json.rs +++ b/src/libextra/json.rs @@ -135,18 +135,21 @@ impl serialize::Encoder for Encoder { _id: uint, cnt: uint, f: &fn(&mut Encoder)) { - // enums are encoded as strings or vectors: + // enums are encoded as strings or objects // Bunny => "Bunny" - // Kangaroo(34,"William") => ["Kangaroo",[34,"William"]] - + // Kangaroo(34,"William") => {"variant": "Kangaroo", "fields": [34,"William"]} if cnt == 0 { self.wr.write_str(escape_str(name)); } else { - self.wr.write_char('['); + self.wr.write_char('{'); + self.wr.write_str("\"variant\""); + self.wr.write_char(':'); self.wr.write_str(escape_str(name)); self.wr.write_char(','); + self.wr.write_str("\"fields\""); + self.wr.write_str(":["); f(self); - self.wr.write_char(']'); + self.wr.write_str("]}"); } } @@ -947,14 +950,20 @@ impl serialize::Decoder for Decoder { debug!("read_enum_variant(names=%?)", names); let name = match self.stack.pop() { String(s) => s, - List(list) => { - for v in list.move_rev_iter() { - self.stack.push(v); - } - match self.stack.pop() { - String(s) => s, - value => fail!("invalid variant name: %?", value), + Object(o) => { + let n = match o.find(&~"variant").expect("invalidly encoded json") { + &String(ref s) => s.clone(), + _ => fail!("invalidly encoded json"), + }; + match o.find(&~"fields").expect("invalidly encoded json") { + &List(ref l) => { + for field in l.rev_iter() { + self.stack.push(field.clone()); + } + }, + _ => fail!("invalidly encoded json") } + n } ref json => fail!("invalid variant: %?", *json), }; @@ -1517,7 +1526,7 @@ mod tests { let mut encoder = Encoder(wr); animal.encode(&mut encoder); }, - ~"[\"Frog\",\"Henry\",349]" + ~"{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}" ); assert_eq!( do io::with_str_writer |wr| { @@ -1921,14 +1930,14 @@ mod tests { assert_eq!(value, Dog); let mut decoder = - Decoder(from_str("[\"Frog\",\"Henry\",349]").unwrap()); + Decoder(from_str("{\"variant\":\"Frog\",\"fields\":[\"Henry\",349]}").unwrap()); let value: Animal = Decodable::decode(&mut decoder); assert_eq!(value, Frog(~"Henry", 349)); } #[test] fn test_decode_map() { - let s = ~"{\"a\": \"Dog\", \"b\": [\"Frog\", \"Henry\", 349]}"; + let s = ~"{\"a\": \"Dog\", \"b\": {\"variant\":\"Frog\",\"fields\":[\"Henry\", 349]}}"; let mut decoder = Decoder(from_str(s).unwrap()); let mut map: TreeMap<~str, Animal> = Decodable::decode(&mut decoder); From bb4d4d7eb9e7a3abe3dc2fd26ad62408d83ed036 Mon Sep 17 00:00:00 2001 From: Corey Richardson Date: Mon, 16 Sep 2013 17:12:53 -0400 Subject: [PATCH 2/3] Fix hardcoded string in libsyntax --- src/libsyntax/parse/mod.rs | 26 ++------------------------ 1 file changed, 2 insertions(+), 24 deletions(-) diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index f7c76fee18093..bb88c686284a3 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -398,30 +398,8 @@ mod test { #[test] fn string_to_tts_1 () { let (tts,_ps) = string_to_tts_and_sess(@"fn a (b : int) { b; }"); assert_eq!(to_json_str(@tts), - ~"[\ - [\"tt_tok\",null,[\"IDENT\",\"fn\",false]],\ - [\"tt_tok\",null,[\"IDENT\",\"a\",false]],\ - [\ - \"tt_delim\",\ - [\ - [\"tt_tok\",null,\"LPAREN\"],\ - [\"tt_tok\",null,[\"IDENT\",\"b\",false]],\ - [\"tt_tok\",null,\"COLON\"],\ - [\"tt_tok\",null,[\"IDENT\",\"int\",false]],\ - [\"tt_tok\",null,\"RPAREN\"]\ - ]\ - ],\ - [\ - \"tt_delim\",\ - [\ - [\"tt_tok\",null,\"LBRACE\"],\ - [\"tt_tok\",null,[\"IDENT\",\"b\",false]],\ - [\"tt_tok\",null,\"SEMI\"],\ - [\"tt_tok\",null,\"RBRACE\"]\ - ]\ - ]\ - ]" - ); + ~"[{\"variant\":\"tt_tok\",\"fields\":[null,{\"variant\":\"IDENT\",\"fields\":[\"fn\",false]}]},{\"variant\":\"tt_tok\",\"fields\":[null,{\"variant\":\"IDENT\",\"fields\":[\"a\",false]}]},{\"variant\":\"tt_delim\",\"fields\":[[{\"variant\":\"tt_tok\",\"fields\":[null,\"LPAREN\"]},{\"variant\":\"tt_tok\",\"fields\":[null,{\"variant\":\"IDENT\",\"fields\":[\"b\",false]}]},{\"variant\":\"tt_tok\",\"fields\":[null,\"COLON\"]},{\"variant\":\"tt_tok\",\"fields\":[null,{\"variant\":\"IDENT\",\"fields\":[\"int\",false]}]},{\"variant\":\"tt_tok\",\"fields\":[null,\"RPAREN\"]}]]},{\"variant\":\"tt_delim\",\"fields\":[[{\"variant\":\"tt_tok\",\"fields\":[null,\"LBRACE\"]},{\"variant\":\"tt_tok\",\"fields\":[null,{\"variant\":\"IDENT\",\"fields\":[\"b\",false]}]},{\"variant\":\"tt_tok\",\"fields\":[null,\"SEMI\"]},{\"variant\":\"tt_tok\",\"fields\":[null,\"RBRACE\"]}]]}]" + ); } #[test] fn ret_expr() { From d0e0c336d081005da157882e95f4bb57f3bd70b8 Mon Sep 17 00:00:00 2001 From: Corey Richardson Date: Mon, 16 Sep 2013 19:12:54 -0400 Subject: [PATCH 3/3] Update syntax test --- src/libsyntax/parse/mod.rs | 124 ++++++++++++++++++++++++++++++++++++- 1 file changed, 123 insertions(+), 1 deletion(-) diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index bb88c686284a3..9645dab4e8b7b 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -398,7 +398,129 @@ mod test { #[test] fn string_to_tts_1 () { let (tts,_ps) = string_to_tts_and_sess(@"fn a (b : int) { b; }"); assert_eq!(to_json_str(@tts), - ~"[{\"variant\":\"tt_tok\",\"fields\":[null,{\"variant\":\"IDENT\",\"fields\":[\"fn\",false]}]},{\"variant\":\"tt_tok\",\"fields\":[null,{\"variant\":\"IDENT\",\"fields\":[\"a\",false]}]},{\"variant\":\"tt_delim\",\"fields\":[[{\"variant\":\"tt_tok\",\"fields\":[null,\"LPAREN\"]},{\"variant\":\"tt_tok\",\"fields\":[null,{\"variant\":\"IDENT\",\"fields\":[\"b\",false]}]},{\"variant\":\"tt_tok\",\"fields\":[null,\"COLON\"]},{\"variant\":\"tt_tok\",\"fields\":[null,{\"variant\":\"IDENT\",\"fields\":[\"int\",false]}]},{\"variant\":\"tt_tok\",\"fields\":[null,\"RPAREN\"]}]]},{\"variant\":\"tt_delim\",\"fields\":[[{\"variant\":\"tt_tok\",\"fields\":[null,\"LBRACE\"]},{\"variant\":\"tt_tok\",\"fields\":[null,{\"variant\":\"IDENT\",\"fields\":[\"b\",false]}]},{\"variant\":\"tt_tok\",\"fields\":[null,\"SEMI\"]},{\"variant\":\"tt_tok\",\"fields\":[null,\"RBRACE\"]}]]}]" + ~"[\ + {\ + \"variant\":\"tt_tok\",\ + \"fields\":[\ + null,\ + {\ + \"variant\":\"IDENT\",\ + \"fields\":[\ + \"fn\",\ + false\ + ]\ + }\ + ]\ + },\ + {\ + \"variant\":\"tt_tok\",\ + \"fields\":[\ + null,\ + {\ + \"variant\":\"IDENT\",\ + \"fields\":[\ + \"a\",\ + false\ + ]\ + }\ + ]\ + },\ + {\ + \"variant\":\"tt_delim\",\ + \"fields\":[\ + [\ + {\ + \"variant\":\"tt_tok\",\ + \"fields\":[\ + null,\ + \"LPAREN\"\ + ]\ + },\ + {\ + \"variant\":\"tt_tok\",\ + \"fields\":[\ + null,\ + {\ + \"variant\":\"IDENT\",\ + \"fields\":[\ + \"b\",\ + false\ + ]\ + }\ + ]\ + },\ + {\ + \"variant\":\"tt_tok\",\ + \"fields\":[\ + null,\ + \"COLON\"\ + ]\ + },\ + {\ + \"variant\":\"tt_tok\",\ + \"fields\":[\ + null,\ + {\ + \"variant\":\"IDENT\",\ + \"fields\":[\ + \"int\",\ + false\ + ]\ + }\ + ]\ + },\ + {\ + \"variant\":\"tt_tok\",\ + \"fields\":[\ + null,\ + \"RPAREN\"\ + ]\ + }\ + ]\ + ]\ + },\ + {\ + \"variant\":\"tt_delim\",\ + \"fields\":[\ + [\ + {\ + \"variant\":\"tt_tok\",\ + \"fields\":[\ + null,\ + \"LBRACE\"\ + ]\ + },\ + {\ + \"variant\":\"tt_tok\",\ + \"fields\":[\ + null,\ + {\ + \"variant\":\"IDENT\",\ + \"fields\":[\ + \"b\",\ + false\ + ]\ + }\ + ]\ + },\ + {\ + \"variant\":\"tt_tok\",\ + \"fields\":[\ + null,\ + \"SEMI\"\ + ]\ + },\ + {\ + \"variant\":\"tt_tok\",\ + \"fields\":[\ + null,\ + \"RBRACE\"\ + ]\ + }\ + ]\ + ]\ + }\ +]" ); }