From f559fff87491d850f714f7a895d7a6fc6f022056 Mon Sep 17 00:00:00 2001 From: numToStr Date: Mon, 23 Jan 2023 15:52:02 +0530 Subject: [PATCH 1/4] chore: update deps --- Cargo.lock | 100 +++++++++++++++++++++++++++++++++++++++++++---------- Cargo.toml | 6 ++-- src/cli.rs | 20 ++++------- 3 files changed, 92 insertions(+), 34 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index de1a7b4..210eaaf 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,28 +2,68 @@ # It is not intended for manual editing. version = 3 +[[package]] +name = "ahash" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" +dependencies = [ + "getrandom", + "once_cell", + "version_check", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + [[package]] name = "chumsky" -version = "0.8.0" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d02796e4586c6c41aeb68eae9bfb4558a522c35f1430c14b40136c3706e09e4" +checksum = "c4d619fba796986dd538d82660b76e0b9756c6e19b2e4d4559ba5a57f9f00810" +dependencies = [ + "hashbrown", +] [[package]] name = "comfy-table" -version = "6.1.2" +version = "6.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1090f39f45786ec6dc6286f8ea9c75d0a7ef0a0d3cda674cef0c3af7b307fbc2" +checksum = "6e7b787b0dc42e8111badfdbe4c3059158ccb2db8780352fa1b01e8ccf45cc4d" dependencies = [ "strum", "strum_macros", "unicode-width", ] +[[package]] +name = "getrandom" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" +dependencies = [ + "ahash", +] + [[package]] name = "heck" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" [[package]] name = "lemmy-help" @@ -37,33 +77,45 @@ dependencies = [ [[package]] name = "lexopt" -version = "0.2.1" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baff4b617f7df3d896f97fe922b64817f6cd9a756bb81d40f8883f2f66dcb401" + +[[package]] +name = "libc" +version = "0.2.139" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79" + +[[package]] +name = "once_cell" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "478ee9e62aaeaf5b140bd4138753d1f109765488581444218d3ddda43234f3e8" +checksum = "6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66" [[package]] name = "proc-macro2" -version = "1.0.47" +version = "1.0.51" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725" +checksum = "5d727cae5b39d21da60fa540906919ad737832fe0b1c165da3a34d6548c849d6" dependencies = [ "unicode-ident", ] [[package]] name = "quote" -version = "1.0.21" +version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179" +checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b" dependencies = [ "proc-macro2", ] [[package]] name = "rustversion" -version = "1.0.9" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97477e48b4cf8603ad5f7aaf897467cf42ab4218a38ef76fb14c2d6773a6d6a8" +checksum = "5583e89e108996506031660fe09baa5011b9dd0341b89029313006d1fb508d70" [[package]] name = "strum" @@ -86,9 +138,9 @@ dependencies = [ [[package]] name = "syn" -version = "1.0.103" +version = "1.0.107" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a864042229133ada95abf3b54fdc62ef5ccabe9515b64717bcb9a1919e59445d" +checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5" dependencies = [ "proc-macro2", "quote", @@ -103,12 +155,24 @@ checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" [[package]] name = "unicode-ident" -version = "1.0.5" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3" +checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc" [[package]] name = "unicode-width" version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" diff --git a/Cargo.toml b/Cargo.toml index a6653c2..88f6996 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -26,10 +26,10 @@ name = "lemmy-help" required-features = ["cli"] [dependencies] -chumsky = { version = "0.8.0", default-features = false } +chumsky = { version = "0.9.0", default-features = false } textwrap = { version = "0.16.0", default-features = false, optional = true } -comfy-table = { version = "6.1.2", default-features = false, optional = true } -lexopt = { version = "0.2.1", default-features = false, optional = true } +comfy-table = { version = "6.1.4", default-features = false, optional = true } +lexopt = { version = "0.3.0", default-features = false, optional = true } [features] vimdoc = ["dep:textwrap", "dep:comfy-table"] diff --git a/src/cli.rs b/src/cli.rs index 4dfc7c2..c115f7d 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -4,7 +4,7 @@ use lexopt::{ Arg::{Long, Short, Value}, Parser, ValueExt, }; -use std::{ffi::OsString, fs::read_to_string, path::PathBuf, str::FromStr}; +use std::{fs::read_to_string, path::PathBuf, str::FromStr}; pub const NAME: &str = env!("CARGO_PKG_NAME"); pub const VERSION: &str = env!("CARGO_PKG_VERSION"); @@ -43,16 +43,11 @@ impl Cli { std::process::exit(0); } Short('l') | Long("layout") => { - let layout = parser.value()?; - let Some(l) = layout.to_str() else { - return Err(lexopt::Error::MissingValue { - option: Some("layout".into()), - }); - }; + let layout = parser.value()?.string()?; c.settings.layout = - Layout::from_str(l).map_err(|_| lexopt::Error::UnexpectedValue { + Layout::from_str(&layout).map_err(|_| lexopt::Error::UnexpectedValue { option: "layout".into(), - value: l.into(), + value: layout.into(), })?; } Short('i') | Long("indent") => { @@ -67,10 +62,9 @@ impl Cli { Value(val) => { let file = PathBuf::from(&val); if !file.is_file() { - return Err(lexopt::Error::UnexpectedArgument(OsString::from(format!( - "{} is not a file!", - file.display() - )))); + return Err(lexopt::Error::UnexpectedArgument( + format!("{} is not a file!", file.display()).into(), + )); } c.files.push(file) } From 23bd6ea21cacd72be7b53d51b4d5e3419667c21a Mon Sep 17 00:00:00 2001 From: numToStr Date: Thu, 23 Mar 2023 11:20:25 +0530 Subject: [PATCH 2/4] init-zero --- Cargo.lock | 41 +-- Cargo.toml | 2 +- run.sh | 5 + src/cli.rs | 11 +- src/lexer.rs | 669 ++++++++++++++++++++----------------- src/lexer/token.rs | 85 +++-- src/lib.rs | 200 ++++++----- src/parser.rs | 33 +- src/parser/node.rs | 126 +++---- src/parser/tags/alias.rs | 70 ++-- src/parser/tags/brief.rs | 29 +- src/parser/tags/class.rs | 109 +++--- src/parser/tags/divider.rs | 14 +- src/parser/tags/func.rs | 168 +++++----- src/parser/tags/mod.rs | 8 +- src/parser/tags/module.rs | 22 +- src/parser/tags/see.rs | 19 +- src/parser/tags/tag.rs | 18 +- src/parser/tags/type.rs | 69 ++-- src/parser/tags/usage.rs | 53 +-- src/vimdoc.rs | 54 +-- 21 files changed, 943 insertions(+), 862 deletions(-) create mode 100755 run.sh diff --git a/Cargo.lock b/Cargo.lock index 210eaaf..6f7fe7d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4,11 +4,11 @@ version = 3 [[package]] name = "ahash" -version = "0.7.6" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" +checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" dependencies = [ - "getrandom", + "cfg-if", "once_cell", "version_check", ] @@ -21,9 +21,9 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chumsky" -version = "0.9.0" +version = "1.0.0-alpha.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4d619fba796986dd538d82660b76e0b9756c6e19b2e4d4559ba5a57f9f00810" +checksum = "379cdc19530b72a1e76d94a350676eaea1455375533eb38f18dfa712f9996902" dependencies = [ "hashbrown", ] @@ -39,22 +39,11 @@ dependencies = [ "unicode-width", ] -[[package]] -name = "getrandom" -version = "0.2.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c05aeb6a22b8f62540c194aac980f2115af067bfe15a0734d7277a768d396b31" -dependencies = [ - "cfg-if", - "libc", - "wasi", -] - [[package]] name = "hashbrown" -version = "0.12.3" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" +checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" dependencies = [ "ahash", ] @@ -81,17 +70,11 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baff4b617f7df3d896f97fe922b64817f6cd9a756bb81d40f8883f2f66dcb401" -[[package]] -name = "libc" -version = "0.2.139" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "201de327520df007757c1f0adce6e827fe8562fbc28bfd9c15571c66ca1f5f79" - [[package]] name = "once_cell" -version = "1.17.0" +version = "1.17.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f61fba1741ea2b3d6a1e3178721804bb716a68a6aeba1149b5d52e3d464ea66" +checksum = "b7e5500299e16ebb147ae15a00a942af264cf3688f47923b8fc2cd5858f23ad3" [[package]] name = "proc-macro2" @@ -170,9 +153,3 @@ name = "version_check" version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" - -[[package]] -name = "wasi" -version = "0.11.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" diff --git a/Cargo.toml b/Cargo.toml index 88f6996..98a4069 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -26,7 +26,7 @@ name = "lemmy-help" required-features = ["cli"] [dependencies] -chumsky = { version = "0.9.0", default-features = false } +chumsky = { version = "1.0.0-alpha.3", default-features = false } textwrap = { version = "0.16.0", default-features = false, optional = true } comfy-table = { version = "6.1.4", default-features = false, optional = true } lexopt = { version = "0.3.0", default-features = false, optional = true } diff --git a/run.sh b/run.sh new file mode 100755 index 0000000..fd97025 --- /dev/null +++ b/run.sh @@ -0,0 +1,5 @@ +#!/bin/bash + +cargo rr -- -fact \ + ~/Code/Comment.nvim/lua/Comment/{init.lua,config.lua} ~/Code/Comment.nvim/plugin/Comment.lua \ + ~/Code/Comment.nvim/lua/Comment/{api.lua,ft.lua,utils.lua,opfunc.lua,extra.lua} > ~/Code/Comment.nvim/doc/Comment.txt diff --git a/src/cli.rs b/src/cli.rs index c115f7d..a395010 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -1,4 +1,4 @@ -use lemmy_help::{vimdoc::VimDoc, FromEmmy, Layout, LemmyHelp, Settings}; +use lemmy_help::{parser, vimdoc::VimDoc, FromEmmy, Layout, Settings}; use lexopt::{ Arg::{Long, Short, Value}, @@ -76,14 +76,17 @@ impl Cli { } pub fn run(self) { - let mut lemmy = LemmyHelp::new(); + let mut help_doc = String::new(); + // FIXME: toc entries for f in self.files { let source = read_to_string(f).unwrap(); - lemmy.for_help(&source, &self.settings).unwrap(); + let ast = parser(&source, &self.settings); + let doc = VimDoc::from_emmy(&ast, &self.settings); + help_doc.push_str(&doc.to_string()); } - print!("{}", VimDoc::from_emmy(&lemmy, &self.settings)); + print!("{help_doc}"); if self.modeline { println!("vim:tw=78:ts=8:noet:ft=help:norl:"); diff --git a/src/lexer.rs b/src/lexer.rs index 558cc48..10d0739 100644 --- a/src/lexer.rs +++ b/src/lexer.rs @@ -1,332 +1,381 @@ -mod token; -pub use token::*; - -use std::ops::Range; +// FIXME: +// - [x] takeuntil(end()).to(Tag::Skip) +// - [x] Trailing whitespace in Tag::Comment - It is part of comment +// - array_union! macro is no-go +mod token; use chumsky::{ - prelude::{any, choice, end, filter, just, take_until, Simple}, + extra, + prelude::Rich, + primitive::{any, choice, end, just, one_of}, + recovery::skip_then_retry_until, recursive::recursive, - text::{ident, keyword, newline, whitespace, TextParser}, - Parser, + span::SimpleSpan, + text::{ident, keyword, newline, whitespace}, + IterParser, Parser, }; +pub use token::*; -type Spanned = (TagType, Range); +macro_rules! array_union { + ($p: expr, $typ: expr) => { + $p.foldl(just("[]").repeated(), |arr, _| Ty::Array(Box::new(arr))) + // NOTE: Not the way I wanted i.e., Ty::Union(Vec) it to be, but it's better than nothing + .foldl(just('|').padded().ignore_then($typ).repeated(), |x, y| { + Ty::Union(Box::new(x), Box::new(y)) + }) + } +} const C: [char; 3] = ['.', '_', '-']; -#[derive(Debug)] -pub struct Lexer; - -impl Lexer { - /// Parse emmylua/lua files into rust token - pub fn init() -> impl Parser, Error = Simple> { - let triple = just("---"); - let space = just(' ').repeated().at_least(1); - let till_eol = take_until(newline()); - - let comment = till_eol.map(|(x, _)| x.iter().collect()); - let desc = space.ignore_then(comment).or_not(); - - let public = keyword("public").to(Scope::Public); - let private = keyword("private") - .to(Scope::Private) - .or(keyword("protected").to(Scope::Protected)) - .or(keyword("package").to(Scope::Package)); +/// Parse emmylua/lua files into rust token +pub fn lexer<'src>( +) -> impl Parser<'src, &'src str, Vec<(Token<'src>, SimpleSpan)>, extra::Err>> +{ + let triple = just("---"); + let till_cr = any().and_is(newline().not()).repeated().slice(); + let space = just(' ').repeated().at_least(1); + let desc = space.ignore_then(till_cr).or_not(); + let name = any() + .filter(|c: &char| c.is_alphanumeric() || C.contains(c)) + .repeated() + .slice(); + + let block_start = just("[["); + let block_end = just("]]"); + + let optional = just('?').or_not().map(|c| match c { + Some(_) => Name::Opt as fn(_) -> _, + None => Name::Req as fn(_) -> _, + }); + + let backtick_string = just('`') + .ignore_then(any().and_is(just('`').not()).repeated().slice()) + .then_ignore(just('`')); + + let union_literal = choice(( + just('\'') + .ignore_then(any().and_is(just('\'').not()).repeated().slice()) + .then_ignore(just('\'')) + .map(Member::Literal), + backtick_string.map(Member::Ident), + )); + + // Private/Protected/Public + let public_kw = keyword("public").to(Scope::Public); + let private_kw = keyword("private") + .to(Scope::Private) + .or(keyword("protected").to(Scope::Protected)) + .or(keyword("package").to(Scope::Package)); + + let any_typ = keyword("any").to(Ty::Any); + let unknown = keyword("unknown").to(Ty::Unknown); + let nil = keyword("nil").to(Ty::Nil); + let boolean = keyword("boolean").to(Ty::Boolean); + let string = keyword("string").to(Ty::String); + let num = keyword("number").to(Ty::Number); + let int = keyword("integer").to(Ty::Integer); + let function = keyword("function").to(Ty::Function); + let thread = keyword("thread").to(Ty::Thread); + let userdata = keyword("userdata").to(Ty::Userdata); + let lightuserdata = keyword("lightuserdata").to(Ty::Lightuserdata); + + let ty = recursive(|inner| { + let comma = just(',').padded(); + let colon = just(':').padded(); + + let list_like = ident() + .padded() + .then(optional) + .then( + colon + .ignore_then(inner.clone()) + .or_not() + // NOTE: if param type is missing then LLS treats it as `any` + .map(|x| x.unwrap_or(Ty::Any)), + ) + .map(|((n, attr), t)| (attr(n), t)) + .separated_by(comma) + .allow_trailing() + .collect(); - let hidden = private - .clone() - .ignore_then(newline()) - .then_ignore(choice(( - // eat up all the emmylua, if any, then one valid token - triple - .then(till_eol) - .padded() - .repeated() - .ignore_then(ident()), - // if there is no emmylua, just eat the next token - // so the next parser won't recognize the code - ident().padded(), - ))) - .ignored(); - - let union_literal = choice(( - just('\'') - .ignore_then(filter(|c| c != &'\'').repeated()) - .then_ignore(just('\'')) - .collect() - .map(Member::Literal), - just('`') - .ignore_then(filter(|c| c != &'`').repeated()) - .then_ignore(just('`')) - .collect() - .map(Member::Ident), - )); - - let variant = just('|') - .then_ignore(space) - .ignore_then(union_literal) + let fun = keyword("fun") + .ignore_then( + list_like + .clone() + .delimited_by(just('(').then(whitespace()), whitespace().then(just(')'))), + ) .then( - space - .ignore_then(just('#').ignore_then(space).ignore_then(comment)) + colon + .ignore_then(inner.clone().separated_by(comma).collect()) .or_not(), ) - .map(|(t, d)| TagType::Variant(t, d)); - - let optional = just('?').or_not().map(|c| match c { - Some(_) => Name::Opt as fn(_) -> _, - None => Name::Req as fn(_) -> _, - }); - - let name = filter(|x: &char| x.is_alphanumeric() || C.contains(x)) - .repeated() - .collect(); + .map(|(param, ret)| Ty::Fun(param, ret)); + + let table = keyword("table") + .ignore_then( + just('<') + .ignore_then(inner.clone().map(Box::new)) + .then_ignore(comma) + .then(inner.clone().map(Box::new)) + .then_ignore(just('>')) + .or_not(), + ) + .map(Ty::Table); - let ty = recursive(|inner| { - let comma = just(',').padded(); - let colon = just(':').padded(); - - let any = just("any").to(Ty::Any); - let unknown = just("unknown").to(Ty::Unknown); - let nil = just("nil").to(Ty::Nil); - let boolean = just("boolean").to(Ty::Boolean); - let string = just("string").to(Ty::String); - let num = just("number").to(Ty::Number); - let int = just("integer").to(Ty::Integer); - let function = just("function").to(Ty::Function); - let thread = just("thread").to(Ty::Thread); - let userdata = just("userdata").to(Ty::Userdata); - let lightuserdata = just("lightuserdata").to(Ty::Lightuserdata); - - #[inline] - fn array_union( - p: impl Parser>, - inner: impl Parser>, - ) -> impl Parser> { - p.then(just("[]").repeated()) - .foldl(|arr, _| Ty::Array(Box::new(arr))) - // NOTE: Not the way I wanted i.e., Ty::Union(Vec) it to be, but it's better than nothing - .then(just('|').padded().ignore_then(inner).repeated()) - .foldl(|x, y| Ty::Union(Box::new(x), Box::new(y))) - } - - let list_like = ident() - .padded() - .then(optional) - .then( - colon - .ignore_then(inner.clone()) - .or_not() - // NOTE: if param type is missing then LLS treats it as `any` - .map(|x| x.unwrap_or(Ty::Any)), - ) - .map(|((n, attr), t)| (attr(n), t)) - .separated_by(comma) - .allow_trailing(); - - let fun = just("fun") - .ignore_then( - list_like - .clone() - .delimited_by(just('(').then(whitespace()), whitespace().then(just(')'))), - ) - .then( - colon - .ignore_then(inner.clone().separated_by(comma)) - .or_not(), - ) - .map(|(param, ret)| Ty::Fun(param, ret)); - - let table = just("table") - .ignore_then( - just('<') - .ignore_then(inner.clone().map(Box::new)) - .then_ignore(comma) - .then(inner.clone().map(Box::new)) - .then_ignore(just('>')) - .or_not(), - ) - .map(Ty::Table); - - let dict = list_like - .delimited_by(just('{').then(whitespace()), whitespace().then(just('}'))) - .map(Ty::Dict); - - let ty_name = name.map(Ty::Ref); - - let parens = inner - .clone() - .delimited_by(just('(').padded(), just(')').padded()); - - // Union of string literals: '"g@"'|'"g@$"' - let string_literal = union_literal.map(Ty::Member); - - choice(( - array_union(any, inner.clone()), - array_union(unknown, inner.clone()), - array_union(nil, inner.clone()), - array_union(boolean, inner.clone()), - array_union(string, inner.clone()), - array_union(num, inner.clone()), - array_union(int, inner.clone()), - array_union(function, inner.clone()), - array_union(thread, inner.clone()), - array_union(userdata, inner.clone()), - array_union(lightuserdata, inner.clone()), - array_union(fun, inner.clone()), - array_union(table, inner.clone()), - array_union(dict, inner.clone()), - array_union(parens, inner.clone()), - array_union(string_literal, inner.clone()), - array_union(ty_name, inner), - )) - }); + let dict = list_like + .delimited_by(just('{').then(whitespace()), whitespace().then(just('}'))) + .map(Ty::Dict); - let code_lang = ident().then_ignore(space).or_not(); - - let tag = just('@').ignore_then(choice(( - hidden.or(public.clone().ignored()).to(TagType::Skip), - just("toc") - .ignore_then(space) - .ignore_then(comment) - .map(TagType::Toc), - just("mod") - .then_ignore(space) - .ignore_then(name) - .then(desc) - .map(|(name, desc)| TagType::Module(name, desc)), - just("divider") - .ignore_then(space) - .ignore_then(any()) - .map(TagType::Divider), - just("brief").ignore_then(space).ignore_then(choice(( - just("[[").to(TagType::BriefStart), - just("]]").to(TagType::BriefEnd), - ))), - just("param") - .ignore_then(space) - .ignore_then(choice(( - just("...").map(|n| Name::Req(n.to_string())), - ident().then(optional).map(|(n, o)| o(n)), - ))) - .then_ignore(space) - .then(ty.clone()) - .then(desc) - .map(|((name, ty), desc)| TagType::Param(name, ty, desc)), - just("return") - .ignore_then(space) - .ignore_then(ty.clone()) - .then(choice(( - newline().to((None, None)), - space.ignore_then(choice(( - just('#').ignore_then(comment).map(|x| (None, Some(x))), - ident().then(desc).map(|(name, desc)| (Some(name), desc)), - ))), - ))) - .map(|(ty, (name, desc))| TagType::Return(ty, name, desc)), - just("class") - .ignore_then(space) - .ignore_then(name) - .then(just(':').padded().ignore_then(ident()).or_not()) - .map(|(name, parent)| TagType::Class(name, parent)), - just("field") - .ignore_then(space.ignore_then(private.or(public)).or_not()) - .then_ignore(space) - .then(ident()) - .then(optional) - .then_ignore(space) - .then(ty.clone()) - .then(desc) - .map(|((((scope, name), opt), ty), desc)| { - TagType::Field(scope.unwrap_or(Scope::Public), opt(name), ty, desc) - }), - just("alias") - .ignore_then(space) - .ignore_then(name) - .then(space.ignore_then(ty.clone()).or_not()) - .map(|(name, ty)| TagType::Alias(name, ty)), - just("type") - .ignore_then(space) - .ignore_then(ty) - .then(desc) - .map(|(ty, desc)| TagType::Type(ty, desc)), - just("tag") - .ignore_then(space) - .ignore_then(comment) - .map(TagType::Tag), - just("see") - .ignore_then(space) - .ignore_then(comment) - .map(TagType::See), - just("usage").ignore_then(space).ignore_then(choice(( - code_lang - .then( - just('`') - .ignore_then(filter(|c| *c != '`').repeated()) - .then_ignore(just('`')) - .collect(), - ) - .map(|(lang, code)| TagType::Usage(lang, code)), - code_lang.then_ignore(just("[[")).map(TagType::UsageStart), - just("]]").to(TagType::UsageEnd), - ))), - just("export") - .ignore_then(space) - .ignore_then(ident()) - .then_ignore(take_until(end())) - .map(TagType::Export), - ))); - - let func = keyword("function").padded(); - let ret = keyword("return"); - let assign = just('=').padded(); - - // obj = ID (prop)+ "=" - // fn = ID (prop | colon_op) - // prop = (dot_op)+ ("(" | colon_op) - // dot_op = "." ID - // colon_op = ":" ID "(" - let colon_op = just(':') - .ignore_then(ident()) - .then_ignore(just('(')) - .map(Op::Colon); - - let dot_op = just('.') - .ignore_then(ident().map(Op::Dot)) - .repeated() - .at_least(1); - - let prop = dot_op - .then(choice((just('(').to(None), colon_op.map(Some)))) - .map(|(mut props, meth)| { - if let Some(x) = meth { - props.push(x) - } - Op::Deep(props) - }); + let ty_name = name.map(Ty::Ref); - let dotted = ident() - .then(choice((prop, colon_op))) - .map(|(prefix, op)| (prefix, op)); + let parens = inner + .clone() + .delimited_by(just('(').padded(), just(')').padded()); - let expr = ident().then(dot_op).then_ignore(assign); + // Union of string literals: '"g@"'|'"g@$"' + let string_literal = union_literal.map(Ty::Member); choice(( - triple.ignore_then(choice((tag, variant, comment.map(TagType::Comment)))), - func.clone() - .ignore_then(dotted) - .map(|(prefix, op)| TagType::Func(prefix, op)), - expr.then(func.or_not()) - .map(|((prefix, op), is_fn)| match is_fn { - Some(_) => TagType::Func(prefix, Op::Deep(op)), - None => TagType::Expr(prefix, Op::Deep(op)), - }), - ret.ignore_then(ident().padded()) - .then_ignore(end()) - .map(TagType::Export), - till_eol.to(TagType::Skip), + array_union!(any_typ, inner.clone()), + array_union!(unknown, inner.clone()), + array_union!(nil, inner.clone()), + array_union!(boolean, inner.clone()), + array_union!(string, inner.clone()), + array_union!(num, inner.clone()), + array_union!(int, inner.clone()), + array_union!(function, inner.clone()), + array_union!(thread, inner.clone()), + array_union!(userdata, inner.clone()), + array_union!(lightuserdata, inner.clone()), + array_union!(fun, inner.clone()), + array_union!(table, inner.clone()), + array_union!(dict, inner.clone()), + array_union!(parens, inner.clone()), + array_union!(string_literal, inner.clone()), + array_union!(ty_name, inner), )) + }); + + // ---@brief [[ + // ---@brief ]] + let brief = keyword("brief").then_ignore(space).ignore_then( + block_end + .to(Token::BriefEnd) + .or(block_start.to(Token::BriefStart)), + ); + + // ---@toc + let toc_tag = keyword("toc").then(space).ignore_then(name).map(Token::Toc); + + // ---@mod [desc] + let mod_tag = keyword("mod") + .then(space) + .ignore_then(name) + .then(desc) + .map(|(name, desc)| Token::Module(name, desc)); + + // ---@divider + let divider_tag = keyword("divider") + .then(space) + .ignore_then(one_of("~-=")) + .map(Token::Divider); + + // ---@param [description] + let param_tag = keyword("param") + .then(space) + .ignore_then(ident().then(optional)) + .then_ignore(space) + .then(ty.clone()) + .then(desc) + .map(|(((name, op), typ), desc)| Token::Param(op(name), typ, desc)); + + // ---@return [ [comment] | [name] #] + let return_tag = keyword("return") + .ignore_then(space) + .ignore_then(ty.clone()) + .then(choice(( + newline().to((None, None)), + space.ignore_then(choice(( + just('#').ignore_then(till_cr).map(|x| (None, Some(x))), + ident().then(desc).map(|(name, desc)| (Some(name), desc)), + ))), + ))) + .map(|(ty, (name, desc))| Token::Return(ty, name, desc)); + + // ---@class [: ] + let class_tag = keyword("class") + .then(space) + .ignore_then(name) + .then(just(':').padded().ignore_then(ident()).or_not()) + .map(|(this, parent)| Token::Class(this, parent)); + + // ---@field [public|protected|private] [desc] + let field_tag = keyword("field") + .ignore_then( + space + .ignore_then(private_kw.clone().or(public_kw.clone())) + .or_not(), + ) + .then_ignore(space) + .then(ident().then(optional)) + .then_ignore(space) + .then(ty.clone()) + .then(desc) + .map(|(((scope, (name, opt)), ty), desc)| { + Token::Field(scope.unwrap_or(Scope::Public), opt(name), ty, desc) + }); + + // -- Simple Alias + // ---@alias + // -- Enum alias + // ---@alias + let alias_tag = keyword("alias") + .then(space) + .ignore_then(name) + .then(space.ignore_then(ty.clone()).or_not()) + .map(|(name, ty)| Token::Alias(name, ty)); + + // ---| '' [# description] + // or + // ---| `` [# description] + let enum_member = just('|') + .then_ignore(space) + .ignore_then(union_literal) + .then( + space + .ignore_then(just('#').padded().ignore_then(till_cr)) + .or_not(), + ) + .map(|(t, d)| Token::Variant(t, d)); + + // ---@type [desc] + let type_tag = keyword("type") + .then(space) + .ignore_then(ty) + .then(desc) + .map(|(ty, desc)| Token::Type(ty, desc)); + + // ---@tag + let tag_tag = keyword("tag").then(space).ignore_then(name).map(Token::Tag); + + // ---@see + let see_tag = keyword("see") + .then(space) + .ignore_then(till_cr.padded()) + .map(Token::See); + + // - Single Line + // ---@usage [lang] `` + // - Multi Line + // ---@usage [lang] [[ + // ---@usage ]] + let usage_tag = { + let lang = ident().then_ignore(space).or_not(); + keyword("usage").then(space).ignore_then(choice(( + lang.then(backtick_string) + .map(|(lang, code)| Token::Usage(lang, code)), + lang.then_ignore(just("[[")).map(Token::UsageStart), + just("]]").to(Token::UsageEnd), + ))) + }; + + // ---@export + let export_tag = keyword("export") + .then(space) + .ignore_then(ident()) + .then_ignore(any().repeated()) + .map(Token::Export); + + // ---@private + let private_tag = private_kw .padded() - .map_with_span(|t, r| (t, r)) + .then(choice(( + // eat up all the emmylua, if any, then one valid token + triple + .then(till_cr) + .padded() + .repeated() + .ignore_then(ident()), + // if there is no emmylua, just eat the next token + // so the next parser won't recognize the code + ident().padded(), + ))) + .ignored(); + + // emmylua tags + let tags = just('@').ignore_then(choice(( + brief, + toc_tag, + mod_tag, + divider_tag, + param_tag, + return_tag, + class_tag, + field_tag, + alias_tag, + type_tag, + tag_tag, + see_tag, + usage_tag, + export_tag, + private_tag.to(Token::Skip), + public_kw.to(Token::Skip), + ))); + + // lua-src + let dotted = just('.') + .ignore_then(ident()) + .map(Op::Dot) .repeated() - } + .collect::>>(); + + // one.two.three = + let expr = ident() + .then(dotted) + .then_ignore(just('=').padded()) + .then(keyword("function").or_not()) + .map(|((name, op), is_fn)| match is_fn { + Some(_) => Token::Func(name, op), + None => Token::Expr(name, op), + }); + + // function one.two.three + // function one.two:three + // function one:two + let function = { + let func_name = dotted + .then(just(':').ignore_then(ident()).or_not()) + .map(|(mut x, y)| { + if let Some(c) = y { + x.push(Op::Colon(c)); + } + x + }); + keyword("function") + .padded() + .ignore_then(ident()) + .then(func_name) + .map(|(name, func_name)| Token::Func(name, func_name)) + }; + + // return \eof + let final_return = keyword("return") + .ignore_then(ident().padded()) + .then_ignore(end()) + .map(Token::Export); + + choice(( + triple.ignore_then(choice((tags, enum_member, till_cr.map(Token::Comment)))), + function, + expr, + final_return, + )) + .padded() + // Ignore Useless Nodes + .recover_with(skip_then_retry_until(any().ignored(), end())) + .map_with_span(|tok, span| (tok, span)) + .repeated() + .collect() + .then_ignore(end()) } diff --git a/src/lexer/token.rs b/src/lexer/token.rs index aaf29e3..8c80196 100644 --- a/src/lexer/token.rs +++ b/src/lexer/token.rs @@ -1,12 +1,12 @@ use std::fmt::Display; #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum Member { - Literal(String), - Ident(String), +pub enum Member<'m> { + Literal(&'m str), + Ident(&'m str), } -impl Display for Member { +impl Display for Member<'_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::Literal(lit) => f.write_str(&format!( @@ -19,15 +19,15 @@ impl Display for Member { } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum TagType { +pub enum Token<'tt> { /// ```lua /// ---@toc /// ``` - Toc(String), + Toc(&'tt str), /// ```lua /// ---@mod [desc] /// ``` - Module(String, Option), + Module(&'tt str, Option<&'tt str>), /// ```lua /// ---@divider /// ``` @@ -36,18 +36,18 @@ pub enum TagType { /// function one.two() end /// one.two = function() end /// ``` - Func(String, Op), + Func(&'tt str, Vec>), /// ```lua /// one = 1 /// one.two = 12 /// ``` - Expr(String, Op), + Expr(&'tt str, Vec>), /// ```lua /// ---@export /// or /// return \eof /// ``` - Export(String), + Export(&'tt str), /// ```lua /// ---@brief [[ /// ``` @@ -59,19 +59,19 @@ pub enum TagType { /// ```lua /// ---@param [description] /// ``` - Param(Name, Ty, Option), + Param(Name<'tt>, Ty<'tt>, Option<&'tt str>), /// ```lua /// ---@return [ [comment] | [name] #] /// ``` - Return(Ty, Option, Option), + Return(Ty<'tt>, Option<&'tt str>, Option<&'tt str>), /// ```lua /// ---@class [: ] /// ``` - Class(String, Option), + Class(&'tt str, Option<&'tt str>), /// ```lua /// ---@field [public|private|protected] [description] /// ``` - Field(Scope, Name, Ty, Option), + Field(Scope, Name<'tt>, Ty<'tt>, Option<&'tt str>), /// ```lua /// -- Simple Alias /// ---@alias @@ -79,7 +79,7 @@ pub enum TagType { /// -- Enum alias /// ---@alias /// ``` - Alias(String, Option), + Alias(&'tt str, Option>), /// ```lua /// ---| '' [# description] /// @@ -87,27 +87,27 @@ pub enum TagType { /// /// ---| `` [# description] /// ``` - Variant(Member, Option), + Variant(Member<'tt>, Option<&'tt str>), /// ```lua /// ---@type [desc] /// ``` - Type(Ty, Option), + Type(Ty<'tt>, Option<&'tt str>), /// ```lua /// ---@tag /// ``` - Tag(String), + Tag(&'tt str), /// ```lua /// ---@see /// ``` - See(String), + See(&'tt str), /// ```lua /// ---@usage [lang] `` /// ``` - Usage(Option, String), + Usage(Option<&'tt str>, &'tt str), /// ```lua /// ---@usage [lang] [[ /// ``` - UsageStart(Option), + UsageStart(Option<&'tt str>), /// ```lua /// ---@usage ]] /// ``` @@ -115,27 +115,20 @@ pub enum TagType { /// ```lua /// ---TEXT /// ``` - Comment(String), + Comment(&'tt str), /// Text nodes which are not needed Skip, } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum Op { - Deep(Vec), - Dot(String), - Colon(String), +pub enum Op<'op> { + Dot(&'op str), + Colon(&'op str), } -impl Display for Op { +impl Display for Op<'_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - Self::Deep(mixed) => { - for mix in mixed { - mix.fmt(f)?; - } - Ok(()) - } Self::Dot(dot) => { f.write_str(".")?; f.write_str(dot) @@ -157,12 +150,12 @@ pub enum Scope { } #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum Name { - Req(String), - Opt(String), +pub enum Name<'nm> { + Req(&'nm str), + Opt(&'nm str), } -impl Display for Name { +impl Display for Name<'_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { Self::Req(n) => f.write_str(n), @@ -176,7 +169,7 @@ impl Display for Name { // Source: https://github.com/sumneko/lua-language-server/wiki/Annotations#documenting-types #[derive(Debug, Clone, PartialEq, Eq, Hash)] -pub enum Ty { +pub enum Ty<'ty> { Nil, Any, Unknown, @@ -188,16 +181,16 @@ pub enum Ty { Thread, Userdata, Lightuserdata, - Ref(String), - Member(Member), - Array(Box), - Table(Option<(Box, Box)>), - Fun(Vec<(Name, Ty)>, Option>), - Dict(Vec<(Name, Ty)>), - Union(Box, Box), + Ref(&'ty str), + Member(Member<'ty>), + Array(Box>), + Table(Option<(Box>, Box>)>), + Fun(Vec<(Name<'ty>, Ty<'ty>)>, Option>>), + Dict(Vec<(Name<'ty>, Ty<'ty>)>), + Union(Box>, Box>), } -impl Display for Ty { +impl Display for Ty<'_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn list_like(args: &[(Name, Ty)]) -> String { args.iter() diff --git a/src/lib.rs b/src/lib.rs index 584f59a..01b03b2 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -6,13 +6,12 @@ pub mod parser; use std::{fmt::Display, str::FromStr}; -use chumsky::prelude::Simple; - +use chumsky::{prelude::Input, IterParser, Parser}; use parser::{ Alias, Brief, Class, Divider, Field, Func, Module, Node, Param, Return, See, Tag, Type, Usage, }; -use crate::lexer::TagType; +use crate::{lexer::lexer, parser::node_parser}; pub trait Visitor { type R; @@ -37,17 +36,18 @@ pub trait Accept { fn accept(&self, n: &T, s: &T::S) -> T::R; } -pub trait Nodes { - fn nodes(&self) -> &Vec; +pub trait Nodes<'src> { + fn nodes(&'src self) -> &'src [Node<'src>]; + fn into_nodes(self) -> Vec>; } -pub trait FromEmmy: Display { +pub trait FromEmmy<'src>: Display { type Settings; - fn from_emmy(t: &impl Nodes, s: &Self::Settings) -> Self; + fn from_emmy(t: &'src impl Nodes<'src>, s: &Self::Settings) -> Self; } -pub trait AsDoc { - fn as_doc(&self, s: &T::Settings) -> T; +pub trait AsDoc<'src, T: FromEmmy<'src>> { + fn as_doc(&'src self, s: &T::Settings) -> T; } #[derive(Debug, Default, PartialEq, Eq)] @@ -112,112 +112,110 @@ impl Default for Settings { } #[derive(Debug, Default)] -pub struct LemmyHelp { - nodes: Vec, +pub struct Document<'src> { + nodes: Vec>, } -impl Nodes for LemmyHelp { - fn nodes(&self) -> &Vec { - &self.nodes +impl<'src> Document<'src> { + pub fn new(nodes: Vec>) -> Self { + Self { nodes } } } -impl AsDoc for LemmyHelp { - fn as_doc(&self, s: &T::Settings) -> T { - T::from_emmy(self, s) +impl<'src> Nodes<'src> for Document<'src> { + fn nodes(&'src self) -> &[Node<'src>] { + self.nodes.as_ref() } -} -impl LemmyHelp { - /// Creates a new parser instance - /// - /// ``` - /// use lemmy_help::LemmyHelp; - /// - /// LemmyHelp::new(); - /// ``` - pub fn new() -> Self { - Self { nodes: vec![] } + fn into_nodes(self) -> Vec> { + self.nodes } +} - /// Parse given lua source code to generate AST representation - /// - /// ``` - /// use lemmy_help::{LemmyHelp, Nodes}; - /// - /// let mut lemmy = LemmyHelp::default(); - /// let src = r#" - /// local U = {} - /// - /// ---Add two integar and print it - /// ---@param this number First number - /// ---@param that number Second number - /// function U.sum(this, that) - /// print(this + that) - /// end - /// - /// return U - /// "#; - /// - /// let ast = lemmy.parse(&src).unwrap(); - /// assert!(!ast.nodes().is_empty()); - /// ``` - pub fn parse(&mut self, src: &str) -> Result<&Self, Vec>> { - self.nodes.append(&mut Node::new(src)?); - - Ok(self) +impl<'src, T: FromEmmy<'src>> AsDoc<'src, T> for Document<'src> { + fn as_doc(&'src self, s: &T::Settings) -> T { + T::from_emmy(self, s) } +} - /// Similar to [`LemmyHelp::parse`], but specifically used for generating vimdoc - pub fn for_help( - &mut self, - src: &str, - settings: &Settings, - ) -> Result<&Self, Vec>> { - let mut nodes = Node::new(src)?; - - if let Some(Node::Export(export)) = nodes.pop() { - let module = match nodes.iter().rev().find(|x| matches!(x, Node::Module(_))) { - Some(Node::Module(m)) => m.name.to_owned(), - _ => export.to_owned(), - }; - - for ele in nodes { - match ele { - Node::Export(..) => {} - Node::Func(mut func) => { - if func.prefix.left.as_deref() == Some(&export) { - if settings.prefix_func { - func.prefix.right = Some(module.to_owned()); - } - self.nodes.push(Node::Func(func)); - } - } - Node::Type(mut typ) => { - if typ.prefix.left.as_deref() == Some(&export) { - if settings.prefix_type { - typ.prefix.right = Some(module.to_owned()); - } - self.nodes.push(Node::Type(typ)); - } +/// Parse given lua source code to generate AST representation +/// +/// ``` +/// let src = r#" +/// local U = {} +/// +/// ---Add two integar and print it +/// ---@param this number First number +/// ---@param that number Second number +/// function U.sum(this, that) +/// print(this + that) +/// end +/// +/// return U +/// "#; +/// +/// let ast = lemmy_help::parse(&src).unwrap(); +/// assert!(!ast.nodes().is_empty()); +/// ``` +pub fn parser<'src>(src: &'src str, settings: &'src Settings) -> Document<'src> { + let Some(tokens) = lexer().parse(src).into_output() else { + return Document::default() + }; + + let Some(mut emmynode) = node_parser() + .repeated() + .collect::>() + .parse(tokens.as_slice().spanned((src.len()..src.len()).into())) + .into_output() + else { + return Document::default() + }; + + let Some(Node::Export(export)) = emmynode.pop() else { + return Document::default() + }; + + let mut nodes = vec![]; + + let module = match emmynode.iter().rev().find(|x| matches!(x, Node::Module(_))) { + Some(Node::Module(m)) => m.name, + _ => export, + }; + + for ele in emmynode { + match ele { + Node::Export(..) => {} + Node::Func(mut func) => { + if func.prefix.left == Some(export) { + if settings.prefix_func { + func.prefix.right = Some(module); } - Node::Alias(mut alias) => { - if settings.prefix_alias { - alias.prefix.right = Some(module.to_owned()); - } - self.nodes.push(Node::Alias(alias)) - } - Node::Class(mut class) => { - if settings.prefix_class { - class.prefix.right = Some(module.to_owned()); - } - self.nodes.push(Node::Class(class)) + nodes.push(Node::Func(func)); + } + } + Node::Type(mut typ) => { + if typ.prefix.left == Some(export) { + if settings.prefix_type { + typ.prefix.right = Some(module); } - _ => self.nodes.push(ele), + nodes.push(Node::Type(typ)); } } - }; - - Ok(self) + Node::Alias(mut alias) => { + if settings.prefix_alias { + alias.prefix.right = Some(module); + } + nodes.push(Node::Alias(alias)) + } + Node::Class(mut class) => { + if settings.prefix_class { + class.prefix.right = Some(module); + } + nodes.push(Node::Class(class)) + } + x => nodes.push(x), + } } + + Document { nodes } } diff --git a/src/parser.rs b/src/parser.rs index 800ab74..ded2c9a 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -3,21 +3,22 @@ pub use node::*; mod tags; pub use tags::*; -macro_rules! impl_parse { - ($id: ident, $ret: ty, $body: expr) => { - impl $id { - pub fn parse() -> impl chumsky::Parser< - $crate::lexer::TagType, - $ret, - Error = chumsky::prelude::Simple<$crate::lexer::TagType>, - > { - $body - } - } - }; - ($id: ident, $body: expr) => { - crate::parser::impl_parse!($id, Self, $body); - }; +use crate::lexer::Token; +use chumsky::{input::SpannedInput, prelude::Rich, span::SimpleSpan, Parser}; + +pub type ParserInput<'tokens, 'src> = + SpannedInput, SimpleSpan, &'tokens [(Token<'src>, SimpleSpan)]>; + +pub type ParserErr<'tokens, 'src> = chumsky::extra::Err, SimpleSpan>>; + +pub trait LemmyParser<'tokens, 'src: 'tokens, O>: + Parser<'tokens, ParserInput<'tokens, 'src>, O, ParserErr<'tokens, 'src>> + Clone +{ } -pub(super) use impl_parse; +impl<'tokens, 'src, O, P> LemmyParser<'tokens, 'src, O> for P +where + 'src: 'tokens, + P: Parser<'tokens, ParserInput<'tokens, 'src>, O, ParserErr<'tokens, 'src>> + Clone, +{ +} diff --git a/src/parser/node.rs b/src/parser/node.rs index 4bf65e1..26766f1 100644 --- a/src/parser/node.rs +++ b/src/parser/node.rs @@ -1,51 +1,53 @@ use chumsky::{ - prelude::{any, choice, Simple}, - select, Parser, Stream, + prelude::choice, + primitive::{any, end}, + recovery::skip_then_retry_until, + select, Parser, }; use crate::{ - lexer::{Lexer, TagType}, - parser::{Alias, Brief, Class, Divider, Func, Module, Tag, Type}, + parser::{Alias, Brief, Class, Divider, Func, Module, Token, Type}, Accept, Visitor, }; -use super::impl_parse; +use super::{ + alias_parser, brief_parser, class_parser, divider_parser, func_parser, mod_parser, tag_parser, + type_parser, LemmyParser, Tag, +}; #[derive(Debug, Clone)] -pub enum Node { - Module(Module), +pub enum Node<'src> { + Module(Module<'src>), Divider(Divider), - Brief(Brief), - Tag(Tag), - Func(Func), - Class(Class), - Alias(Alias), - Type(Type), - Export(String), - Toc(String), + Brief(Brief<'src>), + Tag(Tag<'src>), + Func(Func<'src>), + Class(Class<'src>), + Alias(Alias<'src>), + Type(Type<'src>), + Export(&'src str), + Toc(&'src str), } -impl_parse!(Node, Option, { +pub fn node_parser<'tokens, 'src: 'tokens>() -> impl LemmyParser<'tokens, 'src, Node<'src>> { choice(( - Module::parse().map(Self::Module), - Divider::parse().map(Self::Divider), - Brief::parse().map(Self::Brief), - Tag::parse().map(Self::Tag), - Func::parse().map(Self::Func), - Class::parse().map(Self::Class), - Alias::parse().map(Self::Alias), - Type::parse().map(Self::Type), + mod_parser(), + divider_parser(), + brief_parser(), + tag_parser(), + func_parser(), + class_parser(), + alias_parser(), + type_parser(), select! { - TagType::Export(x) => Self::Export(x), - TagType::Toc(x) => Self::Toc(x), + Token::Export(x) => Node::Export(x), + Token::Toc(x) => Node::Toc(x), }, )) - .map(Some) - // Skip useless nodes - .or(any().to(None)) -}); + .recover_with(skip_then_retry_until(any().ignored(), end())) +} -impl Accept for Node { +impl<'src, T: Visitor> Accept for Node<'src> { fn accept(&self, n: &T, s: &T::S) -> T::R { match self { Self::Brief(x) => x.accept(n, s), @@ -61,34 +63,34 @@ impl Accept for Node { } } -impl Node { - fn init() -> impl Parser, Error = Simple> { - Node::parse().repeated().flatten() - } - - /// Creates stream of AST nodes from emmylua - /// - /// ``` - /// let src = r#" - /// local U = {} - /// - /// ---Add two integar and print it - /// ---@param this number First number - /// ---@param that number Second number - /// function U.sum(this, that) - /// print(this + that) - /// end - /// - /// return U - /// "#; - /// - /// let nodes = lemmy_help::parser::Node::new(src).unwrap(); - /// assert!(!nodes.is_empty()); - /// ``` - pub fn new(src: &str) -> Result, Vec>> { - let tokens = Lexer::init().parse(src).unwrap(); - let stream = Stream::from_iter(src.len()..src.len() + 1, tokens.into_iter()); - - Node::init().parse(stream) - } -} +// impl Node<'_> { +// /// Creates stream of AST nodes from emmylua +// /// +// /// ``` +// /// let src = r#" +// /// local U = {} +// /// +// /// ---Add two integar and print it +// /// ---@param this number First number +// /// ---@param that number Second number +// /// function U.sum(this, that) +// /// print(this + that) +// /// end +// /// +// /// return U +// /// "#; +// /// +// /// let nodes = lemmy_help::parser::Node::new(src).unwrap(); +// /// assert!(!nodes.is_empty()); +// /// ``` +// pub fn init<'src>(src: &'src str) -> Result>, Vec>>> { +// let tokens = Lexer::init().parse(src).into_output().unwrap().as_slice(); +// // return Err(vec![]) +// // }; +// Node::parse() +// .repeated() +// .collect::>>() +// .parse(tokens.spanned((src.len()..src.len()).into())) +// .into_result() +// } +// } diff --git a/src/parser/tags/alias.rs b/src/parser/tags/alias.rs index 7f8faae..7907758 100644 --- a/src/parser/tags/alias.rs +++ b/src/parser/tags/alias.rs @@ -1,51 +1,49 @@ -use chumsky::{prelude::choice, select, Parser}; +use chumsky::{prelude::choice, select, IterParser, Parser}; use crate::{ - lexer::{Member, TagType, Ty}, - parser::{impl_parse, Prefix}, + lexer::{Member, Token, Ty}, + parser::{LemmyParser, Node, Prefix}, Accept, Visitor, }; #[derive(Debug, Clone)] -pub enum AliasKind { - Type(Ty), - Enum(Vec<(Member, Option)>), +pub enum AliasKind<'src> { + Type(Ty<'src>), + Enum(Vec<(Member<'src>, Option<&'src str>)>), } #[derive(Debug, Clone)] -pub struct Alias { - pub name: String, - pub desc: Vec, - pub kind: AliasKind, - pub prefix: Prefix, +pub struct Alias<'src> { + pub name: &'src str, + pub desc: Vec<&'src str>, + pub kind: AliasKind<'src>, + pub prefix: Prefix<'src>, } -impl_parse!(Alias, { - select! { - TagType::Comment(x) => x, - } - .repeated() - .then(choice(( - select! { - TagType::Alias(name, Some(ty)) => (name, AliasKind::Type(ty)) - }, - select! { TagType::Alias(name, ..) => name }.then( - select! { - TagType::Variant(ty, desc) => (ty, desc) - } - .repeated() - .map(AliasKind::Enum), - ), - ))) - .map(|(desc, (name, kind))| Self { - name, - desc, - kind, - prefix: Prefix::default(), - }) -}); +pub fn alias_parser<'tokens, 'src: 'tokens>() -> impl LemmyParser<'tokens, 'src, Node<'src>> { + select! { Token::Comment(x) => x } + .repeated() + .collect() + .then(choice(( + select! { Token::Alias(name,Some(ty)) => (name,AliasKind::Type(ty)) }, + select! { Token::Alias(name, ..) => name }.then( + select! { Token::Variant(ty,desc) => (ty,desc) } + .repeated() + .collect() + .map(AliasKind::Enum), + ), + ))) + .map(|(desc, (name, kind))| { + Node::Alias(Alias { + name, + desc, + kind, + prefix: Prefix::default(), + }) + }) +} -impl Accept for Alias { +impl<'src, T: Visitor> Accept for Alias<'src> { fn accept(&self, n: &T, s: &T::S) -> T::R { n.alias(self, s) } diff --git a/src/parser/tags/brief.rs b/src/parser/tags/brief.rs index e2bd04f..e1cc24b 100644 --- a/src/parser/tags/brief.rs +++ b/src/parser/tags/brief.rs @@ -1,22 +1,25 @@ -use chumsky::{prelude::just, select, Parser}; +use chumsky::{primitive::just, select, IterParser, Parser}; -use crate::{lexer::TagType, parser::impl_parse, Accept, Visitor}; +use crate::{ + lexer::Token, + parser::{LemmyParser, Node}, + Accept, Visitor, +}; #[derive(Debug, Clone)] -pub struct Brief { - pub desc: Vec, +pub struct Brief<'src> { + pub desc: Vec<&'src str>, } -impl_parse!(Brief, { - select! { - TagType::Comment(x) => x, - } - .repeated() - .delimited_by(just(TagType::BriefStart), just(TagType::BriefEnd)) - .map(|desc| Self { desc }) -}); +pub fn brief_parser<'tokens, 'src: 'tokens>() -> impl LemmyParser<'tokens, 'src, Node<'src>> { + select! { Token::Comment(x) => x } + .repeated() + .collect() + .delimited_by(just(Token::BriefStart), just(Token::BriefEnd)) + .map(|desc| Node::Brief(Brief { desc })) +} -impl Accept for Brief { +impl<'src, T: Visitor> Accept for Brief<'src> { fn accept(&self, n: &T, s: &T::S) -> T::R { n.brief(self, s) } diff --git a/src/parser/tags/class.rs b/src/parser/tags/class.rs index abe4705..3ee5284 100644 --- a/src/parser/tags/class.rs +++ b/src/parser/tags/class.rs @@ -1,74 +1,75 @@ -use chumsky::{select, Parser}; +use chumsky::{select, IterParser, Parser}; use crate::{ - lexer::{Name, Scope, TagType, Ty}, - parser::{impl_parse, Prefix, See}, + lexer::{Name, Scope, Token, Ty}, + parser::{LemmyParser, Node, Prefix, See}, Accept, Visitor, }; +use super::see_parser; + #[derive(Debug, Clone)] -pub struct Field { +pub struct Field<'src> { pub scope: Scope, - pub name: Name, - pub ty: Ty, - pub desc: Vec, + pub name: Name<'src>, + pub ty: Ty<'src>, + pub desc: Vec<&'src str>, } -impl_parse!(Field, { - select! { - TagType::Comment(x) => x, - } - .repeated() - .then(select! { - TagType::Field(scope, name, ty, desc) => (scope, name, ty, desc) - }) - .map(|(header, (scope, name, ty, desc))| { - let desc = match desc { - Some(d) => { - let mut new_desc = Vec::with_capacity(header.len() + 1); - new_desc.push(d); - new_desc.extend(header); - new_desc +pub fn field_parser<'tokens, 'src: 'tokens>() -> impl LemmyParser<'tokens, 'src, Field<'src>> { + select! { Token::Comment(x) => x } + .repeated() + .collect::>() + .then(select! { Token::Field(scope,name,ty,desc) => (scope,name,ty,desc) }) + .map(|(header, (scope, name, ty, desc))| { + let desc = match desc { + Some(d) => { + let mut new_desc = Vec::with_capacity(header.len() + 1); + new_desc.push(d); + new_desc.extend(header); + new_desc + } + None => header, + }; + Field { + scope, + name, + ty, + desc, } - None => header, - }; - - Self { - scope, - name, - ty, - desc, - } - }) -}); + }) +} #[derive(Debug, Clone)] -pub struct Class { - pub name: String, - pub parent: Option, - pub desc: Vec, - pub fields: Vec, - pub see: See, - pub prefix: Prefix, +pub struct Class<'src> { + pub name: &'src str, + pub parent: Option<&'src str>, + pub desc: Vec<&'src str>, + pub fields: Vec>, + pub see: See<'src>, + pub prefix: Prefix<'src>, } -impl_parse!(Class, { - select! { TagType::Comment(c) => c } +pub fn class_parser<'tokens, 'src: 'tokens>() -> impl LemmyParser<'tokens, 'src, Node<'src>> { + select! { Token::Comment(c) => c } .repeated() - .then(select! { TagType::Class(name, parent) => (name, parent) }) - .then(Field::parse().repeated()) - .then(See::parse()) - .map(|(((desc, (name, parent)), fields), see)| Self { - name, - parent, - desc, - fields, - see, - prefix: Prefix::default(), + .collect() + .then(select! { Token::Class(name, parent) => (name,parent) }) + .then(field_parser().repeated().collect()) + .then(see_parser()) + .map(|(((desc, (name, parent)), fields), see)| { + Node::Class(Class { + name, + parent, + desc, + fields, + see, + prefix: Prefix::default(), + }) }) -}); +} -impl Accept for Class { +impl<'src, T: Visitor> Accept for Class<'src> { fn accept(&self, n: &T, s: &T::S) -> T::R { n.class(self, s) } diff --git a/src/parser/tags/divider.rs b/src/parser/tags/divider.rs index d6ced28..169f286 100644 --- a/src/parser/tags/divider.rs +++ b/src/parser/tags/divider.rs @@ -1,13 +1,19 @@ use chumsky::select; -use crate::{lexer::TagType, parser::impl_parse, Accept, Visitor}; +use crate::{ + lexer::Token, + parser::{LemmyParser, Node}, + Accept, Visitor, +}; #[derive(Debug, Clone)] pub struct Divider(pub char); -impl_parse!(Divider, { - select! { TagType::Divider(rune) => Self(rune) } -}); +pub fn divider_parser<'tokens, 'src: 'tokens>() -> impl LemmyParser<'tokens, 'src, Node<'src>> { + select! { + Token::Divider(rune) => Node::Divider(Divider(rune)) + } +} impl Accept for Divider { fn accept(&self, n: &T, s: &T::S) -> T::R { diff --git a/src/parser/tags/func.rs b/src/parser/tags/func.rs index 790d959..bc7e397 100644 --- a/src/parser/tags/func.rs +++ b/src/parser/tags/func.rs @@ -1,104 +1,108 @@ -use chumsky::{select, Parser}; +use chumsky::{select, IterParser, Parser}; use crate::{ - lexer::{Name, Op, TagType, Ty}, - parser::{impl_parse, Prefix, See}, + lexer::{Name, Op, Token, Ty}, + parser::{LemmyParser, Node, Prefix, See}, Accept, Visitor, }; -use super::Usage; +use super::{see_parser, usage_parser, Usage}; #[derive(Debug, Clone)] -pub struct Param { - pub name: Name, - pub ty: Ty, - pub desc: Vec, +pub struct Param<'src> { + pub name: Name<'src>, + pub ty: Ty<'src>, + pub desc: Vec<&'src str>, } -impl_parse!(Param, { - select! { - TagType::Param(name, ty, desc) => (name, ty, desc) - } - .then(select! { TagType::Comment(x) => x }.repeated()) - .map(|((name, ty, desc), extra)| { - let desc = match desc { - Some(d) => { - let mut new_desc = Vec::with_capacity(extra.len() + 1); - new_desc.push(d); - new_desc.extend(extra); - new_desc - } - None => extra, - }; - Self { name, ty, desc } - }) -}); +fn param_parser<'tokens, 'src: 'tokens>() -> impl LemmyParser<'tokens, 'src, Param<'src>> { + select! { Token::Param(name,ty,desc) => (name,ty,desc) } + .then( + select! { Token::Comment(x) => x } + .repeated() + .collect::>(), + ) + .map(|((name, ty, desc), extra)| { + let desc = match desc { + Some(d) => { + let mut new_desc = Vec::with_capacity(extra.len() + 1); + new_desc.push(d); + new_desc.extend(extra); + new_desc + } + None => extra, + }; + Param { name, ty, desc } + }) +} #[derive(Debug, Clone)] -pub struct Return { - pub ty: Ty, - pub name: Option, - pub desc: Vec, +pub struct Return<'src> { + pub ty: Ty<'src>, + pub name: Option<&'src str>, + pub desc: Vec<&'src str>, } -impl_parse!(Return, { - select! { - TagType::Return(ty, name, desc) => (ty, name, desc) - } - .then(select! { TagType::Comment(x) => x }.repeated()) - .map(|((ty, name, desc), extra)| { - let desc = match desc { - Some(d) => { - let mut new_desc = Vec::with_capacity(extra.len() + 1); - new_desc.push(d); - new_desc.extend(extra); - new_desc - } - None => extra, - }; - - Self { name, ty, desc } - }) -}); +fn return_parser<'tokens, 'src: 'tokens>() -> impl LemmyParser<'tokens, 'src, Return<'src>> { + select! { Token::Return(ty,name,desc) => (ty,name,desc) } + .then( + select! { Token::Comment(x) => x } + .repeated() + .collect::>(), + ) + .map(|((ty, name, desc), extra)| { + let desc = match desc { + Some(d) => { + let mut new_desc = Vec::with_capacity(extra.len() + 1); + new_desc.push(d); + new_desc.extend(extra); + new_desc + } + None => extra, + }; + Return { name, ty, desc } + }) +} #[derive(Debug, Clone)] -pub struct Func { - pub op: Op, - pub prefix: Prefix, - pub desc: Vec, - pub params: Vec, - pub returns: Vec, - pub see: See, - pub usage: Option, +pub struct Func<'src> { + pub prefix: Prefix<'src>, + pub op: Vec>, + pub desc: Vec<&'src str>, + pub params: Vec>, + pub returns: Vec>, + pub see: See<'src>, + pub usage: Option>, } -impl_parse!(Func, { - select! { - TagType::Comment(x) => x, - } - .repeated() - .then(Param::parse().repeated()) - .then(Return::parse().repeated()) - .then(See::parse()) - .then(Usage::parse().or_not()) - .then(select! { TagType::Func(prefix, op) => (prefix, op) }) - .map( - |(((((desc, params), returns), see), usage), (prefix, op))| Self { - op, - prefix: Prefix { - left: Some(prefix.clone()), - right: Some(prefix), +pub fn func_parser<'tokens, 'src: 'tokens>() -> impl LemmyParser<'tokens, 'src, Node<'src>> { + select! { Token::Comment(x) => x } + .repeated() + .collect() + .then(param_parser().repeated().collect()) + .then(return_parser().repeated().collect()) + .then(see_parser()) + .then(usage_parser().or_not()) + .then(select! { Token::Func(prefix,op) => (prefix,op) }) + .map( + |(((((desc, params), returns), see), usage), (prefix, op))| { + Node::Func(Func { + prefix: Prefix { + left: Some(prefix), + right: Some(prefix), + }, + op, + desc, + params, + returns, + see, + usage, + }) }, - desc, - params, - returns, - see, - usage, - }, - ) -}); + ) +} -impl Accept for Func { +impl<'src, T: Visitor> Accept for Func<'src> { fn accept(&self, n: &T, s: &T::S) -> T::R { n.func(self, s) } diff --git a/src/parser/tags/mod.rs b/src/parser/tags/mod.rs index 976c8b1..6743a7e 100644 --- a/src/parser/tags/mod.rs +++ b/src/parser/tags/mod.rs @@ -19,8 +19,8 @@ pub use see::*; mod usage; pub use usage::*; -#[derive(Debug, Default, Clone)] -pub struct Prefix { - pub left: Option, - pub right: Option, +#[derive(Debug, Clone, Default)] +pub struct Prefix<'src> { + pub left: Option<&'src str>, + pub right: Option<&'src str>, } diff --git a/src/parser/tags/module.rs b/src/parser/tags/module.rs index fc028e2..c4aea14 100644 --- a/src/parser/tags/module.rs +++ b/src/parser/tags/module.rs @@ -1,18 +1,24 @@ use chumsky::select; -use crate::{lexer::TagType, parser::impl_parse, Accept, Visitor}; +use crate::{ + lexer::Token, + parser::{LemmyParser, Node}, + Accept, Visitor, +}; #[derive(Debug, Clone)] -pub struct Module { - pub name: String, - pub desc: Option, +pub struct Module<'src> { + pub name: &'src str, + pub desc: Option<&'src str>, } -impl_parse!(Module, { - select! { TagType::Module(name, desc) => Self { name, desc } } -}); +pub fn mod_parser<'tokens, 'src: 'tokens>() -> impl LemmyParser<'tokens, 'src, Node<'src>> { + select! { + Token::Module(name,desc) => Node::Module(Module { name,desc }) + } +} -impl Accept for Module { +impl<'src, T: Visitor> Accept for Module<'src> { fn accept(&self, n: &T, s: &T::S) -> T::R { n.module(self, s) } diff --git a/src/parser/tags/see.rs b/src/parser/tags/see.rs index adbbe61..87ea0cc 100644 --- a/src/parser/tags/see.rs +++ b/src/parser/tags/see.rs @@ -1,19 +1,20 @@ -use chumsky::{select, Parser}; +use chumsky::{select, IterParser, Parser}; -use crate::{lexer::TagType, parser::impl_parse, Accept, Visitor}; +use crate::{lexer::Token, parser::LemmyParser, Accept, Visitor}; #[derive(Debug, Clone)] -pub struct See { - pub refs: Vec, +pub struct See<'src> { + pub refs: Vec<&'src str>, } -impl_parse!(See, { - select! { TagType::See(x) => x } +pub fn see_parser<'tokens, 'src: 'tokens>() -> impl LemmyParser<'tokens, 'src, See<'src>> { + select! { Token::See(x) => x } .repeated() - .map(|refs| Self { refs }) -}); + .collect() + .map(|refs| See { refs }) +} -impl Accept for See { +impl<'src, T: Visitor> Accept for See<'src> { fn accept(&self, n: &T, s: &T::S) -> T::R { n.see(self, s) } diff --git a/src/parser/tags/tag.rs b/src/parser/tags/tag.rs index 0dd6378..6e72245 100644 --- a/src/parser/tags/tag.rs +++ b/src/parser/tags/tag.rs @@ -1,15 +1,21 @@ use chumsky::select; -use crate::{lexer::TagType, parser::impl_parse, Accept, Visitor}; +use crate::{ + lexer::Token, + parser::{LemmyParser, Node}, + Accept, Visitor, +}; #[derive(Debug, Clone)] -pub struct Tag(pub String); +pub struct Tag<'src>(pub &'src str); -impl_parse!(Tag, { - select! { TagType::Tag(x) => Self(x) } -}); +pub fn tag_parser<'tokens, 'src: 'tokens>() -> impl LemmyParser<'tokens, 'src, Node<'src>> { + select! { + Token::Tag(x) => Node::Tag(Tag(x)) + } +} -impl Accept for Tag { +impl<'src, T: Visitor> Accept for Tag<'src> { fn accept(&self, n: &T, s: &T::S) -> T::R { n.tag(self, s) } diff --git a/src/parser/tags/type.rs b/src/parser/tags/type.rs index d8efca6..8f09825 100644 --- a/src/parser/tags/type.rs +++ b/src/parser/tags/type.rs @@ -1,48 +1,47 @@ -use chumsky::{select, Parser}; +use chumsky::{select, IterParser, Parser}; use crate::{ - lexer::{Op, TagType, Ty}, - parser::{impl_parse, Prefix, See}, + lexer::{Op, Token, Ty}, + parser::{LemmyParser, Node, Prefix, See}, Accept, Visitor, }; -use super::Usage; +use super::{see_parser, usage_parser, Usage}; #[derive(Debug, Clone)] -pub struct Type { - pub desc: (Vec, Option), - pub op: Op, - pub prefix: Prefix, - pub ty: Ty, - pub see: See, - pub usage: Option, +pub struct Type<'src> { + pub desc: (Vec<&'src str>, Option<&'src str>), + pub prefix: Prefix<'src>, + pub op: Vec>, + pub ty: Ty<'src>, + pub see: See<'src>, + pub usage: Option>, } -impl_parse!(Type, { - select! { - TagType::Comment(x) => x - } - .repeated() - .then(select! { TagType::Type(ty, desc) => (ty, desc) }) - .then(See::parse()) - .then(Usage::parse().or_not()) - .then(select! { TagType::Expr(prefix, op) => (prefix, op) }) - .map( - |((((extract, (ty, desc)), see), usage), (prefix, op))| Self { - desc: (extract, desc), - prefix: Prefix { - left: Some(prefix.to_owned()), - right: Some(prefix), - }, - op, - ty, - see, - usage, - }, - ) -}); +pub fn type_parser<'tokens, 'src: 'tokens>() -> impl LemmyParser<'tokens, 'src, Node<'src>> { + select! { Token::Comment(x) => x } + .repeated() + .collect() + .then(select! { Token::Type(ty,desc) => (ty,desc) }) + .then(see_parser()) + .then(usage_parser().or_not()) + .then(select! { Token::Expr(prefix,op) => (prefix,op) }) + .map(|((((extract, (ty, desc)), see), usage), (prefix, op))| { + Node::Type(Type { + desc: (extract, desc), + prefix: Prefix { + left: Some(prefix), + right: Some(prefix), + }, + op, + ty, + see, + usage, + }) + }) +} -impl Accept for Type { +impl<'src, T: Visitor> Accept for Type<'src> { fn accept(&self, n: &T, s: &T::S) -> T::R { n.r#type(self, s) } diff --git a/src/parser/tags/usage.rs b/src/parser/tags/usage.rs index 444192d..a417393 100644 --- a/src/parser/tags/usage.rs +++ b/src/parser/tags/usage.rs @@ -1,34 +1,47 @@ +use std::fmt::Display; + use chumsky::{ primitive::{choice, just}, - select, Parser, + select, IterParser, Parser, }; -use crate::{lexer::TagType, parser::impl_parse, Accept, Visitor}; +use crate::{lexer::Token, parser::LemmyParser, Accept, Visitor}; #[derive(Debug, Clone)] -pub struct Usage { - pub lang: Option, - pub code: String, +pub enum Code<'src> { + InLine(&'src str), + MultiLine(Vec<&'src str>), } -impl_parse!(Usage, { - choice(( - select! { - TagType::UsageStart(lang) => lang +impl<'src> Display for Code<'src> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::InLine(x) => f.write_str(x), + Self::MultiLine(x) => f.write_str(&x.join("\n")), } - .then(select! { TagType::Comment(x) => x }.repeated()) - .then_ignore(just(TagType::UsageEnd)) - .map(|(lang, code)| Self { - lang, - code: code.join("\n"), - }), - select! { - TagType::Usage(lang, code) => Self { lang, code } - }, + } +} + +#[derive(Debug, Clone)] +pub struct Usage<'src> { + pub lang: Option<&'src str>, + pub code: Code<'src>, +} + +pub fn usage_parser<'tokens, 'src: 'tokens>() -> impl LemmyParser<'tokens, 'src, Usage<'src>> { + choice(( + select! { Token::UsageStart(lang) => lang } + .then(select! { Token::Comment(x) => x }.repeated().collect()) + .then_ignore(just(Token::UsageEnd)) + .map(|(lang, code)| Usage { + lang, + code: Code::MultiLine(code), + }), + select! { Token::Usage(lang,code) => Usage { lang,code:Code::InLine(code) } }, )) -}); +} -impl Accept for Usage { +impl<'src, T: Visitor> Accept for Usage<'src> { fn accept(&self, n: &T, s: &T::S) -> T::R { n.usage(self, s) } diff --git a/src/vimdoc.rs b/src/vimdoc.rs index 48535da..3527302 100644 --- a/src/vimdoc.rs +++ b/src/vimdoc.rs @@ -18,7 +18,7 @@ impl Visitor for VimDoc { fn module(&self, n: &Module, s: &Self::S) -> Self::R { let mut doc = String::new(); - let desc = n.desc.as_deref().unwrap_or_default(); + let desc = n.desc.unwrap_or_default(); doc.push_str(&self.divider(&Divider('='), s)); doc.push_str(desc); doc.push_str(&format!( @@ -60,15 +60,23 @@ impl Visitor for VimDoc { .join(", "); format!( "{}{}({args})", - n.prefix.left.as_deref().unwrap_or_default(), - n.op + n.prefix.left.unwrap_or_default(), + n.op.iter().map(|x| x.to_string()).collect::() ) } else { - format!("{}{}()", n.prefix.left.as_deref().unwrap_or_default(), n.op) + format!( + "{}{}()", + n.prefix.left.unwrap_or_default(), + n.op.iter().map(|x| x.to_string()).collect::() + ) }; doc.push_str(&header( &name_with_param, - &format!("{}{}", n.prefix.right.as_deref().unwrap_or_default(), n.op), + &format!( + "{}{}", + n.prefix.right.unwrap_or_default(), + n.op.iter().map(|x| x.to_string()).collect::() + ), )); if !n.desc.is_empty() { doc.push_str(&description(&n.desc.join("\n"), s.indent_width)) @@ -132,7 +140,7 @@ impl Visitor for VimDoc { "({}) {}", entry.ty, if entry.desc.is_empty() { - entry.name.clone().unwrap_or_default() + entry.name.unwrap_or_default().to_owned() } else { entry.desc.join(&format!("\n{}", " ".repeat(n as usize))) } @@ -141,7 +149,7 @@ impl Visitor for VimDoc { table.add_row([ format!("({})", entry.ty), if entry.desc.is_empty() { - entry.name.clone().unwrap_or_default() + entry.name.unwrap_or_default().to_owned() } else { entry.desc.join("\n") }, @@ -163,7 +171,7 @@ impl Visitor for VimDoc { if let Some(prefix) = &n.prefix.right { doc.push_str(&header(&name, &format!("{prefix}.{}", n.name))); } else { - doc.push_str(&header(&name, &n.name)); + doc.push_str(&header(&name, n.name)); } if !n.desc.is_empty() { doc.push_str(&description(&n.desc.join("\n"), s.indent_width)); @@ -216,9 +224,9 @@ impl Visitor for VimDoc { fn alias(&self, n: &crate::parser::Alias, s: &Self::S) -> Self::R { let mut doc = String::new(); if let Some(prefix) = &n.prefix.right { - doc.push_str(&header(&n.name, &format!("{prefix}.{}", n.name))); + doc.push_str(&header(n.name, &format!("{prefix}.{}", n.name))); } else { - doc.push_str(&header(&n.name, &n.name)); + doc.push_str(&header(n.name, n.name)); } if !n.desc.is_empty() { doc.push_str(&description(&n.desc.join("\n"), s.indent_width)); @@ -247,8 +255,16 @@ impl Visitor for VimDoc { fn r#type(&self, n: &crate::parser::Type, s: &Self::S) -> Self::R { let mut doc = String::new(); doc.push_str(&header( - &format!("{}{}", n.prefix.left.as_deref().unwrap_or_default(), n.op), - &format!("{}{}", n.prefix.right.as_deref().unwrap_or_default(), n.op), + &format!( + "{}{}", + n.prefix.left.unwrap_or_default(), + n.op.iter().map(|x| x.to_string()).collect::() + ), + &format!( + "{}{}", + n.prefix.right.unwrap_or_default(), + n.op.iter().map(|x| x.to_string()).collect::() + ), )); let (extract, desc) = &n.desc; if !extract.is_empty() { @@ -284,10 +300,10 @@ impl Visitor for VimDoc { let mut doc = String::new(); doc.push_str(&description("Usage: ~", s.indent_width)); doc.push('>'); - doc.push_str(n.lang.as_deref().unwrap_or("lua")); + doc.push_str(n.lang.unwrap_or("lua")); doc.push('\n'); doc.push_str(&textwrap::indent( - &n.code, + &n.code.to_string(), &(" ").repeat(s.indent_width * 2), )); doc.push_str("\n<\n\n"); @@ -298,8 +314,8 @@ impl Visitor for VimDoc { let mut doc = String::new(); let module = self.module( &Module { - name: n.to_string(), - desc: Some("Table of Contents".into()), + name: n, + desc: Some("Table of Contents"), }, s, ); @@ -307,7 +323,7 @@ impl Visitor for VimDoc { doc.push('\n'); for nod in nodes { if let Node::Module(x) = nod { - let desc = x.desc.as_deref().unwrap_or_default(); + let desc = x.desc.unwrap_or_default(); doc.push_str(&format!( "{desc} {:·>w$}\n", format!(" |{}|", x.name), @@ -319,9 +335,9 @@ impl Visitor for VimDoc { } } -impl FromEmmy for VimDoc { +impl<'src> FromEmmy<'src> for VimDoc { type Settings = Settings; - fn from_emmy(t: &impl crate::Nodes, s: &Self::Settings) -> Self { + fn from_emmy(t: &'src impl crate::Nodes<'src>, s: &Self::Settings) -> Self { let mut shelf = Self(String::new()); let nodes = t.nodes(); for node in nodes { From bb62caa43b3ea14e525f8945add8567db35c482a Mon Sep 17 00:00:00 2001 From: numToStr Date: Fri, 24 Mar 2023 18:08:07 +0530 Subject: [PATCH 3/4] fixed tests --- src/lexer.rs | 9 ++++-- src/lib.rs | 6 ++-- src/parser/node.rs | 32 -------------------- tests/basic.rs | 37 ++++++++++++++++------- tests/types.rs | 68 ++++++++++++++++++++---------------------- tests/with_settings.rs | 14 ++++----- 6 files changed, 76 insertions(+), 90 deletions(-) diff --git a/src/lexer.rs b/src/lexer.rs index 10d0739..a3236b4 100644 --- a/src/lexer.rs +++ b/src/lexer.rs @@ -184,11 +184,16 @@ pub fn lexer<'src>( // ---@param [description] let param_tag = keyword("param") .then(space) - .ignore_then(ident().then(optional)) + .ignore_then( + ident() + .then(optional) + .map(|(name, o)| o(name)) + .or(just("...").map(Name::Req)), + ) .then_ignore(space) .then(ty.clone()) .then(desc) - .map(|(((name, op), typ), desc)| Token::Param(op(name), typ, desc)); + .map(|((name, typ), desc)| Token::Param(name, typ, desc)); // ---@return [ [comment] | [name] #] let return_tag = keyword("return") diff --git a/src/lib.rs b/src/lib.rs index 01b03b2..1a80aa1 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -141,6 +141,8 @@ impl<'src, T: FromEmmy<'src>> AsDoc<'src, T> for Document<'src> { /// Parse given lua source code to generate AST representation /// /// ``` +/// use lemmy_help::{parser, Nodes, Settings}; +/// /// let src = r#" /// local U = {} /// @@ -153,8 +155,8 @@ impl<'src, T: FromEmmy<'src>> AsDoc<'src, T> for Document<'src> { /// /// return U /// "#; -/// -/// let ast = lemmy_help::parse(&src).unwrap(); +/// let settings = Settings::default(); +/// let ast = lemmy_help::parser(&src, &settings); /// assert!(!ast.nodes().is_empty()); /// ``` pub fn parser<'src>(src: &'src str, settings: &'src Settings) -> Document<'src> { diff --git a/src/parser/node.rs b/src/parser/node.rs index 26766f1..d647d42 100644 --- a/src/parser/node.rs +++ b/src/parser/node.rs @@ -62,35 +62,3 @@ impl<'src, T: Visitor> Accept for Node<'src> { } } } - -// impl Node<'_> { -// /// Creates stream of AST nodes from emmylua -// /// -// /// ``` -// /// let src = r#" -// /// local U = {} -// /// -// /// ---Add two integar and print it -// /// ---@param this number First number -// /// ---@param that number Second number -// /// function U.sum(this, that) -// /// print(this + that) -// /// end -// /// -// /// return U -// /// "#; -// /// -// /// let nodes = lemmy_help::parser::Node::new(src).unwrap(); -// /// assert!(!nodes.is_empty()); -// /// ``` -// pub fn init<'src>(src: &'src str) -> Result>, Vec>>> { -// let tokens = Lexer::init().parse(src).into_output().unwrap().as_slice(); -// // return Err(vec![]) -// // }; -// Node::parse() -// .repeated() -// .collect::>>() -// .parse(tokens.spanned((src.len()..src.len()).into())) -// .into_result() -// } -// } diff --git a/tests/basic.rs b/tests/basic.rs index 8cff490..06f7745 100644 --- a/tests/basic.rs +++ b/tests/basic.rs @@ -1,14 +1,18 @@ -use lemmy_help::{vimdoc::VimDoc, FromEmmy, LemmyHelp, Settings}; +use lemmy_help::{parser, vimdoc::VimDoc, FromEmmy, Settings}; macro_rules! lemmy { - ($($src: expr),*) => {{ - let mut lemmy = LemmyHelp::default(); + ($src: expr) => {{ let s = Settings::default(); - $( - lemmy.for_help($src, &s).unwrap(); - )* - VimDoc::from_emmy(&lemmy, &s).to_string() - }}; + let ast = parser($src, &s); + VimDoc::from_emmy(&ast, &s).to_string() + }}; // ($($src: expr),*) => {{ + // let mut lemmy = LemmyHelp::default(); + // let s = Settings::default(); + // $( + // lemmy.for_help($src, &s).unwrap(); + // )* + // VimDoc::from_emmy(&lemmy, &s).to_string() + // }}; } #[test] @@ -467,8 +471,10 @@ fn module() { return U "#; + let docs = String::from_iter([lemmy!(src), lemmy!(src2)]); + assert_eq!( - lemmy!(src, src2), + docs, "\ ============================================================================== Introduction *mod.intro* @@ -536,8 +542,18 @@ fn table_of_contents() { return U "; + let src2 = r#" + ---@mod outside.module Outside Module + + local U = {} + + return U + "#; + + let docs = String::from_iter([lemmy!(src), lemmy!(src2)]); + assert_eq!( - lemmy!(src), + docs, "\ ============================================================================== Table of Contents *my-plugin.contents* @@ -545,6 +561,7 @@ Table of Contents *my-plugin.contents* First Module ···················································· |first.module| Second Module ·················································· |second.module| Third Module ···················································· |third.module| +Outside Module ················································ |outside.module| ============================================================================== First Module *first.module* diff --git a/tests/types.rs b/tests/types.rs index 440c525..c416efa 100644 --- a/tests/types.rs +++ b/tests/types.rs @@ -1,5 +1,5 @@ use chumsky::Parser; -use lemmy_help::lexer::{Lexer, Member, Name, Ty}; +use lemmy_help::lexer::{lexer, Member, Name, Ty}; macro_rules! b { ($t:expr) => { @@ -9,19 +9,20 @@ macro_rules! b { #[test] fn types() { - let type_parse = Lexer::init(); + let type_parse = lexer(); macro_rules! check { ($s:expr, $ty:expr) => { assert_eq!( type_parse .parse(concat!("---@type ", $s)) + .into_output() .unwrap() .into_iter() .next() .unwrap() .0, - lemmy_help::lexer::TagType::Type($ty, None) + lemmy_help::lexer::Token::Type($ty, None) ); }; } @@ -37,7 +38,7 @@ fn types() { check!("thread", Ty::Thread); check!("userdata", Ty::Userdata); check!("lightuserdata", Ty::Lightuserdata); - check!("Any-Thing.El_se", Ty::Ref("Any-Thing.El_se".into())); + check!("Any-Thing.El_se", Ty::Ref("Any-Thing.El_se")); check!( "(string|number|table)[]", @@ -58,8 +59,8 @@ fn types() { Ty::Array(b!(Ty::Table(Some(( b!(Ty::String), b!(Ty::Dict(vec![ - (Name::Req("get".into()), Ty::String), - (Name::Req("set".into()), Ty::String), + (Name::Req("get"), Ty::String), + (Name::Req("set"), Ty::String), ])) ))))) ); @@ -69,7 +70,7 @@ fn types() { Ty::Table(Some(( b!(Ty::String), b!(Ty::Fun( - vec![(Name::Req("a".into()), Ty::String)], + vec![(Name::Req("a"), Ty::String)], Some(vec![Ty::String]) )) ))) @@ -100,19 +101,16 @@ fn types() { ): number, string|string[]", Ty::Fun( vec![ - (Name::Req("a".into()), Ty::String), + (Name::Req("a"), Ty::String), ( - Name::Req("b".into()), + Name::Req("b"), Ty::Union( b!(Ty::String), b!(Ty::Union(b!(Ty::Number), b!(Ty::Boolean))) ) ), - ( - Name::Req("c".into()), - Ty::Array(b!(Ty::Array(b!(Ty::Number)))) - ), - (Name::Opt("d".into()), Ty::Ref("SomeClass".into())), + (Name::Req("c"), Ty::Array(b!(Ty::Array(b!(Ty::Number))))), + (Name::Opt("d"), Ty::Ref("SomeClass")), ], Some(vec![ Ty::Number, @@ -133,17 +131,17 @@ fn types() { ): table", Ty::Fun( vec![ - (Name::Req("a".into()), Ty::String), - (Name::Opt("b".into()), Ty::String), - (Name::Req("c".into()), Ty::Function), - (Name::Req( - "d".into()), + (Name::Req("a"), Ty::String), + (Name::Opt("b"), Ty::String), + (Name::Req("c"), Ty::Function), + ( + Name::Req("d"), Ty::Fun(vec![ - (Name::Req("z".into()), Ty::String) + (Name::Req("z"), Ty::String) ], None) ), - (Name::Req( - "e".into()), + ( + Name::Req("e"), Ty::Union( b!(Ty::String), b!(Ty::Union( @@ -151,13 +149,13 @@ fn types() { b!(Ty::Union( b!(Ty::Table(Some((b!(Ty::String), b!(Ty::String))))), b!(Ty::Fun( - vec![(Name::Req( - "y".into()), + vec![( + Name::Req("y"), Ty::Union( b!(Ty::Array(b!(Ty::String))), b!(Ty::Union( b!(Ty::Dict(vec![ - (Name::Req("get".into()), Ty::Function) + (Name::Req("get"), Ty::Function) ])), b!(Ty::String) )) @@ -182,18 +180,18 @@ fn types() { __proto__?: { _?: unknown } }", Ty::Dict(vec![ - (Name::Req("inner".into()), Ty::String), + (Name::Req("inner"), Ty::String), ( - Name::Req("get".into()), - Ty::Fun(vec![(Name::Req("a".into()), Ty::Unknown)], None,) + Name::Req("get"), + Ty::Fun(vec![(Name::Req("a"), Ty::Unknown)], None,) ), ( - Name::Req("set".into()), - Ty::Fun(vec![(Name::Req("a".into()), Ty::Unknown)], None) + Name::Req("set"), + Ty::Fun(vec![(Name::Req("a"), Ty::Unknown)], None) ), ( - Name::Opt("__proto__".into()), - Ty::Dict(vec![(Name::Opt("_".into()), Ty::Unknown)]) + Name::Opt("__proto__"), + Ty::Dict(vec![(Name::Opt("_"), Ty::Unknown)]) ) ]) ); @@ -201,11 +199,11 @@ fn types() { check!( r#"'"g@"'|string[]|'"g@$"'|number"#, Ty::Union( - b!(Ty::Member(Member::Literal(r#""g@""#.into()))), + b!(Ty::Member(Member::Literal(r#""g@""#))), b!(Ty::Union( b!(Ty::Array(b!(Ty::String))), b!(Ty::Union( - b!(Ty::Member(Member::Literal(r#""g@$""#.into()))), + b!(Ty::Member(Member::Literal(r#""g@$""#))), b!(Ty::Number) )) )) @@ -223,7 +221,7 @@ fn types() { b!(Ty::Union( b!(Ty::Array(b!(Ty::Union(b!(Ty::String), b!(Ty::Number))))), b!(Ty::Union( - b!(Ty::Fun(vec![(Name::Req("a".into()), Ty::String)], None)), + b!(Ty::Fun(vec![(Name::Req("a"), Ty::String)], None)), b!(Ty::Union( b!(Ty::Table(Some((b!(Ty::String), b!(Ty::Number))))), b!(Ty::Array(b!(Ty::Userdata))) diff --git a/tests/with_settings.rs b/tests/with_settings.rs index c4768cd..4cf8c69 100644 --- a/tests/with_settings.rs +++ b/tests/with_settings.rs @@ -1,4 +1,4 @@ -use lemmy_help::{vimdoc::VimDoc, FromEmmy, LemmyHelp, Settings}; +use lemmy_help::{parser, vimdoc::VimDoc, FromEmmy, Settings}; const CODE: &str = r#" local U = {} @@ -26,7 +26,6 @@ return U #[test] fn rename_with_return() { - let mut lemmy = LemmyHelp::new(); let s = Settings { prefix_func: true, prefix_alias: true, @@ -34,11 +33,10 @@ fn rename_with_return() { prefix_type: true, ..Default::default() }; - - lemmy.for_help(CODE, &s).unwrap(); + let ast = parser(CODE, &s); assert_eq!( - VimDoc::from_emmy(&lemmy, &s).to_string(), + VimDoc::from_emmy(&ast, &s).to_string(), "\ ID *U.ID* @@ -81,7 +79,6 @@ U:create() *U:create* fn rename_with_mod() { let src = format!("---@mod awesome This is working {CODE}"); - let mut lemmy = LemmyHelp::new(); let s = Settings { prefix_func: true, prefix_alias: true, @@ -90,7 +87,7 @@ fn rename_with_mod() { ..Default::default() }; - lemmy.for_help(&src, &s).unwrap(); + let lemmy = parser(&src, &s); assert_eq!( VimDoc::from_emmy(&lemmy, &s).to_string(), @@ -155,13 +152,12 @@ end return M "; - let mut lemmy = LemmyHelp::new(); let s = Settings { expand_opt: true, ..Default::default() }; - lemmy.for_help(src, &s).unwrap(); + let lemmy = parser(src, &s); assert_eq!( VimDoc::from_emmy(&lemmy, &s).to_string(), From ead73bab9e7ab27561fc2181e4b14e09500cb506 Mon Sep 17 00:00:00 2001 From: numToStr Date: Fri, 17 Nov 2023 19:44:59 +0530 Subject: [PATCH 4/4] before changing pc --- src/cli.rs | 2 +- src/lib.rs | 50 ++---------------------------- src/vimdoc.rs | 86 +++++++++++++++++++++++++++++++++++++++++---------- 3 files changed, 73 insertions(+), 65 deletions(-) diff --git a/src/cli.rs b/src/cli.rs index a395010..3a41b44 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -81,7 +81,7 @@ impl Cli { // FIXME: toc entries for f in self.files { let source = read_to_string(f).unwrap(); - let ast = parser(&source, &self.settings); + let ast = parser(&source); let doc = VimDoc::from_emmy(&ast, &self.settings); help_doc.push_str(&doc.to_string()); } diff --git a/src/lib.rs b/src/lib.rs index 1a80aa1..01ea56e 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -159,12 +159,12 @@ impl<'src, T: FromEmmy<'src>> AsDoc<'src, T> for Document<'src> { /// let ast = lemmy_help::parser(&src, &settings); /// assert!(!ast.nodes().is_empty()); /// ``` -pub fn parser<'src>(src: &'src str, settings: &'src Settings) -> Document<'src> { +pub fn parser(src: &str) -> Document<'_> { let Some(tokens) = lexer().parse(src).into_output() else { return Document::default() }; - let Some(mut emmynode) = node_parser() + let Some(nodes) = node_parser() .repeated() .collect::>() .parse(tokens.as_slice().spanned((src.len()..src.len()).into())) @@ -173,51 +173,5 @@ pub fn parser<'src>(src: &'src str, settings: &'src Settings) -> Document<'src> return Document::default() }; - let Some(Node::Export(export)) = emmynode.pop() else { - return Document::default() - }; - - let mut nodes = vec![]; - - let module = match emmynode.iter().rev().find(|x| matches!(x, Node::Module(_))) { - Some(Node::Module(m)) => m.name, - _ => export, - }; - - for ele in emmynode { - match ele { - Node::Export(..) => {} - Node::Func(mut func) => { - if func.prefix.left == Some(export) { - if settings.prefix_func { - func.prefix.right = Some(module); - } - nodes.push(Node::Func(func)); - } - } - Node::Type(mut typ) => { - if typ.prefix.left == Some(export) { - if settings.prefix_type { - typ.prefix.right = Some(module); - } - nodes.push(Node::Type(typ)); - } - } - Node::Alias(mut alias) => { - if settings.prefix_alias { - alias.prefix.right = Some(module); - } - nodes.push(Node::Alias(alias)) - } - Node::Class(mut class) => { - if settings.prefix_class { - class.prefix.right = Some(module); - } - nodes.push(Node::Class(class)) - } - x => nodes.push(x), - } - } - Document { nodes } } diff --git a/src/vimdoc.rs b/src/vimdoc.rs index 3527302..9ca4cb1 100644 --- a/src/vimdoc.rs +++ b/src/vimdoc.rs @@ -9,10 +9,10 @@ use crate::{ /// Text Width const TW: usize = 80; -#[derive(Debug)] -pub struct VimDoc(String); +#[derive(Debug, Default)] +pub struct VimDoc<'src>(Vec>); -impl Visitor for VimDoc { +impl Visitor for VimDoc<'_> { type R = String; type S = Settings; @@ -335,26 +335,80 @@ impl Visitor for VimDoc { } } -impl<'src> FromEmmy<'src> for VimDoc { +impl<'src> FromEmmy<'src> for VimDoc<'src> { type Settings = Settings; - fn from_emmy(t: &'src impl crate::Nodes<'src>, s: &Self::Settings) -> Self { - let mut shelf = Self(String::new()); - let nodes = t.nodes(); - for node in nodes { - if let Node::Toc(x) = node { - shelf.0.push_str(&shelf.toc(x, nodes, s)); - } else { - shelf.0.push_str(&node.accept(&shelf, s)); + fn from_emmy(t: &'src impl crate::Nodes<'src>, setting: &Self::Settings) -> Self { + let mut emmynodes = t.nodes(); + + let Some(Node::Export(export)) = emmynodes.pop() else { + return Self::default() + }; + + let mut nodes = vec![]; + + let module = match emmynodes + .iter() + .rev() + .find(|x| matches!(x, Node::Module(_))) + { + Some(Node::Module(m)) => m.name, + _ => export, + }; + + for ele in emmynodes { + match ele { + Node::Export(..) => {} + Node::Func(mut func) => { + if func.prefix.left == Some(export) { + if setting.prefix_func { + func.prefix.right = Some(module); + } + nodes.push(Node::Func(func)); + } + } + Node::Type(mut typ) => { + if typ.prefix.left == Some(export) { + if setting.prefix_type { + typ.prefix.right = Some(module); + } + nodes.push(Node::Type(typ)); + } + } + Node::Alias(mut alias) => { + if setting.prefix_alias { + alias.prefix.right = Some(module); + } + nodes.push(Node::Alias(alias)) + } + Node::Class(mut class) => { + if setting.prefix_class { + class.prefix.right = Some(module); + } + nodes.push(Node::Class(class)) + } + x => nodes.push(x), } - shelf.0.push('\n'); } - shelf + + // let mut shelf = String::new(); + // let nodes = t.nodes(); + // for node in nodes { + // if let Node::Toc(x) = node { + // shelf.push_str(&shelf.toc(x, nodes, s)); + // } else { + // shelf.push_str(&node.accept(&shelf, s)); + // } + // shelf.push('\n'); + // } + // shelf + + Self(nodes) } } -impl Display for VimDoc { +impl Display for VimDoc<'_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.write_str(self.0.as_str()) + todo!() } }