From 648b8b9cd65a6f89384c659ab110251f167e9eb7 Mon Sep 17 00:00:00 2001 From: brian Date: Tue, 20 Sep 2022 17:10:47 -0400 Subject: [PATCH 1/3] don't expose parser and tokenizer modules --- src/lib.rs | 7 +++---- src/tokenizer.rs | 4 ++-- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index 342ca21..ded4de8 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -2,22 +2,21 @@ mod macros; pub mod ast; mod kinds; -pub mod parser; +mod parser; #[cfg(test)] mod tests; mod token_set; -pub mod tokenizer; +mod tokenizer; use std::marker::PhantomData; pub use self::{kinds::SyntaxKind, tokenizer::tokenize}; use ast::AstNode; -use parser::ParseError; +pub use parser::ParseError; use rowan::GreenNode; pub use rowan::{NodeOrToken, TextRange, TextSize, TokenAtOffset, WalkEvent}; pub(crate) use token_set::TokenSet; - use self::tokenizer::Tokenizer; #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] diff --git a/src/tokenizer.rs b/src/tokenizer.rs index 4cc734c..b0c52b5 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -47,8 +47,8 @@ impl Eq for State<'_> {} pub type Token<'a> = (SyntaxKind, &'a str); /// A convenience function for tokenizing the given input -pub fn tokenize(input: &str) -> Vec> { - Tokenizer::new(input).collect() +pub fn tokenize(input: &str) -> impl Iterator> + '_ { + Tokenizer::new(input) } /// The tokenizer. You may want to use the `tokenize` convenience function from this module instead. From 20b9decc5a0f455f585f3882ddb60dd5bd3047f0 Mon Sep 17 00:00:00 2001 From: brian Date: Thu, 22 Sep 2022 16:07:44 -0400 Subject: [PATCH 2/3] update examples --- examples/error-report.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/error-report.rs b/examples/error-report.rs index 56eee82..e14bc9d 100644 --- a/examples/error-report.rs +++ b/examples/error-report.rs @@ -1,4 +1,4 @@ -use rnix::parser::ParseError; +use rnix::ParseError; use std::{env, fs}; fn main() { From a189954ba258ce80682aa17da923a60628b5966d Mon Sep 17 00:00:00 2001 From: brian Date: Thu, 22 Sep 2022 16:13:58 -0400 Subject: [PATCH 3/3] update documentation and expose Token --- src/lib.rs | 4 ++-- src/tokenizer.rs | 5 ++--- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index ded4de8..39e1b8a 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -17,7 +17,7 @@ pub use parser::ParseError; use rowan::GreenNode; pub use rowan::{NodeOrToken, TextRange, TextSize, TokenAtOffset, WalkEvent}; pub(crate) use token_set::TokenSet; -use self::tokenizer::Tokenizer; +pub use tokenizer::Token; #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] pub enum NixLanguage {} @@ -44,7 +44,7 @@ pub use ast::Root; impl Root { pub fn parse(s: &str) -> Parse { - let (green, errors) = parser::parse(Tokenizer::new(s)); + let (green, errors) = parser::parse(tokenize(s)); Parse { green, errors, _ty: PhantomData } } } diff --git a/src/tokenizer.rs b/src/tokenizer.rs index b0c52b5..9a30d98 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -46,13 +46,12 @@ impl Eq for State<'_> {} pub type Token<'a> = (SyntaxKind, &'a str); -/// A convenience function for tokenizing the given input +/// Tokenize the given input pub fn tokenize(input: &str) -> impl Iterator> + '_ { Tokenizer::new(input) } -/// The tokenizer. You may want to use the `tokenize` convenience function from this module instead. -pub struct Tokenizer<'a> { +struct Tokenizer<'a> { ctx: Vec, state: State<'a>, }