1 unstable release
0.2.0 | Jun 4, 2024 |
---|
#192 in Parser tooling
98 downloads per month
Used in 3 crates
(2 directly)
24KB
253 lines
Stream parser
A simple library for parsing data from iterators.
lib.rs
:
A simple library for parsing data streams.
Parsing is splitted into two tasks
- Splitting an iterator into tokens. This is done by a
Lexer
. - Processing the tokens: The 'Tokenized' struct provides helper functions for processing the stream of tokens.
Example
use itertools::{Itertools, PeekingNext};
use libreda_stream_parser::*;
struct ArrayLexer {}
impl Lexer for ArrayLexer {
type Char = char;
fn consume_next_token(
&mut self,
input: &mut (impl Iterator<Item = Self::Char> + PeekingNext),
mut output: impl FnMut(Self::Char),
) -> Result<(), ParserError<char>> {
// Skip whitespace.
let _n = input.peeking_take_while(|c| c.is_whitespace()).count();
let is_terminal_char = |c: char| -> bool {
let terminals = "[],";
c.is_whitespace() || terminals.contains(c)
};
if let Some(c) = input.next() {
output(c);
// Continue reading token if `c` was no terminal character.
if !is_terminal_char(c) {
input
.peeking_take_while(|&c| !is_terminal_char(c))
.for_each(output);
}
}
Ok(())
}
}
/// Parse an array of the form `[1.0, 2, 3.1324]`.
fn parse_array(data: &str) -> Result<Vec<f64>, ParserError<char>> {
let mut tk = tokenize(data.chars(), ArrayLexer {});
tk.advance()?;
let mut arr: Vec<f64> = vec![];
tk.expect_str("[")?;
loop {
if tk.test_str("]")? {
break;
}
let num = tk.take_and_parse()?;
arr.push(num);
tk.expect_str(",")?;
}
Ok(arr)
}
let data = r#"
[
1.23,
2.34,
3.456,
]
"#;
let arr = parse_array(data).expect("parsing failed");
assert_eq!(arr, vec![1.23, 2.34, 3.456]);
Dependencies
~450KB