You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
snekdown/src/tests/lexer_tests.rs

25 lines
767 B
Rust

use crate::lexer::tokenize;
use crate::lexer::tokens::{HeaderStartToken, LinebreakToken, WhitespaceToken, WordToken};
use charred::token::UnknownToken;
use std::io::Cursor;
#[tokio::test]
async fn it_tokenizes_everything() {
let input = r#"
# A Snekdown Document
With multiple lines
<[import.md]
And some whitespaces
| tables | exist |
|--------|-------|
"#;
let tokens = tokenize(Cursor::new(input)).await.unwrap();
let mut tokens = tokens.into_iter();
assert!(tokens.next().unwrap().is::<LinebreakToken>());
assert!(tokens.next().unwrap().is::<HeaderStartToken>());
assert!(tokens.next().unwrap().is::<WhitespaceToken>());
assert!(tokens.next().unwrap().is::<WordToken>());
assert!(tokens.all(|t| !t.is::<UnknownToken>()));
}