Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

perf(ecma/parser): swc ecma lexer #10016

Draft
wants to merge 10 commits into
base: main
Choose a base branch
from
2 changes: 1 addition & 1 deletion crates/swc_common/src/input.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ impl<'a> StringInput<'a> {
}

#[inline(always)]
pub fn as_str(&self) -> &str {
pub fn as_str(&self) -> &'a str {
self.iter.as_str()
}

Expand Down
7 changes: 4 additions & 3 deletions crates/swc_common/src/syntax_pos.rs
Original file line number Diff line number Diff line change
Expand Up @@ -381,9 +381,10 @@ impl Span {

#[inline]
pub fn new(mut lo: BytePos, mut hi: BytePos) -> Self {
if lo > hi {
std::mem::swap(&mut lo, &mut hi);
}
// TODO: perf remove the core::cmp
// if lo > hi {
// std::mem::swap(&mut lo, &mut hi);
// }

Span { lo, hi }
}
Expand Down
36 changes: 21 additions & 15 deletions crates/swc_ecma_parser/examples/lexer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use swc_common::{
sync::Lrc,
FileName, SourceMap,
};
use swc_ecma_parser::{lexer::Lexer, Capturing, Parser, StringInput, Syntax};
use swc_ecma_parser::{lexer::Lexer, Capturing, Parser, StringInput, Syntax, Tokens};

fn main() {
let cm: Lrc<SourceMap> = Default::default();
Expand All @@ -14,10 +14,7 @@ fn main() {
// .load_file(Path::new("test.js"))
// .expect("failed to load test.js");

let fm = cm.new_source_file(
FileName::Custom("test.js".into()).into(),
"function foo() {}".into(),
);
let fm = cm.new_source_file(FileName::Custom("test.js".into()).into(), "08e1".into());

let lexer = Lexer::new(
Syntax::Es(Default::default()),
Expand All @@ -26,18 +23,27 @@ fn main() {
None,
);

let capturing = Capturing::new(lexer);
for token in lexer {
println!("{:?}", token);
}

let mut parser = Parser::new_from(capturing);
// let errors =
// let errors = lexer.take_errors();

for e in parser.take_errors() {
e.into_diagnostic(&handler).emit();
}
// println!("error: \n", errors);

// let capturing = Capturing::new(lexer);

// let mut parser = Parser::new_from(capturing);

// for e in parser.take_errors() {
// e.into_diagnostic(&handler).emit();
// }

let _module = parser
.parse_module()
.map_err(|e| e.into_diagnostic(&handler).emit())
.expect("Failed to parse module.");
// let _module = parser
// .parse_module()
// .map_err(|e| e.into_diagnostic(&handler).emit())
// .expect("Failed to parse module.");

println!("Tokens: {:?}", parser.input().take());
// println!("Tokens: {:?}", parser.input().take());
}
23 changes: 23 additions & 0 deletions crates/swc_ecma_parser/examples/raw_lex.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
use swc_ecma_parser::{raw_lexer::RawLexer, Syntax};

fn main() {
let source = "hello #!aslk";

let mut lexer = RawLexer::new(source, Syntax::Es(Default::default()));

loop {
match lexer.read_next_token() {
Ok(token) => {
if token.kind.is_eof() {
break;
} else {
println!("token: {:?}", token);
}
}
Err(e) => {
println!("e {:?}", e);
break;
}
}
}
}
4 changes: 4 additions & 0 deletions crates/swc_ecma_parser/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,10 @@ impl Error {
pub fn into_kind(self) -> SyntaxError {
self.error.1
}

pub fn set_kind(&mut self, kind: SyntaxError) {
self.error.1 = kind;
}
}

#[derive(Debug, Clone, PartialEq)]
Expand Down
4 changes: 4 additions & 0 deletions crates/swc_ecma_parser/src/lexer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ use self::{
};
use crate::{
error::{Error, SyntaxError},
raw_lexer::RawLexer,
token::{BinOpToken, IdentLike, Token, Word},
Context, Syntax,
};
Expand Down Expand Up @@ -116,6 +117,7 @@ impl FusedIterator for CharIter {}
#[derive(Clone)]
pub struct Lexer<'a> {
comments: Option<&'a dyn Comments>,
raw_lexer: RawLexer<'a>,
/// [Some] if comment comment parsing is enabled. Otherwise [None]
comments_buffer: Option<CommentsBuffer>,

Expand Down Expand Up @@ -147,9 +149,11 @@ impl<'a> Lexer<'a> {
let start_pos = input.last_pos();

Lexer {
raw_lexer: RawLexer::new(input.as_str(), syntax),
comments,
comments_buffer: comments.is_some().then(CommentsBuffer::new),
ctx: Default::default(),
// input,
input,
start_pos,
state: State::new(syntax, start_pos),
Expand Down
Loading
Loading