diff --git a/.github/workflows/clippy-rustfmt-fix.yml b/.github/workflows/clippy-rustfmt-fix.yml
new file mode 100644
index 00000000..620863af
--- /dev/null
+++ b/.github/workflows/clippy-rustfmt-fix.yml
@@ -0,0 +1,37 @@
+name: Run automated fixes on current branch
+
+on:
+ workflow_dispatch
+
+env:
+ CACHE_PATHS: |
+ ~/.cargo/bin/
+ ~/.cargo/registry/index/
+ ~/.cargo/registry/cache/
+ ~/.cargo/git/db/
+ target/
+
+jobs:
+ publish:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v4
+ - uses: actions/cache@v4
+ with:
+ path: ${{ env.CACHE_PATHS }}
+ key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
+
+ - name: Run automated fixes
+ run: |
+ cargo clippy --fix
+ cargo fmt
+
+ - name: Commit
+ run: |
+ git add .
+ git commit -m "Run clippy --fix & formatting"
+
+ git config user.name "github-actions[bot]"
+ git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
+
+ git push
diff --git a/.github/workflows/performance-and-size.yml b/.github/workflows/performance-and-size.yml
index 4a9f38de..9ce0f5b3 100644
--- a/.github/workflows/performance-and-size.yml
+++ b/.github/workflows/performance-and-size.yml
@@ -50,23 +50,23 @@ jobs:
shell: bash
run: |
# Generate a file which contains everything that Ezno currently implements
- cargo run -p ezno-parser --example code_blocks_to_script ./checker/specification/specification.md ./demo.ts
- echo "::info::Finished file generation"
+ cargo run -p ezno-parser --example code_blocks_to_script ./checker/specification/specification.md --comment-headers --out ./demo.tsx
- LINES_OF_CODE=$(scc -c --no-cocomo -f json demo.ts | jq ".[0].Code")
+ LINES_OF_CODE=$(scc -c --no-cocomo -f json demo.tsx | jq ".[0].Code")
echo "### Checking
\`\`\`shell
- $(hyperfine -i './target/release/ezno check demo.ts')
+ $(hyperfine -i './target/release/ezno check demo.tsx')
\`\`\`" >> $GITHUB_STEP_SUMMARY
echo "
Input
- \`\`\`ts
+ \`\`\`tsx
// $LINES_OF_CODE lines of TypeScript generated from specification.md
- // this is not meant to accurately represent a program but instead give an idea for how it scales across all the type checking features
- $(cat ./demo.ts)
+ // this is not meant to accurately represent a program but instead give an idea
+ // for how it scales across all the type checking features
+ $(cat ./demo.tsx)
\`\`\`
" >> $GITHUB_STEP_SUMMARY
@@ -77,7 +77,7 @@ jobs:
Diagnostics
\`\`\`
- $(./target/release/ezno check demo.ts --timings --max-diagnostics all 2>&1 || true)
+ $(./target/release/ezno check demo.tsx --timings --max-diagnostics all 2>&1 || true)
\`\`\`
" >> $GITHUB_STEP_SUMMARY
diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml
index 841ef39a..9daaaca7 100644
--- a/.github/workflows/rust.yml
+++ b/.github/workflows/rust.yml
@@ -223,20 +223,26 @@ jobs:
rustup install nightly
rustup default nightly
- - uses: brndnmtthws/rust-action-cargo-binstall@v1
+ # Current `cargo-fuzz` is broken: https://github.com/kaleidawave/ezno/pull/158#issuecomment-2171431210
+ # However `cargo install --git ...` does work below
+ # - uses: brndnmtthws/rust-action-cargo-binstall@v1
+ # if: steps.changes.outputs.parser == 'true'
+ # with:
+ # packages: cargo-fuzz
+
+ - name: Install cargo-fuzz
if: steps.changes.outputs.parser == 'true'
- with:
- packages: cargo-fuzz
-
+ run: cargo install --git https://github.com/rust-fuzz/cargo-fuzz.git
+
- name: Run fuzzing
env:
SHORT_CIRCUIT: true
if: steps.changes.outputs.parser == 'true'
run: |
if ${{ env.SHORT_CIRCUIT }}; then
- cargo fuzz run -s none ${{ matrix.fuzz-target }} -- -timeout=10 -use_value_profile=1 -max_total_time=120
+ CARGO_TARGET_DIR=../../target cargo fuzz run -s none ${{ matrix.fuzz-target }} -- -timeout=10 -use_value_profile=1 -max_total_time=120
else
- cargo fuzz run -s none ${{ matrix.fuzz-target }} -- -timeout=10 -use_value_profile=1 -max_total_time=300 -fork=1 -ignore_crashes=1
+ CARGO_TARGET_DIR=../../target cargo fuzz run -s none ${{ matrix.fuzz-target }} -- -timeout=10 -use_value_profile=1 -max_total_time=300 -fork=1 -ignore_crashes=1
if test -d fuzz/artifacts; then
find fuzz/artifacts -type f -print -exec xxd {} \; -exec cargo fuzz fmt -s none module_roundtrip_structured {} \;; false;
diff --git a/checker/specification/specification.md b/checker/specification/specification.md
index 7e1b66f1..a5d0d3d5 100644
--- a/checker/specification/specification.md
+++ b/checker/specification/specification.md
@@ -38,10 +38,10 @@ const b: string = a
```ts
let a = 2
a = "hello world"
-let b: boolean = a
+a satisfies number
```
-- Type "hello world" is not assignable to type boolean
+- Expected number, found "hello world"
#### Variable references does not exist
diff --git a/checker/src/context/environment.rs b/checker/src/context/environment.rs
index f22d9a2c..a2283b1b 100644
--- a/checker/src/context/environment.rs
+++ b/checker/src/context/environment.rs
@@ -384,7 +384,7 @@ impl<'a> Environment<'a> {
}
}
}
- Assignable::ObjectDestructuring(assignments) => {
+ Assignable::ObjectDestructuring(members, _spread) => {
debug_assert!(matches!(operator, AssignmentKind::Assign));
let rhs = A::synthesise_expression(
@@ -395,13 +395,13 @@ impl<'a> Environment<'a> {
);
self.assign_to_object_destructure_handle_errors(
- assignments,
+ members,
rhs,
assignment_span,
checking_data,
)
}
- Assignable::ArrayDestructuring(assignments) => {
+ Assignable::ArrayDestructuring(members, _spread) => {
debug_assert!(matches!(operator, AssignmentKind::Assign));
let rhs = A::synthesise_expression(
@@ -412,7 +412,7 @@ impl<'a> Environment<'a> {
);
self.assign_to_array_destructure_handle_errors(
- assignments,
+ members,
rhs,
assignment_span,
checking_data,
@@ -463,14 +463,14 @@ impl<'a> Environment<'a> {
checking_data,
assignment_span,
),
- Assignable::ObjectDestructuring(assignments) => self
+ Assignable::ObjectDestructuring(assignments, _spread) => self
.assign_to_object_destructure_handle_errors(
assignments,
rhs,
assignment_span,
checking_data,
),
- Assignable::ArrayDestructuring(assignments) => self
+ Assignable::ArrayDestructuring(assignments, _spread) => self
.assign_to_array_destructure_handle_errors(
assignments,
rhs,
@@ -549,7 +549,6 @@ impl<'a> Environment<'a> {
checking_data,
);
}
- AssignableObjectDestructuringField::Spread(_, _) => todo!(),
}
}
diff --git a/checker/src/features/assignments.rs b/checker/src/features/assignments.rs
index 0ab6a52b..d9742156 100644
--- a/checker/src/features/assignments.rs
+++ b/checker/src/features/assignments.rs
@@ -10,8 +10,8 @@ use super::operations::{LogicalOperator, MathematicalAndBitwise};
/// A single or multiple items to assign to
pub enum Assignable {
Reference(Reference),
- ObjectDestructuring(Vec>),
- ArrayDestructuring(Vec>),
+ ObjectDestructuring(Vec>, Option>),
+ ArrayDestructuring(Vec>, Option>),
}
/// TODO Can this use lifetimes?
@@ -29,12 +29,14 @@ pub enum AssignableObjectDestructuringField {
default_value: Option>>,
position: SpanWithSource,
},
- /// `{ ...x }`
- Spread(Assignable, SpanWithSource),
}
+pub struct AssignableSpread(
+ pub Box>,
+ pub SpanWithSource,
+);
+
pub enum AssignableArrayDestructuringField {
- Spread(Assignable, SpanWithSource),
Name(Assignable, Option>>),
Comment { content: String, is_multiline: bool, position: SpanWithSource },
None,
diff --git a/checker/src/lib.rs b/checker/src/lib.rs
index d388aa3d..b80a62fd 100644
--- a/checker/src/lib.rs
+++ b/checker/src/lib.rs
@@ -1,5 +1,6 @@
#![doc = include_str!("../README.md")]
#![allow(deprecated, clippy::new_without_default, clippy::too_many_lines, clippy::result_unit_err)]
+#![warn(clippy::must_use_candidate)]
pub mod context;
pub mod diagnostics;
diff --git a/checker/src/synthesis/assignments.rs b/checker/src/synthesis/assignments.rs
index 540269aa..5ad9948a 100644
--- a/checker/src/synthesis/assignments.rs
+++ b/checker/src/synthesis/assignments.rs
@@ -9,7 +9,7 @@ use crate::{
context::Environment,
features::assignments::{
Assignable, AssignableArrayDestructuringField, AssignableObjectDestructuringField,
- Reference,
+ AssignableSpread, Reference,
},
synthesis::expressions::synthesise_expression,
types::properties::{PropertyKey, Publicity},
@@ -35,11 +35,11 @@ impl SynthesiseToAssignable for LHSOfAssignment {
checking_data: &mut CheckingData,
) -> Assignable {
match self {
- LHSOfAssignment::ObjectDestructuring(items, _) => {
- synthesise_object_to_reference(items, environment, checking_data)
+ LHSOfAssignment::ObjectDestructuring { members, spread, position: _ } => {
+ synthesise_object_to_reference(members, spread, environment, checking_data)
}
- LHSOfAssignment::ArrayDestructuring(items, _) => {
- synthesise_array_to_reference(items, environment, checking_data)
+ LHSOfAssignment::ArrayDestructuring { members, spread, position: _ } => {
+ synthesise_array_to_reference(members, spread, environment, checking_data)
}
LHSOfAssignment::VariableOrPropertyAccess(access) => Assignable::Reference(
synthesise_access_to_reference(access, environment, checking_data),
@@ -55,11 +55,11 @@ impl SynthesiseToAssignable for VariableField {
checking_data: &mut CheckingData,
) -> Assignable {
match self {
- VariableField::Object(items, _) => {
- synthesise_object_to_reference(items, environment, checking_data)
+ VariableField::Object { members, spread, position: _ } => {
+ synthesise_object_to_reference(members, spread, environment, checking_data)
}
- VariableField::Array(items, _) => {
- synthesise_array_to_reference(items, environment, checking_data)
+ VariableField::Array { members, spread, position: _ } => {
+ synthesise_array_to_reference(members, spread, environment, checking_data)
}
VariableField::Name(ident) => Assignable::Reference(match ident {
VariableIdentifier::Standard(name, position) => Reference::Variable(
@@ -79,6 +79,7 @@ fn synthesise_object_to_reference<
U: SynthesiseToAssignable + parser::DestructuringFieldInto,
>(
items: &[parser::WithComment>],
+ spread: &Option>,
environment: &mut Environment,
checking_data: &mut CheckingData,
) -> Assignable {
@@ -98,16 +99,6 @@ fn synthesise_object_to_reference<
position: position.with_source(environment.get_source()),
}
}
- parser::ObjectDestructuringField::Spread(name, position) => {
- AssignableObjectDestructuringField::Spread(
- SynthesiseToAssignable::synthesise_to_assignable(
- name,
- environment,
- checking_data,
- ),
- position.with_source(environment.get_source()),
- )
- }
parser::ObjectDestructuringField::Map {
from,
annotation: _,
@@ -135,6 +126,16 @@ fn synthesise_object_to_reference<
}
})
.collect(),
+ spread.as_ref().map(|spread| {
+ AssignableSpread(
+ Box::new(SynthesiseToAssignable::synthesise_to_assignable(
+ &*spread.0,
+ environment,
+ checking_data,
+ )),
+ spread.1.with_source(environment.get_source()),
+ )
+ }),
)
}
@@ -143,6 +144,7 @@ fn synthesise_array_to_reference<
U: SynthesiseToAssignable + parser::DestructuringFieldInto,
>(
items: &[parser::WithComment>],
+ spread: &Option>,
environment: &mut Environment,
checking_data: &mut CheckingData,
) -> Assignable {
@@ -150,16 +152,6 @@ fn synthesise_array_to_reference<
items
.iter()
.map(|item| match item.get_ast_ref() {
- parser::ArrayDestructuringField::Spread(name, position) => {
- AssignableArrayDestructuringField::Spread(
- SynthesiseToAssignable::synthesise_to_assignable(
- name,
- environment,
- checking_data,
- ),
- position.with_source(environment.get_source()),
- )
- }
parser::ArrayDestructuringField::Name(name, _, default_value) => {
AssignableArrayDestructuringField::Name(
SynthesiseToAssignable::synthesise_to_assignable(
@@ -180,6 +172,16 @@ fn synthesise_array_to_reference<
parser::ArrayDestructuringField::None => AssignableArrayDestructuringField::None,
})
.collect(),
+ spread.as_ref().map(|spread| {
+ AssignableSpread(
+ Box::new(SynthesiseToAssignable::synthesise_to_assignable(
+ &*spread.0,
+ environment,
+ checking_data,
+ )),
+ spread.1.with_source(environment.get_source()),
+ )
+ }),
)
}
diff --git a/checker/src/synthesis/classes.rs b/checker/src/synthesis/classes.rs
index 1ea7884d..9a9b9fc9 100644
--- a/checker/src/synthesis/classes.rs
+++ b/checker/src/synthesis/classes.rs
@@ -109,7 +109,7 @@ pub(super) fn synthesise_class_declaration<
match &member.on {
ClassMember::Method(false, method) => {
let publicity = match method.name.get_ast_ref() {
- ParserPropertyKey::Ident(
+ ParserPropertyKey::Identifier(
_,
_,
parser::property_key::PublicOrPrivate::Private,
@@ -133,7 +133,7 @@ pub(super) fn synthesise_class_declaration<
}
};
- let internal_marker = if let (true, ParserPropertyKey::Ident(name, _, _)) =
+ let internal_marker = if let (true, ParserPropertyKey::Identifier(name, _, _)) =
(is_declare, method.name.get_ast_ref())
{
get_internal_function_effect_from_decorators(
@@ -179,7 +179,7 @@ pub(super) fn synthesise_class_declaration<
}
ClassMember::Property(false, property) => {
let publicity = match property.key.get_ast_ref() {
- ParserPropertyKey::Ident(
+ ParserPropertyKey::Identifier(
_,
_,
parser::property_key::PublicOrPrivate::Private,
@@ -256,7 +256,7 @@ pub(super) fn synthesise_class_declaration<
match &member.on {
ClassMember::Method(true, method) => {
let publicity_kind = match method.name.get_ast_ref() {
- ParserPropertyKey::Ident(
+ ParserPropertyKey::Identifier(
_,
_,
parser::property_key::PublicOrPrivate::Private,
@@ -264,7 +264,7 @@ pub(super) fn synthesise_class_declaration<
_ => Publicity::Public,
};
- let internal_marker = if let (true, ParserPropertyKey::Ident(name, _, _)) =
+ let internal_marker = if let (true, ParserPropertyKey::Identifier(name, _, _)) =
(is_declare, method.name.get_ast_ref())
{
get_internal_function_effect_from_decorators(
@@ -321,7 +321,7 @@ pub(super) fn synthesise_class_declaration<
}
ClassMember::Property(true, property) => {
let publicity_kind = match property.key.get_ast_ref() {
- ParserPropertyKey::Ident(
+ ParserPropertyKey::Identifier(
_,
_,
parser::property_key::PublicOrPrivate::Private,
diff --git a/checker/src/synthesis/functions.rs b/checker/src/synthesis/functions.rs
index 2620b55f..6ae05d48 100644
--- a/checker/src/synthesis/functions.rs
+++ b/checker/src/synthesis/functions.rs
@@ -4,8 +4,8 @@ use iterator_endiate::EndiateIteratorExt;
use parser::{
expressions::ExpressionOrBlock,
functions::{LeadingParameter, ParameterData},
- ASTNode, Block, FunctionBased, Span, TypeAnnotation, TypeParameter, VariableField,
- VariableIdentifier, WithComment,
+ ASTNode, Block, FunctionBased, Span, SpreadDestructuringField, TypeAnnotation, TypeParameter,
+ VariableField, VariableIdentifier, WithComment,
};
use crate::{
@@ -494,14 +494,10 @@ pub(super) fn variable_field_to_string(param: &VariableField) -> String {
String::new()
}
}
- VariableField::Array(items, _) => {
+ VariableField::Array { members, spread, .. } => {
let mut buf = String::from("[");
- for (not_at_end, item) in items.iter().nendiate() {
- match item.get_ast_ref() {
- parser::ArrayDestructuringField::Spread(name, _) => {
- buf.push_str("...");
- buf.push_str(&variable_field_to_string(name));
- }
+ for (not_at_end, member) in members.iter().nendiate() {
+ match member.get_ast_ref() {
parser::ArrayDestructuringField::Name(name, ..) => {
buf.push_str(&variable_field_to_string(name));
}
@@ -512,26 +508,28 @@ pub(super) fn variable_field_to_string(param: &VariableField) -> String {
buf.push_str(", ");
}
}
+ if let Some(SpreadDestructuringField(name, _)) = spread {
+ if !members.is_empty() {
+ buf.push_str(", ");
+ }
+ buf.push_str("...");
+ buf.push_str(&variable_field_to_string(name));
+ }
buf.push(']');
-
buf
}
- VariableField::Object(items, _) => {
+ VariableField::Object { members, spread, .. } => {
let mut buf = String::from("{");
- for (not_at_end, item) in items.iter().nendiate() {
+ for (not_at_end, item) in members.iter().nendiate() {
match item.get_ast_ref() {
parser::ObjectDestructuringField::Name(name, ..) => {
if let VariableIdentifier::Standard(name, ..) = name {
buf.push_str(name);
}
}
- parser::ObjectDestructuringField::Spread(name, _) => {
- buf.push_str("...");
- buf.push_str(&variable_field_to_string(name));
- }
parser::ObjectDestructuringField::Map { from, name, .. } => {
match from {
- parser::PropertyKey::Ident(ident, _, _) => {
+ parser::PropertyKey::Identifier(ident, _, _) => {
buf.push_str(ident);
}
parser::PropertyKey::StringLiteral(_, _, _) => todo!(),
@@ -546,6 +544,13 @@ pub(super) fn variable_field_to_string(param: &VariableField) -> String {
buf.push_str(", ");
}
}
+ if let Some(SpreadDestructuringField(name, _)) = spread {
+ if !members.is_empty() {
+ buf.push_str(", ");
+ }
+ buf.push_str("...");
+ buf.push_str(&variable_field_to_string(name));
+ }
buf.push_str(" }");
buf
@@ -560,8 +565,12 @@ fn get_parameter_name(parameter: &parser::VariableField) -> String {
VariableIdentifier::Standard(ref name, _) => name.to_owned(),
VariableIdentifier::Marker(_, _) => String::new(),
},
- VariableField::Array(_items, _) => "todo".to_owned(),
- VariableField::Object(_, _) => "todo".to_owned(),
+ VariableField::Array { members: _, spread: _, position: _ } => {
+ "todo: VariableField::Array".to_owned()
+ }
+ VariableField::Object { members: _, spread: _, position: _ } => {
+ "todo: VariableField::Object".to_owned()
+ }
}
}
diff --git a/checker/src/synthesis/interfaces.rs b/checker/src/synthesis/interfaces.rs
index ac5e8ff3..cf006657 100644
--- a/checker/src/synthesis/interfaces.rs
+++ b/checker/src/synthesis/interfaces.rs
@@ -61,7 +61,7 @@ impl SynthesiseInterfaceBehavior for OnToType {
(
if matches!(
key,
- parser::PropertyKey::Ident(
+ parser::PropertyKey::Identifier(
_,
_,
parser::property_key::PublicOrPrivate::Private
diff --git a/checker/src/synthesis/mod.rs b/checker/src/synthesis/mod.rs
index 1bda34b3..c3254f74 100644
--- a/checker/src/synthesis/mod.rs
+++ b/checker/src/synthesis/mod.rs
@@ -57,7 +57,7 @@ impl crate::ASTImplementation for EznoParser {
type VariableField<'_a> = parser::VariableField;
- type ForStatementInitiliser<'_a> = parser::statements::ForLoopStatementinitialiser;
+ type ForStatementInitiliser<'_a> = parser::statements::ForLoopStatementInitialiser;
fn module_from_string(
// TODO remove
@@ -163,13 +163,13 @@ impl crate::ASTImplementation for EznoParser {
checking_data: &mut crate::CheckingData,
) {
match for_loop_initialiser {
- parser::statements::ForLoopStatementinitialiser::VariableDeclaration(declaration) => {
+ parser::statements::ForLoopStatementInitialiser::VariableDeclaration(declaration) => {
// TODO is this correct & the best
hoist_variable_declaration(declaration, environment, checking_data);
synthesise_variable_declaration(declaration, environment, checking_data, false);
}
- parser::statements::ForLoopStatementinitialiser::VarStatement(_) => todo!(),
- parser::statements::ForLoopStatementinitialiser::Expression(_) => todo!(),
+ parser::statements::ForLoopStatementInitialiser::VarStatement(_) => todo!(),
+ parser::statements::ForLoopStatementInitialiser::Expression(_) => todo!(),
}
}
@@ -206,7 +206,7 @@ pub(super) fn parser_property_key_to_checker_property_key<
perform_side_effect_computed: bool,
) -> PropertyKey<'static> {
match property_key {
- ParserPropertyKey::StringLiteral(value, ..) | ParserPropertyKey::Ident(value, ..) => {
+ ParserPropertyKey::StringLiteral(value, ..) | ParserPropertyKey::Identifier(value, ..) => {
PropertyKey::String(std::borrow::Cow::Owned(value.clone()))
}
ParserPropertyKey::NumberLiteral(number, _) => {
diff --git a/checker/src/synthesis/type_annotations.rs b/checker/src/synthesis/type_annotations.rs
index c3ad4c49..996d5d60 100644
--- a/checker/src/synthesis/type_annotations.rs
+++ b/checker/src/synthesis/type_annotations.rs
@@ -526,7 +526,10 @@ pub(super) fn synthesise_type_annotation(
}
acc
}
- TypeAnnotation::Infer(name, _pos) => {
+ TypeAnnotation::Infer { name, extends, position: _ } => {
+ if extends.is_some() {
+ crate::utilities::notify!("TODO");
+ }
if let Scope::TypeAnnotationCondition { ref mut infer_parameters } =
environment.context_type.scope
{
diff --git a/checker/src/synthesis/variables.rs b/checker/src/synthesis/variables.rs
index 5ba943f2..34e137be 100644
--- a/checker/src/synthesis/variables.rs
+++ b/checker/src/synthesis/variables.rs
@@ -2,7 +2,7 @@ use std::borrow::Cow;
use parser::{
declarations::VariableDeclarationItem, ASTNode, ArrayDestructuringField,
- ObjectDestructuringField, VariableField, VariableIdentifier,
+ ObjectDestructuringField, SpreadDestructuringField, VariableField, VariableIdentifier,
};
use super::expressions::synthesise_expression;
@@ -62,24 +62,27 @@ pub(crate) fn register_variable(
parser::VariableField::Name(variable) => {
register_variable_identifier(variable, environment, checking_data, argument);
}
- parser::VariableField::Array(items, _) => {
- for (idx, field) in items.iter().enumerate() {
+ parser::VariableField::Array { members, spread, position: _ } => {
+ if let Some(_spread) = spread {
+ todo!()
+ }
+ for (idx, field) in members.iter().enumerate() {
match field.get_ast_ref() {
- ArrayDestructuringField::Spread(variable, _pos) => {
- // TODO
- let argument = VariableRegisterArguments {
- constant: argument.constant,
- space: argument.space,
- initial_value: argument.initial_value,
- };
- register_variable(
- variable,
- environment,
- checking_data,
- // TODO
- argument,
- );
- }
+ // ArrayDestructuringField::Spread(variable, _pos) => {
+ // // TODO
+ // let argument = VariableRegisterArguments {
+ // constant: argument.constant,
+ // space: argument.space,
+ // initial_value: argument.initial_value,
+ // };
+ // register_variable(
+ // variable,
+ // environment,
+ // checking_data,
+ // // TODO
+ // argument,
+ // );
+ // }
ArrayDestructuringField::Name(name, _type, _initial_value) => {
// TODO account for spread in `idx`
let key = PropertyKey::from_usize(idx);
@@ -96,8 +99,8 @@ pub(crate) fn register_variable(
}
}
}
- parser::VariableField::Object(items, _) => {
- for field in items {
+ parser::VariableField::Object { members, spread, .. } => {
+ for field in members {
match field.get_ast_ref() {
ObjectDestructuringField::Name(variable, _type, ..) => {
let name = match variable {
@@ -119,19 +122,6 @@ pub(crate) fn register_variable(
argument,
);
}
- ObjectDestructuringField::Spread(variable, _) => {
- register_variable(
- variable,
- environment,
- checking_data,
- // TODO
- VariableRegisterArguments {
- constant: argument.constant,
- space: argument.space,
- initial_value: argument.initial_value,
- },
- );
- }
ObjectDestructuringField::Map {
from,
// TODO
@@ -157,6 +147,19 @@ pub(crate) fn register_variable(
}
}
}
+ if let Some(SpreadDestructuringField(variable, _position)) = spread {
+ register_variable(
+ variable,
+ environment,
+ checking_data,
+ // TODO
+ VariableRegisterArguments {
+ constant: argument.constant,
+ space: argument.space,
+ initial_value: argument.initial_value,
+ },
+ );
+ }
}
}
}
@@ -249,10 +252,10 @@ fn assign_initial_to_fields(
}
}
}
- VariableField::Array(_items, pos) => {
+ VariableField::Array { members: _, spread: _, position } => {
checking_data.raise_unimplemented_error(
"destructuring array (needs iterator)",
- pos.with_source(environment.get_source()),
+ position.with_source(environment.get_source()),
);
// for (idx, item) in items.iter().enumerate() {
// match item.get_ast_ref() {
@@ -282,10 +285,9 @@ fn assign_initial_to_fields(
// ArrayDestructuringField::Comment { .. } | ArrayDestructuringField::None => {}
// }
}
- VariableField::Object(items, _) => {
- for item in items {
- match item.get_ast_ref() {
- ObjectDestructuringField::Spread(_, _) => todo!(),
+ VariableField::Object { members, spread, .. } => {
+ for member in members {
+ match member.get_ast_ref() {
ObjectDestructuringField::Name(name, _, default_value, _) => {
let position = name.get_position().with_source(environment.get_source());
let id = crate::VariableId(environment.get_source(), position.start);
@@ -435,6 +437,9 @@ fn assign_initial_to_fields(
}
}
}
+ if let Some(_spread) = spread {
+ todo!()
+ }
}
}
}
diff --git a/parser/README.md b/parser/README.md
index f1ff1128..4a8227b3 100644
--- a/parser/README.md
+++ b/parser/README.md
@@ -55,6 +55,7 @@ fn main() {
- Output
- Stripping type annotations can be stripped from output using `ToStringOptions { include_types: false, ..Default::default() }`
- Adding indentation under `pretty: true`, not adding whitespace for production builds
+ - Setting `max_line_length` to a size to wrap certain structures
- Support for source map mapping generation
## Non-goals
diff --git a/parser/examples/code_blocks_to_script.rs b/parser/examples/code_blocks_to_script.rs
index 2c18dc80..eeeaf655 100644
--- a/parser/examples/code_blocks_to_script.rs
+++ b/parser/examples/code_blocks_to_script.rs
@@ -12,6 +12,7 @@ fn main() -> Result<(), Box> {
let path = args.first().ok_or("expected path to markdown file")?;
let replace_satisfies_with_as = args.iter().any(|item| item == "--satisfies-with-as");
+ let add_headers_as_comments = args.iter().any(|item| item == "--comment-headers");
let into_files_directory_and_extension = args.windows(3).find_map(|item| {
matches!(item[0].as_str(), "--into-files").then_some((item[1].clone(), item[2].clone()))
@@ -31,19 +32,17 @@ fn main() -> Result<(), Box> {
while let Some(line) = lines.next() {
if line.starts_with("```ts") {
- let mut indented_code = lines
- .by_ref()
- .take_while(|line| !line.starts_with("```"))
- .fold("\t".to_owned(), |mut a, s| {
+ let code = lines.by_ref().take_while(|line| !line.starts_with("```")).fold(
+ String::new(),
+ |mut a, s| {
a.push_str(s);
- a.push_str("\n\t");
+ a.push_str("\n");
a
- });
+ },
+ );
- debug_assert_eq!(indented_code.pop(), Some('\t'));
-
- if !filters.iter().any(|filter| indented_code.contains(filter)) {
- blocks.push((std::mem::take(&mut current), indented_code));
+ if !filters.iter().any(|filter| code.contains(filter)) {
+ blocks.push((std::mem::take(&mut current), code));
}
} else if let Some(header) = line.strip_prefix("#### ") {
current = header.to_owned();
@@ -118,16 +117,36 @@ fn main() -> Result<(), Box> {
}
}
+ // If available block add to that, otherwise create a new one
if let Some((items, block)) =
final_blocks.iter_mut().find(|(uses, _)| uses.is_disjoint(&names))
{
items.extend(names.into_iter());
- block.push_str("\n// ");
- block.push_str(&header);
+ if add_headers_as_comments {
+ block.push_str("\n\t// ");
+ block.push_str(&header);
+ }
+ for line in code.lines() {
+ block.push_str("\n\t");
+ block.push_str(&line);
+ }
+ // If the block is not terminated, it can change the parsing of the next one
+ if block.ends_with(')') {
+ block.push(';');
+ }
block.push('\n');
- block.push_str(&code);
} else {
- final_blocks.push((names, code));
+ let mut block = String::new();
+ if add_headers_as_comments {
+ block.push_str("\t// ");
+ block.push_str(&header);
+ }
+ for line in code.lines() {
+ block.push_str("\n\t");
+ block.push_str(&line);
+ }
+ block.push('\n');
+ final_blocks.push((names, block));
}
}
diff --git a/parser/examples/parse.rs b/parser/examples/parse.rs
index 038b9e81..f04a2769 100644
--- a/parser/examples/parse.rs
+++ b/parser/examples/parse.rs
@@ -22,14 +22,10 @@ fn main() -> Result<(), Box> {
let type_definition_module = args.iter().any(|item| item == "--type-definition-module");
let type_annotations = !args.iter().any(|item| item == "--no-type-annotations");
- // `parse -> print -> parse -> print` and compare difference (same as fuzzing process)
- let double = args.iter().any(|item| item == "--double");
-
let print_ast = args.iter().any(|item| item == "--ast");
- // double => pretty and render thus `|| double`
- let render_output = args.iter().any(|item| item == "--render") || double;
- let pretty = args.iter().any(|item| item == "--pretty") || double;
+ let render_output = args.iter().any(|item| item == "--render");
+ let pretty = args.iter().any(|item| item == "--pretty");
let now = Instant::now();
@@ -52,23 +48,10 @@ fn main() -> Result<(), Box> {
..ParseOptions::default()
};
- // let parse_options = ParseOptions {
- // stack_size: Some(STACK_SIZE_MB * 1024 * 1024),
- // jsx: false,
- // type_annotations: false,
- // retain_blank_lines: true,
- // ..Default::default()
- // };
-
let mut fs = source_map::MapFileStore::::default();
let source = std::fs::read_to_string(path.clone())?;
- // let source = String::from_utf8([0x2f, 0x8, 0x2f, 0xa].to_vec()).unwrap();
- // let source = "if (this) return; else switch (this) {\n}\n\n\n".to_string();
- // let source = ";\n\n".to_string();
- // let source = "{};;;".to_string();
-
let source_id = fs.new_source_id(path.into(), source.clone());
eprintln!("parsing {:?} bytes", source.len());
@@ -83,20 +66,18 @@ fn main() -> Result<(), Box> {
if print_ast {
println!("{module:#?}");
}
- if source_maps || render_output || double || render_timings {
+ if source_maps || render_output || render_timings {
let now = Instant::now();
- // let to_string_options = ToStringOptions {
- // expect_markers: true,
- // include_type_annotations: type_annotations,
- // pretty,
- // comments: if pretty { Comments::All } else { Comments::None },
- // // 60 is temp
- // max_line_length: if pretty { 60 } else { u8::MAX },
- // ..Default::default()
- // };
-
- let to_string_options = ToStringOptions::default();
+ let to_string_options = ToStringOptions {
+ expect_markers: true,
+ include_type_annotations: type_annotations,
+ pretty,
+ comments: if pretty { Comments::All } else { Comments::None },
+ // 60 is temp
+ max_line_length: if pretty { 60 } else { u8::MAX },
+ ..Default::default()
+ };
let (output, source_map) =
module.to_string_with_source_map(&to_string_options, source_id, &fs);
@@ -111,33 +92,6 @@ fn main() -> Result<(), Box> {
if render_output {
println!("{output}");
}
-
- if double {
- let result2 =
- Module::from_string_with_options(output.clone(), parse_options, None);
- return match result2 {
- Ok((module2, _state)) => {
- let output2 = module2
- .to_string_with_source_map(&to_string_options, source_id, &fs)
- .0;
-
- if output == output2 {
- eprintln!("{output:?} == {output2:?}");
- eprintln!("re-parse was equal ✅");
- Ok(())
- } else {
- eprintln!("{output:?} != {output2:?} (original = {source:?})");
- eprintln!("initial {:?}", module);
- eprintln!("re-parsed {:?}", module2);
- Err(Box::::from("not equal"))
- }
- }
- Err(parse_err) => {
- eprintln!("error parsing output: {output:?} from {module:?}");
- Err(Box::::from(parse_err))
- }
- };
- }
}
if display_keywords {
diff --git a/parser/examples/pretty_printing.rs b/parser/examples/pretty_printing.rs
index 8959538a..22422dff 100644
--- a/parser/examples/pretty_printing.rs
+++ b/parser/examples/pretty_printing.rs
@@ -17,6 +17,8 @@ function x(a: { something: string, another: number, third: "yes" }, b: Array<{ e
for (let i = 0; i < 5; i += 1) {
console.log("here 2")
}
+
+ const x = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 100, 5000, 1000, 122200, 100];
}
"#;
diff --git a/parser/fuzz/Cargo.lock b/parser/fuzz/Cargo.lock
index 8592bec9..6c3c6df6 100644
--- a/parser/fuzz/Cargo.lock
+++ b/parser/fuzz/Cargo.lock
@@ -4,9 +4,9 @@ version = 3
[[package]]
name = "addr2line"
-version = "0.21.0"
+version = "0.22.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb"
+checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678"
dependencies = [
"gimli",
]
@@ -47,9 +47,9 @@ checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0"
[[package]]
name = "backtrace"
-version = "0.3.71"
+version = "0.3.73"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "26b05800d2e817c8b3b4b54abd461726265fa9789ae34330622f2db9ee696f9d"
+checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a"
dependencies = [
"addr2line",
"cc",
@@ -81,7 +81,7 @@ checksum = "cf4b9d6a944f767f8e5e0db018570623c85f3d925ac718db4e06d0187adb21c1"
[[package]]
name = "boa_ast"
version = "0.18.0"
-source = "git+https://github.com/boa-dev/boa.git#6130adf6d9b4db3fbc9de7491995c5e43925a94b"
+source = "git+https://github.com/boa-dev/boa.git#1eaf9230ae52440a8785d2befb4e380983190f24"
dependencies = [
"arbitrary",
"bitflags 2.5.0",
@@ -95,7 +95,7 @@ dependencies = [
[[package]]
name = "boa_gc"
version = "0.18.0"
-source = "git+https://github.com/boa-dev/boa.git#6130adf6d9b4db3fbc9de7491995c5e43925a94b"
+source = "git+https://github.com/boa-dev/boa.git#1eaf9230ae52440a8785d2befb4e380983190f24"
dependencies = [
"boa_macros",
"boa_profiler",
@@ -105,7 +105,7 @@ dependencies = [
[[package]]
name = "boa_interner"
version = "0.18.0"
-source = "git+https://github.com/boa-dev/boa.git#6130adf6d9b4db3fbc9de7491995c5e43925a94b"
+source = "git+https://github.com/boa-dev/boa.git#1eaf9230ae52440a8785d2befb4e380983190f24"
dependencies = [
"arbitrary",
"boa_gc",
@@ -121,7 +121,7 @@ dependencies = [
[[package]]
name = "boa_macros"
version = "0.18.0"
-source = "git+https://github.com/boa-dev/boa.git#6130adf6d9b4db3fbc9de7491995c5e43925a94b"
+source = "git+https://github.com/boa-dev/boa.git#1eaf9230ae52440a8785d2befb4e380983190f24"
dependencies = [
"proc-macro2",
"quote",
@@ -132,7 +132,7 @@ dependencies = [
[[package]]
name = "boa_profiler"
version = "0.18.0"
-source = "git+https://github.com/boa-dev/boa.git#6130adf6d9b4db3fbc9de7491995c5e43925a94b"
+source = "git+https://github.com/boa-dev/boa.git#1eaf9230ae52440a8785d2befb4e380983190f24"
[[package]]
name = "bumpalo"
@@ -148,9 +148,9 @@ checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9"
[[package]]
name = "cc"
-version = "1.0.98"
+version = "1.0.99"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "41c270e7540d725e65ac7f1b212ac8ce349719624d7bcff99f8e2e488e8cf03f"
+checksum = "96c51067fd44124faa7f870b4b1c969379ad32b2ba805aa959430ceaa384f695"
dependencies = [
"jobserver",
"libc",
@@ -256,6 +256,17 @@ version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8"
+[[package]]
+name = "displaydoc"
+version = "0.2.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
[[package]]
name = "either_n"
version = "0.2.0"
@@ -311,7 +322,7 @@ dependencies = [
[[package]]
name = "ezno-parser"
-version = "0.1.4"
+version = "0.1.5"
dependencies = [
"derive-debug-extras",
"derive-enum-from-into",
@@ -348,7 +359,7 @@ dependencies = [
[[package]]
name = "ezno-parser-visitable-derive"
-version = "0.0.6"
+version = "0.0.7"
dependencies = [
"string-cases",
"syn-helpers",
@@ -469,9 +480,9 @@ dependencies = [
[[package]]
name = "gimli"
-version = "0.28.1"
+version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253"
+checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd"
[[package]]
name = "gloo-utils"
@@ -538,9 +549,9 @@ dependencies = [
[[package]]
name = "httparse"
-version = "1.8.0"
+version = "1.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904"
+checksum = "d0e7a4dd27b9476dc40cb050d3632d3bba3a70ddbff012285f7f8559a1e7e545"
[[package]]
name = "httpdate"
@@ -550,9 +561,9 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9"
[[package]]
name = "hyper"
-version = "0.14.28"
+version = "0.14.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bf96e135eb83a2a8ddf766e426a841d8ddd7449d5f00d34ea02b41d2f19eef80"
+checksum = "f361cde2f109281a220d4307746cdfd5ee3f410da58a70377762396775634b33"
dependencies = [
"bytes",
"futures-channel",
@@ -585,14 +596,134 @@ dependencies = [
"tokio-native-tls",
]
+[[package]]
+name = "icu_collections"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "db2fa452206ebee18c4b5c2274dbf1de17008e874b4dc4f0aea9d01ca79e4526"
+dependencies = [
+ "displaydoc",
+ "yoke",
+ "zerofrom",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_locid"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "13acbb8371917fc971be86fc8057c41a64b521c184808a698c02acc242dbf637"
+dependencies = [
+ "displaydoc",
+ "litemap",
+ "tinystr",
+ "writeable",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_locid_transform"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "01d11ac35de8e40fdeda00d9e1e9d92525f3f9d887cdd7aa81d727596788b54e"
+dependencies = [
+ "displaydoc",
+ "icu_locid",
+ "icu_locid_transform_data",
+ "icu_provider",
+ "tinystr",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_locid_transform_data"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e"
+
+[[package]]
+name = "icu_normalizer"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "19ce3e0da2ec68599d193c93d088142efd7f9c5d6fc9b803774855747dc6a84f"
+dependencies = [
+ "displaydoc",
+ "icu_collections",
+ "icu_normalizer_data",
+ "icu_properties",
+ "icu_provider",
+ "smallvec",
+ "utf16_iter",
+ "utf8_iter",
+ "write16",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_normalizer_data"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516"
+
+[[package]]
+name = "icu_properties"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1f8ac670d7422d7f76b32e17a5db556510825b29ec9154f235977c9caba61036"
+dependencies = [
+ "displaydoc",
+ "icu_collections",
+ "icu_locid_transform",
+ "icu_properties_data",
+ "icu_provider",
+ "tinystr",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_properties_data"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569"
+
+[[package]]
+name = "icu_provider"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6ed421c8a8ef78d3e2dbc98a973be2f3770cb42b606e3ab18d6237c4dfde68d9"
+dependencies = [
+ "displaydoc",
+ "icu_locid",
+ "icu_provider_macros",
+ "stable_deref_trait",
+ "tinystr",
+ "writeable",
+ "yoke",
+ "zerofrom",
+ "zerovec",
+]
+
+[[package]]
+name = "icu_provider_macros"
+version = "1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1ec89e9337638ecdc08744df490b221a7399bf8d164eb52a665454e60e075ad6"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
[[package]]
name = "idna"
-version = "0.5.0"
+version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6"
+checksum = "4716a3a0933a1d01c2f72450e89596eb51dd34ef3c211ccd875acdf1f8fe47ed"
dependencies = [
- "unicode-bidi",
- "unicode-normalization",
+ "icu_normalizer",
+ "icu_properties",
+ "smallvec",
+ "utf8_iter",
]
[[package]]
@@ -641,12 +772,6 @@ dependencies = [
"wasm-bindgen",
]
-[[package]]
-name = "lazy_static"
-version = "1.4.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646"
-
[[package]]
name = "libc"
version = "0.2.155"
@@ -670,6 +795,12 @@ version = "0.4.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89"
+[[package]]
+name = "litemap"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "643cb0b8d4fcc284004d5fd0d67ccf61dfffadb7f75e1e71bc420f4688a3a704"
+
[[package]]
name = "log"
version = "0.4.21"
@@ -694,9 +825,9 @@ checksum = "b8dd856d451cc0da70e2ef2ce95a18e39a93b7558bedf10201ad28503f918568"
[[package]]
name = "memchr"
-version = "2.7.2"
+version = "2.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d"
+checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3"
[[package]]
name = "mime"
@@ -726,11 +857,10 @@ dependencies = [
[[package]]
name = "native-tls"
-version = "0.2.11"
+version = "0.2.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e"
+checksum = "a8614eb2c83d59d1c8cc974dd3f920198647674a0a035e1af1fa58707e317466"
dependencies = [
- "lazy_static",
"libc",
"log",
"openssl",
@@ -773,9 +903,9 @@ dependencies = [
[[package]]
name = "object"
-version = "0.32.2"
+version = "0.36.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441"
+checksum = "576dfe1fc8f9df304abb159d767a29d0476f7750fbf8aa7ad07816004a207434"
dependencies = [
"memchr",
]
@@ -914,9 +1044,9 @@ dependencies = [
[[package]]
name = "proc-macro2"
-version = "1.0.83"
+version = "1.0.85"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0b33eb56c327dec362a9e55b3ad14f9d2f0904fb5a5b03b513ab5465399e9f43"
+checksum = "22244ce15aa966053a896d1accb3a6e68469b97c7f33f284b99f0d576879fc23"
dependencies = [
"unicode-ident",
]
@@ -1079,18 +1209,18 @@ dependencies = [
[[package]]
name = "serde"
-version = "1.0.202"
+version = "1.0.203"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "226b61a0d411b2ba5ff6d7f73a476ac4f8bb900373459cd00fab8512828ba395"
+checksum = "7253ab4de971e72fb7be983802300c30b5a7f0c2e56fab8abfc6a214307c0094"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
-version = "1.0.202"
+version = "1.0.203"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6048858004bcff69094cd972ed40a32500f153bd3be9f716b2eed2e8217c4838"
+checksum = "500cbc0ebeb6f46627f50f3f5811ccf6bf00643be300b4c3eabc0ef55dc5b5ba"
dependencies = [
"proc-macro2",
"quote",
@@ -1146,6 +1276,12 @@ dependencies = [
"autocfg",
]
+[[package]]
+name = "smallvec"
+version = "1.13.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67"
+
[[package]]
name = "socket2"
version = "0.5.7"
@@ -1169,6 +1305,12 @@ dependencies = [
"wasm-bindgen",
]
+[[package]]
+name = "stable_deref_trait"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
+
[[package]]
name = "static_assertions"
version = "1.1.0"
@@ -1183,9 +1325,9 @@ checksum = "a31d23461f9e0fbe756cf9d5a36be93740fe12c8b094409a5f78f0f912ee2b6f"
[[package]]
name = "syn"
-version = "2.0.65"
+version = "2.0.66"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d2863d96a84c6439701d7a38f9de935ec562c8832cc55d1dde0f513b52fad106"
+checksum = "c42f3f41a2de00b01c0aaad383c5a45241efc8b2d1eda5661812fda5f3cdcff5"
dependencies = [
"proc-macro2",
"quote",
@@ -1270,20 +1412,15 @@ dependencies = [
]
[[package]]
-name = "tinyvec"
-version = "1.6.0"
+name = "tinystr"
+version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50"
+checksum = "9117f5d4db391c1cf6927e7bea3db74b9a1c1add8f7eda9ffd5364f40f57b82f"
dependencies = [
- "tinyvec_macros",
+ "displaydoc",
+ "zerovec",
]
-[[package]]
-name = "tinyvec_macros"
-version = "0.1.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
-
[[package]]
name = "tokenizer-lib"
version = "1.6.0"
@@ -1295,9 +1432,9 @@ dependencies = [
[[package]]
name = "tokio"
-version = "1.37.0"
+version = "1.38.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1adbebffeca75fcfd058afa480fb6c0b81e165a0323f9c9d39c9697e37c46787"
+checksum = "ba4f4a02a7a80d6f274636f0aa95c7e383b912d41fe721a31f29e29698585a4a"
dependencies = [
"backtrace",
"bytes",
@@ -1387,44 +1524,41 @@ dependencies = [
"syn",
]
-[[package]]
-name = "unicode-bidi"
-version = "0.3.15"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75"
-
[[package]]
name = "unicode-ident"
version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"
-[[package]]
-name = "unicode-normalization"
-version = "0.1.23"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5"
-dependencies = [
- "tinyvec",
-]
-
[[package]]
name = "unicode-width"
-version = "0.1.12"
+version = "0.1.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "68f5e5f3158ecfd4b8ff6fe086db7c8467a2dfdac97fe420f2b7c4aa97af66d6"
+checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d"
[[package]]
name = "url"
-version = "2.5.0"
+version = "2.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633"
+checksum = "f7c25da092f0a868cdf09e8674cd3b7ef3a7d92a24253e663a2fb85e2496de56"
dependencies = [
"form_urlencoded",
"idna",
"percent-encoding",
]
+[[package]]
+name = "utf16_iter"
+version = "1.0.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c8232dd3cdaed5356e0f716d285e4b40b932ac434100fe9b7e0e8e935b9e6246"
+
+[[package]]
+name = "utf8_iter"
+version = "1.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be"
+
[[package]]
name = "vcpkg"
version = "0.2.15"
@@ -1686,12 +1820,48 @@ dependencies = [
"windows-sys 0.48.0",
]
+[[package]]
+name = "write16"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936"
+
+[[package]]
+name = "writeable"
+version = "0.5.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1e9df38ee2d2c3c5948ea468a8406ff0db0b29ae1ffde1bcf20ef305bcc95c51"
+
[[package]]
name = "yansi"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec"
+[[package]]
+name = "yoke"
+version = "0.7.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6c5b1314b079b0930c31e3af543d8ee1757b1951ae1e1565ec704403a7240ca5"
+dependencies = [
+ "serde",
+ "stable_deref_trait",
+ "yoke-derive",
+ "zerofrom",
+]
+
+[[package]]
+name = "yoke-derive"
+version = "0.7.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "28cc31741b18cb6f1d5ff12f5b7523e3d6eb0852bbbad19d73905511d9849b95"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "synstructure",
+]
+
[[package]]
name = "zerocopy"
version = "0.7.34"
@@ -1711,3 +1881,46 @@ dependencies = [
"quote",
"syn",
]
+
+[[package]]
+name = "zerofrom"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "91ec111ce797d0e0784a1116d0ddcdbea84322cd79e5d5ad173daeba4f93ab55"
+dependencies = [
+ "zerofrom-derive",
+]
+
+[[package]]
+name = "zerofrom-derive"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0ea7b4a3637ea8669cedf0f1fd5c286a17f3de97b8dd5a70a6c167a1730e63a5"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+ "synstructure",
+]
+
+[[package]]
+name = "zerovec"
+version = "0.10.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bb2cc8827d6c0994478a15c53f374f46fbd41bea663d809b14744bc42e6b109c"
+dependencies = [
+ "yoke",
+ "zerofrom",
+ "zerovec-derive",
+]
+
+[[package]]
+name = "zerovec-derive"
+version = "0.10.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "97cf56601ee5052b4417d90c8755c6683473c926039908196cf35d99f893ebe7"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
diff --git a/parser/src/declarations/classes/class_member.rs b/parser/src/declarations/classes/class_member.rs
index 39b96490..07032a07 100644
--- a/parser/src/declarations/classes/class_member.rs
+++ b/parser/src/declarations/classes/class_member.rs
@@ -322,7 +322,7 @@ impl FunctionBased for ClassFunctionBase {
}
fn get_name(name: &Self::Name) -> Option<&str> {
- if let PropertyKey::Ident(name, ..) = name.get_ast_ref() {
+ if let PropertyKey::Identifier(name, ..) = name.get_ast_ref() {
Some(name.as_str())
} else {
None
diff --git a/parser/src/declarations/classes/mod.rs b/parser/src/declarations/classes/mod.rs
index 3c9817dc..56d29ecc 100644
--- a/parser/src/declarations/classes/mod.rs
+++ b/parser/src/declarations/classes/mod.rs
@@ -78,7 +78,7 @@ impl ClassDeclaration {
.is_some()
.then(|| {
crate::parse_bracketed(reader, state, options, None, TSXToken::CloseChevron)
- .map(|(params, _)| params)
+ .map(|(params, _, _)| params)
})
.transpose()?;
diff --git a/parser/src/declarations/export.rs b/parser/src/declarations/export.rs
index fd967927..ad9b696c 100644
--- a/parser/src/declarations/export.rs
+++ b/parser/src/declarations/export.rs
@@ -170,7 +170,7 @@ impl ASTNode for ExportDeclaration {
state.append_keyword_at_pos(reader.next().unwrap().1 .0, TSXKeyword::Type);
let Token(_, start) = reader.next().unwrap(); // OpenBrace
- let (parts, _end) = crate::parse_bracketed::(
+ let (parts, _, _end) = crate::parse_bracketed::(
reader,
state,
options,
@@ -213,7 +213,7 @@ impl ASTNode for ExportDeclaration {
});
if let Some(Token(token_type, _)) = after_bracket {
if let TSXToken::Keyword(TSXKeyword::From) = token_type {
- let (parts, _end) = crate::parse_bracketed::(
+ let (parts, _, _end) = crate::parse_bracketed::(
reader,
state,
options,
@@ -234,7 +234,7 @@ impl ASTNode for ExportDeclaration {
position: start.union(end),
})
} else {
- let (parts, end) = crate::parse_bracketed::(
+ let (parts, _, end) = crate::parse_bracketed::(
reader,
state,
options,
@@ -405,7 +405,9 @@ pub enum ExportPart {
),
}
-impl ListItem for ExportPart {}
+impl ListItem for ExportPart {
+ type LAST = ();
+}
impl ASTNode for ExportPart {
fn get_position(&self) -> Span {
diff --git a/parser/src/declarations/import.rs b/parser/src/declarations/import.rs
index 8bbb3790..04714d43 100644
--- a/parser/src/declarations/import.rs
+++ b/parser/src/declarations/import.rs
@@ -274,7 +274,7 @@ pub(crate) fn parse_import_specifier_and_parts(
let end = under.get_position().get_end();
(ImportedItems::All { under }, end)
} else if let Some(Token(TSXToken::OpenBrace, _)) = peek {
- let (parts, end) = parse_bracketed::(
+ let (parts, _, end) = parse_bracketed::(
reader,
state,
options,
@@ -322,7 +322,9 @@ pub enum ImportPart {
),
}
-impl ListItem for ImportPart {}
+impl ListItem for ImportPart {
+ type LAST = ();
+}
impl ASTNode for ImportPart {
fn get_position(&self) -> Span {
diff --git a/parser/src/declarations/variable.rs b/parser/src/declarations/variable.rs
index 2b656880..b72ad3c8 100644
--- a/parser/src/declarations/variable.rs
+++ b/parser/src/declarations/variable.rs
@@ -4,8 +4,9 @@ use iterator_endiate::EndiateIteratorExt;
use crate::{
derive_ASTNode, errors::parse_lexing_error, expressions::operators::COMMA_PRECEDENCE,
- throw_unexpected_token_with_token, ASTNode, Expression, ParseOptions, ParseResult, Span,
- TSXKeyword, TSXToken, Token, TokenReader, TypeAnnotation, VariableField, WithComment,
+ throw_unexpected_token_with_token, ASTNode, Expression, ParseError, ParseErrors, ParseOptions,
+ ParseResult, Span, TSXKeyword, TSXToken, Token, TokenReader, TypeAnnotation, VariableField,
+ WithComment,
};
use visitable_derive::Visitable;
@@ -270,10 +271,19 @@ impl ASTNode for VariableDeclaration {
break;
}
}
- VariableDeclaration::LetDeclaration {
- position: start.union(declarations.last().unwrap().get_position()),
- declarations,
- }
+
+ let position = if let Some(last) = declarations.last() {
+ start.union(last.get_position())
+ } else {
+ let position = start.with_length(3);
+ if options.partial_syntax {
+ position
+ } else {
+ return Err(ParseError::new(ParseErrors::ExpectedDeclaration, position));
+ }
+ };
+
+ VariableDeclaration::LetDeclaration { position, declarations }
}
VariableDeclarationKeyword::Const => {
state.append_keyword_at_pos(start.0, TSXKeyword::Const);
@@ -297,10 +307,19 @@ impl ASTNode for VariableDeclaration {
break;
}
}
- VariableDeclaration::ConstDeclaration {
- position: start.union(declarations.last().unwrap().get_position()),
- declarations,
- }
+
+ let position = if let Some(last) = declarations.last() {
+ start.union(last.get_position())
+ } else {
+ let position = start.with_length(3);
+ if options.partial_syntax {
+ position
+ } else {
+ return Err(ParseError::new(ParseErrors::ExpectedDeclaration, position));
+ }
+ };
+
+ VariableDeclaration::ConstDeclaration { position, declarations }
}
})
}
diff --git a/parser/src/errors.rs b/parser/src/errors.rs
index 25901f6e..a52364d0 100644
--- a/parser/src/errors.rs
+++ b/parser/src/errors.rs
@@ -32,84 +32,8 @@ pub enum ParseErrors<'a> {
NonStandardSyntaxUsedWithoutEnabled,
ExpectRule,
InvalidRegexFlag,
-}
-
-#[allow(missing_docs)]
-#[derive(Debug)]
-pub enum LexingErrors {
- SecondDecimalPoint,
- NumberLiteralCannotHaveDecimalPoint,
- NumberLiteralBaseSpecifierMustPrecededWithZero,
- InvalidCharacterInJSXTag(char),
- UnbalancedJSXClosingTags,
- ExpectedClosingAngleAtEndOfSelfClosingTag,
- InvalidCharacterInAttributeKey(char),
- UnexpectedCharacter(derive_finite_automaton::InvalidCharacter),
- EmptyAttributeName,
- ExpectedJSXEndTag,
- NewLineInStringLiteral,
- ExpectedEndToMultilineComment,
- ExpectedEndToStringLiteral,
- UnexpectedEndToNumberLiteral,
- InvalidNumeralItemBecauseOfLiteralKind,
- ExpectedEndToRegexLiteral,
- ExpectedEndToJSXLiteral,
- ExpectedEndToTemplateLiteral,
- InvalidExponentUsage,
- InvalidUnderscore,
- CannotLoadLargeFile(usize),
- ExpectedDashInComment,
-}
-
-impl Display for LexingErrors {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- match self {
- LexingErrors::SecondDecimalPoint => {
- f.write_str("Second decimal point found in number literal")
- }
- LexingErrors::NumberLiteralCannotHaveDecimalPoint => {
- f.write_str("Number literal with specified base cannot have decimal point")
- }
- LexingErrors::NumberLiteralBaseSpecifierMustPrecededWithZero => {
- f.write_str("Number literal base character must be proceeded with a zero")
- }
- LexingErrors::InvalidCharacterInJSXTag(chr) => {
- write!(f, "Invalid character {chr:?} in JSX tag")
- }
- LexingErrors::ExpectedClosingAngleAtEndOfSelfClosingTag => {
- f.write_str("Expected closing angle at end of self closing JSX tag")
- }
- LexingErrors::InvalidCharacterInAttributeKey(chr) => {
- write!(f, "Invalid character {chr:?} in JSX attribute name")
- }
- LexingErrors::EmptyAttributeName => f.write_str("Empty JSX attribute name"),
- LexingErrors::ExpectedJSXEndTag => f.write_str("Expected JSX end tag"),
- LexingErrors::NewLineInStringLiteral => {
- f.write_str("String literals cannot contain new lines")
- }
- LexingErrors::ExpectedEndToMultilineComment => {
- f.write_str("Unclosed multiline comment")
- }
- LexingErrors::ExpectedEndToStringLiteral => f.write_str("Unclosed string literal"),
- LexingErrors::UnexpectedEndToNumberLiteral => f.write_str("Unclosed number literal"),
- LexingErrors::ExpectedEndToRegexLiteral => f.write_str("Unclosed regex literal"),
- LexingErrors::ExpectedEndToJSXLiteral => f.write_str("Unclosed JSX literal"),
- LexingErrors::ExpectedEndToTemplateLiteral => f.write_str("Unclosed template literal"),
- LexingErrors::UnexpectedCharacter(err) => Display::fmt(err, f),
- LexingErrors::UnbalancedJSXClosingTags => f.write_str("Too many closing JSX tags"),
- LexingErrors::InvalidExponentUsage => f.write_str("Two e in number literal"),
- LexingErrors::InvalidUnderscore => f.write_str("Numeric separator in invalid place"),
- LexingErrors::InvalidNumeralItemBecauseOfLiteralKind => {
- f.write_str("Invalid item in binary, hex or octal literal")
- }
- LexingErrors::CannotLoadLargeFile(size) => {
- write!(f, "Cannot parse {size:?} byte file (4GB maximum)")
- }
- LexingErrors::ExpectedDashInComment => {
- f.write_str("JSX comments must have two dashes after ` Display for ParseErrors<'a> {
@@ -201,7 +125,91 @@ impl<'a> Display for ParseErrors<'a> {
write!(f, "'-' must be followed by a readonly rule")
}
ParseErrors::InvalidRegexFlag => {
- write!(f, "Regexp flags must be one of 'd', 'g', 'i', 'm', 's', 'u' or 'y'")
+ write!(f, "Regexp flags must be 'd', 'g', 'i', 'm', 's', 'u' or 'y'")
+ }
+ ParseErrors::ExpectedDeclaration => {
+ write!(f, "Expected identifier after variable declaration keyword")
+ }
+ ParseErrors::CannotHaveRegularMemberAfterSpread => {
+ write!(f, "Cannot have regular member after spread")
+ }
+ }
+ }
+}
+
+#[allow(missing_docs)]
+#[derive(Debug)]
+pub enum LexingErrors {
+ SecondDecimalPoint,
+ NumberLiteralCannotHaveDecimalPoint,
+ NumberLiteralBaseSpecifierMustPrecededWithZero,
+ InvalidCharacterInJSXTag(char),
+ UnbalancedJSXClosingTags,
+ ExpectedClosingAngleAtEndOfSelfClosingTag,
+ InvalidCharacterInAttributeKey(char),
+ UnexpectedCharacter(derive_finite_automaton::InvalidCharacter),
+ EmptyAttributeName,
+ ExpectedJSXEndTag,
+ NewLineInStringLiteral,
+ ExpectedEndToMultilineComment,
+ ExpectedEndToStringLiteral,
+ UnexpectedEndToNumberLiteral,
+ InvalidNumeralItemBecauseOfLiteralKind,
+ ExpectedEndToRegexLiteral,
+ ExpectedEndToJSXLiteral,
+ ExpectedEndToTemplateLiteral,
+ InvalidExponentUsage,
+ InvalidUnderscore,
+ CannotLoadLargeFile(usize),
+ ExpectedDashInComment,
+}
+
+impl Display for LexingErrors {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self {
+ LexingErrors::SecondDecimalPoint => {
+ f.write_str("Second decimal point found in number literal")
+ }
+ LexingErrors::NumberLiteralCannotHaveDecimalPoint => {
+ f.write_str("Number literal with specified base cannot have decimal point")
+ }
+ LexingErrors::NumberLiteralBaseSpecifierMustPrecededWithZero => {
+ f.write_str("Number literal base character must be proceeded with a zero")
+ }
+ LexingErrors::InvalidCharacterInJSXTag(chr) => {
+ write!(f, "Invalid character {chr:?} in JSX tag")
+ }
+ LexingErrors::ExpectedClosingAngleAtEndOfSelfClosingTag => {
+ f.write_str("Expected closing angle at end of self closing JSX tag")
+ }
+ LexingErrors::InvalidCharacterInAttributeKey(chr) => {
+ write!(f, "Invalid character {chr:?} in JSX attribute name")
+ }
+ LexingErrors::EmptyAttributeName => f.write_str("Empty JSX attribute name"),
+ LexingErrors::ExpectedJSXEndTag => f.write_str("Expected JSX end tag"),
+ LexingErrors::NewLineInStringLiteral => {
+ f.write_str("String literals cannot contain new lines")
+ }
+ LexingErrors::ExpectedEndToMultilineComment => {
+ f.write_str("Unclosed multiline comment")
+ }
+ LexingErrors::ExpectedEndToStringLiteral => f.write_str("Unclosed string literal"),
+ LexingErrors::UnexpectedEndToNumberLiteral => f.write_str("Unclosed number literal"),
+ LexingErrors::ExpectedEndToRegexLiteral => f.write_str("Unclosed regex literal"),
+ LexingErrors::ExpectedEndToJSXLiteral => f.write_str("Unclosed JSX literal"),
+ LexingErrors::ExpectedEndToTemplateLiteral => f.write_str("Unclosed template literal"),
+ LexingErrors::UnexpectedCharacter(err) => Display::fmt(err, f),
+ LexingErrors::UnbalancedJSXClosingTags => f.write_str("Too many closing JSX tags"),
+ LexingErrors::InvalidExponentUsage => f.write_str("Two e in number literal"),
+ LexingErrors::InvalidUnderscore => f.write_str("Numeric separator in invalid place"),
+ LexingErrors::InvalidNumeralItemBecauseOfLiteralKind => {
+ f.write_str("Invalid item in binary, hex or octal literal")
+ }
+ LexingErrors::CannotLoadLargeFile(size) => {
+ write!(f, "Cannot parse {size:?} byte file (4GB maximum)")
+ }
+ LexingErrors::ExpectedDashInComment => {
+ f.write_str("JSX comments must have two dashes after `>>,
- Span,
- ),
- ObjectDestructuring(
- #[visit_skip_field] Vec>>,
- Span,
- ),
+ ArrayDestructuring {
+ #[visit_skip_field]
+ members: Vec>>,
+ spread: Option>,
+ position: Span,
+ },
+ ObjectDestructuring {
+ #[visit_skip_field]
+ members: Vec>>,
+ spread: Option>,
+ position: Span,
+ },
}
impl ASTNode for LHSOfAssignment {
fn get_position(&self) -> Span {
match self {
- LHSOfAssignment::ObjectDestructuring(_, pos)
- | LHSOfAssignment::ArrayDestructuring(_, pos) => *pos,
+ LHSOfAssignment::ObjectDestructuring { position, .. }
+ | LHSOfAssignment::ArrayDestructuring { position, .. } => *position,
LHSOfAssignment::VariableOrPropertyAccess(var_prop_access) => {
var_prop_access.get_position()
}
@@ -216,7 +224,7 @@ impl ASTNode for LHSOfAssignment {
local: crate::LocalToStringInformation,
) {
match self {
- LHSOfAssignment::ObjectDestructuring(members, _) => {
+ LHSOfAssignment::ObjectDestructuring { members, spread, position: _ } => {
buf.push('{');
options.push_gap_optionally(buf);
for (at_end, member) in members.iter().endiate() {
@@ -226,10 +234,18 @@ impl ASTNode for LHSOfAssignment {
options.push_gap_optionally(buf);
}
}
+ if let Some(ref spread) = spread {
+ if !members.is_empty() {
+ buf.push(',');
+ options.push_gap_optionally(buf);
+ }
+ buf.push_str("...");
+ spread.0.to_string_from_buffer(buf, options, local);
+ }
options.push_gap_optionally(buf);
buf.push('}');
}
- LHSOfAssignment::ArrayDestructuring(members, _) => {
+ LHSOfAssignment::ArrayDestructuring { members, spread, position: _ } => {
buf.push('[');
for (at_end, member) in members.iter().endiate() {
member.to_string_from_buffer(buf, options, local);
@@ -238,6 +254,14 @@ impl ASTNode for LHSOfAssignment {
options.push_gap_optionally(buf);
}
}
+ if let Some(ref spread) = spread {
+ if !members.is_empty() {
+ buf.push(',');
+ options.push_gap_optionally(buf);
+ }
+ buf.push_str("...");
+ spread.0.to_string_from_buffer(buf, options, local);
+ }
buf.push(']');
}
LHSOfAssignment::VariableOrPropertyAccess(variable_or_property_access) => {
@@ -252,9 +276,10 @@ impl TryFrom for LHSOfAssignment {
fn try_from(value: Expression) -> Result {
match value {
- Expression::ArrayLiteral(inner, position) => {
- let mut members = Vec::with_capacity(inner.len());
- for member in inner {
+ Expression::ArrayLiteral(members, position) => {
+ let mut new_members = Vec::with_capacity(members.len());
+ let mut iter = members.into_iter();
+ for member in iter.by_ref() {
let new_member = match member.0 {
Some(FunctionArgument::Comment { content, is_multiline: _, position }) => {
WithComment::PrefixComment(
@@ -264,8 +289,19 @@ impl TryFrom for LHSOfAssignment {
)
}
Some(FunctionArgument::Spread(expression, span)) => {
- let lhs: LHSOfAssignment = expression.try_into()?;
- WithComment::None(ArrayDestructuringField::Spread(lhs, span))
+ return if let Some(next) = iter.next() {
+ Err(ParseError::new(
+ ParseErrors::CannotHaveRegularMemberAfterSpread,
+ next.get_position(),
+ ))
+ } else {
+ let inner: LHSOfAssignment = expression.try_into()?;
+ Ok(Self::ArrayDestructuring {
+ members: new_members,
+ spread: Some(SpreadDestructuringField(Box::new(inner), span)),
+ position,
+ })
+ }
}
Some(FunctionArgument::Standard(expression)) => {
WithComment::None(match expression {
@@ -279,16 +315,29 @@ impl TryFrom for LHSOfAssignment {
}
None => WithComment::None(ArrayDestructuringField::None),
};
- members.push(new_member);
+ new_members.push(new_member);
}
- Ok(Self::ArrayDestructuring(members, position))
+ Ok(Self::ArrayDestructuring { members: new_members, spread: None, position })
}
- Expression::ObjectLiteral(inner) => {
- let mut members = Vec::with_capacity(inner.members.len());
- for member in inner.members {
+ Expression::ObjectLiteral(ObjectLiteral { members, position }) => {
+ let mut new_members = Vec::with_capacity(members.len());
+ let mut iter = members.into_iter();
+ for member in iter.by_ref() {
let new_member: ObjectDestructuringField = match member {
- ObjectLiteralMember::Spread(expression, pos) => {
- ObjectDestructuringField::Spread(expression.try_into()?, pos)
+ ObjectLiteralMember::Spread(expression, span) => {
+ return if let Some(next) = iter.next() {
+ Err(ParseError::new(
+ ParseErrors::CannotHaveRegularMemberAfterSpread,
+ next.get_position(),
+ ))
+ } else {
+ let inner: LHSOfAssignment = expression.try_into()?;
+ Ok(Self::ObjectDestructuring {
+ members: new_members,
+ spread: Some(SpreadDestructuringField(Box::new(inner), span)),
+ position,
+ })
+ }
}
ObjectLiteralMember::Shorthand(name, pos) => {
ObjectDestructuringField::Name(
@@ -300,7 +349,7 @@ impl TryFrom for LHSOfAssignment {
}
ObjectLiteralMember::Property { assignment, key, position, value } => {
if assignment {
- if let PropertyKey::Ident(name, pos, _) = key.get_ast() {
+ if let PropertyKey::Identifier(name, pos, _) = key.get_ast() {
ObjectDestructuringField::Name(
crate::VariableIdentifier::Standard(name, pos),
(),
@@ -334,16 +383,16 @@ impl TryFrom for LHSOfAssignment {
ObjectLiteralMember::Method(_) => {
return Err(ParseError::new(
crate::ParseErrors::InvalidLHSAssignment,
- inner.position,
+ position,
))
}
ObjectLiteralMember::Comment(..) => {
continue;
}
};
- members.push(WithComment::None(new_member));
+ new_members.push(WithComment::None(new_member));
}
- Ok(Self::ObjectDestructuring(members, inner.position))
+ Ok(Self::ObjectDestructuring { members: new_members, spread: None, position })
}
expression => VariableOrPropertyAccess::try_from(expression)
.map(LHSOfAssignment::VariableOrPropertyAccess),
diff --git a/parser/src/expressions/mod.rs b/parser/src/expressions/mod.rs
index b5643ab1..ee2ab8d6 100644
--- a/parser/src/expressions/mod.rs
+++ b/parser/src/expressions/mod.rs
@@ -343,7 +343,7 @@ impl Expression {
)
}
Token(TSXToken::OpenParentheses, _) => {
- let (arguments, end_pos) = parse_bracketed(
+ let (arguments, _, end_pos) = parse_bracketed(
reader,
state,
options,
@@ -394,7 +394,7 @@ impl Expression {
Expression::UnaryOperation { operator, operand: Box::new(operand), position }
}
Token(TSXToken::OpenBracket, start) => {
- let (items, end) = parse_bracketed::(
+ let (items, _, end) = parse_bracketed::(
reader,
state,
options,
@@ -480,7 +480,7 @@ impl Expression {
.conditional_next(|token| *token == TSXToken::OpenChevron)
.is_some()
{
- let (generic_arguments, end_pos) =
+ let (generic_arguments, _, end_pos) =
parse_bracketed(reader, state, options, None, TSXToken::CloseChevron)?;
(Some(generic_arguments), end_pos)
} else {
@@ -492,7 +492,7 @@ impl Expression {
.is_some()
{
parse_bracketed(reader, state, options, None, TSXToken::CloseParentheses)
- .map(|(args, end)| (Some(args), end))?
+ .map(|(args, _, end)| (Some(args), end))?
} else {
// TODO are type arguments not allowed...?
(None, end)
@@ -889,7 +889,7 @@ impl Expression {
}
let next = reader.next().unwrap();
let is_optional = matches!(next.0, TSXToken::OptionalCall);
- let (arguments, end) =
+ let (arguments, _, end) =
parse_bracketed(reader, state, options, None, TSXToken::CloseParentheses)?;
let position = top.get_position().union(end);
top = Expression::FunctionCall {
@@ -1140,7 +1140,7 @@ impl Expression {
let (type_arguments, _) = generic_arguments_from_reader_sub_open_angle(
reader, state, options, None,
)?;
- let (arguments, end) = parse_bracketed(
+ let (arguments, _, end) = parse_bracketed(
reader,
state,
options,
@@ -1504,7 +1504,7 @@ impl Expression {
}
Self::Assignment { lhs, rhs, .. } => {
let require_parenthesis =
- matches!(lhs, LHSOfAssignment::ObjectDestructuring(..)) && local2.on_left;
+ matches!(lhs, LHSOfAssignment::ObjectDestructuring { .. }) && local2.on_left;
if require_parenthesis {
buf.push('(');
@@ -1663,6 +1663,55 @@ impl Expression {
}
}
Self::ArrayLiteral(values, _) => {
+ // Fix, see: https://github.com/kaleidawave/ezno/pull/158#issuecomment-2169621017
+ if options.pretty && options.enforce_limit_length_limit() {
+ const MAX_INLINE_OBJECT_LITERAL: u32 = 40;
+
+ let values_are_all_booleans_or_numbers =
+ values.first().and_then(ArrayElement::inner_ref).is_some_and(|e| {
+ matches!(
+ e,
+ Expression::BooleanLiteral(..) | Expression::NumberLiteral(..)
+ )
+ }) && values.iter().all(|e| {
+ e.inner_ref().is_some_and(|e| {
+ matches!(
+ e,
+ Expression::BooleanLiteral(..) | Expression::NumberLiteral(..)
+ )
+ })
+ }) && are_nodes_over_length(
+ values.iter(),
+ options,
+ local,
+ Some(MAX_INLINE_OBJECT_LITERAL),
+ true,
+ );
+
+ if values_are_all_booleans_or_numbers {
+ buf.push('[');
+ let inner_local = local.next_level();
+ buf.push_new_line();
+ options.add_indent(inner_local.depth, buf);
+ for (at_end, node) in
+ iterator_endiate::EndiateIteratorExt::endiate(values.iter())
+ {
+ if buf.characters_on_current_line() > MAX_INLINE_OBJECT_LITERAL {
+ buf.push_new_line();
+ options.add_indent(inner_local.depth, buf);
+ }
+ node.to_string_from_buffer(buf, options, inner_local);
+ if !at_end {
+ buf.push(',');
+ options.push_gap_optionally(buf);
+ }
+ }
+ buf.push_new_line();
+ options.add_indent(local.depth, buf);
+ buf.push(']');
+ return;
+ };
+ }
to_string_bracketed(values, ('[', ']'), buf, options, local);
}
Self::ObjectLiteral(object_literal) => {
@@ -1977,6 +2026,11 @@ pub(crate) fn arguments_to_string(
local: crate::LocalToStringInformation,
) {
buf.push('(');
+ if nodes.is_empty() {
+ buf.push(')');
+ return;
+ }
+
let add_new_lines = are_nodes_over_length(
nodes.iter(),
options,
@@ -1984,6 +2038,7 @@ pub(crate) fn arguments_to_string(
Some(u32::from(options.max_line_length).saturating_sub(buf.characters_on_current_line())),
true,
);
+
if add_new_lines {
buf.push_new_line();
options.add_indent(local.depth + 1, buf);
@@ -2082,7 +2137,9 @@ pub enum FunctionArgument {
Comment { content: String, is_multiline: bool, position: Span },
}
-impl ListItem for FunctionArgument {}
+impl ListItem for FunctionArgument {
+ type LAST = ();
+}
impl ASTNode for FunctionArgument {
fn from_reader(
@@ -2214,8 +2271,25 @@ impl ASTNode for ArrayElement {
}
}
+impl ArrayElement {
+ /// For utility purposes! Loses spread information
+ #[must_use]
+ pub fn inner_ref(&self) -> Option<&Expression> {
+ if let Some(ref inner) = self.0 {
+ match inner {
+ FunctionArgument::Spread(expr, _) | FunctionArgument::Standard(expr) => Some(expr),
+ FunctionArgument::Comment { .. } => None,
+ }
+ } else {
+ None
+ }
+ }
+}
+
impl ListItem for ArrayElement {
const EMPTY: Option = Some(Self(None));
+
+ type LAST = ();
}
// Utils for Expression
diff --git a/parser/src/expressions/object_literal.rs b/parser/src/expressions/object_literal.rs
index 4673c99f..57484b05 100644
--- a/parser/src/expressions/object_literal.rs
+++ b/parser/src/expressions/object_literal.rs
@@ -141,7 +141,7 @@ impl FunctionBased for ObjectLiteralMethodBase {
}
fn get_name(name: &Self::Name) -> Option<&str> {
- if let PropertyKey::Ident(name, ..) = name.get_ast_ref() {
+ if let PropertyKey::Identifier(name, ..) = name.get_ast_ref() {
Some(name.as_str())
} else {
None
@@ -248,7 +248,7 @@ impl ASTNode for ObjectLiteralMember {
return crate::throw_unexpected_token(reader, &[TSXToken::OpenParentheses]);
}
if let Some(Token(TSXToken::Comma | TSXToken::CloseBrace, _)) = reader.peek() {
- if let PropertyKey::Ident(name, position, _) = key.get_ast() {
+ if let PropertyKey::Identifier(name, position, _) = key.get_ast() {
Ok(Self::Shorthand(name, position))
} else {
let token = reader.next().ok_or_else(parse_lexing_error)?;
diff --git a/parser/src/extensions/jsx.rs b/parser/src/extensions/jsx.rs
index 906e1047..88f58aac 100644
--- a/parser/src/extensions/jsx.rs
+++ b/parser/src/extensions/jsx.rs
@@ -441,7 +441,8 @@ impl JSXElement {
let end = if let Token(TSXToken::JSXClosingTagName(closing_tag_name), start) =
reader.next().ok_or_else(parse_lexing_error)?
{
- let end = start.0 + closing_tag_name.len() as u32 + 2;
+ let end =
+ start.0 + u32::try_from(closing_tag_name.len()).expect("4GB tag name") + 2;
if closing_tag_name != tag_name {
return Err(ParseError::new(
crate::ParseErrors::ClosingTagDoesNotMatch {
diff --git a/parser/src/functions/mod.rs b/parser/src/functions/mod.rs
index 8de28f03..96f12244 100644
--- a/parser/src/functions/mod.rs
+++ b/parser/src/functions/mod.rs
@@ -219,7 +219,7 @@ impl FunctionBase {
.is_some()
.then(|| {
parse_bracketed(reader, state, options, None, TSXToken::CloseChevron)
- .map(|(params, _)| params)
+ .map(|(params, _, _)| params)
})
.transpose()?;
let parameters = FunctionParameters::from_reader(reader, state, options)?;
@@ -672,7 +672,7 @@ pub(crate) fn get_method_name(
let new_public = T::new_public();
(
MethodHeader::default(),
- WithComment::None(PropertyKey::Ident(name.to_owned(), position, new_public)),
+ WithComment::None(PropertyKey::Identifier(name.to_owned(), position, new_public)),
)
} else {
(MethodHeader::from_reader(reader), WithComment::from_reader(reader, state, options)?)
diff --git a/parser/src/lib.rs b/parser/src/lib.rs
index a33fc0a9..770d30f3 100644
--- a/parser/src/lib.rs
+++ b/parser/src/lib.rs
@@ -1,5 +1,6 @@
#![doc = include_str!("../README.md")]
#![allow(clippy::new_without_default, clippy::too_many_lines)]
+#![warn(clippy::must_use_candidate)]
mod block;
mod comments;
@@ -295,8 +296,13 @@ pub struct LocalToStringInformation {
}
impl LocalToStringInformation {
- pub(crate) fn next_level(self) -> LocalToStringInformation {
- LocalToStringInformation {
+ #[must_use]
+ pub fn new_under(under: SourceId) -> Self {
+ Self { under, depth: 0, should_try_pretty_print: true }
+ }
+
+ pub(crate) fn next_level(self) -> Self {
+ Self {
under: self.under,
depth: self.depth + 1,
should_try_pretty_print: self.should_try_pretty_print,
@@ -304,8 +310,8 @@ impl LocalToStringInformation {
}
/// For printing source maps after bundling
- pub(crate) fn change_source(self, new: SourceId) -> LocalToStringInformation {
- LocalToStringInformation {
+ pub(crate) fn change_source(self, new: SourceId) -> Self {
+ Self {
under: new,
depth: self.depth,
should_try_pretty_print: self.should_try_pretty_print,
@@ -313,12 +319,8 @@ impl LocalToStringInformation {
}
/// Prevents recursion & other excess
- pub(crate) fn do_not_pretty_print(self) -> LocalToStringInformation {
- LocalToStringInformation {
- under: self.under,
- depth: self.depth,
- should_try_pretty_print: false,
- }
+ pub(crate) fn do_not_pretty_print(self) -> Self {
+ Self { under: self.under, depth: self.depth, should_try_pretty_print: false }
}
}
@@ -360,15 +362,8 @@ pub trait ASTNode: Sized + Clone + PartialEq + std::fmt::Debug + Sync + Send + '
/// Returns structure as valid string
fn to_string(&self, options: &crate::ToStringOptions) -> String {
let mut buf = source_map::StringWithOptionalSourceMap::new(false);
- self.to_string_from_buffer(
- &mut buf,
- options,
- LocalToStringInformation {
- under: source_map::Nullable::NULL,
- depth: 0,
- should_try_pretty_print: true,
- },
- );
+ let local = LocalToStringInformation::new_under(source_map::Nullable::NULL);
+ self.to_string_from_buffer(&mut buf, options, local);
buf.source
}
}
@@ -384,7 +379,10 @@ pub fn lex_and_parse_script(
let (mut sender, mut reader) =
tokenizer_lib::ParallelTokenQueue::new_with_buffer_size(options.buffer_size);
let lex_options = options.get_lex_options();
+
+ #[allow(clippy::cast_possible_truncation)]
let length_of_source = script.len() as u32;
+
let mut thread = std::thread::Builder::new().name("AST parsing".into());
if let Some(stack_size) = options.stack_size {
thread = thread.stack_size(stack_size);
@@ -515,7 +513,7 @@ impl ParsingState {
fn new_partial_point_marker(&mut self, at: source_map::Start) -> Marker {
let id = self.partial_points.len();
self.partial_points.push(at);
- Marker(id as u8, Default::default())
+ Marker(u8::try_from(id).expect("more than 256 markers"), Default::default())
}
}
@@ -525,6 +523,7 @@ pub struct KeywordPositions(Vec<(u32, TSXKeyword)>);
impl KeywordPositions {
#[must_use]
+ #[allow(clippy::cast_possible_truncation)]
pub fn try_get_keyword_at_position(&self, pos: u32) -> Option {
// binary search
let mut l: u32 = 0;
@@ -621,7 +620,10 @@ impl TryFrom for f64 {
NumberRepresentation::Number(value) => Ok(value),
NumberRepresentation::Hex { sign, value, .. }
| NumberRepresentation::Bin { sign, value, .. }
- | NumberRepresentation::Octal { sign, value, .. } => Ok(sign.apply(value as f64)),
+ | NumberRepresentation::Octal { sign, value, .. } => {
+ // TODO `value as f64` can lose information? If so should return f64::INFINITY
+ Ok(sign.apply(value as f64))
+ }
NumberRepresentation::Exponential { sign, value, exponent } => {
Ok(sign.apply(value * 10f64.powi(exponent)))
}
@@ -942,10 +944,17 @@ impl ExpressionOrStatementPosition for ExpressionPosition {
}
pub trait ListItem: Sized {
+ type LAST;
+ const LAST_PREFIX: Option = None;
const EMPTY: Option = None;
- fn allow_comma_after(&self) -> bool {
- true
+ #[allow(unused)]
+ fn parse_last_item(
+ reader: &mut impl TokenReader,
+ state: &mut crate::ParsingState,
+ options: &ParseOptions,
+ ) -> ParseResult {
+ unreachable!("ListItem::LAST != ASTNode")
}
}
@@ -958,7 +967,7 @@ pub(crate) fn parse_bracketed(
options: &ParseOptions,
start: Option,
end: TSXToken,
-) -> ParseResult<(Vec, TokenEnd)> {
+) -> ParseResult<(Vec, Option, TokenEnd)> {
if let Some(start) = start {
let _ = reader.expect_next(start)?;
}
@@ -972,31 +981,31 @@ pub(crate) fn parse_bracketed(
}
let Token(token, s) = reader.next().unwrap();
if token == end {
- return Ok((nodes, s.get_end_after(token.length() as usize)));
+ return Ok((nodes, None, s.get_end_after(token.length() as usize)));
}
continue;
}
} else if let Some(token) = reader.conditional_next(|token| *token == end) {
- return Ok((nodes, token.get_end()));
+ return Ok((nodes, None, token.get_end()));
+ }
+
+ if T::LAST_PREFIX.is_some_and(|l| reader.peek().is_some_and(|Token(token, _)| *token == l))
+ {
+ let last = T::parse_last_item(reader, state, options)?;
+ let len = end.length() as usize;
+ let end = reader.expect_next(end)?.get_end_after(len);
+ return Ok((nodes, Some(last), end));
}
let node = T::from_reader(reader, state, options)?;
- let allow_comma = T::allow_comma_after(&node);
nodes.push(node);
match reader.next().ok_or_else(errors::parse_lexing_error)? {
- Token(TSXToken::Comma, s) => {
- if !allow_comma {
- return Err(ParseError::new(
- crate::ParseErrors::TrailingCommaNotAllowedHere,
- s.with_length(1),
- ));
- }
- }
+ Token(TSXToken::Comma, _) => {}
token => {
if token.0 == end {
let get_end = token.get_end();
- return Ok((nodes, get_end));
+ return Ok((nodes, None, get_end));
}
let position = token.get_span();
return Err(ParseError::new(
@@ -1202,7 +1211,7 @@ impl VariableKeyword {
/// Conditionally computes the node length
/// Does nothing under pretty == false or no max line length
pub fn are_nodes_over_length<'a, T: ASTNode>(
- nodes: impl Iterator- ,
+ nodes: impl ExactSizeIterator
- ,
options: &ToStringOptions,
local: crate::LocalToStringInformation,
// None = 'no space'
@@ -1216,16 +1225,14 @@ pub fn are_nodes_over_length<'a, T: ASTNode>(
source: String::new(),
source_map: None,
quit_after: Some(room),
- since_new_line: 0,
+ // Temp fix for considering delimiters to nodes
+ since_new_line: nodes.len().try_into().expect("4 billion nodes ?"),
};
+
for node in nodes {
node.to_string_from_buffer(&mut buf, options, local);
- let length = if total {
- buf.source.len()
- } else {
- buf.source.find('\n').unwrap_or(buf.source.len())
- };
+ let length = if total { buf.source.len() } else { buf.since_new_line as usize };
let is_over = length > room;
if is_over {
return is_over;
diff --git a/parser/src/property_key.rs b/parser/src/property_key.rs
index 98c4b98f..386d4ed9 100644
--- a/parser/src/property_key.rs
+++ b/parser/src/property_key.rs
@@ -15,7 +15,7 @@ use crate::{
};
pub trait PropertyKeyKind: Debug + PartialEq + Eq + Clone + Sized + Send + Sync + 'static {
- fn parse_ident(
+ fn parse_identifier(
first: Token,
reader: &mut impl TokenReader,
) -> ParseResult<(String, Span, Self)>;
@@ -37,7 +37,7 @@ pub struct AlwaysPublic;
// ";
impl PropertyKeyKind for AlwaysPublic {
- fn parse_ident(
+ fn parse_identifier(
first: Token,
_reader: &mut impl TokenReader,
) -> ParseResult<(String, Span, Self)> {
@@ -68,7 +68,7 @@ pub enum PublicOrPrivate {
// ";
impl PropertyKeyKind for PublicOrPrivate {
- fn parse_ident(
+ fn parse_identifier(
first: Token,
reader: &mut impl TokenReader,
) -> ParseResult<(String, Span, Self)> {
@@ -95,7 +95,7 @@ impl PropertyKeyKind for PublicOrPrivate {
#[derive(Debug, PartialEq, Eq, Clone, get_field_by_type::GetFieldByType)]
#[get_field_by_type_target(Span)]
pub enum PropertyKey {
- Ident(String, Span, T),
+ Identifier(String, Span, T),
StringLiteral(String, Quoted, Span),
NumberLiteral(NumberRepresentation, Span),
/// Includes anything in the `[...]` maybe a symbol
@@ -105,7 +105,7 @@ pub enum PropertyKey {
impl PropertyKey {
pub fn is_private(&self) -> bool {
match self {
- PropertyKey::Ident(_, _, p) => U::is_private(p),
+ PropertyKey::Identifier(_, _, p) => U::is_private(p),
_ => false,
}
}
@@ -114,7 +114,7 @@ impl PropertyKey {
impl PartialEq for PropertyKey {
fn eq(&self, other: &str) -> bool {
match self {
- PropertyKey::Ident(name, _, _) | PropertyKey::StringLiteral(name, _, _) => {
+ PropertyKey::Identifier(name, _, _) | PropertyKey::StringLiteral(name, _, _) => {
name == other
}
PropertyKey::NumberLiteral(_, _) | PropertyKey::Computed(_, _) => false,
@@ -151,8 +151,8 @@ impl ASTNode for PropertyKey {
// TODO could add marker?
Self::from_reader(reader, state, options)
} else {
- let (name, position, private) = U::parse_ident(token, reader)?;
- Ok(Self::Ident(name, position, private))
+ let (name, position, private) = U::parse_identifier(token, reader)?;
+ Ok(Self::Identifier(name, position, private))
}
}
}
@@ -165,7 +165,7 @@ impl ASTNode for PropertyKey {
local: crate::LocalToStringInformation,
) {
match self {
- Self::Ident(ident, _pos, _) => buf.push_str(ident.as_str()),
+ Self::Identifier(ident, _pos, _) => buf.push_str(ident.as_str()),
Self::NumberLiteral(number, _) => buf.push_str(&number.to_string()),
Self::StringLiteral(string, quoted, _) => {
buf.push(quoted.as_char());
diff --git a/parser/src/statements/for_statement.rs b/parser/src/statements/for_statement.rs
index 93969c6e..8d39b91c 100644
--- a/parser/src/statements/for_statement.rs
+++ b/parser/src/statements/for_statement.rs
@@ -68,7 +68,7 @@ impl ASTNode for ForLoopStatement {
#[derive(Debug, Clone, PartialEq, Visitable)]
#[apply(derive_ASTNode)]
-pub enum ForLoopStatementinitialiser {
+pub enum ForLoopStatementInitialiser {
VariableDeclaration(VariableDeclaration),
VarStatement(VarVariableStatement),
Expression(MultipleExpression),
@@ -92,7 +92,7 @@ pub enum ForLoopCondition {
position: Span,
},
Statements {
- initialiser: Option,
+ initialiser: Option,
condition: Option,
afterthought: Option,
position: Span,
@@ -165,26 +165,26 @@ impl ASTNode for ForLoopCondition {
peek
{
let declaration = VariableDeclaration::from_reader(reader, state, options)?;
- Some(ForLoopStatementinitialiser::VariableDeclaration(declaration))
+ Some(ForLoopStatementInitialiser::VariableDeclaration(declaration))
} else if let Some(Token(TSXToken::Keyword(TSXKeyword::Var), _)) = peek {
let stmt = VarVariableStatement::from_reader(reader, state, options)?;
- Some(ForLoopStatementinitialiser::VarStatement(stmt))
+ Some(ForLoopStatementInitialiser::VarStatement(stmt))
} else if let Some(Token(TSXToken::SemiColon, _)) = peek {
None
} else {
let expr = MultipleExpression::from_reader(reader, state, options)?;
- Some(ForLoopStatementinitialiser::Expression(expr))
+ Some(ForLoopStatementInitialiser::Expression(expr))
};
let semi_colon_one = reader.expect_next(TSXToken::SemiColon)?;
let start = initialiser.as_ref().map_or(semi_colon_one, |init| match init {
- ForLoopStatementinitialiser::VariableDeclaration(item) => {
+ ForLoopStatementInitialiser::VariableDeclaration(item) => {
item.get_position().get_start()
}
- ForLoopStatementinitialiser::VarStatement(item) => {
+ ForLoopStatementInitialiser::VarStatement(item) => {
item.get_position().get_start()
}
- ForLoopStatementinitialiser::Expression(item) => {
+ ForLoopStatementInitialiser::Expression(item) => {
item.get_position().get_start()
}
});
@@ -314,19 +314,19 @@ impl ASTNode for ForLoopCondition {
}
fn initialiser_to_string(
- initialiser: &ForLoopStatementinitialiser,
+ initialiser: &ForLoopStatementInitialiser,
buf: &mut T,
options: &crate::ToStringOptions,
local: crate::LocalToStringInformation,
) {
match initialiser {
- ForLoopStatementinitialiser::VariableDeclaration(stmt) => {
+ ForLoopStatementInitialiser::VariableDeclaration(stmt) => {
stmt.to_string_from_buffer(buf, options, local);
}
- ForLoopStatementinitialiser::Expression(expr) => {
+ ForLoopStatementInitialiser::Expression(expr) => {
expr.to_string_from_buffer(buf, options, local);
}
- ForLoopStatementinitialiser::VarStatement(stmt) => {
+ ForLoopStatementInitialiser::VarStatement(stmt) => {
stmt.to_string_from_buffer(buf, options, local);
}
}
diff --git a/parser/src/statements/mod.rs b/parser/src/statements/mod.rs
index d8ade1d0..4bfd06a6 100644
--- a/parser/src/statements/mod.rs
+++ b/parser/src/statements/mod.rs
@@ -8,6 +8,7 @@ use crate::{
declarations::variable::{declarations_to_string, VariableDeclarationItem},
derive_ASTNode,
tokens::token_as_identifier,
+ ParseError, ParseErrors,
};
use derive_enum_from_into::{EnumFrom, EnumTryInto};
use derive_partial_eq_extras::PartialEqExtras;
@@ -19,7 +20,7 @@ use super::{
TSXKeyword, TSXToken, Token, TokenReader,
};
use crate::errors::parse_lexing_error;
-pub use for_statement::{ForLoopCondition, ForLoopStatement, ForLoopStatementinitialiser};
+pub use for_statement::{ForLoopCondition, ForLoopStatement, ForLoopStatementInitialiser};
pub use if_statement::*;
pub use switch_statement::{SwitchBranch, SwitchStatement};
pub use try_catch_statement::TryCatchStatement;
@@ -362,10 +363,19 @@ impl ASTNode for VarVariableStatement {
break;
}
}
- Ok(VarVariableStatement {
- position: start.union(declarations.last().unwrap().get_position()),
- declarations,
- })
+
+ let position = if let Some(last) = declarations.last() {
+ start.union(last.get_position())
+ } else {
+ let position = start.with_length(3);
+ if options.partial_syntax {
+ position
+ } else {
+ return Err(ParseError::new(ParseErrors::ExpectedDeclaration, position));
+ }
+ };
+
+ Ok(VarVariableStatement { declarations, position })
}
fn to_string_from_buffer(
diff --git a/parser/src/tokens.rs b/parser/src/tokens.rs
index 6dc7427d..7eb883a0 100644
--- a/parser/src/tokens.rs
+++ b/parser/src/tokens.rs
@@ -175,6 +175,7 @@ impl tokenizer_lib::TokenTrait for TSXToken {
}
impl tokenizer_lib::sized_tokens::SizedToken for TSXToken {
+ #[allow(clippy::cast_possible_truncation)]
fn length(&self) -> u32 {
match self {
TSXToken::Keyword(kw) => kw.length(),
@@ -355,6 +356,7 @@ impl TSXKeyword {
matches!(self, TSXKeyword::Function | TSXKeyword::Async)
}
+ #[allow(clippy::cast_possible_truncation)]
pub(crate) fn length(self) -> u32 {
self.to_str().len() as u32
}
@@ -405,7 +407,7 @@ impl TSXToken {
pub fn is_expression_prefix(&self) -> bool {
matches!(
self,
- TSXToken::Keyword(TSXKeyword::Return | TSXKeyword::Case | TSXKeyword::Yield | TSXKeyword::Throw | TSXKeyword::TypeOf | TSXKeyword::Await)
+ TSXToken::Keyword(TSXKeyword::Return | TSXKeyword::Case | TSXKeyword::Yield | TSXKeyword::Throw | TSXKeyword::TypeOf | TSXKeyword::In | TSXKeyword::Of | TSXKeyword::Await)
| TSXToken::Arrow
// for `const x = 2; /something/g`
| TSXToken::SemiColon
@@ -417,6 +419,9 @@ impl TSXToken {
| TSXToken::LogicalNot
| TSXToken::LogicalAnd
| TSXToken::LogicalOr
+ | TSXToken::BitwiseNot
+ | TSXToken::BitwiseAnd
+ | TSXToken::BitwiseOr
| TSXToken::Multiply
| TSXToken::Add
| TSXToken::Subtract
diff --git a/parser/src/types/interface.rs b/parser/src/types/interface.rs
index 502da208..a27c2ef7 100644
--- a/parser/src/types/interface.rs
+++ b/parser/src/types/interface.rs
@@ -55,7 +55,7 @@ impl ASTNode for InterfaceDeclaration {
.is_some()
.then(|| {
crate::parse_bracketed(reader, state, options, None, TSXToken::CloseChevron)
- .map(|(params, _)| params)
+ .map(|(params, _, _)| params)
})
.transpose()?;
@@ -256,7 +256,7 @@ impl ASTNode for InterfaceMember {
}
// Caller self with generic parameters
TSXToken::OpenChevron => {
- let (type_parameters, _start_pos) =
+ let (type_parameters, _, _start_pos) =
parse_bracketed(reader, state, options, None, TSXToken::CloseChevron)?;
let parameters =
TypeAnnotationFunctionParameters::from_reader(reader, state, options)?;
@@ -285,7 +285,7 @@ impl ASTNode for InterfaceMember {
.is_some()
.then(|| parse_bracketed(reader, state, options, None, TSXToken::CloseChevron))
.transpose()?
- .map(|(tp, _)| tp);
+ .map(|(tp, _, _)| tp);
let parameters =
TypeAnnotationFunctionParameters::from_reader(reader, state, options)?;
@@ -480,7 +480,7 @@ impl ASTNode for InterfaceMember {
})
.transpose()?;
- (property_key, type_parameters.map(|(tp, _)| tp))
+ (property_key, type_parameters.map(|(tp, _, _)| tp))
};
let start = readonly_position.unwrap_or_else(|| name.get_position());
diff --git a/parser/src/types/type_alias.rs b/parser/src/types/type_alias.rs
index 65ef0836..7f5c3ae3 100644
--- a/parser/src/types/type_alias.rs
+++ b/parser/src/types/type_alias.rs
@@ -29,7 +29,7 @@ impl ASTNode for TypeAlias {
.is_some()
.then(|| {
crate::parse_bracketed(reader, state, options, None, TSXToken::CloseChevron)
- .map(|(params, _)| params)
+ .map(|(params, _, _)| params)
})
.transpose()?;
diff --git a/parser/src/types/type_annotations.rs b/parser/src/types/type_annotations.rs
index e90e3068..c322f806 100644
--- a/parser/src/types/type_annotations.rs
+++ b/parser/src/types/type_annotations.rs
@@ -78,7 +78,11 @@ pub enum TypeAnnotation {
/// KeyOf
KeyOf(Box, Span),
TypeOf(Box, Span),
- Infer(String, Span),
+ Infer {
+ name: String,
+ extends: Option>,
+ position: Span,
+ },
/// This is technically a special return type in TypeScript but we can make a superset behavior here
Asserts(Box, Span),
Extends {
@@ -115,7 +119,9 @@ pub enum TypeAnnotation {
Marker(Marker, Span),
}
-impl ListItem for TypeAnnotation {}
+impl ListItem for TypeAnnotation {
+ type LAST = ();
+}
#[derive(Debug, Clone, PartialEq)]
#[apply(derive_ASTNode)]
@@ -263,11 +269,19 @@ impl ASTNode for TypeAnnotation {
buf.push_str("typeof ");
on.to_string_from_buffer(buf, options, local);
}
- Self::Infer(name, _pos) => {
+ Self::Infer { name, extends, position: _ } => {
buf.push_str("infer ");
buf.push_str(name.as_str());
+ if let Some(ref extends) = extends {
+ buf.push_str(" extends ");
+ extends.to_string_from_buffer(buf, options, local);
+ }
+ }
+ Self::NamespacedName(from, to, _) => {
+ buf.push_str(from);
+ buf.push('.');
+ buf.push_str(to);
}
- Self::NamespacedName(..) => todo!(),
Self::ObjectLiteral(members, _) => {
to_string_bracketed(members, ('{', '}'), buf, options, local);
}
@@ -350,7 +364,12 @@ impl ASTNode for TypeAnnotation {
}
buf.push('`');
}
- Self::Symbol { .. } => buf.push_str("symbol"),
+ Self::Symbol { unique, .. } => {
+ if *unique {
+ buf.push_str("unique ");
+ }
+ buf.push_str("symbol");
+ }
Self::Extends { item, extends, .. } => {
item.to_string_from_buffer(buf, options, local);
buf.push_str(" extends ");
@@ -442,8 +461,22 @@ impl TypeAnnotation {
Token(TSXToken::Keyword(TSXKeyword::Infer), start) => {
let token = reader.next().ok_or_else(parse_lexing_error)?;
let (name, position) = token_as_identifier(token, "infer name")?;
- let position = start.union(position);
- Self::Infer(name, position)
+ let (position, extends) = if reader
+ .conditional_next(|t| matches!(t, TSXToken::Keyword(TSXKeyword::Extends)))
+ .is_some()
+ {
+ let extends = TypeAnnotation::from_reader_with_config(
+ reader,
+ state,
+ options,
+ Some(TypeOperatorKind::Query),
+ None,
+ )?;
+ (start.union(extends.get_position()), Some(Box::new(extends)))
+ } else {
+ (start.union(position), None)
+ };
+ Self::Infer { name, extends, position }
}
Token(TSXToken::Keyword(TSXKeyword::Asserts), start) => {
let predicate = TypeAnnotation::from_reader_with_config(
@@ -482,8 +515,8 @@ impl TypeAnnotation {
}
}
t @ Token(TSXToken::Keyword(TSXKeyword::Unique), _) => {
- let sym_pos = reader.expect_next(TSXToken::Keyword(TSXKeyword::Symbol))?;
- let position = t.get_span().union(sym_pos.with_length("symbol".len()));
+ let kw_pos = reader.expect_next(TSXToken::Keyword(TSXKeyword::Symbol))?;
+ let position = t.get_span().union(kw_pos.with_length("symbol".len()));
#[cfg(feature = "extras")]
let name =
reader.conditional_next(|t| matches!(t, TSXToken::StringLiteral(..))).map(
@@ -496,7 +529,7 @@ impl TypeAnnotation {
},
);
Self::Symbol {
- unique: false,
+ unique: true,
position,
#[cfg(feature = "extras")]
name,
@@ -575,7 +608,7 @@ impl TypeAnnotation {
}
}
Token(TSXToken::OpenChevron, start) => {
- let (type_parameters, _) =
+ let (type_parameters, _, _) =
parse_bracketed(reader, state, options, None, TSXToken::CloseChevron)?;
let parameters =
TypeAnnotationFunctionParameters::from_reader(reader, state, options)?;
@@ -596,7 +629,7 @@ impl TypeAnnotation {
}
// Tuple literal type
Token(TSXToken::OpenBracket, start) => {
- let (members, end) =
+ let (members, _, end) =
parse_bracketed(reader, state, options, None, TSXToken::CloseBracket)?;
let position = start.union(end);
Self::TupleLiteral(members, position)
@@ -642,7 +675,7 @@ impl TypeAnnotation {
.is_some()
.then(|| {
parse_bracketed(reader, state, options, None, TSXToken::CloseChevron)
- .map(|(params, _items)| params)
+ .map(|(params, _, _)| params)
})
.transpose()?;
let parameters =
@@ -1121,9 +1154,7 @@ impl ASTNode for TupleLiteralElement {
impl ListItem for TupleLiteralElement {
const EMPTY: Option = None;
- fn allow_comma_after(&self) -> bool {
- true
- }
+ type LAST = ();
}
#[cfg(test)]
diff --git a/parser/src/types/type_declarations.rs b/parser/src/types/type_declarations.rs
index 2bc068cf..bc6987a3 100644
--- a/parser/src/types/type_declarations.rs
+++ b/parser/src/types/type_declarations.rs
@@ -18,7 +18,9 @@ pub struct TypeParameter {
pub is_constant: bool,
}
-impl ListItem for TypeParameter {}
+impl ListItem for TypeParameter {
+ type LAST = ();
+}
impl ASTNode for TypeParameter {
fn from_reader(
diff --git a/parser/src/variable_fields.rs b/parser/src/variable_fields.rs
index da74b0f0..a53a2807 100644
--- a/parser/src/variable_fields.rs
+++ b/parser/src/variable_fields.rs
@@ -88,11 +88,17 @@ pub enum VariableField {
/// `x`
Name(VariableIdentifier),
/// `[x, y, z]`
- /// TODO spread last
- Array(Vec>>, Span),
+ Array {
+ members: Vec>>,
+ spread: Option>,
+ position: Span,
+ },
/// `{ x, y: z }`.
- /// TODO spread last
- Object(Vec>>, Span),
+ Object {
+ members: Vec>>,
+ spread: Option>,
+ position: Span,
+ },
}
impl ASTNode for VariableField {
@@ -104,15 +110,15 @@ impl ASTNode for VariableField {
match reader.peek().ok_or_else(parse_lexing_error)?.0 {
TSXToken::OpenBrace => {
let Token(_, start_pos) = reader.next().unwrap();
- let (members, last_pos) =
+ let (members, spread, last_pos) =
parse_bracketed(reader, state, options, None, TSXToken::CloseBrace)?;
- Ok(Self::Object(members, start_pos.union(last_pos)))
+ Ok(Self::Object { members, spread, position: start_pos.union(last_pos) })
}
TSXToken::OpenBracket => {
let Token(_, start_pos) = reader.next().unwrap();
- let (items, end) =
+ let (members, spread, end) =
parse_bracketed(reader, state, options, None, TSXToken::CloseBracket)?;
- Ok(Self::Array(items, start_pos.union(end)))
+ Ok(Self::Array { members, spread, position: start_pos.union(end) })
}
_ => Ok(Self::Name(VariableIdentifier::from_reader(reader, state, options)?)),
}
@@ -129,7 +135,7 @@ impl ASTNode for VariableField {
buf.add_mapping(&identifier.get_position().with_source(local.under));
identifier.to_string_from_buffer(buf, options, local);
}
- Self::Array(members, _) => {
+ Self::Array { members, spread, position: _ } => {
buf.push('[');
for (at_end, member) in members.iter().endiate() {
member.to_string_from_buffer(buf, options, local);
@@ -138,9 +144,17 @@ impl ASTNode for VariableField {
options.push_gap_optionally(buf);
}
}
+ if let Some(ref spread) = spread {
+ if !members.is_empty() {
+ buf.push(',');
+ options.push_gap_optionally(buf);
+ }
+ buf.push_str("...");
+ spread.0.to_string_from_buffer(buf, options, local);
+ }
buf.push(']');
}
- Self::Object(members, _) => {
+ Self::Object { members, spread, position: _ } => {
buf.push('{');
options.push_gap_optionally(buf);
for (at_end, member) in members.iter().endiate() {
@@ -150,6 +164,14 @@ impl ASTNode for VariableField {
options.push_gap_optionally(buf);
}
}
+ if let Some(ref spread) = spread {
+ if !members.is_empty() {
+ buf.push(',');
+ options.push_gap_optionally(buf);
+ }
+ buf.push_str("...");
+ spread.0.to_string_from_buffer(buf, options, local);
+ }
options.push_gap_optionally(buf);
buf.push('}');
}
@@ -158,7 +180,9 @@ impl ASTNode for VariableField {
fn get_position(&self) -> Span {
match self {
- VariableField::Array(_, position) | VariableField::Object(_, position) => *position,
+ VariableField::Array { position, .. } | VariableField::Object { position, .. } => {
+ *position
+ }
VariableField::Name(id) => id.get_position(),
}
}
@@ -212,17 +236,32 @@ impl DestructuringFieldInto for crate::ast::LHSOfAssignment {
#[derive(Debug, Clone, PartialEq, Eq)]
#[apply(derive_ASTNode)]
pub enum ArrayDestructuringField {
- Spread(T, Span),
Name(T, T::TypeAnnotation, Option>),
Comment { content: String, is_multiline: bool, position: Span },
None,
}
+/// Covers [`ArrayDestructuring`] AND [`ObjectDestructuringField`]
+#[derive(Debug, Clone, PartialEq, Eq, visitable_derive::Visitable)]
+#[apply(derive_ASTNode)]
+pub struct SpreadDestructuringField(pub Box, pub Span);
+
impl ListItem for WithComment> {
const EMPTY: Option = Some(WithComment::None(ArrayDestructuringField::None));
- fn allow_comma_after(&self) -> bool {
- !matches!(self.get_ast_ref(), ArrayDestructuringField::Spread(..))
+ const LAST_PREFIX: Option = Some(TSXToken::Spread);
+
+ type LAST = SpreadDestructuringField;
+
+ fn parse_last_item(
+ reader: &mut impl TokenReader,
+ state: &mut crate::ParsingState,
+ options: &ParseOptions,
+ ) -> ParseResult {
+ let start = reader.expect_next(TSXToken::Spread)?;
+ let node = T::from_reader(reader, state, options)?;
+ let position = start.union(node.get_position());
+ Ok(SpreadDestructuringField(Box::new(node), position))
}
}
@@ -233,12 +272,7 @@ impl ASTNode for ArrayDestructuringField {
options: &ParseOptions,
) -> ParseResult {
let Token(token, _start) = reader.peek().ok_or_else(parse_lexing_error)?;
- if let TSXToken::Spread = token {
- let token = reader.next().unwrap();
- let to = T::from_reader(reader, state, options)?;
- let position = token.get_span().union(to.get_position());
- Ok(Self::Spread(to, position))
- } else if matches!(token, TSXToken::Comma | TSXToken::CloseBracket) {
+ if matches!(token, TSXToken::Comma | TSXToken::CloseBracket) {
Ok(Self::None)
} else {
let name = T::from_reader(reader, state, options)?;
@@ -266,10 +300,6 @@ impl ASTNode for ArrayDestructuringField {
local: crate::LocalToStringInformation,
) {
match self {
- Self::Spread(name, _) => {
- buf.push_str("...");
- name.to_string_from_buffer(buf, options, local);
- }
Self::Name(name, _annotation, default_value) => {
name.to_string_from_buffer(buf, options, local);
if let Some(default_value) = default_value {
@@ -292,8 +322,7 @@ impl ASTNode for ArrayDestructuringField {
fn get_position(&self) -> Span {
match self {
- ArrayDestructuringField::Comment { position, .. }
- | ArrayDestructuringField::Spread(_, position) => *position,
+ ArrayDestructuringField::Comment { position, .. } => *position,
// TODO misses out optional expression
ArrayDestructuringField::Name(vf, ..) => vf.get_position(),
ArrayDestructuringField::None => source_map::Nullable::NULL,
@@ -311,8 +340,6 @@ impl ASTNode for ArrayDestructuringField {
pub enum ObjectDestructuringField {
/// `{ x }` and (annoyingly) `{ x = 2 }`
Name(VariableIdentifier, T::TypeAnnotation, Option>, Span),
- /// `{ ...x }`
- Spread(T, Span),
/// `{ x: y }`
Map {
from: PropertyKey,
@@ -323,7 +350,22 @@ pub enum ObjectDestructuringField {
},
}
-impl ListItem for WithComment> {}
+impl ListItem for WithComment> {
+ const LAST_PREFIX: Option = Some(TSXToken::Spread);
+
+ type LAST = SpreadDestructuringField;
+
+ fn parse_last_item(
+ reader: &mut impl TokenReader,
+ state: &mut crate::ParsingState,
+ options: &ParseOptions,
+ ) -> ParseResult {
+ let start = reader.expect_next(TSXToken::Spread)?;
+ let node = T::from_reader(reader, state, options)?;
+ let position = start.union(node.get_position());
+ Ok(SpreadDestructuringField(Box::new(node), position))
+ }
+}
impl ASTNode for ObjectDestructuringField {
fn from_reader(
@@ -331,51 +373,44 @@ impl ASTNode for ObjectDestructuringField {
state: &mut crate::ParsingState,
options: &ParseOptions,
) -> ParseResult {
- if let Token(TSXToken::Spread, _) = reader.peek().ok_or_else(parse_lexing_error)? {
- let token = reader.next().unwrap();
- let name = T::from_reader(reader, state, options)?;
- let position = token.get_span().union(name.get_position());
- Ok(Self::Spread(name, position))
- } else {
- let key = PropertyKey::from_reader(reader, state, options)?;
- if reader.peek().is_some_and(|Token(t, _)| is_destructuring_into_marker(t, options)) {
- reader.next();
- let name = WithComment::::from_reader(reader, state, options)?;
- let annotation = T::type_annotation_from_reader(reader, state, options)?;
-
- let default_value = reader
- .conditional_next(|t| matches!(t, TSXToken::Assign))
- .is_some()
- .then(|| Expression::from_reader(reader, state, options).map(Box::new))
- .transpose()?;
-
- let position = if let Some(ref dv) = default_value {
- key.get_position().union(dv.get_position())
- } else {
- key.get_position()
- };
-
- Ok(Self::Map { from: key, annotation, name, default_value, position })
- } else if let PropertyKey::Ident(name, key_pos, _) = key {
- let default_value = reader
- .conditional_next(|t| matches!(t, TSXToken::Assign))
- .is_some()
- .then(|| Expression::from_reader(reader, state, options).map(Box::new))
- .transpose()?;
-
- let standard = VariableIdentifier::Standard(name, key_pos);
- let annotation = T::type_annotation_from_reader(reader, state, options)?;
- let position = if let Some(ref dv) = default_value {
- key_pos.union(dv.get_position())
- } else {
- key_pos
- };
-
- Ok(Self::Name(standard, annotation, default_value, position))
+ let key = PropertyKey::from_reader(reader, state, options)?;
+ if reader.peek().is_some_and(|Token(t, _)| is_destructuring_into_marker(t, options)) {
+ reader.next();
+ let name = WithComment::::from_reader(reader, state, options)?;
+ let annotation = T::type_annotation_from_reader(reader, state, options)?;
+
+ let default_value = reader
+ .conditional_next(|t| matches!(t, TSXToken::Assign))
+ .is_some()
+ .then(|| Expression::from_reader(reader, state, options).map(Box::new))
+ .transpose()?;
+
+ let position = if let Some(ref dv) = default_value {
+ key.get_position().union(dv.get_position())
} else {
- let token = reader.next().ok_or_else(parse_lexing_error)?;
- throw_unexpected_token_with_token(token, &[TSXToken::Colon])
- }
+ key.get_position()
+ };
+
+ Ok(Self::Map { from: key, annotation, name, default_value, position })
+ } else if let PropertyKey::Identifier(name, key_pos, _) = key {
+ let default_value = reader
+ .conditional_next(|t| matches!(t, TSXToken::Assign))
+ .is_some()
+ .then(|| Expression::from_reader(reader, state, options).map(Box::new))
+ .transpose()?;
+
+ let standard = VariableIdentifier::Standard(name, key_pos);
+ let annotation = T::type_annotation_from_reader(reader, state, options)?;
+ let position = if let Some(ref dv) = default_value {
+ key_pos.union(dv.get_position())
+ } else {
+ key_pos
+ };
+
+ Ok(Self::Name(standard, annotation, default_value, position))
+ } else {
+ let token = reader.next().ok_or_else(parse_lexing_error)?;
+ throw_unexpected_token_with_token(token, &[TSXToken::Colon])
}
}
@@ -386,10 +421,6 @@ impl ASTNode for ObjectDestructuringField {
local: crate::LocalToStringInformation,
) {
match self {
- Self::Spread(name, _) => {
- buf.push_str("...");
- name.to_string_from_buffer(buf, options, local);
- }
Self::Name(name, _annotation, default_value, ..) => {
name.to_string_from_buffer(buf, options, local);
if let Some(default_value) = default_value {
@@ -435,12 +466,12 @@ impl Visitable for VariableField {
visitors.visit_variable(&item, data, chain);
}
}
- VariableField::Array(array_destructuring_fields, _) => array_destructuring_fields
- .iter()
- .for_each(|f| f.visit(visitors, data, options, chain)),
- VariableField::Object(object_destructuring_fields, _) => object_destructuring_fields
- .iter()
- .for_each(|f| f.visit(visitors, data, options, chain)),
+ VariableField::Array { members, spread: _, .. } => {
+ members.iter().for_each(|f| f.visit(visitors, data, options, chain));
+ }
+ VariableField::Object { members, spread: _, .. } => {
+ members.iter().for_each(|f| f.visit(visitors, data, options, chain));
+ }
}
}
@@ -461,12 +492,12 @@ impl Visitable for VariableField {
);
}
}
- VariableField::Array(array_destructuring_fields, _) => array_destructuring_fields
- .iter_mut()
- .for_each(|f| f.visit_mut(visitors, data, options, chain)),
- VariableField::Object(object_destructuring_fields, _) => object_destructuring_fields
- .iter_mut()
- .for_each(|f| f.visit_mut(visitors, data, options, chain)),
+ VariableField::Array { members, spread: _, .. } => {
+ members.iter_mut().for_each(|f| f.visit_mut(visitors, data, options, chain));
+ }
+ VariableField::Object { members, spread: _, .. } => {
+ members.iter_mut().for_each(|f| f.visit_mut(visitors, data, options, chain));
+ }
}
}
}
@@ -485,9 +516,7 @@ impl Visitable for WithComment> {
visitors.visit_variable(&array_destructuring_member, data, chain);
match field {
// TODO should be okay, no nesting here
- ArrayDestructuringField::Comment { .. }
- | ArrayDestructuringField::Spread(..)
- | ArrayDestructuringField::None => {}
+ ArrayDestructuringField::Comment { .. } | ArrayDestructuringField::None => {}
ArrayDestructuringField::Name(variable_field, _, expression) => {
variable_field.visit(visitors, data, options, chain);
expression.visit(visitors, data, options, chain);
@@ -506,9 +535,6 @@ impl Visitable for WithComment> {
MutableVariableOrProperty::ArrayDestructuringMember(self.get_ast_mut());
visitors.visit_variable_mut(&mut array_destructuring_member, data, chain);
match self.get_ast_mut() {
- ArrayDestructuringField::Spread(_, _id) => {
- // TODO should be okay, no nesting here
- }
ArrayDestructuringField::Comment { .. } | ArrayDestructuringField::None => {}
ArrayDestructuringField::Name(variable_field, _, default_value) => {
variable_field.visit_mut(visitors, data, options, chain);
@@ -554,7 +580,6 @@ impl Visitable for WithComment> {
chain,
);
match self.get_ast_ref() {
- ObjectDestructuringField::Spread(_name, _) => {}
ObjectDestructuringField::Name(_name, _, default_value, _) => {
default_value.visit(visitors, data, options, chain);
}
@@ -583,7 +608,6 @@ impl Visitable for WithComment> {
chain,
);
match self.get_ast_mut() {
- ObjectDestructuringField::Spread(_id, _) => {}
ObjectDestructuringField::Name(_id, _, default_value, _) => {
default_value.visit_mut(visitors, data, options, chain);
}
@@ -655,8 +679,8 @@ mod tests {
fn array() {
assert_matches_ast!(
"[x, y, z]",
- VariableField::Array(
- Deref @ [WithComment::None(ArrayDestructuringField::Name(
+ VariableField::Array {
+ members: Deref @ [WithComment::None(ArrayDestructuringField::Name(
VariableField::Name(VariableIdentifier::Standard(Deref @ "x", span!(1, 2))),
None,
None,
@@ -669,13 +693,15 @@ mod tests {
None,
None,
))],
- _,
- )
+ spread: _,
+ position: _
+ }
);
assert_matches_ast!(
"[x,,z]",
- VariableField::Array(
+ VariableField::Array {
+ members:
Deref @ [WithComment::None(ArrayDestructuringField::Name(
VariableField::Name(VariableIdentifier::Standard(Deref @ "x", span!(1, 2))),
None,
@@ -685,8 +711,9 @@ mod tests {
None,
None,
))],
- span!(0, 6),
- )
+ spread: None,
+ position: span!(0, 6),
+ }
);
}
@@ -694,8 +721,8 @@ mod tests {
fn object() {
assert_matches_ast!(
"{x, y, z}",
- VariableField::Object(
- Deref @ [WithComment::None(ObjectDestructuringField::Name(
+ VariableField::Object {
+ members: Deref @ [WithComment::None(ObjectDestructuringField::Name(
VariableIdentifier::Standard(Deref @ "x", span!(1, 2)),
None,
None,
@@ -711,8 +738,9 @@ mod tests {
None,
span!(7, 8),
))],
- span!(0, 9),
- )
+ spread: None,
+ position: span!(0, 9),
+ }
);
}
@@ -720,7 +748,8 @@ mod tests {
fn name_with_default() {
assert_matches_ast!(
"{ x = 2 }",
- VariableField::Object(
+ VariableField::Object {
+ members:
Deref @ [WithComment::None(ObjectDestructuringField::Name(
VariableIdentifier::Standard(Deref @ "x", span!(2, 3)),
None,
@@ -732,8 +761,9 @@ mod tests {
),
span!(2, 7),
))],
- span!(0, 9),
- )
+ spread: None,
+ position: span!(0, 9),
+ }
);
}
@@ -741,17 +771,15 @@ mod tests {
fn array_spread() {
assert_matches_ast!(
"[x, ...y]",
- VariableField::Array(
- Deref @ [WithComment::None(ArrayDestructuringField::Name(
+ VariableField::Array {
+ members:Deref @ [WithComment::None(ArrayDestructuringField::Name(
VariableField::Name(VariableIdentifier::Standard(Deref @ "x", span!(1, 2))),
None,
None,
- )), WithComment::None(ArrayDestructuringField::Spread(
- VariableField::Name(VariableIdentifier::Standard(Deref @ "y", span!(7, 8))),
- span!(4, 8),
))],
- span!(0, 9),
- )
+ spread: Some(SpreadDestructuringField( Deref @ VariableField::Name(VariableIdentifier::Standard(Deref @ "y", span!(7, 8))), span!(4, 8))),
+ position: span!(0, 9)
+ }
);
}
}
diff --git a/parser/src/visiting.rs b/parser/src/visiting.rs
index 5b9a6ae6..75407a50 100644
--- a/parser/src/visiting.rs
+++ b/parser/src/visiting.rs
@@ -369,11 +369,9 @@ mod structures {
ImmutableVariableOrProperty::ArrayDestructuringMember(_) => None,
ImmutableVariableOrProperty::ObjectDestructuringMember(o) => {
match o.get_ast_ref() {
- ObjectDestructuringField::Spread(
- VariableField::Name(VariableIdentifier::Standard(a, _)),
- _,
- )
- | ObjectDestructuringField::Name(VariableIdentifier::Standard(a, ..), ..) => Some(a.as_str()),
+ ObjectDestructuringField::Name(VariableIdentifier::Standard(a, ..), ..) => {
+ Some(a.as_str())
+ }
_ => None,
}
}
@@ -389,7 +387,7 @@ mod structures {
// Just want variable names
None
// match property.get_ast_ref() {
- // PropertyKey::Ident(ident, _, _)
+ // PropertyKey::Identifier(ident, _, _)
// | PropertyKey::StringLiteral(ident, _, _) => Some(ident.as_str()),
// PropertyKey::NumberLiteral(_, _) | PropertyKey::Computed(_, _) => None,
// }
@@ -398,7 +396,7 @@ mod structures {
// Just want variable names
None
// match property.get_ast_ref() {
- // PropertyKey::Ident(ident, _, _)
+ // PropertyKey::Identifier(ident, _, _)
// | PropertyKey::StringLiteral(ident, _, _) => Some(ident.as_str()),
// PropertyKey::NumberLiteral(_, _) | PropertyKey::Computed(_, _) => None,
// }