diff --git a/CHANGELOG.md b/CHANGELOG.md index e2faaf8..a864be5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,13 @@ All notable changes to this project from version 1.2.0 upwards are documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). +## [Unreleased] + +### Changed +- `Position`'s method `isEmpty` is deprecated in favour of `isFlat` to ensure consistency across the [StarLasu](https://github.com/Strumenta/StarLasu) libraries collection. +- `Issue`'s messages are capitalized. +- Parsing `Issue`s' position uses the token's length. + ## [1.6.29] – 2024-07-09 ### Fixed diff --git a/src/model/position.ts b/src/model/position.ts index 4d5d9d2..18bf10c 100644 --- a/src/model/position.ts +++ b/src/model/position.ts @@ -131,10 +131,18 @@ export class Position { /** * If start and end are the same, - * then this Position is considered empty. + * then this Position is considered flat. + */ + isFlat(): boolean { + return this.start.equals(this.end); + } + + /** + * @deprecated + * Use `this.isFlat()` instead. */ isEmpty(): boolean { - return this.start.equals(this.end) + return this.isFlat(); } /** diff --git a/src/parsing/tylasu-parser.ts b/src/parsing/tylasu-parser.ts index d3c6e69..acabfa0 100644 --- a/src/parsing/tylasu-parser.ts +++ b/src/parsing/tylasu-parser.ts @@ -11,7 +11,8 @@ import { Recognizer, TerminalNode, Token, - TokenStream + TokenStream, + CommonToken } from "antlr4ng"; import {Issue, IssueSeverity} from "../validation"; import {Point, Position, Source, StringSource} from "../model/position"; @@ -120,6 +121,12 @@ export abstract class TylasuANTLRLexer implements TylasuL reportAttemptingFullContext() {}, reportContextSensitivity() {}, syntaxError(recognizer: Recognizer, offendingSymbol: S | null, line: number, charPositionInLine: number, msg: string) { + const startPoint = new Point(line, charPositionInLine); + let endPoint = new Point(line, charPositionInLine); + if (offendingSymbol instanceof CommonToken) { + const tokenLength = offendingSymbol.stop - offendingSymbol.start + 1; + endPoint = new Point(offendingSymbol.line, offendingSymbol.column + tokenLength); + }; const regex = /token recognition error at: '(.+)'/ if (regex.test(msg)){ const match = msg.match(regex) as string[]; @@ -127,7 +134,7 @@ export abstract class TylasuANTLRLexer implements TylasuL Issue.lexical( msg || "unspecified", IssueSeverity.ERROR, - Position.ofPoint(new Point(line, charPositionInLine)), + new Position(startPoint, endPoint), undefined, TOKEN_RECOGNITION_ERROR, [ @@ -141,7 +148,7 @@ export abstract class TylasuANTLRLexer implements TylasuL Issue.lexical( msg || "unspecified", IssueSeverity.ERROR, - Position.ofPoint(new Point(line, charPositionInLine)), + new Position(startPoint, endPoint), undefined, SYNTAX_ERROR)); } @@ -301,6 +308,12 @@ export abstract class TylasuParser< reportAttemptingFullContext() {}, reportContextSensitivity() {}, syntaxError(recognizer: Recognizer, offendingSymbol: S | null, line: number, charPositionInLine: number, msg: string) { + const startPoint = new Point(line, charPositionInLine); + let endPoint = new Point(line, charPositionInLine); + if (offendingSymbol instanceof CommonToken) { + const tokenLength = offendingSymbol.stop - offendingSymbol.start + 1; + endPoint = new Point(offendingSymbol.line, offendingSymbol.column + tokenLength); + }; const mismatchedRegex = /^mismatched input '(|.+)' expecting {([a-zA-Z_]+(, [a-zA-Z_]+)*)}$/ if (mismatchedRegex.test(msg)) { const match = msg.match(mismatchedRegex) as string[]; @@ -316,7 +329,7 @@ export abstract class TylasuParser< Issue.syntactic( msg, IssueSeverity.ERROR, - Position.ofPoint(new Point(line, charPositionInLine)), + new Position(startPoint, endPoint), undefined, MISMATCHED_INPUT, args)); @@ -325,7 +338,7 @@ export abstract class TylasuParser< Issue.syntactic( msg || "unspecified", IssueSeverity.ERROR, - Position.ofPoint(new Point(line, charPositionInLine)), + new Position(startPoint, endPoint), undefined, SYNTAX_ERROR)); } diff --git a/src/utils/capitalize.ts b/src/utils/capitalize.ts new file mode 100644 index 0000000..8c72dfd --- /dev/null +++ b/src/utils/capitalize.ts @@ -0,0 +1,6 @@ +/** +* Capitalize the first letter of a string +*/ +export function capitalize(str: string) { + return str.charAt(0).toUpperCase() + str.slice(1); +} diff --git a/src/validation.ts b/src/validation.ts index 15bbd3d..17fb234 100644 --- a/src/validation.ts +++ b/src/validation.ts @@ -1,5 +1,6 @@ import {Node} from "./model/model"; import {Position} from "./model/position"; +import { capitalize } from "./utils/capitalize"; export enum IssueType { LEXICAL, SYNTACTIC, SEMANTIC} @@ -21,6 +22,8 @@ export class Issue { public readonly code?: string, public readonly args: IssueArg[] = [] ) { + this.message = capitalize(message); + if (!position) { this.position = node?.position; } diff --git a/tests/issues.test.ts b/tests/issues.test.ts index a9f4b09..3829c62 100644 --- a/tests/issues.test.ts +++ b/tests/issues.test.ts @@ -30,4 +30,10 @@ describe('Issues', function() { SOURCE_NODE_NOT_MAPPED, [{ name: "nodeType", value: "SomeNode" }]); expect(i18next.t(issue.code!, { type: issue.args[0].value })).to.equal("Source node not mapped: SomeNode"); }); + + it("has capitalized messages", + function () { + let issue = Issue.syntactic("unexpected token: foo", IssueSeverity.ERROR, undefined, undefined, SYNTAX_ERROR); + expect(issue.message).to.equal("Unexpected token: foo"); + }); }); diff --git a/tests/parsing.test.ts b/tests/parsing.test.ts index a5ca533..890cdf3 100644 --- a/tests/parsing.test.ts +++ b/tests/parsing.test.ts @@ -82,7 +82,7 @@ describe('Parsing', function() { IssueType.SYNTACTIC, "mismatched input '+' expecting {INT_LIT, DEC_LIT, STRING_LIT, BOOLEAN_LIT}", IssueSeverity.ERROR, - new Position(new Point(1, 11), new Point(1, 11)), + new Position(new Point(1, 11), new Point(1, 12)), undefined, "parser.mismatchedinput", [ @@ -225,4 +225,22 @@ describe('Parsing', function() { ] )]) }) + it("produces issues with non-flat positions", + function() { + const code = + "set set a = 10\n" + + "|display c\n"; + const parser = new SLParser(new ANTLRTokenFactory()); + const result = parser.parse(code); + + expect(result.issues.length).to.not.eq(0) + + const extraneousInput = result.issues.find(issue => issue.message.startsWith("Extraneous input 'set'")) + expect(!(extraneousInput?.position?.isFlat())) + expect(extraneousInput?.position).to.eql(new Position(new Point(1, 4), new Point(1, 7))) + + const mismatchedInput = result.issues.find(issue => issue.message.startsWith("Mismatched input 'c'")) + expect(!(mismatchedInput?.position?.isFlat())) + expect(mismatchedInput?.position).to.eql(new Position(new Point(2, 9), new Point(2, 10))) + }) }); diff --git a/tests/transformation/transformation.test.ts b/tests/transformation/transformation.test.ts index 5c3d085..c2efa32 100644 --- a/tests/transformation/transformation.test.ts +++ b/tests/transformation/transformation.test.ts @@ -101,9 +101,9 @@ describe("AST Transformers", function () { transformer.addIssue("warning", IssueSeverity.WARNING); transformer.addIssue("info", IssueSeverity.INFO, pos(1, 0, 1, 2)); - expect(transformer.issues[0].message).to.be.equal("error"); - expect(transformer.issues[1].message).to.be.equal("warning"); - expect(transformer.issues[2].message).to.be.equal("info"); + expect(transformer.issues[0].message).to.be.equal("Error"); + expect(transformer.issues[1].message).to.be.equal("Warning"); + expect(transformer.issues[2].message).to.be.equal("Info"); }); it("transform function does not accept collections as source", function () { const transformer = new ASTTransformer();