Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Modernize lexers #130

Closed
wants to merge 6 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 2 additions & 5 deletions lexers/actionscript.lua
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
-- Copyright 2006-2025 Mitchell. See LICENSE.
-- Actionscript LPeg lexer.

local lexer = require('lexer')
local lexer = lexer
local token, word_match = lexer.token, lexer.word_match
local P, S = lpeg.P, lpeg.S

local lex = lexer.new('actionscript')

-- Whitespace.
lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
local lex = lexer.new(...)

-- Keywords.
lex:add_rule('keyword', token(lexer.KEYWORD, word_match{
Expand Down
7 changes: 2 additions & 5 deletions lexers/ada.lua
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
-- Copyright 2006-2025 Mitchell. See LICENSE.
-- Ada LPeg lexer.

local lexer = require('lexer')
local lexer = lexer
local token, word_match = lexer.token, lexer.word_match
local P, S = lpeg.P, lpeg.S

local lex = lexer.new('ada')

-- Whitespace.
lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
local lex = lexer.new(...)

-- Keywords.
lex:add_rule('keyword', token(lexer.KEYWORD, word_match({
Expand Down
7 changes: 2 additions & 5 deletions lexers/antlr.lua
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
-- Copyright 2006-2025 Mitchell. See LICENSE.
-- ANTLR LPeg lexer.

local lexer = require('lexer')
local lexer = lexer
local token, word_match = lexer.token, lexer.word_match
local P, S = lpeg.P, lpeg.S

local lex = lexer.new('antlr')

-- Whitespace.
lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
local lex = lexer.new(...)

-- Keywords.
lex:add_rule('keyword', token(lexer.KEYWORD, word_match{
Expand Down
5 changes: 1 addition & 4 deletions lexers/apdl.lua
Original file line number Diff line number Diff line change
@@ -1,15 +1,12 @@
-- Copyright 2006-2025 Mitchell. See LICENSE.
-- APDL LPeg lexer.

local lexer = require('lexer')
local lexer = lexer
local token, word_match = lexer.token, lexer.word_match
local P, S = lpeg.P, lpeg.S

local lex = lexer.new('apdl', {case_insensitive_fold_points = true})

-- Whitespace.
lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))

-- Keywords.
lex:add_rule('keyword', token(lexer.KEYWORD, word_match({
'*abbr', '*abb', '*afun', '*afu', '*ask', '*cfclos', '*cfc', '*cfopen', '*cfo', '*cfwrite',
Expand Down
7 changes: 2 additions & 5 deletions lexers/apl.lua
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
-- Copyright 2015-2025 David B. Lamkins <[email protected]>. See LICENSE.
-- APL LPeg lexer.

local lexer = require('lexer')
local lexer = lexer
local token, word_match = lexer.token, lexer.word_match
local P, S = lpeg.P, lpeg.S

local lex = lexer.new('apl')

-- Whitespace.
lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
local lex = lexer.new(...)

-- Comments.
lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol(P('⍝') + '#')))
Expand Down
7 changes: 2 additions & 5 deletions lexers/applescript.lua
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
-- Copyright 2006-2025 Mitchell. See LICENSE.
-- Applescript LPeg lexer.

local lexer = require('lexer')
local lexer = lexer
local token, word_match = lexer.token, lexer.word_match
local P, S = lpeg.P, lpeg.S

local lex = lexer.new('applescript')

-- Whitespace.
lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
local lex = lexer.new(...)

-- Keywords.
lex:add_rule('keyword', token(lexer.KEYWORD, word_match({
Expand Down
5 changes: 1 addition & 4 deletions lexers/batch.lua
Original file line number Diff line number Diff line change
@@ -1,15 +1,12 @@
-- Copyright 2006-2025 Mitchell. See LICENSE.
-- Batch LPeg lexer.

local lexer = require('lexer')
local lexer = lexer
local token, word_match = lexer.token, lexer.word_match
local P, S = lpeg.P, lpeg.S

local lex = lexer.new('batch', {case_insensitive_fold_points = true})

-- Whitespace.
lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))

-- Keywords.
lex:add_rule('keyword', token(lexer.KEYWORD, word_match({
'cd', 'chdir', 'md', 'mkdir', 'cls', 'for', 'if', 'echo', 'echo.', 'move', 'copy', 'ren', 'del',
Expand Down
7 changes: 2 additions & 5 deletions lexers/boo.lua
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
-- Copyright 2006-2025 Mitchell. See LICENSE.
-- Boo LPeg lexer.

local lexer = require('lexer')
local lexer = lexer
local token, word_match = lexer.token, lexer.word_match
local P, S = lpeg.P, lpeg.S

local lex = lexer.new('boo')

-- Whitespace.
lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
local lex = lexer.new(...)

-- Keywords.
lex:add_rule('keyword', token(lexer.KEYWORD, word_match{
Expand Down
7 changes: 2 additions & 5 deletions lexers/caml.lua
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
-- Copyright 2006-2025 Mitchell. See LICENSE.
-- OCaml LPeg lexer.

local lexer = require('lexer')
local lexer = lexer
local token, word_match = lexer.token, lexer.word_match
local P, S = lpeg.P, lpeg.S

local lex = lexer.new('caml')

-- Whitespace.
lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
local lex = lexer.new(...)

-- Keywords.
lex:add_rule('keyword', token(lexer.KEYWORD, word_match{
Expand Down
7 changes: 2 additions & 5 deletions lexers/clojure.lua
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,11 @@
-- Clojure LPeg lexer.
-- Contributed by Christos Chatzifountas.

local lexer = require('lexer')
local lexer = lexer
local token, word_match = lexer.token, lexer.word_match
local P, S = lpeg.P, lpeg.S

local lex = lexer.new('clojure')

-- Whitespace.
lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
local lex = lexer.new(...)

-- Keywords.
lex:add_rule('keyword', token(lexer.KEYWORD, word_match{
Expand Down
5 changes: 1 addition & 4 deletions lexers/coffeescript.lua
Original file line number Diff line number Diff line change
@@ -1,15 +1,12 @@
-- Copyright 2006-2025 Mitchell. See LICENSE.
-- CoffeeScript LPeg lexer.

local lexer = require('lexer')
local lexer = lexer
local word_match = lexer.word_match
local P, S = lpeg.P, lpeg.S

local lex = lexer.new('coffeescript', {fold_by_indentation = true})

-- Whitespace.
lex:add_rule('whitespace', lex:tag(lexer.WHITESPACE, lexer.space^1))

-- Keywords.
lex:add_rule('keyword', lex:tag(lexer.KEYWORD, word_match{
'all', 'and', 'bind', 'break', 'by', 'case', 'catch', 'class', 'const', 'continue', 'default',
Expand Down
7 changes: 2 additions & 5 deletions lexers/context.lua
Original file line number Diff line number Diff line change
@@ -1,19 +1,16 @@
-- Copyright 2006-2025 Robert Gieseke, Lars Otter. See LICENSE.
-- ConTeXt LPeg lexer.

local lexer = require('lexer')
local lexer = lexer
local token, word_match = lexer.token, lexer.word_match
local P, S = lpeg.P, lpeg.S

local lex = lexer.new('context')
local lex = lexer.new(...)

-- TeX and ConTeXt mkiv environment definitions.
local beginend = (P('begin') + 'end')
local startstop = (P('start') + 'stop')

-- Whitespace.
lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))

-- Comments.
lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('%')))

Expand Down
7 changes: 2 additions & 5 deletions lexers/crystal.lua
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,11 @@
-- Copyright 2017 Michel Martens.
-- Crystal LPeg lexer (based on Ruby).

local lexer = require('lexer')
local lexer = lexer
local token, word_match = lexer.token, lexer.word_match
local P, S = lpeg.P, lpeg.S

local lex = lexer.new('crystal')

-- Whitespace.
lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
local lex = lexer.new(...)

-- Keywords.
lex:add_rule('keyword', token(lexer.KEYWORD, word_match{
Expand Down
7 changes: 2 additions & 5 deletions lexers/csharp.lua
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
-- Copyright 2006-2025 Mitchell. See LICENSE.
-- C# LPeg lexer.

local lexer = require('lexer')
local lexer = lexer
local token, word_match = lexer.token, lexer.word_match
local P, S = lpeg.P, lpeg.S

local lex = lexer.new('csharp')

-- Whitespace.
lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
local lex = lexer.new(...)

-- Keywords.
lex:add_rule('keyword', token(lexer.KEYWORD, word_match{
Expand Down
7 changes: 2 additions & 5 deletions lexers/dart.lua
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,11 @@
-- Dart LPeg lexer.
-- Written by Brian Schott (@Hackerpilot on Github).

local lexer = require('lexer')
local lexer = lexer
local token, word_match = lexer.token, lexer.word_match
local P, S = lpeg.P, lpeg.S

local lex = lexer.new('dart')

-- Whitespace.
lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
local lex = lexer.new(...)

-- Keywords.
lex:add_rule('keyword', token(lexer.KEYWORD, word_match{
Expand Down
7 changes: 2 additions & 5 deletions lexers/dot.lua
Original file line number Diff line number Diff line change
Expand Up @@ -2,14 +2,11 @@
-- Dot LPeg lexer.
-- Based off of lexer code by Mitchell.

local lexer = require('lexer')
local lexer = lexer
local token, word_match = lexer.token, lexer.word_match
local P, S = lpeg.P, lpeg.S

local lex = lexer.new('dot')

-- Whitespace.
lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
local lex = lexer.new(...)

-- Keywords.
lex:add_rule('keyword', token(lexer.KEYWORD, word_match{
Expand Down
7 changes: 2 additions & 5 deletions lexers/eiffel.lua
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
-- Copyright 2006-2025 Mitchell. See LICENSE.
-- Eiffel LPeg lexer.

local lexer = require('lexer')
local lexer = lexer
local token, word_match = lexer.token, lexer.word_match
local P, S = lpeg.P, lpeg.S

local lex = lexer.new('eiffel')

-- Whitespace.
lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
local lex = lexer.new(...)

-- Keywords.
lex:add_rule('keyword', token(lexer.KEYWORD, word_match{
Expand Down
5 changes: 1 addition & 4 deletions lexers/elixir.lua
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,12 @@
-- Contributed by Richard Philips.
-- Elixir LPeg lexer.

local lexer = require('lexer')
local lexer = lexer
local token, word_match = lexer.token, lexer.word_match
local B, P, S = lpeg.B, lpeg.P, lpeg.S

local lex = lexer.new('elixir', {fold_by_indentation = true})

-- Whitespace.
lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))

-- Sigils.
local sigil11 = '~' * S('CRSW') * lexer.range('<', '>')
local sigil12 = '~' * S('CRSW') * lexer.range('{', '}')
Expand Down
5 changes: 1 addition & 4 deletions lexers/elm.lua
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,12 @@
-- Elm LPeg lexer
-- Adapted from Haskell LPeg lexer by Karl Schultheisz.

local lexer = require('lexer')
local lexer = lexer
local token, word_match = lexer.token, lexer.word_match
local P, S = lpeg.P, lpeg.S

local lex = lexer.new('elm', {fold_by_indentation = true})

-- Whitespace.
lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))

-- Keywords.
lex:add_rule('keyword', token(lexer.KEYWORD, word_match(
'if then else case of let in module import as exposing type alias port')))
Expand Down
7 changes: 2 additions & 5 deletions lexers/erlang.lua
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
-- Copyright 2006-2025 Mitchell. See LICENSE.
-- Erlang LPeg lexer.

local lexer = require('lexer')
local lexer = lexer
local token, word_match = lexer.token, lexer.word_match
local P, S = lpeg.P, lpeg.S

local lex = lexer.new('erlang')

-- Whitespace.
lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
local lex = lexer.new(...)

-- Keywords.
lex:add_rule('keyword', token(lexer.KEYWORD, word_match{
Expand Down
8 changes: 2 additions & 6 deletions lexers/fantom.lua
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,11 @@
-- Fantom LPeg lexer.
-- Based on Java LPeg lexer by Mitchell and Vim's Fantom syntax.

local lexer = require('lexer')
local lexer = lexer
local token, word_match = lexer.token, lexer.word_match
local P, S = lpeg.P, lpeg.S

local lex = lexer.new('fantom')

-- Whitespace.
local ws = token(lexer.WHITESPACE, lexer.space^1)
lex:add_rule('whitespace', ws)
local lex = lexer.new(...)

-- Classes.
local type = token(lexer.TYPE, lexer.word)
Expand Down
7 changes: 2 additions & 5 deletions lexers/faust.lua
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
-- Copyright 2015-2025 David B. Lamkins <[email protected]>. See LICENSE.
-- Faust LPeg lexer, see http://faust.grame.fr/

local lexer = require('lexer')
local lexer = lexer
local token, word_match = lexer.token, lexer.word_match
local P, S = lpeg.P, lpeg.S

local lex = lexer.new('faust')

-- Whitespace.
lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))
local lex = lexer.new(...)

-- Keywords.
lex:add_rule('keyword', token(lexer.KEYWORD, word_match{
Expand Down
5 changes: 1 addition & 4 deletions lexers/fennel.lua
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,12 @@
-- Fennel LPeg lexer.
-- Contributed by Momohime Honda.

local lexer = require('lexer')
local lexer = lexer
local token, word_match = lexer.token, lexer.word_match
local P, S = lpeg.P, lpeg.S

local lex = lexer.new('fennel', {inherit = lexer.load('lua')})

-- Whitespace.
lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))

-- Keywords.
lex:modify_rule('keyword', token(lexer.KEYWORD, word_match{
'#', '%', '*', '+', '-', '->>', '->', '-?>>', '-?>', '..', '.', '//', '/', ':', '<=', '<', '=',
Expand Down
Loading