Skip to content

Commit

Permalink
Removed auto-defined user word lists.
Browse files Browse the repository at this point in the history
Ideally, user word lists would come after lexer word lists, but early enough in rule order to be
effective. Otherwise SciTE cannot use them as intended.
At the same time, any more than 2 per lexer runs into an LPeg rule limit for some lexers with
embedded languages like Markdown and HTML. While that limit can be lifted with a compile option,
the standalone Lua module will still have this limitation.
  • Loading branch information
orbitalquark committed Sep 26, 2022
1 parent 71a8b42 commit 347a0c7
Show file tree
Hide file tree
Showing 4 changed files with 0 additions and 26 deletions.
7 changes: 0 additions & 7 deletions docs/api.md
Original file line number Diff line number Diff line change
Expand Up @@ -916,13 +916,6 @@ A pattern that matches a sequence of end of line characters.

A pattern that matches any single, non-newline character.

<a id="lexer.num_user_word_lists"></a>
#### `lexer.num_user_word_lists` (number)

The number of word lists to add as rules to every lexer created by `lexer.new()`. These
word lists are intended to be set by users outside the lexer. Each word in a list is tagged
with the name `userlistN`, where N is the index of the list. The default value is `0`.

<a id="lexer.number"></a>
#### `lexer.number` (pattern)

Expand Down
1 change: 0 additions & 1 deletion docs/changelog.md
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@ Changes:
`lexer.WHITESPACE`.
* Child lexers can extend their parent's keyword lists.
* Added more builtin tag/token names.
* All lexers created with `lexer.new()` have a default set of user word lists.
* Updated Perl lexer to recognize more numbers.
* Updated Fennel lexer.
* Updated Python lexer to highlight class definitions.
Expand Down
14 changes: 0 additions & 14 deletions lexers/lexer.lua
Original file line number Diff line number Diff line change
Expand Up @@ -786,10 +786,6 @@ local M = {}
-- Flag indicating that the line is blank.
-- @field FOLD_HEADER (number)
-- Flag indicating the line is fold point.
-- @field num_user_word_lists (number)
-- The number of word lists to add as rules to every lexer created by `lexer.new()`. These
-- word lists are intended to be set by users outside the lexer. Each word in a list is tagged
-- with the name `userlistN`, where N is the index of the list. The default value is `0`.
module('lexer')]=]

local lpeg = _G.lpeg or require('lpeg') -- Scintillua's Lua environment defines _G.lpeg
Expand All @@ -812,8 +808,6 @@ local predefined = {
}
for _, name in ipairs(predefined) do M[name:upper():gsub('%.', '_')] = name end

M.num_user_word_lists = 0 -- TODO: make configurable

---
-- Creates and returns a pattern that tags pattern *patt* with name *name* in lexer *lexer*.
-- If *name* is not a predefined tag name, its Scintilla style will likely need to be defined
Expand Down Expand Up @@ -1474,14 +1468,6 @@ function M.new(name, opts)
-- Use a unique whitespace tag name since embedded lexing relies on these unique names.
lexer:add_rule('whitespace', lexer:tag('whitespace.' .. name, M.space^1))

-- Add placeholders for user-defined word lists.
if not lexer._lexer and not lexer._no_user_word_lists then
for i = 1, M.num_user_word_lists do
local name = 'userlist' .. i
lexer:add_rule(name, lexer:tag(name, lexer:word_match(name)))
end
end

return lexer
end

Expand Down
4 changes: 0 additions & 4 deletions tests.lua
Original file line number Diff line number Diff line change
Expand Up @@ -44,9 +44,6 @@ end
-- @param rules The ordered list of rule names the lexer should have.
function assert_rules(lex, rules)
if rules[1] ~= 'whitespace' then table.insert(rules, 1, 'whitespace') end -- auto-added
if not lex._lexer and not lex._no_user_word_lists then
for i = 1, lexer.num_user_word_lists do table.insert(rules, i + 1, 'userlist' .. i) end -- auto-added
end
local j = 1
for i = 1, #lex._rules do
assert(lex._rules[rules[j]], string.format("rule '%s' does not exist", rules[j]))
Expand Down Expand Up @@ -1652,7 +1649,6 @@ function test_legacy()
local lex = lexer.new('test')
local ws = lexer.token(lexer.WHITESPACE, lexer.space^1)
lex:add_rule('whitespace', ws) -- should call lex:modify_rule()
assert(#lex._rules == 1 + lexer.num_user_word_lists)
assert(lex._rules['whitespace'] == ws)
lex:add_rule('keyword', lexer.token(KEYWORD, lexer.word_match('foo bar baz')))
lex:add_rule('number', lexer.token(NUMBER, lexer.number))
Expand Down

0 comments on commit 347a0c7

Please sign in to comment.