~yerinalexey/dotfiles

6bc10da8042c88b0f4a60b9ae66e2933a71ace97 — Alexey Yerin 1 year, 2 months ago 76c7359
vis: update hare lexer once again
1 files changed, 13 insertions(+), 11 deletions(-)

M vis/lexers/hare.lua
M vis/lexers/hare.lua => vis/lexers/hare.lua +13 -11
@@ 14,22 14,22 @@ lex:add_rule('whitespace', token(lexer.WHITESPACE, lexer.space^1))

-- Keywords.
lex:add_rule('keyword', token(lexer.KEYWORD, word_match{
  'as', 'break', 'case', 'const', 'continue', 'def', 'defer', 'else', 'export', 'false', 'fn', 'for',
  'if', 'is', 'let', 'match', 'null', 'nullable', 'return', 'static', 'struct', 'switch', 'true',
  'type', 'use', 'yield'
  'as', 'break', 'case', 'const', 'continue', 'def', 'defer', 'else', 'export', 'false', 'fn',
  'for', 'if', 'is', 'let', 'match', 'null', 'nullable', 'return', 'static', 'struct', 'switch',
  'true', 'type', 'union', 'use', 'yield'
}))

-- Functions.
local size_builtin = 'size' * #(lexer.space^0 * '(')
lex:add_rule('function', token(lexer.FUNCTION, word_match{
  'abort', 'align', 'alloc', 'append', 'assert', 'cap', 'delete', 'free', 'insert', 'len', 'offset',
  'abort', 'align', 'alloc', 'append', 'assert', 'delete', 'free', 'insert', 'len', 'offset',
  'vaarg', 'vaend', 'vastart'
} + size_builtin))

-- Types.
lex:add_rule('type', token(lexer.TYPE, word_match{
  'bool', 'enum', 'f32', 'f64', 'i16', 'i32', 'i64', 'i8', 'int', 'rune', 'size', 'str', 'u16',
  'u32', 'u64', 'u8', 'uint', 'uintptr', 'union', 'valist', 'void',
  'bool', 'enum', 'f32', 'f64', 'i16', 'i32', 'i64', 'i8', 'int', 'never', 'opaque', 'rune', 'size',
  'str', 'u16', 'u32', 'u64', 'u8', 'uint', 'uintptr', 'valist', 'void'
}))

-- Identifiers.


@@ 38,7 38,7 @@ lex:add_rule('identifier', token(lexer.IDENTIFIER, lexer.word))
-- Strings.
local sq_str = lexer.range("'", true)
local dq_str = lexer.range('"')
local raw_str = lexer.range('`')
local raw_str = lexer.range('`', false, false)
lex:add_rule('string', token(lexer.STRING, sq_str + dq_str + raw_str))

-- Comments.


@@ 46,15 46,16 @@ lex:add_rule('comment', token(lexer.COMMENT, lexer.to_eol('//')))

-- Numbers.
local integer_suffix = word_match{
	"i", "u", "z", "i8", "i16", "i32", "i64", "u8", "u16", "u32", "u64",
  "i", "u", "z", "i8", "i16", "i32", "i64", "u8", "u16", "u32", "u64"
}
local float_suffix = word_match{ "f32", "f64" }
local float_suffix = word_match{"f32", "f64"}
local suffix = integer_suffix + float_suffix

local bin_num = '0b' * R('01')^1 * -lexer.xdigit
local oct_num = '0o' * R('07')^1 * -lexer.xdigit
local hex_num = '0x' * lexer.xdigit^1
local integer_literal = S('+-')^-1 * ((hex_num + oct_num + bin_num) * integer_suffix^-1 + lexer.dec_num * suffix^-1)
local integer_literal = S('+-')^-1 *
  ((hex_num + oct_num + bin_num) * integer_suffix^-1 + lexer.dec_num * suffix^-1)
local float_literal = lexer.float * float_suffix^-1
lex:add_rule('number', token(lexer.NUMBER, integer_literal + float_literal))



@@ 70,6 71,7 @@ lex:add_rule('attribute', token(lexer.PREPROCESSOR, '@' * lexer.word))

-- Fold points.
lex:add_fold_point(lexer.OPERATOR, '{', '}')
lex:add_fold_point(lexer.COMMENT, lexer.fold_consecutive_lines('//'))

lexer.property['scintillua.comment'] = '//'

return lex