25 -- * Please read technotes.txt for more technical details. |
25 -- * Please read technotes.txt for more technical details. |
26 ----------------------------------------------------------------------]] |
26 ----------------------------------------------------------------------]] |
27 |
27 |
28 local base = _G |
28 local base = _G |
29 local string = require "string" |
29 local string = require "string" |
30 module "llex" |
30 --module "llex" |
31 |
31 |
32 local find = string.find |
32 local find = string.find |
33 local match = string.match |
33 local match = string.match |
34 local sub = string.sub |
34 local sub = string.sub |
|
35 |
|
36 local z = '' -- source |
|
37 local sourceid = '' -- name of source |
|
38 local I = 1 -- lexer's position in source |
|
39 local ln = 1 -- line number |
|
40 local tok = {} -- lexed token list* |
|
41 local seminfo = {} -- lexed semantic information list* |
|
42 local tokln = {} -- line numbers for messages* |
|
43 local buff = '' |
|
44 -- NOTE: see init() for module variables (externally visible): |
|
45 -- tok, seminfo, tokln |
35 |
46 |
36 ---------------------------------------------------------------------- |
47 ---------------------------------------------------------------------- |
37 -- initialize keyword list, variables |
48 -- initialize keyword list, variables |
38 ---------------------------------------------------------------------- |
49 ---------------------------------------------------------------------- |
39 |
50 |
42 and break do else elseif end false for function if in |
53 and break do else elseif end false for function if in |
43 local nil not or repeat return then true until while]], "%S+") do |
54 local nil not or repeat return then true until while]], "%S+") do |
44 kw[v] = true |
55 kw[v] = true |
45 end |
56 end |
46 |
57 |
47 -- NOTE: see init() for module variables (externally visible): |
|
48 -- tok, seminfo, tokln |
|
49 |
|
50 local z, -- source stream |
|
51 sourceid, -- name of source |
|
52 I, -- position of lexer |
|
53 buff, -- buffer for strings |
|
54 ln -- line number |
|
55 |
58 |
56 ---------------------------------------------------------------------- |
59 ---------------------------------------------------------------------- |
57 -- add information to token listing |
60 -- add information to token listing |
58 ---------------------------------------------------------------------- |
61 ---------------------------------------------------------------------- |
59 |
62 |
87 -- initialize lexer for given source _z and source name _sourceid |
90 -- initialize lexer for given source _z and source name _sourceid |
88 ---------------------------------------------------------------------- |
91 ---------------------------------------------------------------------- |
89 |
92 |
90 function init(_z, _sourceid) |
93 function init(_z, _sourceid) |
91 z = _z -- source |
94 z = _z -- source |
92 sourceid = _sourceid -- name of source |
95 sourceid = _sourceid -- name of source |
93 I = 1 -- lexer's position in source |
|
94 ln = 1 -- line number |
|
95 tok = {} -- lexed token list* |
|
96 seminfo = {} -- lexed semantic information list* |
|
97 tokln = {} -- line numbers for messages* |
|
98 -- (*) externally visible thru' module |
|
99 -------------------------------------------------------------------- |
96 -------------------------------------------------------------------- |
100 -- initial processing (shbang handling) |
97 -- initial processing (shbang handling) |
101 -------------------------------------------------------------------- |
98 -------------------------------------------------------------------- |
102 local p, _, q, r = find(z, "^(#[^\r\n]*)(\r?\n?)") |
99 local p, _, q, r = find(z, "^(#[^\r\n]*)(\r?\n?)") |
103 if p then -- skip first line |
100 if p then -- skip first line |