--[[ This file is part of parser.lua - table based parsing Copyright (C) 2019 Soni L. This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with this program. If not, see . --]] local parser = require "parser" local caseno = 0 local function case() caseno = caseno + 1 return caseno end do -- basic check local case = case() local defs = {} local count = 0 local state, err = parser.parse(defs, function() assert(count == 0, "should be called only once"); count = count + 1 return nil end) assert(state) end -- basic check do -- trim left spaces local defs = {} defs.self = defs defs[' '] = "whitespace" defs['\n'] = "whitespace" defs['\r'] = "whitespace" defs['\t'] = "whitespace" defs['\f'] = "whitespace" defs['\v'] = "whitespace" defs.whitespace = "self" defs[''] = function(state, token) state[#state + 1] = token if #state > 20 then state[1] = table.concat(state) for i=#state, 2, -1 do state[i] = nil end end return "start" end defs.start = {} defs.start.self = defs.start defs.start[''] = function(state, token) state[#state + 1] = token if #state > 20 then state[1] = table.concat(state) for i=#state, 2, -1 do state[i] = nil end end return "self" end for k,v in ipairs({"hello", " hello", "\t \v \n\r hello"}) do local state, err = parser.parse(defs, v) local case = case() if not state then print(case, err) else assert(table.concat(state) == "hello") end end end -- trim left spaces do -- lua tokens local luatokens = require "luatokens" local tokens = luatokens.defs local state, err, etoken, estate = parser.parse(tokens, [["hello world"]]) local case = case() if not state then print(case, "---- IN TOKENS ----") print(case, err, etoken) for i,v in pairs(estate) do print(case, i, v) end print(case, "---- OUT TOKENS ----") else assert(state[1] == luatokens.tokens.TK_STRING) assert(state[2] == "hello world") assert(state.line == 1 or not state.line) end end -- lua tokens do -- more lua tokens local luatokens = require "luatokens" local tokens = luatokens.defs local state, err, etoken, estate = parser.parse(tokens, [["\a\b\f\n\r\t\v\\\"\'\z \x41\65\ "]]) local case = case() if not state then print(case, "---- IN TOKENS ----") print(case, err, etoken) for i,v in pairs(estate) do print(case, i, v) end print(case, "---- OUT TOKENS ----") else assert(state[1] == luatokens.tokens.TK_STRING) assert(state[2] == "\7\8\12\10\13\9\11\92\34\39\65\65\10") assert(state.line == 2) end end -- lua tokens do -- even more lua tokens local luatokens = require "luatokens" local tokens = luatokens.defs local state, err, etoken, estate = parser.parse(tokens, [["\u{000000000000000000000000000000000000000000000000000000000000041}"]]) local case = case() if not state then print(case, "---- IN TOKENS ----") print(case, err, etoken) for i,v in pairs(estate) do print(case, i, v) end print(case, "---- OUT TOKENS ----") else assert(state[1] == luatokens.tokens.TK_STRING) assert(state[2] == "A") assert(state.line == 1 or not state.line) end end -- lua tokens do -- even more lua tokens local luatokens = require "luatokens" local tokens = luatokens.defs local state, err, etoken, estate = parser.parse(tokens, [["\u{7F}""\u{80}""\u{7FF}""\u{800}""\u{FFFF}""\u{10000}""\u{1FFFFF}""\u{200000}""\u{3FFFFFF}""\u{4000000}""\u{7FFFFFFF}"]]) local case = case() if not state then print(case, "---- IN TOKENS ----") print(case, err, etoken) for i,v in pairs(estate) do print(case, i, v) end print(case, "---- OUT TOKENS ----") else assert(table.remove(state, 1) == luatokens.tokens.TK_STRING) assert(table.remove(state, 1) == "\127") assert(table.remove(state, 1) == luatokens.tokens.TK_STRING) assert(table.remove(state, 1) == "\194\128") assert(table.remove(state, 1) == luatokens.tokens.TK_STRING) assert(table.remove(state, 1) == "\223\191") assert(table.remove(state, 1) == luatokens.tokens.TK_STRING) assert(table.remove(state, 1) == "\224\160\128") assert(table.remove(state, 1) == luatokens.tokens.TK_STRING) assert(table.remove(state, 1) == "\239\191\191") assert(table.remove(state, 1) == luatokens.tokens.TK_STRING) assert(table.remove(state, 1) == "\240\144\128\128") assert(table.remove(state, 1) == luatokens.tokens.TK_STRING) assert(table.remove(state, 1) == "\247\191\191\191") assert(table.remove(state, 1) == luatokens.tokens.TK_STRING) assert(table.remove(state, 1) == "\248\136\128\128\128") assert(table.remove(state, 1) == luatokens.tokens.TK_STRING) assert(table.remove(state, 1) == "\251\191\191\191\191") assert(table.remove(state, 1) == luatokens.tokens.TK_STRING) assert(table.remove(state, 1) == "\252\132\128\128\128\128") assert(table.remove(state, 1) == luatokens.tokens.TK_STRING) assert(table.remove(state, 1) == "\253\191\191\191\191\191") assert(state.line == 1 or not state.line) end end -- lua tokens do -- simple lua tokens local luatokens = require "luatokens" local tokens = luatokens.defs local state, err, etoken, estate = parser.parse(tokens, [[[""]]) local case = case() if not state then print(case, "---- IN TOKENS ----") print(case, err, etoken) for i,v in pairs(estate) do print(case, i, v) end print(case, "---- OUT TOKENS ----") else assert(table.remove(state, 1) == "[") assert(table.remove(state, 1) == luatokens.tokens.TK_STRING) assert(table.remove(state, 1) == "") assert(state.line == 1 or not state.line) end end -- lua tokens do -- simple long string local luatokens = require "luatokens" local tokens = luatokens.defs local state, err, etoken, estate = parser.parse(tokens, [=[[[]]]=]) local case = case() if not state then print(case, "---- IN TOKENS ----") print(case, err, etoken) for i,v in pairs(estate) do print(case, i, v) end print(case, "---- OUT TOKENS ----") else assert(table.remove(state, 1) == luatokens.tokens.TK_STRING) assert(table.remove(state, 1) == "") assert(state.line == 1 or not state.line) end end -- long string do -- long string with depth 1 local luatokens = require "luatokens" local tokens = luatokens.defs local state, err, etoken, estate = parser.parse(tokens, [==[[=[]=]]==]) local case = case() if not state then print(case, "---- IN TOKENS ----") print(case, err, etoken) for i,v in pairs(estate) do print(case, i, v) end print(case, "---- OUT TOKENS ----") else assert(table.remove(state, 1) == luatokens.tokens.TK_STRING) assert(table.remove(state, 1) == "") assert(state.line == 1 or not state.line) end end -- long string do -- long string with "nested" long string local luatokens = require "luatokens" local tokens = luatokens.defs local state, err, etoken, estate = parser.parse(tokens, [==[[=[[[]]]=]]==]) local case = case() if not state then print(case, "---- IN TOKENS ----") print(case, err, etoken) for i,v in pairs(estate) do print(case, i, v) end print(case, "---- OUT TOKENS ----") else assert(table.remove(state, 1) == luatokens.tokens.TK_STRING) assert(table.remove(state, 1) == "[[]]") assert(state.line == 1 or not state.line) end end -- long string do -- long string edge cases local luatokens = require "luatokens" local tokens = luatokens.defs local state, err, etoken, estate = parser.parse(tokens, "[==[]=]==][==[]]==]") local case = case() if not state then print(case, "---- IN TOKENS ----") print(case, err, etoken) for i,v in pairs(estate) do print(case, i, v) end print(case, "---- OUT TOKENS ----") else assert(table.remove(state, 1) == luatokens.tokens.TK_STRING) assert(table.remove(state, 1) == "]=") assert(table.remove(state, 1) == luatokens.tokens.TK_STRING) assert(table.remove(state, 1) == "]") assert(state.line == 1 or not state.line) end end -- long string do -- keywords local luatokens = require "luatokens" local tokens = luatokens.defs local state, err, etoken, estate = parser.parse(tokens, [[ and break do else elseif end false for function goto if in local nil not or repeat return then true until while]]) local case = case() if not state then print(case, "---- IN TOKENS ----") print(case, err, etoken) for i,v in pairs(estate) do print(case, i, v) end print(case, "---- OUT TOKENS ----") else assert(table.remove(state, 1) == luatokens.tokens.TK_AND) assert(table.remove(state, 1) == luatokens.tokens.TK_BREAK) assert(table.remove(state, 1) == luatokens.tokens.TK_DO) assert(table.remove(state, 1) == luatokens.tokens.TK_ELSE) assert(table.remove(state, 1) == luatokens.tokens.TK_ELSEIF) assert(table.remove(state, 1) == luatokens.tokens.TK_END) assert(table.remove(state, 1) == luatokens.tokens.TK_FALSE) assert(table.remove(state, 1) == luatokens.tokens.TK_FOR) assert(table.remove(state, 1) == luatokens.tokens.TK_FUNCTION) assert(table.remove(state, 1) == luatokens.tokens.TK_GOTO) assert(table.remove(state, 1) == luatokens.tokens.TK_IF) assert(table.remove(state, 1) == luatokens.tokens.TK_IN) assert(table.remove(state, 1) == luatokens.tokens.TK_LOCAL) assert(table.remove(state, 1) == luatokens.tokens.TK_NIL) assert(table.remove(state, 1) == luatokens.tokens.TK_NOT) assert(table.remove(state, 1) == luatokens.tokens.TK_OR) assert(table.remove(state, 1) == luatokens.tokens.TK_REPEAT) assert(table.remove(state, 1) == luatokens.tokens.TK_RETURN) assert(table.remove(state, 1) == luatokens.tokens.TK_THEN) assert(table.remove(state, 1) == luatokens.tokens.TK_TRUE) assert(table.remove(state, 1) == luatokens.tokens.TK_UNTIL) assert(table.remove(state, 1) == luatokens.tokens.TK_WHILE) assert(state.line == 4) end end -- keywords do -- FUCK local luatokens = require "luatokens" local luatokens_file = io.open("./luatokens.lua", "r"):read((_VERSION == "Lua 5.1" or _VERSION == "Lua 5.2") and "*a" or "a") local tokens = luatokens.defs local state, err, etoken, estate = parser.parse(tokens, luatokens_file) local case = case() if not state then print(case, "---- IN TOKENS ----") print(case, err, etoken) for i,v in pairs(estate) do print(case, i, v) end print(case, "---- OUT TOKENS ----") end end -- FUCK