Some cleanups and fixes, made the parser more permissive of prematurely
terminated files.
This commit is contained in:
parent
be1df7ccd3
commit
23abab4f80
@ -186,6 +186,13 @@ local _list_mt = {
|
|||||||
end
|
end
|
||||||
return false
|
return false
|
||||||
end,
|
end,
|
||||||
|
remove_1 = function(self, item)
|
||||||
|
for i, x in ipairs(self) do
|
||||||
|
if x == item then
|
||||||
|
remove(self, i)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end,
|
||||||
index_of_1 = function(self, item)
|
index_of_1 = function(self, item)
|
||||||
for i, x in ipairs(self) do
|
for i, x in ipairs(self) do
|
||||||
if x == item then
|
if x == item then
|
||||||
|
@ -73,6 +73,10 @@ _list_mt =
|
|||||||
if x == item
|
if x == item
|
||||||
return true
|
return true
|
||||||
return false
|
return false
|
||||||
|
remove_1: (item)=>
|
||||||
|
for i,x in ipairs @
|
||||||
|
if x == item
|
||||||
|
remove(@, i)
|
||||||
index_of_1: (item)=>
|
index_of_1: (item)=>
|
||||||
for i,x in ipairs @
|
for i,x in ipairs @
|
||||||
if x == item
|
if x == item
|
||||||
|
@ -53,7 +53,7 @@ compile [..]
|
|||||||
%when_true_expr if %condition otherwise %when_false_expr
|
%when_true_expr if %condition otherwise %when_false_expr
|
||||||
%when_false_expr unless %condition else %when_true_expr
|
%when_false_expr unless %condition else %when_true_expr
|
||||||
%when_false_expr unless %condition then %when_true_expr
|
%when_false_expr unless %condition then %when_true_expr
|
||||||
..to (..)
|
..to:
|
||||||
# If %when_true_expr is guaranteed to be truthy, we can use Lua's idiomatic
|
# If %when_true_expr is guaranteed to be truthy, we can use Lua's idiomatic
|
||||||
equivalent of a conditional expression: (cond and if_true or if_false)
|
equivalent of a conditional expression: (cond and if_true or if_false)
|
||||||
if {Text:yes, List:yes, Dict:yes, Number:yes}.(%when_true_expr.type):
|
if {Text:yes, List:yes, Dict:yes, Number:yes}.(%when_true_expr.type):
|
||||||
@ -541,7 +541,7 @@ test:
|
|||||||
assume (sorted %flat) == [1, 2, 3, 4, 5, 6]
|
assume (sorted %flat) == [1, 2, 3, 4, 5, 6]
|
||||||
|
|
||||||
# Recurion control flow
|
# Recurion control flow
|
||||||
compile [for %var in recursive %structure %body] to (..)
|
compile [for %var in recursive %structure %body] to:
|
||||||
with local compile actions:
|
with local compile actions:
|
||||||
define mangler
|
define mangler
|
||||||
compile [recurse %v on %x] to (..)
|
compile [recurse %v on %x] to (..)
|
||||||
|
@ -231,7 +231,7 @@ compile [%tree as nomsu] to (..)
|
|||||||
Lua value "nomsu:tree_to_nomsu(\(%tree as lua expr))"
|
Lua value "nomsu:tree_to_nomsu(\(%tree as lua expr))"
|
||||||
|
|
||||||
compile [%tree as inline nomsu] to (..)
|
compile [%tree as inline nomsu] to (..)
|
||||||
Lua value "nomsu:tree_to_nomsu(\(%tree as lua expr), true)"
|
Lua value "nomsu:tree_to_inline_nomsu(\(%tree as lua expr), true)"
|
||||||
|
|
||||||
action [%var as lua identifier, %var as lua id] (..)
|
action [%var as lua identifier, %var as lua id] (..)
|
||||||
lua> "\
|
lua> "\
|
||||||
|
@ -157,7 +157,7 @@ compile [with %assignments %body] to:
|
|||||||
test:
|
test:
|
||||||
assume ((5 wrapped around 2) == 1) or barf "mod not working"
|
assume ((5 wrapped around 2) == 1) or barf "mod not working"
|
||||||
compile [%x wrapped around %y, %x mod %y] to (..)
|
compile [%x wrapped around %y, %x mod %y] to (..)
|
||||||
Lua value "(\(%x as lua expr) % \(%y as lua expr))"
|
Lua value "((\(%x as lua expr)) % (\(%y as lua expr)))"
|
||||||
|
|
||||||
# 3-part chained comparisons
|
# 3-part chained comparisons
|
||||||
# (uses a lambda to avoid re-evaluating middle value, while still being an expression)
|
# (uses a lambda to avoid re-evaluating middle value, while still being an expression)
|
||||||
|
@ -2,6 +2,15 @@
|
|||||||
#
|
#
|
||||||
This file contains the implementation of an Object-Oriented programming system.
|
This file contains the implementation of an Object-Oriented programming system.
|
||||||
|
|
||||||
|
%globals.METAMETHOD_MAP = {..}
|
||||||
|
"as text": "__tostring", "clean up": "__gc",
|
||||||
|
"+ 1": "__add", "- 1": "__sub", "* 1": "__mul", "/ 1": "__div",
|
||||||
|
"-": "__unm", "// 1": "__idiv", "mod 1": "__mod", "^ 1": "__pow",
|
||||||
|
"& 1": "__band", "| 1": "__bor", "~ 1": "__bxor", "~": "__bnot",
|
||||||
|
"<< 1": "__bshl", ">> 1": "__bshr", "== 1": "__eq", "< 1": "__lt",
|
||||||
|
"<= 1": "__le", "set 1 = 2": "__newindex", "size": "__len",
|
||||||
|
"iterate": "__ipairs", "iterate all": "__pairs",
|
||||||
|
|
||||||
test:
|
test:
|
||||||
object (Dog):
|
object (Dog):
|
||||||
(Dog).genus = "Canus"
|
(Dog).genus = "Canus"
|
||||||
@ -86,9 +95,7 @@ compile [object %classname extends %parent %class_body] to:
|
|||||||
__tostring=function(cls) return cls.name end,
|
__tostring=function(cls) return cls.name end,
|
||||||
__call=function(cls, inst)
|
__call=function(cls, inst)
|
||||||
inst = setmetatable(inst or {}, cls)
|
inst = setmetatable(inst or {}, cls)
|
||||||
if inst.set_up then
|
if inst.set_up then inst:set_up() end
|
||||||
inst:set_up()
|
|
||||||
end
|
|
||||||
return inst
|
return inst
|
||||||
end,
|
end,
|
||||||
})
|
})
|
||||||
@ -100,15 +107,7 @@ compile [object %classname extends %parent %class_body] to:
|
|||||||
return inst.name..getmetatable(_Dict{}).__tostring(inst)
|
return inst.name..getmetatable(_Dict{}).__tostring(inst)
|
||||||
end
|
end
|
||||||
\(%class_body as lua statements)
|
\(%class_body as lua statements)
|
||||||
local metamethod_map = {["as text"]="__tostring", ["clean up"]="__gc",
|
for stub,metamethod in pairs(globals.METAMETHOD_MAP) do
|
||||||
["+ 1"]="__add", ["- 1"]="__sub", ["* 1"]="__mul", ["/ 1"]="__div",
|
|
||||||
["-"]="__unm", ["// 1"]="__idiv", ["mod 1"]="__mod", ["^ 1"]="__pow",
|
|
||||||
["& 1"]="__band", ["| 1"]="__bor", ["~ 1"]="__bxor", ["~"]="__bnot",
|
|
||||||
["<< 1"]="__bshl", [">> 1"]="__bshr", ["== 1"]="__eq", ["< 1"]="__lt",
|
|
||||||
["<= 1"]="__le", ["set 1 = 2"]="__newindex", ["size"]="__len",
|
|
||||||
["iterate"]="__ipairs", ["iterate all"]="__pairs",
|
|
||||||
}
|
|
||||||
for stub,metamethod in pairs(metamethod_map) do
|
|
||||||
class[metamethod] = class[stub:as_lua_id()]
|
class[metamethod] = class[stub:as_lua_id()]
|
||||||
end
|
end
|
||||||
end"
|
end"
|
||||||
|
@ -52,7 +52,7 @@ action [%tree decompiled inline]:
|
|||||||
return %nomsu
|
return %nomsu
|
||||||
|
|
||||||
"Text":
|
"Text":
|
||||||
%nomsu = (Nomsu Code from %tree ["\""])
|
%nomsu = (Nomsu Code from %tree [])
|
||||||
for %text in recursive %tree:
|
for %text in recursive %tree:
|
||||||
for %bit in %text at %i:
|
for %bit in %text at %i:
|
||||||
if (%bit is text):
|
if (%bit is text):
|
||||||
@ -87,14 +87,14 @@ action [%tree decompiled inline]:
|
|||||||
"DictEntry":
|
"DictEntry":
|
||||||
set {%key:%tree.1, %value:%tree.2}
|
set {%key:%tree.1, %value:%tree.2}
|
||||||
if (all of [%key.type == "Text", (size of %key) == 1, %key.1 is a nomsu identifier]):
|
if (all of [%key.type == "Text", (size of %key) == 1, %key.1 is a nomsu identifier]):
|
||||||
%nomsu = (Nomsu Code from %key [key.1])
|
%nomsu = (Nomsu Code from %key [%key.1])
|
||||||
..else:
|
..else:
|
||||||
%nomsu = (%key decompiled inline)
|
%nomsu = (%key decompiled inline)
|
||||||
|
|
||||||
if (%key.type == "Action"):
|
if (%key.type == "Action"):
|
||||||
%nomsu::parenthesize
|
%nomsu::parenthesize
|
||||||
%nomsu::add ":"
|
|
||||||
if %value:
|
if %value:
|
||||||
|
%nomsu::add ":"
|
||||||
%nomsu::add (%value decompiled inline)
|
%nomsu::add (%value decompiled inline)
|
||||||
return %nomsu
|
return %nomsu
|
||||||
|
|
||||||
|
16
nomsu.4.peg
16
nomsu.4.peg
@ -8,6 +8,8 @@ file:
|
|||||||
|
|
||||||
shebang: "#!" (!"nomsu" [^%nl])* "nomsu" ws+ "-V" ws* {:version: [0-9.]+ :} [^%nl]*
|
shebang: "#!" (!"nomsu" [^%nl])* "nomsu" ws+ "-V" ws* {:version: [0-9.]+ :} [^%nl]*
|
||||||
|
|
||||||
|
eof: !.
|
||||||
|
|
||||||
file_chunks (FileChunks):
|
file_chunks (FileChunks):
|
||||||
{:curr_indent: ' '* :}
|
{:curr_indent: ' '* :}
|
||||||
shebang? comment? blank_lines?
|
shebang? comment? blank_lines?
|
||||||
@ -71,7 +73,7 @@ tab_error (Error):
|
|||||||
section_division: ("~")^+3 eol
|
section_division: ("~")^+3 eol
|
||||||
|
|
||||||
inline_block:
|
inline_block:
|
||||||
"(" ws* inline_block ws* ")" / raw_inline_block
|
"(" ws* inline_block ws* (eof / ")") / raw_inline_block
|
||||||
raw_inline_block (Block):
|
raw_inline_block (Block):
|
||||||
(!"::") ":" ws* ((inline_statement (ws* ";" ws* inline_statement)*) / !(eol nl_indent))
|
(!"::") ":" ws* ((inline_statement (ws* ";" ws* inline_statement)*) / !(eol nl_indent))
|
||||||
indented_block (Block):
|
indented_block (Block):
|
||||||
@ -89,7 +91,7 @@ noindex_inline_expression:
|
|||||||
/ ( "("
|
/ ( "("
|
||||||
ws* (inline_action / inline_expression) ws*
|
ws* (inline_action / inline_expression) ws*
|
||||||
(ws* ',' ws* (inline_action / inline_expression) ws*)*
|
(ws* ',' ws* (inline_action / inline_expression) ws*)*
|
||||||
(")" / missing_paren_err / unexpected_code)
|
(")" / eof / missing_paren_err / unexpected_code)
|
||||||
)
|
)
|
||||||
inline_expression: index_chain / noindex_inline_expression
|
inline_expression: index_chain / noindex_inline_expression
|
||||||
indented_expression:
|
indented_expression:
|
||||||
@ -130,7 +132,7 @@ text_word (Text): word
|
|||||||
|
|
||||||
inline_text (Text):
|
inline_text (Text):
|
||||||
!(indented_text)
|
!(indented_text)
|
||||||
'"' _inline_text* ('"' / missing_quote_err / unexpected_code)
|
'"' _inline_text* ('"' / eof / missing_quote_err / unexpected_code)
|
||||||
_inline_text:
|
_inline_text:
|
||||||
{~ (('\"' -> '"') / ('\\' -> '\') / escaped_char / text_char+)+ ~}
|
{~ (('\"' -> '"') / ('\\' -> '\') / escaped_char / text_char+)+ ~}
|
||||||
/ inline_text_interpolation / illegal_char
|
/ inline_text_interpolation / illegal_char
|
||||||
@ -140,7 +142,7 @@ inline_text_interpolation:
|
|||||||
/ ("("
|
/ ("("
|
||||||
ws* (inline_action / inline_expression) ws*
|
ws* (inline_action / inline_expression) ws*
|
||||||
(ws* ',' ws* (inline_action / inline_expression) ws*)*
|
(ws* ',' ws* (inline_action / inline_expression) ws*)*
|
||||||
(")" / missing_paren_err / unexpected_code))
|
(")" / eof / missing_paren_err / unexpected_code))
|
||||||
)
|
)
|
||||||
|
|
||||||
text_char: %utf8_char / !["\] %print / %tab
|
text_char: %utf8_char / !["\] %print / %tab
|
||||||
@ -156,7 +158,7 @@ indented_text (Text):
|
|||||||
(('\' %nl+ {:curr_indent: indent :} ('..')?)
|
(('\' %nl+ {:curr_indent: indent :} ('..')?)
|
||||||
/ disallowed_interpolation? {%nl+} {:curr_indent: indent :})
|
/ disallowed_interpolation? {%nl+} {:curr_indent: indent :})
|
||||||
(indented_plain_text / text_interpolation / illegal_char / {~ %nl+ (=curr_indent -> "") ~})*
|
(indented_plain_text / text_interpolation / illegal_char / {~ %nl+ (=curr_indent -> "") ~})*
|
||||||
('"' eol / missing_quote_err)
|
('"' eol / eof / missing_quote_err)
|
||||||
{:curr_indent: %nil :}
|
{:curr_indent: %nil :}
|
||||||
-- Tracking text-lines-within-indented-text as separate objects allows for better debugging line info
|
-- Tracking text-lines-within-indented-text as separate objects allows for better debugging line info
|
||||||
indented_plain_text (Text):
|
indented_plain_text (Text):
|
||||||
@ -180,7 +182,7 @@ inline_list (List):
|
|||||||
!('[..]')
|
!('[..]')
|
||||||
"[" ws*
|
"[" ws*
|
||||||
(inline_list_item (ws* ',' ws* inline_list_item)* (ws* ',')?)? ws*
|
(inline_list_item (ws* ',' ws* inline_list_item)* (ws* ',')?)? ws*
|
||||||
("]" / (","? (missing_bracket_error / unexpected_code)))
|
("]" / eof / (","? (missing_bracket_error / unexpected_code)))
|
||||||
indented_list (List):
|
indented_list (List):
|
||||||
"[..]" eol nl_indent
|
"[..]" eol nl_indent
|
||||||
list_line (nl_nodent list_line)*
|
list_line (nl_nodent list_line)*
|
||||||
@ -195,7 +197,7 @@ inline_dict (Dict):
|
|||||||
!('{..}')
|
!('{..}')
|
||||||
"{" ws*
|
"{" ws*
|
||||||
(inline_dict_entry (ws* ',' ws* inline_dict_entry)*)? ws*
|
(inline_dict_entry (ws* ',' ws* inline_dict_entry)*)? ws*
|
||||||
("}" / (","? (missing_brace_error / unexpected_code)))
|
("}" / eof / (","? (missing_brace_error / unexpected_code)))
|
||||||
indented_dict (Dict):
|
indented_dict (Dict):
|
||||||
"{..}" eol nl_indent
|
"{..}" eol nl_indent
|
||||||
dict_line (nl_nodent dict_line)*
|
dict_line (nl_nodent dict_line)*
|
||||||
|
@ -129,17 +129,21 @@ make_tree = function(tree, userdata)
|
|||||||
return tree
|
return tree
|
||||||
end
|
end
|
||||||
local Parsers = { }
|
local Parsers = { }
|
||||||
local max_parser_version = 0
|
local max_parser_version = 4
|
||||||
for version = 1, 999 do
|
for version = 1, max_parser_version do
|
||||||
do
|
local peg_file = io.open("nomsu." .. tostring(version) .. ".peg")
|
||||||
local peg_contents = Files.read("nomsu." .. tostring(version) .. ".peg")
|
if not peg_file and package.nomsupath then
|
||||||
if peg_contents then
|
for path in package.nomsupath:gmatch("[^;]+") do
|
||||||
max_parser_version = version
|
peg_file = io.open(path .. "/nomsu." .. tostring(version) .. ".peg")
|
||||||
Parsers[version] = make_parser(peg_contents, make_tree)
|
if peg_file then
|
||||||
else
|
break
|
||||||
break
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
assert(peg_file, "could not find nomsu .peg file")
|
||||||
|
local peg_contents = peg_file:read('*a')
|
||||||
|
peg_file:close()
|
||||||
|
Parsers[version] = make_parser(peg_contents, make_tree)
|
||||||
end
|
end
|
||||||
local MAX_LINE = 80
|
local MAX_LINE = 80
|
||||||
local NomsuCompiler = setmetatable({ }, {
|
local NomsuCompiler = setmetatable({ }, {
|
||||||
@ -199,9 +203,8 @@ do
|
|||||||
re = re,
|
re = re,
|
||||||
Files = Files,
|
Files = Files,
|
||||||
AST = AST,
|
AST = AST,
|
||||||
TESTS = Dict({ }, {
|
TESTS = Dict({ }),
|
||||||
globals = Dict({ })
|
globals = Dict({ }),
|
||||||
}),
|
|
||||||
LuaCode = LuaCode,
|
LuaCode = LuaCode,
|
||||||
NomsuCode = NomsuCode,
|
NomsuCode = NomsuCode,
|
||||||
Source = Source,
|
Source = Source,
|
||||||
@ -209,6 +212,7 @@ do
|
|||||||
__imported = Dict({ }),
|
__imported = Dict({ }),
|
||||||
__parent = nil
|
__parent = nil
|
||||||
}
|
}
|
||||||
|
assert(NomsuCompiler.environment.globals)
|
||||||
setmetatable(NomsuCompiler.environment, {
|
setmetatable(NomsuCompiler.environment, {
|
||||||
__index = function(self, key)
|
__index = function(self, key)
|
||||||
do
|
do
|
||||||
|
@ -77,12 +77,17 @@ make_tree = (tree, userdata)->
|
|||||||
return tree
|
return tree
|
||||||
|
|
||||||
Parsers = {}
|
Parsers = {}
|
||||||
max_parser_version = 0
|
max_parser_version = 4
|
||||||
for version=1,999
|
for version=1,max_parser_version
|
||||||
if peg_contents = Files.read("nomsu.#{version}.peg")
|
peg_file = io.open("nomsu.#{version}.peg")
|
||||||
max_parser_version = version
|
if not peg_file and package.nomsupath
|
||||||
Parsers[version] = make_parser(peg_contents, make_tree)
|
for path in package.nomsupath\gmatch("[^;]+")
|
||||||
else break
|
peg_file = io.open(path.."/nomsu.#{version}.peg")
|
||||||
|
break if peg_file
|
||||||
|
assert(peg_file, "could not find nomsu .peg file")
|
||||||
|
peg_contents = peg_file\read('*a')
|
||||||
|
peg_file\close!
|
||||||
|
Parsers[version] = make_parser(peg_contents, make_tree)
|
||||||
|
|
||||||
MAX_LINE = 80 -- For beautification purposes, try not to make lines much longer than this value
|
MAX_LINE = 80 -- For beautification purposes, try not to make lines much longer than this value
|
||||||
NomsuCompiler = setmetatable {}, {__tostring: => "Nomsu"}
|
NomsuCompiler = setmetatable {}, {__tostring: => "Nomsu"}
|
||||||
@ -105,12 +110,13 @@ with NomsuCompiler
|
|||||||
_List:List, _Dict:Dict,
|
_List:List, _Dict:Dict,
|
||||||
-- Utilities and misc.
|
-- Utilities and misc.
|
||||||
stringify:stringify, utils:utils, lpeg:lpeg, re:re, Files:Files,
|
stringify:stringify, utils:utils, lpeg:lpeg, re:re, Files:Files,
|
||||||
:AST, TESTS: Dict{}, globals: Dict{}
|
:AST, TESTS: Dict({}), globals: Dict({}),
|
||||||
:LuaCode, :NomsuCode, :Source
|
:LuaCode, :NomsuCode, :Source
|
||||||
nomsu:NomsuCompiler
|
nomsu:NomsuCompiler
|
||||||
__imported: Dict{}
|
__imported: Dict{}
|
||||||
__parent: nil
|
__parent: nil
|
||||||
}
|
}
|
||||||
|
assert .environment.globals
|
||||||
setmetatable(.environment, {
|
setmetatable(.environment, {
|
||||||
__index: (key)=>
|
__index: (key)=>
|
||||||
if imported = rawget(@, "__imported")
|
if imported = rawget(@, "__imported")
|
||||||
|
@ -23,8 +23,8 @@ for %path in %files:
|
|||||||
%results::add {..}
|
%results::add {..}
|
||||||
line: %line_num
|
line: %line_num
|
||||||
text: "\
|
text: "\
|
||||||
..\(blue "\%filename:\%line_num:")
|
..\(blue "\%filename:\%line_num:")
|
||||||
\(yellow (source lines of %t))"
|
\(yellow (source lines of %t))"
|
||||||
|
|
||||||
if (%t is syntax tree):
|
if (%t is syntax tree):
|
||||||
for %sub in %t: recurse %t on %sub
|
for %sub in %t: recurse %t on %sub
|
||||||
|
Loading…
Reference in New Issue
Block a user