more shit
This commit is contained in:
parent
0a5738e8cd
commit
65eeb7d25d
4 changed files with 56 additions and 26 deletions
48
aggen.py
48
aggen.py
|
@ -24,13 +24,20 @@ class NodeDesc:
|
||||||
def __init__(self, node: Node):
|
def __init__(self, node: Node):
|
||||||
self.node = node
|
self.node = node
|
||||||
self.name = node.name
|
self.name = node.name
|
||||||
self.nonterminal = gensym(node.name.lower())
|
self.nonterminal = node.name.lower()
|
||||||
|
|
||||||
|
|
||||||
|
class ParseEquation:
|
||||||
|
def __init__(self, name: str, syms: List[str], ty: str):
|
||||||
|
self.name = name
|
||||||
|
self.syms = syms
|
||||||
|
self.ty = ty
|
||||||
|
|
||||||
|
|
||||||
class GenResult:
|
class GenResult:
|
||||||
def __init__(self, pd: str = "", ex: str = ""):
|
def __init__(self, pd: str = "", ex: str = ""):
|
||||||
self.literals: Dict[str, str] = dict()
|
self.literals: Dict[str, str] = dict()
|
||||||
self.parse_rules: defaultdict[str, List[str]] = defaultdict(list)
|
self.parse_rules: defaultdict[str, List[ParseEquation]] = defaultdict(list)
|
||||||
self.starts: Set[str] = set()
|
self.starts: Set[str] = set()
|
||||||
self.extra = ex
|
self.extra = ex
|
||||||
self.trans: List[str] = list()
|
self.trans: List[str] = list()
|
||||||
|
@ -38,6 +45,15 @@ class GenResult:
|
||||||
@property
|
@property
|
||||||
def transdef(self) -> str:
|
def transdef(self) -> str:
|
||||||
s = self.trans
|
s = self.trans
|
||||||
|
for name, rules in self.parse_rules.items():
|
||||||
|
n = name.lstrip("?")
|
||||||
|
for equation in rules:
|
||||||
|
code = f"""
|
||||||
|
def {equation.name}(self, items: Any) -> Thunk[{equation.ty}]:
|
||||||
|
return Thunk(lambda: {equation.ty}())
|
||||||
|
""".strip().replace("\n", "")
|
||||||
|
code = re.sub(r"\s+", " ", code)
|
||||||
|
s.append(code)
|
||||||
if not s:
|
if not s:
|
||||||
s = ["pass"]
|
s = ["pass"]
|
||||||
return "\n" + "\n".join(map(lambda c: f" {c}", s))
|
return "\n" + "\n".join(map(lambda c: f" {c}", s))
|
||||||
|
@ -48,8 +64,11 @@ class GenResult:
|
||||||
for sym, pat in self.literals.items():
|
for sym, pat in self.literals.items():
|
||||||
s.append(f"{sym}: {pat}")
|
s.append(f"{sym}: {pat}")
|
||||||
for name, rules in self.parse_rules.items():
|
for name, rules in self.parse_rules.items():
|
||||||
srules = " | ".join(rules)
|
names = []
|
||||||
s.append(f"{name}: {srules}")
|
for rule in rules:
|
||||||
|
names.append(rule.name)
|
||||||
|
s.append(f"{rule.name}: {' '.join(rule.syms)}")
|
||||||
|
s.append(f"{name}: {' | '.join(names)}")
|
||||||
s.append("%import common.WS")
|
s.append("%import common.WS")
|
||||||
s.append("%ignore WS")
|
s.append("%ignore WS")
|
||||||
return "\n".join(s)
|
return "\n".join(s)
|
||||||
|
@ -135,7 +154,11 @@ def gen(program: List[Decl]) -> GenResult:
|
||||||
assert isinstance(node_desc, NodeDesc)
|
assert isinstance(node_desc, NodeDesc)
|
||||||
|
|
||||||
res.starts.add(node_desc.name.lower())
|
res.starts.add(node_desc.name.lower())
|
||||||
res.parse_rules[f"?{node_desc.name.lower()}"].append(node_desc.nonterminal)
|
# res.parse_rules[f"?{node_desc.name.lower()}"].append(
|
||||||
|
# ParseEquation(
|
||||||
|
# node_desc.name.lower(), [node_desc.nonterminal], node_desc.nonterminal
|
||||||
|
# )
|
||||||
|
# )
|
||||||
|
|
||||||
class_decl = textwrap.dedent(
|
class_decl = textwrap.dedent(
|
||||||
f"""
|
f"""
|
||||||
|
@ -158,7 +181,6 @@ def gen(program: List[Decl]) -> GenResult:
|
||||||
res.extra += class_decl
|
res.extra += class_decl
|
||||||
|
|
||||||
prod_name = gensym(node_desc.nonterminal + "_")
|
prod_name = gensym(node_desc.nonterminal + "_")
|
||||||
res.parse_rules[node_desc.nonterminal].append(prod_name)
|
|
||||||
print("PRODUCTION", prod_name, variant.prod)
|
print("PRODUCTION", prod_name, variant.prod)
|
||||||
|
|
||||||
# resolving a production just means checking to make sure it's a
|
# resolving a production just means checking to make sure it's a
|
||||||
|
@ -188,7 +210,7 @@ def gen(program: List[Decl]) -> GenResult:
|
||||||
for sym in variant.prod:
|
for sym in variant.prod:
|
||||||
n = resolve_production(sym)
|
n = resolve_production(sym)
|
||||||
seq.append(n)
|
seq.append(n)
|
||||||
res.parse_rules[prod_name].append(" ".join(seq))
|
res.parse_rules[node_desc.nonterminal].append(ParseEquation(prod_name, seq, v_class_name))
|
||||||
|
|
||||||
# create an environment for checking the equations based on
|
# create an environment for checking the equations based on
|
||||||
# the production
|
# the production
|
||||||
|
@ -208,16 +230,4 @@ def gen(program: List[Decl]) -> GenResult:
|
||||||
print("RHS", eq.rhs, eq.rhs.id)
|
print("RHS", eq.rhs, eq.rhs.id)
|
||||||
collect_required_thunks(copy.deepcopy(env), eq.rhs)
|
collect_required_thunks(copy.deepcopy(env), eq.rhs)
|
||||||
|
|
||||||
func_impl = textwrap.dedent(
|
|
||||||
f"""
|
|
||||||
def {eq_name}() -> None:
|
|
||||||
''' {repr(eq)} '''
|
|
||||||
pass
|
|
||||||
def {thunk_name}() -> Thunk[None]:
|
|
||||||
return Thunk({eq_name})
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
print(f"```py\n{func_impl}\n```")
|
|
||||||
res.extra += func_impl
|
|
||||||
|
|
||||||
return res
|
return res
|
||||||
|
|
16
agmain.py
16
agmain.py
|
@ -16,16 +16,17 @@ if __name__ == "__main__":
|
||||||
|
|
||||||
trans = Parser()
|
trans = Parser()
|
||||||
ast = trans.transform(cst)
|
ast = trans.transform(cst)
|
||||||
print("ast", ast)
|
|
||||||
|
|
||||||
res = gen(ast)
|
res = gen(ast)
|
||||||
|
print("Grammar:")
|
||||||
|
print(res.parser_data)
|
||||||
|
|
||||||
if not os.path.exists("gen"):
|
if not os.path.exists("gen"):
|
||||||
os.makedirs("gen")
|
os.makedirs("gen")
|
||||||
with open("gen/arith.py", "w") as f:
|
with open("gen/arith.py", "w") as f:
|
||||||
fmt_str = textwrap.dedent(
|
fmt_str = textwrap.dedent(
|
||||||
"""
|
"""
|
||||||
# This documented generated by agtest.
|
# This document is generated by agtest.
|
||||||
|
|
||||||
__all__ = ["parse"]
|
__all__ = ["parse"]
|
||||||
from typing import Generic, TypeVar, Optional, Callable, Dict, Any
|
from typing import Generic, TypeVar, Optional, Callable, Dict, Any
|
||||||
|
@ -34,6 +35,7 @@ if __name__ == "__main__":
|
||||||
builtins: Dict[str, Any] = {{
|
builtins: Dict[str, Any] = {{
|
||||||
"parseInt": lambda s: int(s)
|
"parseInt": lambda s: int(s)
|
||||||
}}
|
}}
|
||||||
|
|
||||||
class Thunk(Generic[T]):
|
class Thunk(Generic[T]):
|
||||||
''' A thunk represents a value that may be computed lazily. '''
|
''' A thunk represents a value that may be computed lazily. '''
|
||||||
value: Optional[T]
|
value: Optional[T]
|
||||||
|
@ -44,9 +46,15 @@ if __name__ == "__main__":
|
||||||
if self.value is None:
|
if self.value is None:
|
||||||
self.value = self.func()
|
self.value = self.func()
|
||||||
return self.value
|
return self.value
|
||||||
parser = Lark('''{pd}''', parser='lalr', start={starts}, debug=True)
|
|
||||||
class Trans(Transformer[None]): {transdef}
|
parser = Lark('''
|
||||||
|
{pd}
|
||||||
|
''', parser='lalr', start={starts}, debug=True)
|
||||||
|
|
||||||
{ex}
|
{ex}
|
||||||
|
|
||||||
|
class Trans(Transformer[None]): {transdef}
|
||||||
|
|
||||||
def parse(input: str, start: Optional[str] = None) -> Any:
|
def parse(input: str, start: Optional[str] = None) -> Any:
|
||||||
tree = parser.parse(input, start)
|
tree = parser.parse(input, start)
|
||||||
trans = Trans()
|
trans = Trans()
|
||||||
|
|
9
test/agtest.ag
Normal file
9
test/agtest.ag
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
iface Pycode {
|
||||||
|
pycode: str,
|
||||||
|
}
|
||||||
|
|
||||||
|
node Program : Pycode {
|
||||||
|
}
|
||||||
|
|
||||||
|
node Decl : Pycode {
|
||||||
|
}
|
|
@ -4,10 +4,13 @@ iface HasValue {
|
||||||
|
|
||||||
node Expr : HasValue {
|
node Expr : HasValue {
|
||||||
<l:Expr> "+" <r:Expr> => {
|
<l:Expr> "+" <r:Expr> => {
|
||||||
self.val = l.val + r.val * l.val;
|
self.val = l.val + r.val;
|
||||||
}
|
}
|
||||||
|
|
||||||
<l:Expr> "*" <r:Expr> => {
|
<l:Expr> "*" <r:Expr> => {
|
||||||
self.val = l.val * r.val;
|
self.val = l.val * r.val;
|
||||||
}
|
}
|
||||||
|
|
||||||
<n:"[0-9]+"> => { self.val = parseInt(n); }
|
<n:"[0-9]+"> => { self.val = parseInt(n); }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue