added some more shit

This commit is contained in:
Michael Zhang 2021-10-01 14:35:42 -05:00
parent 7d1795cd89
commit e43175c53b
Signed by: michael
GPG key ID: BDA47A31A3C8EE6B
5 changed files with 71 additions and 36 deletions

View file

@ -12,14 +12,15 @@
# #
import os import os
import sys import sys
sys.path.insert(0, os.path.abspath('..'))
sys.path.insert(0, os.path.abspath(".."))
# -- Project information ----------------------------------------------------- # -- Project information -----------------------------------------------------
project = 'agtest' project = "agtest"
copyright = '2021, Michael Zhang' copyright = "2021, Michael Zhang"
author = 'Michael Zhang <mail@mzhang.io>' author = "Michael Zhang <mail@mzhang.io>"
# -- General configuration --------------------------------------------------- # -- General configuration ---------------------------------------------------
@ -27,15 +28,15 @@ author = 'Michael Zhang <mail@mzhang.io>'
# Add any Sphinx extension module names here, as strings. They can be # Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones. # ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.napoleon'] extensions = ["sphinx.ext.autodoc", "sphinx.ext.napoleon"]
# Add any paths that contain templates here, relative to this directory. # Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates'] templates_path = ["_templates"]
# List of patterns, relative to source directory, that match files and # List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files. # directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path. # This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
# -- Options for HTML output ------------------------------------------------- # -- Options for HTML output -------------------------------------------------
@ -43,9 +44,9 @@ exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The theme to use for HTML and HTML Help pages. See the documentation for # The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes. # a list of builtin themes.
# #
html_theme = 'furo' html_theme = "furo"
# Add any paths that contain custom static files (such as style sheets) here, # Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files, # relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css". # so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static'] html_static_path = ["_static"]

View file

@ -17,5 +17,11 @@
in in
{ {
packages = flake-utils.lib.flattenTree myPkgs; packages = flake-utils.lib.flattenTree myPkgs;
devShell = pkgs.mkShell {
packages = with pythonPkgs; [
black
];
};
}); });
} }

View file

@ -16,6 +16,7 @@ grammar_path = path.join(src_dir, "grammar.lark")
runtime_path = path.join(src_dir, "runtime.tmpl.py") runtime_path = path.join(src_dir, "runtime.tmpl.py")
p = lark.Lark(open(grammar_path).read(), start="program", parser="lalr") p = lark.Lark(open(grammar_path).read(), start="program", parser="lalr")
@click.command() @click.command()
@click.option("--show-only", is_flag=True) @click.option("--show-only", is_flag=True)
@click.argument("input", type=click.File("r")) @click.argument("input", type=click.File("r"))
@ -53,8 +54,10 @@ def run(input: TextIO, show_only: bool) -> None:
print("Call parse(str) to parse something.") print("Call parse(str) to parse something.")
import imp import imp
mod = imp.new_module("mod") mod = imp.new_module("mod")
exec(s.getvalue(), mod.__dict__) exec(s.getvalue(), mod.__dict__)
import code import code
code.InteractiveConsole(locals=mod.__dict__).interact() code.InteractiveConsole(locals=mod.__dict__).interact()

View file

@ -3,10 +3,16 @@ import textwrap
import re import re
import copy import copy
import json import json
import sys
from collections import defaultdict from collections import defaultdict
from agtest.ast import * from agtest.ast import *
def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
global i global i
i = 0 i = 0
@ -27,10 +33,11 @@ class NodeDesc:
class ParseEquation: class ParseEquation:
def __init__(self, name: str, syms: List[str], pyty: str): def __init__(self, name: str, syms: List[str], pyty: str, pycode: str):
self.name = name self.name = name
self.syms = syms self.syms = syms
self.ty = pyty self.pyty = pyty
self.pycode = pycode
class GenResult: class GenResult:
@ -55,13 +62,23 @@ class GenResult:
def trans_def(self) -> str: def trans_def(self) -> str:
s = [] s = []
for name, rules in self.parse_rules.items(): for name, rules in self.parse_rules.items():
possible_returns = ", ".join(map(lambda e: e.pyty, rules))
n = name.lstrip("?") n = name.lstrip("?")
code = textwrap.dedent(
f"""
def {n}(self, items: List[Union[{possible_returns}]]) -> {n}:
return items[0]
"""
)
s.append(code)
for equation in rules: for equation in rules:
code = textwrap.dedent( code = textwrap.dedent(
f""" f"""
def {equation.name}(self, items: Any) -> Thunk[{equation.ty}]: def {equation.name}(self, items: Any) -> Thunk[{equation.pyty}]:
def inner() -> {equation.ty}: def inner() -> {equation.pyty}:
res = {equation.ty}() res = {equation.pyty}()
return res return res
return Thunk(inner) return Thunk(inner)
""" """
@ -86,9 +103,8 @@ class GenResult:
s.append("%ignore WS") s.append("%ignore WS")
return "\n".join(s) return "\n".join(s)
def _collect_ifaces(self) -> None: def _collect_ifaces(self) -> None:
""" collect a list of name -> iface declarations""" """collect a list of name -> iface declarations"""
self.ifaces = dict( self.ifaces = dict(
map( map(
lambda c: (c.name, cast(Iface, c)), lambda c: (c.name, cast(Iface, c)),
@ -97,7 +113,7 @@ class GenResult:
) )
def _create_iface_mappings(self) -> None: def _create_iface_mappings(self) -> None:
""" list of node -> iface mappings """ """list of node -> iface mappings"""
self.what_ifaces = dict() self.what_ifaces = dict()
self.what_fields = dict() self.what_fields = dict()
for node in filter(lambda c: isinstance(c, Node), self.program): for node in filter(lambda c: isinstance(c, Node), self.program):
@ -137,25 +153,23 @@ class GenResult:
return dict() return dict()
raise Exception(f"unhandled {expr.__class__}") raise Exception(f"unhandled {expr.__class__}")
def _resolve_production(self, sym: Sym) -> str: def _resolve_production(self, sym: Sym) -> Tuple[bool, str]:
""" resolving a production just means checking to make sure it's a type that exists or it's a regex""" """resolving a production just means checking to make sure it's a type that exists or it's a regex"""
if isinstance(sym, SymRename): if isinstance(sym, SymRename):
if isinstance(sym.ty, NodeRefByName): if isinstance(sym.ty, NodeRefByName):
if sym.ty.name in self.node_map: if sym.ty.name in self.node_map:
return self.node_map[sym.ty.name].nonterminal return True, self.node_map[sym.ty.name].nonterminal
else: else:
raise Exception( raise Exception(f"unresolved name {sym.ty.name} in production")
f"unresolved name {sym.ty.name} in production"
)
elif isinstance(sym.ty, NodeRegex): elif isinstance(sym.ty, NodeRegex):
sym_name = gensym("sym") sym_name = gensym("sym")
self.literals[sym_name] = f"/{sym.ty.pat.pattern}/" self.literals[sym_name] = f"/{sym.ty.pat.pattern}/"
return sym_name return True, sym_name
elif isinstance(sym, SymLit): elif isinstance(sym, SymLit):
sym_name = gensym("lit") sym_name = gensym("lit")
# hack to make repr have double quotes # hack to make repr have double quotes
self.literals[sym_name] = json.dumps(sym.lit) self.literals[sym_name] = json.dumps(sym.lit)
return sym_name return False, sym_name
raise Exception(f"unhandled {sym.__class__}") raise Exception(f"unhandled {sym.__class__}")
def _build_node_map(self) -> None: def _build_node_map(self) -> None:
@ -170,6 +184,9 @@ class GenResult:
self._collect_ifaces() self._collect_ifaces()
self._create_iface_mappings() self._create_iface_mappings()
self._build_node_map()
eprint("IFACE MAPS", self.what_fields, self.what_ifaces)
# a high-level dictionary of productions; this has sub-productions that # a high-level dictionary of productions; this has sub-productions that
# should be further expanded at a later step before converting into lark # should be further expanded at a later step before converting into lark
@ -186,12 +203,12 @@ class GenResult:
g = textwrap.indent("\n".join(class_fields), " ") g = textwrap.indent("\n".join(class_fields), " ")
class_decl = textwrap.dedent( class_decl = textwrap.dedent(
f""" """
class {node_desc.nonterminal}: class {nonterminal}:
{g} {g}
pass pass
""" """
) ).format(nonterminal=node_desc.nonterminal, g=g)
self.extra += class_decl self.extra += class_decl
# print(node_desc.name, node_desc.node.ifaces) # print(node_desc.name, node_desc.node.ifaces)
@ -199,22 +216,31 @@ class GenResult:
for variant in node_desc.node.variants: for variant in node_desc.node.variants:
v_class_name = gensym(f"{node_desc.nonterminal}_var") v_class_name = gensym(f"{node_desc.nonterminal}_var")
class_decl = textwrap.dedent( class_decl = textwrap.dedent(
f"""
class {v_class_name}({node_desc.nonterminal}): pass
""" """
class {v_class_name}({nonterminal}):
{g}
pass
"""
).format(
v_class_name=v_class_name, nonterminal=node_desc.nonterminal, g=g
) )
self.extra += class_decl self.extra += class_decl
prod_name = gensym(node_desc.nonterminal + "_") prod_name = gensym(node_desc.nonterminal + "_")
# print("PRODUCTION", prod_name, variant.prod) # print("PRODUCTION", prod_name, variant.prod)
seq = [] seq = []
for sym in variant.prod: inputs = []
n = self._resolve_production(sym) for i, sym in enumerate(variant.prod):
isInput, n = self._resolve_production(sym)
if isInput:
inputs.append((i, n))
seq.append(n) seq.append(n)
eprint("INPUTS", node_desc.nonterminal, inputs)
pycode = ""
self.parse_rules[node_desc.nonterminal].append( self.parse_rules[node_desc.nonterminal].append(
ParseEquation(prod_name, seq, v_class_name) ParseEquation(prod_name, seq, v_class_name, pycode)
) )
# create an environment for checking the equations based on the # create an environment for checking the equations based on the
@ -229,4 +255,3 @@ class GenResult:
# value. # value.
for eq in variant.equations: for eq in variant.equations:
self._collect_required_thunks(copy.deepcopy(env), eq.rhs) self._collect_required_thunks(copy.deepcopy(env), eq.rhs)

View file

@ -3,7 +3,7 @@
__all__ = ["parse"] __all__ = ["parse"]
import re import re
from typing import Generic, TypeVar, Optional, Callable, Dict, Any from typing import Generic, TypeVar, Optional, Callable, Dict, Any, Union, List
from lark import Lark, Transformer from lark import Lark, Transformer
T = TypeVar("T") T = TypeVar("T")