Remove the old lalr parser
This commit is contained in:
parent
0334b5cf33
commit
bf13304f9e
10 changed files with 1 additions and 430 deletions
|
@ -1,17 +0,0 @@
|
|||
[package]
|
||||
name = "enterprise-macros"
|
||||
version = "0.1.0"
|
||||
authors = ["Michael Zhang <iptq@protonmail.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
proc-macro = true
|
||||
|
||||
[build-dependencies]
|
||||
lalrpop = "0.17.2"
|
||||
|
||||
[dependencies]
|
||||
proc-macro2 = { version = "1.0.7", features = ["span-locations"] }
|
||||
quote = "1.0.2"
|
||||
thiserror = "1.0.9"
|
||||
lalrpop-util = "0.17.2"
|
|
@ -1,3 +0,0 @@
|
|||
fn main() {
|
||||
lalrpop::process_root().unwrap();
|
||||
}
|
|
@ -1,34 +0,0 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Toplevel {
|
||||
Use(Use),
|
||||
Component(Component),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Use(pub Vec<String>);
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Component {
|
||||
pub name: String,
|
||||
pub body: Vec<ComponentBody>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ComponentBody {
|
||||
Constructor(),
|
||||
View(Rsx),
|
||||
Fn(),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Rsx {
|
||||
Tag {
|
||||
tag: String,
|
||||
attrs: HashMap<String, String>,
|
||||
inner: Vec<Rsx>,
|
||||
},
|
||||
CodeSegment(String),
|
||||
Text(String),
|
||||
}
|
|
@ -1,128 +0,0 @@
|
|||
use std::fmt;
|
||||
|
||||
use lalrpop_util::ParseError;
|
||||
use proc_macro2::{
|
||||
Delimiter, Group, Ident, LineColumn, Literal, Punct, Spacing, Span as Span2, TokenStream,
|
||||
TokenTree,
|
||||
};
|
||||
|
||||
use crate::parser::parser::__ToTriple;
|
||||
|
||||
macro_rules! generate_token {
|
||||
([$($keyword_name:ident: $keyword:ident),* $(,)?]) => {
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum TokenType {
|
||||
Ident(Ident),
|
||||
Punct(char, Punct),
|
||||
Literal(Literal),
|
||||
|
||||
$($keyword_name(Span),)*
|
||||
}
|
||||
|
||||
impl fmt::Display for TokenType {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
use TokenType::*;
|
||||
match self {
|
||||
Ident(ident) => ident.fmt(f),
|
||||
Punct(_, punct) => punct.fmt(f),
|
||||
Literal(literal) => literal.fmt(f),
|
||||
$($keyword_name(_) => f.write_str(stringify!($keyword)),)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl TokenType {
|
||||
pub fn span(&self) -> Span {
|
||||
use TokenType::*;
|
||||
match self {
|
||||
Ident(ident) => Span(ident.span()),
|
||||
Punct(_, punct) => Span(punct.span()),
|
||||
Literal(literal) => Span(literal.span()),
|
||||
|
||||
$($keyword_name(span) => *span,)*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn flatten_tree(token_tree: TokenTree) -> Vec<Token> {
|
||||
match token_tree {
|
||||
TokenTree::Group(group) => {
|
||||
use Delimiter::*;
|
||||
let mut result = flatten(group.stream());
|
||||
let surround = match group.delimiter() {
|
||||
Brace => Some(construct_group_tokens('{', '}', group)),
|
||||
Parenthesis => Some(construct_group_tokens('(', ')', group)),
|
||||
Bracket => Some(construct_group_tokens('[', ']', group)),
|
||||
None => Option::None,
|
||||
};
|
||||
if let Some((start, end)) = surround {
|
||||
result.insert(0, start);
|
||||
result.push(end);
|
||||
}
|
||||
result
|
||||
},
|
||||
TokenTree::Ident(ident) => {
|
||||
let token = match ident.to_string().as_ref() {
|
||||
$(stringify!($keyword) => TokenType::$keyword_name(Span(ident.span())),)*
|
||||
_ => TokenType::Ident(ident),
|
||||
};
|
||||
vec![construct_token(token)]
|
||||
}
|
||||
TokenTree::Punct(punct) => {
|
||||
let token = TokenType::Punct(punct.as_char(), punct);
|
||||
vec![construct_token(token)]
|
||||
}
|
||||
TokenTree::Literal(literal) => {
|
||||
let token = TokenType::Literal(literal);
|
||||
vec![construct_token(token)]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
generate_token!([
|
||||
Component: component,
|
||||
Constructor: constructor,
|
||||
Fn: fn,
|
||||
Use: use,
|
||||
View: view,
|
||||
]);
|
||||
|
||||
pub type Token = Result<(Span, TokenType, Span), ()>;
|
||||
|
||||
pub fn flatten(token_stream: TokenStream) -> Vec<Token> {
|
||||
token_stream.into_iter().flat_map(flatten_tree).collect()
|
||||
}
|
||||
|
||||
fn construct_group_tokens(left: char, right: char, group: Group) -> (Token, Token) {
|
||||
let mut left_punct = Punct::new(left, Spacing::Alone);
|
||||
left_punct.set_span(group.span_open());
|
||||
let mut right_punct = Punct::new(right, Spacing::Alone);
|
||||
right_punct.set_span(group.span_close());
|
||||
(
|
||||
construct_token(TokenType::Punct(left, left_punct)),
|
||||
construct_token(TokenType::Punct(right, right_punct)),
|
||||
)
|
||||
}
|
||||
|
||||
fn construct_token(token: TokenType) -> Token {
|
||||
let span = token.span();
|
||||
Ok((span, token, span))
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct Span(pub Span2);
|
||||
|
||||
impl fmt::Display for Span {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "{:?}", self)
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Span {
|
||||
fn default() -> Self {
|
||||
Span(Span2::call_site())
|
||||
}
|
||||
}
|
||||
|
|
@ -1,80 +0,0 @@
|
|||
extern crate proc_macro;
|
||||
extern crate thiserror;
|
||||
#[macro_use]
|
||||
extern crate lalrpop_util;
|
||||
|
||||
mod ast;
|
||||
mod flatten;
|
||||
|
||||
mod parser {
|
||||
#![allow(dead_code, unused_variables, unknown_lints, non_snake_case)]
|
||||
lalrpop_mod!(pub parser);
|
||||
}
|
||||
|
||||
use proc_macro2::TokenStream;
|
||||
use quote::quote;
|
||||
|
||||
use crate::flatten::{Span, TokenType};
|
||||
use crate::parser::parser::*;
|
||||
|
||||
#[proc_macro]
|
||||
pub fn component(input_tokens: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||
let input_tokens: TokenStream = input_tokens.into();
|
||||
let tokens = flatten::flatten(input_tokens);
|
||||
|
||||
eprintln!("Tokens:");
|
||||
for token in tokens.iter() {
|
||||
eprintln!("- {:?}", token);
|
||||
}
|
||||
eprintln!();
|
||||
|
||||
let parser = ProgramParser::new();
|
||||
let result = parser.parse(tokens);
|
||||
match result {
|
||||
Ok(result) => eprintln!("success: {:?}", result),
|
||||
Err(err) => {
|
||||
use lalrpop_util::ParseError::*;
|
||||
match err {
|
||||
User { ref error } => print!("user error: {:?}", error),
|
||||
InvalidToken { ref location } => print!("Invalid token at {}", location),
|
||||
UnrecognizedEOF {
|
||||
ref location,
|
||||
ref expected,
|
||||
} => {
|
||||
print!("Unrecognized EOF found at {}", location);
|
||||
fmt_expected(expected)
|
||||
}
|
||||
UnrecognizedToken {
|
||||
token: (ref start, ref token, ref end),
|
||||
ref expected,
|
||||
} => {
|
||||
print!("Unrecognized token `{}` found at {}:{}", token, start, end);
|
||||
fmt_expected(expected)
|
||||
}
|
||||
ExtraToken {
|
||||
token: (ref start, ref token, ref end),
|
||||
} => print!("Extra token {} found at {}:{}", token, start, end),
|
||||
}
|
||||
}
|
||||
}
|
||||
panic!();
|
||||
|
||||
let result = quote! {};
|
||||
result.into()
|
||||
}
|
||||
|
||||
/// Format a list of expected tokens.
|
||||
fn fmt_expected(expected: &[String]) {
|
||||
if !expected.is_empty() {
|
||||
println!();
|
||||
for (i, e) in expected.iter().enumerate() {
|
||||
let sep = match i {
|
||||
0 => "Expected one of",
|
||||
_ if i < expected.len() - 1 => ",",
|
||||
// Last expected message to be written
|
||||
_ => " or",
|
||||
};
|
||||
print!("{} {}", sep, e);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,162 +0,0 @@
|
|||
use std::iter::FromIterator;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use proc_macro2::{Ident, Punct, TokenTree, Delimiter, TokenStream, Group, Spacing, Literal};
|
||||
|
||||
use crate::{TokenType, Span};
|
||||
use crate::ast::*;
|
||||
|
||||
grammar;
|
||||
|
||||
pub Program: Vec<Toplevel> = Toplevel*;
|
||||
|
||||
pub Toplevel: Toplevel = {
|
||||
Component => Toplevel::Component(<>),
|
||||
Use => Toplevel::Use(<>),
|
||||
};
|
||||
|
||||
Use: Use = {
|
||||
"use" <path:Delim<Ident, (":" ":")>> ";" => Use(path.into_iter().map(|ident| ident.to_string()).collect()),
|
||||
};
|
||||
|
||||
Component: Component = {
|
||||
"component" <name:Ident> <body:Body<ComponentBody*>> => {
|
||||
Component {
|
||||
name: name.to_string(),
|
||||
body,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
ComponentBody: ComponentBody = {
|
||||
"constructor" "(" ")" BraceGrouper => {
|
||||
ComponentBody::Constructor()
|
||||
},
|
||||
"view" <rsx:Body<Rsx>> => {
|
||||
ComponentBody::View(rsx)
|
||||
},
|
||||
"fn" Ident "(" Delim<Arg, ","> ")" BraceGrouper => {
|
||||
ComponentBody::Fn()
|
||||
},
|
||||
};
|
||||
|
||||
// TODO: finish this
|
||||
Arg: () = {
|
||||
Punct Ident Ident => {},
|
||||
Ident ":" Ident => {},
|
||||
};
|
||||
|
||||
Rsx: Rsx = {
|
||||
"<" <tag:Ident> <attrs:Attrs> "/" ">" => { Rsx::Tag { tag: tag.to_string(), attrs, inner: Vec::new(), } },
|
||||
"<" <tag:Ident> <attrs:Attrs> ">" <inner:Rsx*> "<" "/" <closeTag:Ident> ">" => {
|
||||
assert_eq!(tag, closeTag, "Tags {} and {} do not match.", tag, closeTag);
|
||||
Rsx::Tag { tag: tag.to_string(), attrs, inner }
|
||||
},
|
||||
BraceGrouper => { Rsx::CodeSegment(<>.to_string()) },
|
||||
AnyText => { Rsx::Text(<>) },
|
||||
};
|
||||
|
||||
Attrs: HashMap<String, String> = {
|
||||
(AttrLhs "=" BraceGrouper)* => {
|
||||
<>.into_iter()
|
||||
.map(|(lhs, _, rhs)| (lhs.to_string(), rhs.to_string()))
|
||||
.collect()
|
||||
}
|
||||
};
|
||||
|
||||
AttrLhs: String = {
|
||||
<a:Ident> ":" <b:Ident> => format!("{}:{}", a, b),
|
||||
Ident => <>.to_string(),
|
||||
};
|
||||
|
||||
//
|
||||
|
||||
pub ArbitraryBlocks: Vec<TokenTree> = AnyToken*;
|
||||
|
||||
Grouper: TokenTree = {
|
||||
BraceGrouper => <>,
|
||||
"(" <b:AnyToken*> ")" => TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::from_iter(b.into_iter()))),
|
||||
"[" <b:AnyToken*> "]" => TokenTree::Group(Group::new(Delimiter::Bracket, TokenStream::from_iter(b.into_iter()))),
|
||||
};
|
||||
|
||||
BraceGrouper: TokenTree = {
|
||||
"{" <b:AnyToken*> "}" => TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::from_iter(b.into_iter()))),
|
||||
};
|
||||
|
||||
AnyText: String = {
|
||||
Ident => <>.to_string(),
|
||||
Punct => <>.to_string(),
|
||||
Literal => <>.to_string(),
|
||||
};
|
||||
|
||||
AnyToken: TokenTree = {
|
||||
Grouper => <>,
|
||||
|
||||
"component" => TokenTree::Ident(Ident::new("component", <>.0)),
|
||||
"constructor" => TokenTree::Ident(Ident::new("constructor", <>.0)),
|
||||
"fn" => TokenTree::Ident(Ident::new("fn", <>.0)),
|
||||
"use" => TokenTree::Ident(Ident::new("use", <>.0)),
|
||||
"view" => TokenTree::Ident(Ident::new("view", <>.0)),
|
||||
|
||||
":" => TokenTree::Punct(Punct::new(':', Spacing::Alone)),
|
||||
";" => TokenTree::Punct(Punct::new(';', Spacing::Alone)),
|
||||
"," => TokenTree::Punct(Punct::new(',', Spacing::Alone)),
|
||||
"<" => TokenTree::Punct(Punct::new('<', Spacing::Alone)),
|
||||
">" => TokenTree::Punct(Punct::new('>', Spacing::Alone)),
|
||||
"/" => TokenTree::Punct(Punct::new('/', Spacing::Alone)),
|
||||
"=" => TokenTree::Punct(Punct::new('=', Spacing::Alone)),
|
||||
|
||||
Ident => TokenTree::Ident(<>),
|
||||
Punct => TokenTree::Punct(<>),
|
||||
Literal => TokenTree::Literal(<>),
|
||||
};
|
||||
|
||||
Body<T>: T = {
|
||||
"{" <body:T> "}" => body,
|
||||
};
|
||||
|
||||
Delim<T, Sep>: Vec<T> = {
|
||||
<all:(T ((Sep T)+ Sep?)?)?> => {
|
||||
let mut vec = Vec::new();
|
||||
if let Some((initial, rest)) = all {
|
||||
vec.push(initial);
|
||||
if let Some((rest, _)) = rest {
|
||||
for (_, item) in rest {
|
||||
vec.push(item);
|
||||
}
|
||||
}
|
||||
}
|
||||
vec
|
||||
},
|
||||
};
|
||||
|
||||
extern {
|
||||
type Location = Span;
|
||||
type Error = ();
|
||||
|
||||
enum TokenType {
|
||||
"component" => TokenType::Component(<Span>),
|
||||
"constructor" => TokenType::Constructor(<Span>),
|
||||
"fn" => TokenType::Fn(<Span>),
|
||||
"use" => TokenType::Use(<Span>),
|
||||
"view" => TokenType::View(<Span>),
|
||||
|
||||
":" => TokenType::Punct(':', _),
|
||||
";" => TokenType::Punct(';', _),
|
||||
"," => TokenType::Punct(',', _),
|
||||
"{" => TokenType::Punct('{', _),
|
||||
"}" => TokenType::Punct('}', _),
|
||||
"(" => TokenType::Punct('(', _),
|
||||
")" => TokenType::Punct(')', _),
|
||||
"[" => TokenType::Punct('[', _),
|
||||
"]" => TokenType::Punct(']', _),
|
||||
"<" => TokenType::Punct('<', _),
|
||||
">" => TokenType::Punct('>', _),
|
||||
"/" => TokenType::Punct('/', _),
|
||||
"=" => TokenType::Punct('=', _),
|
||||
|
||||
Punct => TokenType::Punct(_, <Punct>),
|
||||
Ident => TokenType::Ident(<Ident>),
|
||||
Literal => TokenType::Literal(<Literal>),
|
||||
}
|
||||
}
|
|
@ -96,7 +96,6 @@ impl Visitor {
|
|||
"model" => {
|
||||
let next_group = self.consume_group(Delimiter::Brace)?;
|
||||
let mut model_visitor = Visitor::from_tokens(next_group.stream());
|
||||
println!("SHIET");
|
||||
let model_map = model_visitor.consume_model_map()?;
|
||||
Ok(Some(ComponentBlock::Model(model_map)))
|
||||
}
|
||||
|
|
|
@ -1,4 +0,0 @@
|
|||
//! Compiler-related procedures.
|
||||
|
||||
/// Processes a set of component definitions.
|
||||
pub fn process() {}
|
|
@ -9,7 +9,6 @@ pub extern crate parking_lot;
|
|||
pub extern crate stdweb;
|
||||
|
||||
mod backend;
|
||||
pub mod compiler;
|
||||
pub mod std;
|
||||
|
||||
pub use crate::backend::{Backend, Web};
|
||||
|
|
|
@ -163,6 +163,7 @@ impl<T> List<T> {
|
|||
if let Some(mut prev) = prev {
|
||||
(*unsafe { prev.as_mut() }).next = next;
|
||||
} else if let Some(_) = self.head {
|
||||
// if node.prev is None that means node is the head
|
||||
self.head = next;
|
||||
} else {
|
||||
// shouldn't be any other cases
|
||||
|
|
Loading…
Reference in a new issue