hoooooly shit parsing works, will make the code prettier tomorrow
This commit is contained in:
parent
7c2febb4ed
commit
51be5dcf55
18 changed files with 566 additions and 121 deletions
2
.tokeignore
Normal file
2
.tokeignore
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
syn-serde
|
||||||
|
symbol
|
29
Cargo.lock
generated
29
Cargo.lock
generated
|
@ -67,9 +67,11 @@ dependencies = [
|
||||||
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
|
"serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
|
"serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"serde_json 1.0.48 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"spin 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"spin 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"symbol 0.1.0",
|
"symbol 0.1.0",
|
||||||
"syn 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
"syn 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"syn-serde 0.2.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -79,8 +81,10 @@ dependencies = [
|
||||||
"enterprise-compiler 0.1.0",
|
"enterprise-compiler 0.1.0",
|
||||||
"proc-macro2 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
"proc-macro2 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"serde_json 1.0.48 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"symbol 0.1.0",
|
"symbol 0.1.0",
|
||||||
"syn 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
"syn 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"syn-serde 0.2.0",
|
||||||
"thiserror 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
"thiserror 1.0.11 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -230,6 +234,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
name = "serde"
|
name = "serde"
|
||||||
version = "1.0.104"
|
version = "1.0.104"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
dependencies = [
|
||||||
|
"serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_derive"
|
name = "serde_derive"
|
||||||
|
@ -243,7 +250,7 @@ dependencies = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "serde_json"
|
name = "serde_json"
|
||||||
version = "1.0.46"
|
version = "1.0.48"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"itoa 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
"itoa 0.4.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -274,7 +281,7 @@ dependencies = [
|
||||||
"discard 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
"discard 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"rustc_version 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
|
"serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"serde_json 1.0.46 (registry+https://github.com/rust-lang/crates.io-index)",
|
"serde_json 1.0.48 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"stdweb-derive 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
"stdweb-derive 0.5.3 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"stdweb-internal-macros 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
"stdweb-internal-macros 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"stdweb-internal-runtime 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
"stdweb-internal-runtime 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
@ -303,7 +310,7 @@ dependencies = [
|
||||||
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
|
"serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
|
"serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"serde_json 1.0.46 (registry+https://github.com/rust-lang/crates.io-index)",
|
"serde_json 1.0.48 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"sha1 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"sha1 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"syn 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
"syn 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
@ -318,6 +325,8 @@ name = "symbol"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
"spin 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
"spin 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -331,6 +340,18 @@ dependencies = [
|
||||||
"unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
"unicode-xid 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "syn-serde"
|
||||||
|
version = "0.2.0"
|
||||||
|
dependencies = [
|
||||||
|
"proc-macro2 1.0.8 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"serde_json 1.0.48 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
"syn 1.0.14 (registry+https://github.com/rust-lang/crates.io-index)",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "thiserror"
|
name = "thiserror"
|
||||||
version = "1.0.11"
|
version = "1.0.11"
|
||||||
|
@ -452,7 +473,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
|
"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
|
||||||
"checksum serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)" = "414115f25f818d7dfccec8ee535d76949ae78584fc4f79a6f45a904bf8ab4449"
|
"checksum serde 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)" = "414115f25f818d7dfccec8ee535d76949ae78584fc4f79a6f45a904bf8ab4449"
|
||||||
"checksum serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)" = "128f9e303a5a29922045a830221b8f78ec74a5f544944f3d5984f8ec3895ef64"
|
"checksum serde_derive 1.0.104 (registry+https://github.com/rust-lang/crates.io-index)" = "128f9e303a5a29922045a830221b8f78ec74a5f544944f3d5984f8ec3895ef64"
|
||||||
"checksum serde_json 1.0.46 (registry+https://github.com/rust-lang/crates.io-index)" = "21b01d7f0288608a01dca632cf1df859df6fd6ffa885300fc275ce2ba6221953"
|
"checksum serde_json 1.0.48 (registry+https://github.com/rust-lang/crates.io-index)" = "9371ade75d4c2d6cb154141b9752cf3781ec9c05e0e5cf35060e1e70ee7b9c25"
|
||||||
"checksum sha1 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2579985fda508104f7587689507983eadd6a6e84dd35d6d115361f530916fa0d"
|
"checksum sha1 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "2579985fda508104f7587689507983eadd6a6e84dd35d6d115361f530916fa0d"
|
||||||
"checksum smallvec 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5c2fb2ec9bcd216a5b0d0ccf31ab17b5ed1d627960edff65bbe95d3ce221cefc"
|
"checksum smallvec 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5c2fb2ec9bcd216a5b0d0ccf31ab17b5ed1d627960edff65bbe95d3ce221cefc"
|
||||||
"checksum spin 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d"
|
"checksum spin 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d"
|
||||||
|
|
|
@ -8,7 +8,9 @@ edition = "2018"
|
||||||
members = [
|
members = [
|
||||||
"enterprise-compiler",
|
"enterprise-compiler",
|
||||||
"enterprise-macros",
|
"enterprise-macros",
|
||||||
|
|
||||||
"symbol",
|
"symbol",
|
||||||
|
"syn-serde",
|
||||||
|
|
||||||
"examples/helloworld",
|
"examples/helloworld",
|
||||||
]
|
]
|
||||||
|
|
|
@ -15,4 +15,6 @@ serde = "1.0.104"
|
||||||
serde_derive = "1.0.104"
|
serde_derive = "1.0.104"
|
||||||
spin = "0.5.2"
|
spin = "0.5.2"
|
||||||
symbol = { path = "../symbol" }
|
symbol = { path = "../symbol" }
|
||||||
syn = { version = "1.0.14", features = ["extra-traits", "full"] }
|
syn-serde = { path = "../syn-serde" }
|
||||||
|
syn = { version = "1.0.14", features = ["extra-traits", "full"] }
|
||||||
|
serde_json = "1.0.48"
|
||||||
|
|
|
@ -7,26 +7,32 @@ extern crate serde_derive;
|
||||||
|
|
||||||
pub mod model;
|
pub mod model;
|
||||||
mod visitor;
|
mod visitor;
|
||||||
|
mod tuple_map;
|
||||||
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
use std::env;
|
||||||
|
use std::fs::File;
|
||||||
|
use std::io::Write;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use crate::model::{Elem, Rsx, TagLhs};
|
use crate::model::{Component, Elem, Rsx, TagLhs};
|
||||||
use crate::visitor::Visitor;
|
use crate::visitor::Visitor;
|
||||||
use proc_macro2::TokenStream;
|
use proc_macro2::TokenStream;
|
||||||
use symbol::Symbol;
|
use symbol::Symbol;
|
||||||
use syn::Expr;
|
use syn::Expr;
|
||||||
|
|
||||||
fn process(
|
pub fn build(
|
||||||
name: impl AsRef<str>,
|
// name: impl AsRef<str>,
|
||||||
datamodel: &HashMap<String, String>,
|
// datamodel: &HashMap<String, String>,
|
||||||
datainit: &HashMap<String, String>,
|
// datainit: &HashMap<String, String>,
|
||||||
dom: &[Rsx],
|
// dom: &[Rsx],
|
||||||
|
component: &Component,
|
||||||
) -> TokenStream {
|
) -> TokenStream {
|
||||||
let name = name.as_ref();
|
let name = &component.name;
|
||||||
|
|
||||||
let mut visitor = Visitor::new();
|
let mut visitor = Visitor::new();
|
||||||
visitor.load_model(&datamodel);
|
visitor.load_model(&component.model);
|
||||||
let new_dom = visitor.make_graph(&dom);
|
let new_dom = visitor.make_graph(&component.view);
|
||||||
let toplevel_names = visitor.gen_code(&new_dom);
|
let toplevel_names = visitor.gen_code(&new_dom);
|
||||||
|
|
||||||
// let graph: Graph<_, _, _> = visitor.deps.clone().into_graph();
|
// let graph: Graph<_, _, _> = visitor.deps.clone().into_graph();
|
||||||
|
@ -35,17 +41,13 @@ fn process(
|
||||||
let name = format_ident!("{}", name);
|
let name = format_ident!("{}", name);
|
||||||
let mut model = TokenStream::new();
|
let mut model = TokenStream::new();
|
||||||
let mut init = TokenStream::new();
|
let mut init = TokenStream::new();
|
||||||
for (name, ty) in datamodel {
|
for (name, (ty, value)) in component.model.iter() {
|
||||||
let name = format_ident!("{}", name);
|
let name = format_ident!("{}", name.as_str());
|
||||||
// TODO: parse this into an actual expression tree for Vec<T>
|
let ty: syn::Type = ty.into();
|
||||||
let ty = format_ident!("{}", ty);
|
let value: syn::Expr = value.into();
|
||||||
model.extend(quote! { #name : std::sync::Arc<enterprise::parking_lot::Mutex<#ty>> , });
|
model.extend(quote! { #name : std::sync::Arc<enterprise::parking_lot::Mutex<#ty>> , });
|
||||||
}
|
|
||||||
for (name, value) in datainit {
|
|
||||||
let name = format_ident!("{}", name);
|
|
||||||
let value = syn::parse_str::<Expr>(&value).unwrap();
|
|
||||||
init.extend(
|
init.extend(
|
||||||
quote! { #name : std::sync::Arc::new(enterprise::parking_lot::Mutex::new(#value)) , },
|
quote! { #name : std::sync::Arc::new(enterprise::parking_lot::Mutex::new(#value .into())) , },
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -63,13 +65,13 @@ fn process(
|
||||||
}
|
}
|
||||||
|
|
||||||
quote! {
|
quote! {
|
||||||
struct #name<B> {
|
pub struct #name<B> {
|
||||||
_b: std::marker::PhantomData<B>,
|
_b: std::marker::PhantomData<B>,
|
||||||
#model
|
#model
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<B> #name<B> {
|
impl<B> #name<B> {
|
||||||
fn new(_: &B) -> Self {
|
pub fn new(_: &B) -> Self {
|
||||||
#name {
|
#name {
|
||||||
_b: std::marker::PhantomData::default(),
|
_b: std::marker::PhantomData::default(),
|
||||||
#init
|
#init
|
||||||
|
@ -87,6 +89,14 @@ fn process(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn process(mod_name: impl AsRef<str>, code: impl AsRef<str>) {
|
||||||
|
let component: Component = serde_json::from_str(code.as_ref()).unwrap();
|
||||||
|
let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap());
|
||||||
|
let mut out_file = File::create(out_dir.join(format!("{}.rs", mod_name.as_ref()))).unwrap();
|
||||||
|
let tokens = build(&component);
|
||||||
|
write!(out_file, "{}", tokens);
|
||||||
|
}
|
||||||
|
|
||||||
// #[proc_macro]
|
// #[proc_macro]
|
||||||
// pub fn example(_input_tokens: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
// pub fn example(_input_tokens: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||||
// let helloworld_datamodel: HashMap<String, String> = hashmap! {
|
// let helloworld_datamodel: HashMap<String, String> = hashmap! {
|
||||||
|
|
|
@ -1,39 +1,71 @@
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use proc_macro2::TokenStream;
|
||||||
use symbol::Symbol;
|
use symbol::Symbol;
|
||||||
use syn::{Expr, Type};
|
use syn_serde::{Expr, Syn, Type};
|
||||||
|
|
||||||
pub type Id = Symbol;
|
pub type Id = Symbol;
|
||||||
|
|
||||||
pub type ModelMap = HashMap<Symbol, (Type, Expr)>;
|
pub type ModelMap = HashMap<Symbol, (Type, Expr)>;
|
||||||
|
|
||||||
#[derive(Debug)]
|
pub fn convert_map(map: HashMap<Symbol, (syn::Type, syn::Expr)>) -> ModelMap {
|
||||||
|
map.into_iter()
|
||||||
|
.map(|(name, (ty, expr))| {
|
||||||
|
let ty = ty.to_adapter();
|
||||||
|
let expr = expr.to_adapter();
|
||||||
|
(name, (ty, expr))
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
pub struct Component {
|
pub struct Component {
|
||||||
pub name: String,
|
pub name: String,
|
||||||
|
#[serde(with = "crate::tuple_map")]
|
||||||
pub model: ModelMap,
|
pub model: ModelMap,
|
||||||
pub view: Rsx,
|
pub view: Vec<Rsx>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||||
pub enum TagLhs {
|
pub enum TagLhs {
|
||||||
Bind(String),
|
Bind(String),
|
||||||
// On(String),
|
Plain(String),
|
||||||
// Plain(String),
|
On(String),
|
||||||
|
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
_Nonexhaustive,
|
_Nonexhaustive,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Default)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
|
pub enum TagRhs {
|
||||||
|
Code(Expr),
|
||||||
|
Text(String),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Clone for TagRhs {
|
||||||
|
fn clone(&self) -> Self {
|
||||||
|
match self {
|
||||||
|
TagRhs::Code(expr) => {
|
||||||
|
let expr: syn::Expr = Syn::from_adapter(&*expr);
|
||||||
|
TagRhs::Code(expr.clone().to_adapter())
|
||||||
|
}
|
||||||
|
TagRhs::Text(string) => TagRhs::Text(string.clone()),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Default, Serialize, Deserialize)]
|
||||||
pub struct Elem<T> {
|
pub struct Elem<T> {
|
||||||
pub tag: String,
|
pub tag: String,
|
||||||
pub attrs: HashMap<TagLhs, String>,
|
#[serde(with = "crate::tuple_map")]
|
||||||
|
pub attrs: HashMap<TagLhs, TagRhs>,
|
||||||
pub inner: Vec<T>,
|
pub inner: Vec<T>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug, Serialize, Deserialize)]
|
||||||
pub enum Rsx {
|
pub enum Rsx {
|
||||||
Elem(Elem<Rsx>),
|
Elem(Elem<Rsx>),
|
||||||
Code(Box<Expr>),
|
Code(Expr),
|
||||||
Text(String),
|
Text(String),
|
||||||
|
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
|
@ -54,7 +86,7 @@ impl TaggedRsx {
|
||||||
pub fn get_id(&self) -> Id {
|
pub fn get_id(&self) -> Id {
|
||||||
match self {
|
match self {
|
||||||
TaggedRsx::Elem(id, _) | TaggedRsx::Code(id, _) | TaggedRsx::Text(id, _) => *id,
|
TaggedRsx::Elem(id, _) | TaggedRsx::Code(id, _) | TaggedRsx::Text(id, _) => *id,
|
||||||
_ => unimplemented!(),
|
_ => unimplemented!("tagged rsx"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
82
enterprise-compiler/src/tuple_map.rs
Normal file
82
enterprise-compiler/src/tuple_map.rs
Normal file
|
@ -0,0 +1,82 @@
|
||||||
|
// https://github.com/daboross/serde-tuple-vec-map/blob/master/src/lib.rs
|
||||||
|
|
||||||
|
use std::hash::Hash;
|
||||||
|
use std::marker::PhantomData;
|
||||||
|
use std::fmt;
|
||||||
|
use std::cmp;
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use serde::{de::{Visitor, Deserialize, Deserializer, SeqAccess}, ser::{Serialize, Serializer}};
|
||||||
|
|
||||||
|
struct TupleVecMapVisitor<K, V> {
|
||||||
|
marker: PhantomData<HashMap<K, V>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<K, V> TupleVecMapVisitor<K, V> {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
TupleVecMapVisitor {
|
||||||
|
marker: PhantomData,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'de, K: Eq + Hash, V> Visitor<'de> for TupleVecMapVisitor<K, V>
|
||||||
|
where
|
||||||
|
K: Deserialize<'de>,
|
||||||
|
V: Deserialize<'de>,
|
||||||
|
{
|
||||||
|
type Value = HashMap<K, V>;
|
||||||
|
|
||||||
|
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
formatter.write_str("a map")
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn visit_unit<E>(self) -> Result<Self::Value, E> {
|
||||||
|
Ok(HashMap::new())
|
||||||
|
}
|
||||||
|
|
||||||
|
#[inline]
|
||||||
|
fn visit_seq<T>(self, mut seq: T) -> Result<Self::Value, T::Error>
|
||||||
|
where
|
||||||
|
T: SeqAccess<'de>,
|
||||||
|
{
|
||||||
|
let mut values = HashMap::new();
|
||||||
|
|
||||||
|
while let Some((key, value)) = seq.next_element()? {
|
||||||
|
values.insert(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(values)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Serialize an array of `(K, V)` pairs as if it were a `HashMap<K, V>`.
|
||||||
|
///
|
||||||
|
/// In formats where dictionaries are ordered, this maintains the input data's order. Each pair is treated as a single
|
||||||
|
/// entry into the dictionary.
|
||||||
|
///
|
||||||
|
/// Behavior when duplicate keys are present in the data is unspecified and serializer-dependent. This function does
|
||||||
|
/// not check for duplicate keys and will not warn the serializer.
|
||||||
|
pub fn serialize<K: Eq + Hash, V, S>(data: &HashMap<K, V>, serializer: S) -> Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: Serializer,
|
||||||
|
K: Serialize,
|
||||||
|
V: Serialize,
|
||||||
|
{
|
||||||
|
serializer.collect_seq(data.iter().map(|x| (x.0, x.1)))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Deserialize to a `Vec<(K, V)>` as if it were a `HashMap<K, V>`.
|
||||||
|
///
|
||||||
|
/// This directly deserializes into the returned vec with no intermediate allocation.
|
||||||
|
///
|
||||||
|
/// In formats where dictionaries are ordered, this maintains the input data's order.
|
||||||
|
pub fn deserialize<'de, K: Eq + Hash, V, D>(deserializer: D) -> Result<HashMap<K, V>, D::Error>
|
||||||
|
where
|
||||||
|
D: Deserializer<'de>,
|
||||||
|
K: Deserialize<'de>,
|
||||||
|
V: Deserialize<'de>,
|
||||||
|
{
|
||||||
|
deserializer.deserialize_seq(TupleVecMapVisitor::new())
|
||||||
|
}
|
|
@ -6,9 +6,10 @@ use petgraph::graphmap::DiGraphMap;
|
||||||
use petgraph::visit::Dfs;
|
use petgraph::visit::Dfs;
|
||||||
use proc_macro2::{TokenStream, TokenTree};
|
use proc_macro2::{TokenStream, TokenTree};
|
||||||
use quote::ToTokens;
|
use quote::ToTokens;
|
||||||
use syn::Expr;
|
use syn::{Expr, Type};
|
||||||
|
use syn_serde::Syn;
|
||||||
|
|
||||||
use crate::model::{Elem, Id, Rsx, TagLhs, TaggedRsx};
|
use crate::model::{Elem, Id, ModelMap, Rsx, TagLhs, TagRhs, TaggedRsx};
|
||||||
use crate::Symbol;
|
use crate::Symbol;
|
||||||
|
|
||||||
#[derive(Copy, Clone, Debug, Hash, PartialOrd, Ord, PartialEq, Eq)]
|
#[derive(Copy, Clone, Debug, Hash, PartialOrd, Ord, PartialEq, Eq)]
|
||||||
|
@ -26,13 +27,13 @@ pub enum DepNode {
|
||||||
impl DepNode {
|
impl DepNode {
|
||||||
fn gen_update_code(
|
fn gen_update_code(
|
||||||
&self,
|
&self,
|
||||||
model_bimap: &BiHashMap<Id, String>,
|
// model_bimap: &BiHashMap<Id, String>,
|
||||||
updates: &mut TokenStream,
|
updates: &mut TokenStream,
|
||||||
update_func: &mut TokenStream,
|
update_func: &mut TokenStream,
|
||||||
) {
|
) {
|
||||||
match self {
|
match self {
|
||||||
DepNode::ModelValue(sym) => {
|
DepNode::ModelValue(sym) => {
|
||||||
let sym_name = format_ident!("{}", model_bimap.get_by_left(&sym).unwrap());
|
let sym_name = format_ident!("{}", sym.to_string());
|
||||||
let inner_lock = format_ident!("inner_lock_{}", Symbol::gensym().as_str());
|
let inner_lock = format_ident!("inner_lock_{}", Symbol::gensym().as_str());
|
||||||
updates.extend(quote! {
|
updates.extend(quote! {
|
||||||
let #inner_lock = self.#sym_name.clone();
|
let #inner_lock = self.#sym_name.clone();
|
||||||
|
@ -66,12 +67,11 @@ type DependencyGraph = DiGraphMap<DepNode, ()>;
|
||||||
pub struct Visitor {
|
pub struct Visitor {
|
||||||
idx: u32,
|
idx: u32,
|
||||||
pub(crate) deps: DependencyGraph,
|
pub(crate) deps: DependencyGraph,
|
||||||
model: HashMap<Id, String>,
|
model: HashMap<Id, (Type, Expr)>,
|
||||||
pub(crate) impl_code: TokenStream,
|
pub(crate) impl_code: TokenStream,
|
||||||
elem_attr_map: HashMap<Id, HashSet<Id>>,
|
elem_attr_map: HashMap<Id, HashSet<Id>>,
|
||||||
|
|
||||||
// symbol maps
|
// symbol maps
|
||||||
model_bimap: BiHashMap<Id, String>,
|
// model_bimap: BiHashMap<Id, String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Visitor {
|
impl Visitor {
|
||||||
|
@ -81,11 +81,13 @@ impl Visitor {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn load_model(&mut self, model: &HashMap<String, String>) {
|
pub fn load_model(&mut self, model: &ModelMap) {
|
||||||
for (key, value) in model {
|
for (key, (ty, init)) in model {
|
||||||
let id = Symbol::gensym();
|
let id = Symbol::gensym();
|
||||||
self.model_bimap.insert(id, key.clone());
|
// self.model_bimap.insert(id, key.clone());
|
||||||
self.model.insert(id, value.clone());
|
let ty = Syn::from_adapter(&*ty);
|
||||||
|
let init = Syn::from_adapter(&*init);
|
||||||
|
self.model.insert(key.clone(), (ty, init));
|
||||||
}
|
}
|
||||||
// self.model.extend(model.clone());
|
// self.model.extend(model.clone());
|
||||||
}
|
}
|
||||||
|
@ -99,16 +101,19 @@ impl Visitor {
|
||||||
let tag_inner = self.make_graph(&inner);
|
let tag_inner = self.make_graph(&inner);
|
||||||
for (lhs, rhs) in attrs {
|
for (lhs, rhs) in attrs {
|
||||||
if let TagLhs::Bind(attr) = lhs {
|
if let TagLhs::Bind(attr) = lhs {
|
||||||
if let Some(id) = self.model_bimap.get_by_right(rhs) {
|
if let TagRhs::Text(text) = rhs {
|
||||||
let from = DepNode::RsxAttr(node_id, Symbol::from(attr));
|
let text_sym = Symbol::from(text);
|
||||||
let to = DepNode::ModelValue(*id);
|
if self.model.contains_key(&text_sym) {
|
||||||
self.deps.add_edge(from, to, ());
|
let from = DepNode::RsxAttr(node_id, Symbol::from(attr));
|
||||||
if let Some(set) = self.elem_attr_map.get_mut(&node_id) {
|
let to = DepNode::ModelValue(text_sym);
|
||||||
set.insert(Symbol::from(attr));
|
self.deps.add_edge(from, to, ());
|
||||||
} else {
|
if let Some(set) = self.elem_attr_map.get_mut(&node_id) {
|
||||||
let mut set = HashSet::new();
|
set.insert(Symbol::from(attr));
|
||||||
set.insert(Symbol::from(attr));
|
} else {
|
||||||
self.elem_attr_map.insert(node_id, set);
|
let mut set = HashSet::new();
|
||||||
|
set.insert(Symbol::from(attr));
|
||||||
|
self.elem_attr_map.insert(node_id, set);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -123,14 +128,15 @@ impl Visitor {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
Rsx::Code(expr) => {
|
Rsx::Code(expr) => {
|
||||||
let deps = self.extract_model_dependencies(expr);
|
let syn_expr = Syn::from_adapter(&*expr);
|
||||||
|
let deps = self.extract_model_dependencies(&syn_expr);
|
||||||
for dep in deps {
|
for dep in deps {
|
||||||
let from = DepNode::ModelValue(dep);
|
let from = DepNode::ModelValue(dep);
|
||||||
let to = DepNode::RsxSpan(node_id);
|
let to = DepNode::RsxSpan(node_id);
|
||||||
self.deps.add_edge(from, to, ());
|
self.deps.add_edge(from, to, ());
|
||||||
}
|
}
|
||||||
|
|
||||||
TaggedRsx::Code(node_id, expr.clone())
|
TaggedRsx::Code(node_id, Box::new(syn_expr.clone().to_adapter()))
|
||||||
}
|
}
|
||||||
Rsx::Text(literal) => TaggedRsx::Text(node_id, literal.clone()),
|
Rsx::Text(literal) => TaggedRsx::Text(node_id, literal.clone()),
|
||||||
_ => unimplemented!(),
|
_ => unimplemented!(),
|
||||||
|
@ -156,7 +162,7 @@ impl Visitor {
|
||||||
while let Some(nx) = dfs.next(&self.deps) {
|
while let Some(nx) = dfs.next(&self.deps) {
|
||||||
if nx != starting {
|
if nx != starting {
|
||||||
nx.gen_update_code(
|
nx.gen_update_code(
|
||||||
&self.model_bimap,
|
// &self.model_bimap,
|
||||||
&mut updates,
|
&mut updates,
|
||||||
&mut update_func,
|
&mut update_func,
|
||||||
);
|
);
|
||||||
|
@ -204,7 +210,7 @@ impl Visitor {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
_ => unimplemented!(),
|
_ => unimplemented!("gen_code tagged rsx"),
|
||||||
}
|
}
|
||||||
names.push(format!("{}", make_node_id));
|
names.push(format!("{}", make_node_id));
|
||||||
}
|
}
|
||||||
|
@ -218,8 +224,10 @@ impl Visitor {
|
||||||
|
|
||||||
for token in tokens.into_iter() {
|
for token in tokens.into_iter() {
|
||||||
if let TokenTree::Ident(ident) = token {
|
if let TokenTree::Ident(ident) = token {
|
||||||
if let Some(id) = self.model_bimap.get_by_right(&ident.to_string()) {
|
// if let Some(id) = self.model_bimap.get_by_right(&ident.to_string()) {
|
||||||
result.insert(*id);
|
let sym = Symbol::from(ident.to_string());
|
||||||
|
if self.model.contains_key(&sym) {
|
||||||
|
result.insert(sym);
|
||||||
}
|
}
|
||||||
// result.insert(format!("{}", ident));
|
// result.insert(format!("{}", ident));
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,4 +13,6 @@ quote = "1.0.2"
|
||||||
thiserror = "1.0.9"
|
thiserror = "1.0.9"
|
||||||
symbol = { path = "../symbol" }
|
symbol = { path = "../symbol" }
|
||||||
enterprise-compiler = { path = "../enterprise-compiler" }
|
enterprise-compiler = { path = "../enterprise-compiler" }
|
||||||
|
syn-serde = { path = "../syn-serde" }
|
||||||
syn = { version = "1.0.14", features = ["extra-traits", "full"] }
|
syn = { version = "1.0.14", features = ["extra-traits", "full"] }
|
||||||
|
serde_json = "1.0.48"
|
||||||
|
|
|
@ -2,20 +2,26 @@ extern crate proc_macro;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate quote;
|
extern crate quote;
|
||||||
|
|
||||||
|
mod rsx;
|
||||||
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::iter::FromIterator;
|
use std::iter::FromIterator;
|
||||||
use std::iter::Peekable;
|
use std::iter::Peekable;
|
||||||
|
|
||||||
use enterprise_compiler::model::{Component, ModelMap, Rsx};
|
use quote::ToTokens;
|
||||||
|
use enterprise_compiler::model::{Component, Elem, ModelMap, Rsx};
|
||||||
|
use syn_serde::Syn;
|
||||||
use proc_macro2::{
|
use proc_macro2::{
|
||||||
token_stream::IntoIter, Delimiter, Group, Ident, Punct, Spacing, TokenStream, TokenTree,
|
token_stream::IntoIter, Delimiter, Group, Ident, Punct, Spacing, TokenStream, TokenTree,
|
||||||
};
|
};
|
||||||
use symbol::Symbol;
|
use symbol::Symbol;
|
||||||
use syn::{
|
use syn::{
|
||||||
parse::{Parse, ParseStream},
|
parse::{Parse, ParseStream},
|
||||||
Error as SynError, Expr, Result as SynResult, Token, Type,
|
Error as SynError, Expr, Lit, Result as SynResult, Token, Type,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use crate::rsx::{RsxParser, RsxToken};
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
enum ParseError {
|
enum ParseError {
|
||||||
ExpectedKeyword(Symbol, Ident),
|
ExpectedKeyword(Symbol, Ident),
|
||||||
|
@ -23,12 +29,14 @@ enum ParseError {
|
||||||
ExpectedGroup(TokenTree),
|
ExpectedGroup(TokenTree),
|
||||||
ExpectedPunct(TokenTree),
|
ExpectedPunct(TokenTree),
|
||||||
WrongDelimiter(Delimiter, Delimiter),
|
WrongDelimiter(Delimiter, Delimiter),
|
||||||
WrongPunct(Punct, Punct),
|
WrongPunct(char, Punct),
|
||||||
Syn(SynError),
|
Syn(SynError),
|
||||||
UnexpectedEOF,
|
UnexpectedEOF,
|
||||||
UnexpectedKeyword,
|
UnexpectedKeyword,
|
||||||
MissingModel,
|
MissingModel,
|
||||||
MissingView,
|
MissingView,
|
||||||
|
InvalidRsx(TokenTree),
|
||||||
|
UnmatchedOpenTag(TokenTree),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<SynError> for ParseError {
|
impl From<SynError> for ParseError {
|
||||||
|
@ -39,7 +47,7 @@ impl From<SynError> for ParseError {
|
||||||
|
|
||||||
enum ComponentBlock {
|
enum ComponentBlock {
|
||||||
Model(ModelMap),
|
Model(ModelMap),
|
||||||
View(Rsx),
|
View(Vec<Rsx>),
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Visitor(Peekable<IntoIter>);
|
struct Visitor(Peekable<IntoIter>);
|
||||||
|
@ -55,7 +63,7 @@ impl Visitor {
|
||||||
}
|
}
|
||||||
|
|
||||||
self.consume_keyword("component")?;
|
self.consume_keyword("component")?;
|
||||||
let name = self.consume_ident()?.to_string();
|
let name = consume_ident(&mut self.0)?.to_string();
|
||||||
let def = self.consume_group(Delimiter::Brace)?;
|
let def = self.consume_group(Delimiter::Brace)?;
|
||||||
let mut def_visitor = Visitor::from_tokens(def.stream());
|
let mut def_visitor = Visitor::from_tokens(def.stream());
|
||||||
let mut model_map = None;
|
let mut model_map = None;
|
||||||
|
@ -84,7 +92,7 @@ impl Visitor {
|
||||||
return Ok(None);
|
return Ok(None);
|
||||||
}
|
}
|
||||||
|
|
||||||
let next_ident = self.consume_ident()?;
|
let next_ident = consume_ident(&mut self.0)?;
|
||||||
match next_ident.to_string().as_ref() {
|
match next_ident.to_string().as_ref() {
|
||||||
"model" => {
|
"model" => {
|
||||||
let next_group = self.consume_group(Delimiter::Brace)?;
|
let next_group = self.consume_group(Delimiter::Brace)?;
|
||||||
|
@ -156,7 +164,7 @@ impl Visitor {
|
||||||
|
|
||||||
let stream = TokenStream::from_iter(buf);
|
let stream = TokenStream::from_iter(buf);
|
||||||
let item = syn::parse2::<ModelEntry>(stream)?;
|
let item = syn::parse2::<ModelEntry>(stream)?;
|
||||||
println!("ITEM: {:?}", item);
|
// println!("ITEM: {:?}", item);
|
||||||
|
|
||||||
Ok(Some((
|
Ok(Some((
|
||||||
Symbol::from(item.name.to_string()),
|
Symbol::from(item.name.to_string()),
|
||||||
|
@ -173,18 +181,40 @@ impl Visitor {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(map)
|
Ok(enterprise_compiler::model::convert_map(map))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn consume_view(&mut self) -> Result<Rsx, ParseError> {
|
fn consume_view(&mut self) -> Result<Vec<Rsx>, ParseError> {
|
||||||
let lt = self.consume_punct(Some(Punct::new('<', Spacing::Alone)))?;
|
let mut rsx_parser = RsxParser::new(self.0.clone());
|
||||||
let gt = self.consume_punct(Some(Punct::new('>', Spacing::Alone)))?;
|
let mut result = Vec::new();
|
||||||
Ok(Rsx::Text(String::new()))
|
|
||||||
|
while let Some(next_token) = rsx_parser.next() {
|
||||||
|
match next_token? {
|
||||||
|
RsxToken::EmptyTag(name, attrs) => {
|
||||||
|
let elem = Elem {
|
||||||
|
tag: name.to_string(),
|
||||||
|
attrs,
|
||||||
|
inner: vec![],
|
||||||
|
};
|
||||||
|
let el = Rsx::Elem(elem);
|
||||||
|
result.push(el);
|
||||||
|
}
|
||||||
|
RsxToken::Code(expr) => {
|
||||||
|
result.push(Rsx::Code(expr.to_adapter()));
|
||||||
|
}
|
||||||
|
RsxToken::Str(string) => {
|
||||||
|
result.push(Rsx::Text(string));
|
||||||
|
}
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn consume_keyword(&mut self, keyword: impl AsRef<str>) -> Result<(), ParseError> {
|
fn consume_keyword(&mut self, keyword: impl AsRef<str>) -> Result<(), ParseError> {
|
||||||
let keyword = keyword.as_ref();
|
let keyword = keyword.as_ref();
|
||||||
let ident = self.consume_ident()?;
|
let ident = consume_ident(&mut self.0)?;
|
||||||
let ident_str = ident.to_string();
|
let ident_str = ident.to_string();
|
||||||
|
|
||||||
if keyword == &ident_str {
|
if keyword == &ident_str {
|
||||||
|
@ -194,42 +224,6 @@ impl Visitor {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn consume_punct(&mut self, equals: Option<Punct>) -> Result<Punct, ParseError> {
|
|
||||||
let next_token = self.0.peek();
|
|
||||||
if next_token.is_none() {
|
|
||||||
return Err(ParseError::UnexpectedEOF);
|
|
||||||
}
|
|
||||||
|
|
||||||
let next_token = self.0.next().expect("unreachable");
|
|
||||||
if let TokenTree::Punct(punct) = next_token {
|
|
||||||
if let Some(equals) = equals {
|
|
||||||
if punct.as_char() == equals.as_char() && punct.spacing() == equals.spacing() {
|
|
||||||
Ok(punct)
|
|
||||||
} else {
|
|
||||||
Err(ParseError::WrongPunct(equals, punct))
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Ok(punct)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Err(ParseError::ExpectedPunct(next_token))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn consume_ident(&mut self) -> Result<Ident, ParseError> {
|
|
||||||
let next_token = self.0.peek();
|
|
||||||
if next_token.is_none() {
|
|
||||||
return Err(ParseError::UnexpectedEOF);
|
|
||||||
}
|
|
||||||
|
|
||||||
let next_token = self.0.next().expect("unreachable");
|
|
||||||
if let TokenTree::Ident(ident) = next_token {
|
|
||||||
Ok(ident)
|
|
||||||
} else {
|
|
||||||
Err(ParseError::ExpectedIdent(next_token))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn consume_group(&mut self, delimiter: Delimiter) -> Result<Group, ParseError> {
|
fn consume_group(&mut self, delimiter: Delimiter) -> Result<Group, ParseError> {
|
||||||
let next_token = self.0.peek();
|
let next_token = self.0.peek();
|
||||||
if next_token.is_none() {
|
if next_token.is_none() {
|
||||||
|
@ -249,6 +243,47 @@ impl Visitor {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn consume_punct(
|
||||||
|
iter: &mut Peekable<impl Iterator<Item = TokenTree>>,
|
||||||
|
equals: Option<char>,
|
||||||
|
) -> Result<Punct, ParseError> {
|
||||||
|
let next_token = iter.peek();
|
||||||
|
if next_token.is_none() {
|
||||||
|
return Err(ParseError::UnexpectedEOF);
|
||||||
|
}
|
||||||
|
|
||||||
|
let next_token = iter.next().expect("unreachable");
|
||||||
|
if let TokenTree::Punct(punct) = next_token {
|
||||||
|
if let Some(equals) = equals {
|
||||||
|
if punct.as_char() == equals {
|
||||||
|
Ok(punct)
|
||||||
|
} else {
|
||||||
|
Err(ParseError::WrongPunct(equals, punct))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Ok(punct)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Err(ParseError::ExpectedPunct(next_token))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn consume_ident(
|
||||||
|
iter: &mut Peekable<impl Iterator<Item = TokenTree>>,
|
||||||
|
) -> Result<Ident, ParseError> {
|
||||||
|
let next_token = iter.peek();
|
||||||
|
if next_token.is_none() {
|
||||||
|
return Err(ParseError::UnexpectedEOF);
|
||||||
|
}
|
||||||
|
|
||||||
|
let next_token = iter.next().expect("unreachable");
|
||||||
|
if let TokenTree::Ident(ident) = next_token {
|
||||||
|
Ok(ident)
|
||||||
|
} else {
|
||||||
|
Err(ParseError::ExpectedIdent(next_token))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Iterator for Visitor {
|
impl Iterator for Visitor {
|
||||||
type Item = Result<Component, ParseError>;
|
type Item = Result<Component, ParseError>;
|
||||||
|
|
||||||
|
@ -264,11 +299,19 @@ impl Iterator for Visitor {
|
||||||
#[proc_macro]
|
#[proc_macro]
|
||||||
pub fn component(input_tokens: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
pub fn component(input_tokens: proc_macro::TokenStream) -> proc_macro::TokenStream {
|
||||||
let input_tokens: TokenStream = input_tokens.into();
|
let input_tokens: TokenStream = input_tokens.into();
|
||||||
println!("TOKENS: {:?}", input_tokens);
|
// println!("TOKENS: {:?}", input_tokens);
|
||||||
let visitor = Visitor::from_tokens(input_tokens);
|
let visitor = Visitor::from_tokens(input_tokens);
|
||||||
|
|
||||||
|
// TODO: allow importing and stuff
|
||||||
|
let mut output = TokenStream::new();
|
||||||
for component in visitor {
|
for component in visitor {
|
||||||
println!("- {:?}", component);
|
println!("- {:#?}", component);
|
||||||
|
let component = component.expect("holy shiet");
|
||||||
|
let name = format_ident!("{}", component.name);
|
||||||
|
let serialized = serde_json::to_string(&component).expect("fucking json");
|
||||||
|
output.extend(quote! {
|
||||||
|
const #name: &'static str = #serialized;
|
||||||
|
});
|
||||||
}
|
}
|
||||||
panic!();
|
output.into()
|
||||||
(quote! {}).into()
|
|
||||||
}
|
}
|
||||||
|
|
191
enterprise-macros/src/rsx.rs
Normal file
191
enterprise-macros/src/rsx.rs
Normal file
|
@ -0,0 +1,191 @@
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::iter::FromIterator;
|
||||||
|
use std::iter::Peekable;
|
||||||
|
|
||||||
|
use enterprise_compiler::model::{TagLhs, TagRhs};
|
||||||
|
use proc_macro2::{token_stream::IntoIter, Delimiter, Ident, Spacing, TokenStream, TokenTree};
|
||||||
|
use symbol::Symbol;
|
||||||
|
use syn::{Expr, Lit};
|
||||||
|
use syn_serde::Syn;
|
||||||
|
|
||||||
|
use crate::ParseError;
|
||||||
|
use crate::{consume_ident, consume_punct};
|
||||||
|
|
||||||
|
pub(crate) struct RsxParser(Peekable<IntoIter>);
|
||||||
|
|
||||||
|
impl RsxParser {
|
||||||
|
pub fn new(tokens: impl Iterator<Item = TokenTree>) -> Self {
|
||||||
|
let tokens = TokenStream::from_iter(tokens);
|
||||||
|
RsxParser(tokens.into_iter().peekable())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn next_token(&mut self) -> Result<Option<RsxToken>, ParseError> {
|
||||||
|
let token = self.0.peek();
|
||||||
|
if token.is_none() {
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
let token = self.0.next().expect("unreachable");
|
||||||
|
match token {
|
||||||
|
TokenTree::Punct(ref punct) if punct.as_char() == '<' => {
|
||||||
|
let next_token = self.0.peek();
|
||||||
|
if next_token.is_none() {
|
||||||
|
return Err(ParseError::UnmatchedOpenTag(token));
|
||||||
|
}
|
||||||
|
|
||||||
|
let next_token = next_token.expect("unreachable");
|
||||||
|
let is_closing = if let TokenTree::Punct(punct2) = next_token {
|
||||||
|
if punct2.as_char() == '/' {
|
||||||
|
self.0.next();
|
||||||
|
true
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
};
|
||||||
|
|
||||||
|
let name = self.consume_ident()?;
|
||||||
|
if is_closing {
|
||||||
|
return Ok(Some(RsxToken::ClosingTag(Symbol::from(name.to_string()))));
|
||||||
|
}
|
||||||
|
|
||||||
|
// read until closing tag
|
||||||
|
let mut buf = Vec::new();
|
||||||
|
let mut prev_tag = None;
|
||||||
|
let mut is_empty = false;
|
||||||
|
loop {
|
||||||
|
let next_token = self.0.peek();
|
||||||
|
if next_token.is_none() {
|
||||||
|
// probably wrong error?
|
||||||
|
return Err(ParseError::UnexpectedEOF);
|
||||||
|
}
|
||||||
|
|
||||||
|
let next_token = self.0.next().expect("unreachable");
|
||||||
|
if let TokenTree::Punct(ref punct) = next_token {
|
||||||
|
if punct.as_char() == '>' {
|
||||||
|
if let Some(TokenTree::Punct(ref punct2)) = prev_tag {
|
||||||
|
if punct2.as_char() == '/' {
|
||||||
|
buf.truncate(buf.len() - 1);
|
||||||
|
is_empty = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
prev_tag = Some(next_token.clone());
|
||||||
|
buf.push(next_token);
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut attrs = HashMap::new();
|
||||||
|
let mut iter = buf.into_iter().peekable();
|
||||||
|
loop {
|
||||||
|
// consume a single attr
|
||||||
|
let next_token = iter.peek();
|
||||||
|
if next_token.is_none() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
let name_or_prefix = consume_ident(&mut iter)?.to_string();
|
||||||
|
let lhs = if let Some(TokenTree::Punct(ref punct)) = iter.peek() {
|
||||||
|
if punct.as_char() == ':' {
|
||||||
|
iter.next();
|
||||||
|
let name = consume_ident(&mut iter)?.to_string();
|
||||||
|
if name_or_prefix == "bind" {
|
||||||
|
TagLhs::Bind(name)
|
||||||
|
} else if name_or_prefix == "on" {
|
||||||
|
TagLhs::On(name)
|
||||||
|
} else {
|
||||||
|
unimplemented!("these are wrong states")
|
||||||
|
}
|
||||||
|
} else if punct.as_char() == '=' {
|
||||||
|
TagLhs::Plain(name_or_prefix.to_string())
|
||||||
|
} else {
|
||||||
|
unimplemented!("these are wrong states")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
unimplemented!("these are wrong states")
|
||||||
|
};
|
||||||
|
consume_punct(&mut iter, Some('='));
|
||||||
|
|
||||||
|
let next_token = iter.next();
|
||||||
|
let rhs = match next_token {
|
||||||
|
Some(TokenTree::Literal(lit)) => {
|
||||||
|
let mut stream = TokenStream::from(TokenTree::Literal(lit));
|
||||||
|
let lit = syn::parse2::<Lit>(stream)?;
|
||||||
|
if let Lit::Str(string) = lit {
|
||||||
|
TagRhs::Text(string.value())
|
||||||
|
} else {
|
||||||
|
unimplemented!("grrr")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Some(TokenTree::Group(group)) if group.delimiter() == Delimiter::Brace => {
|
||||||
|
let expr = syn::parse2::<Expr>(group.stream())?;
|
||||||
|
TagRhs::Code(expr.to_adapter())
|
||||||
|
}
|
||||||
|
_ => unimplemented!("wrong state: {:?}", next_token),
|
||||||
|
};
|
||||||
|
|
||||||
|
attrs.insert(lhs, rhs);
|
||||||
|
}
|
||||||
|
|
||||||
|
let variant = if is_empty {
|
||||||
|
RsxToken::EmptyTag
|
||||||
|
} else {
|
||||||
|
RsxToken::OpeningTag
|
||||||
|
};
|
||||||
|
return Ok(Some(variant(Symbol::from(name.to_string()), attrs)));
|
||||||
|
}
|
||||||
|
TokenTree::Literal(lit) => {
|
||||||
|
let mut stream = TokenStream::from(TokenTree::Literal(lit));
|
||||||
|
let lit = syn::parse2::<Lit>(stream)?;
|
||||||
|
|
||||||
|
if let Lit::Str(string) = lit {
|
||||||
|
return Ok(Some(RsxToken::Str(string.value())));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
TokenTree::Group(group) if group.delimiter() == Delimiter::Brace => {
|
||||||
|
let expr = syn::parse2::<Expr>(group.stream())?;
|
||||||
|
return Ok(Some(RsxToken::Code(expr)));
|
||||||
|
}
|
||||||
|
_ => unimplemented!("TOKEN: {:?}", token),
|
||||||
|
};
|
||||||
|
|
||||||
|
unimplemented!("the fuck")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn consume_ident(&mut self) -> Result<Ident, ParseError> {
|
||||||
|
let next_token = self.0.peek();
|
||||||
|
if next_token.is_none() {
|
||||||
|
return Err(ParseError::UnexpectedEOF);
|
||||||
|
}
|
||||||
|
|
||||||
|
let next_token = self.0.next().expect("unreachable");
|
||||||
|
if let TokenTree::Ident(ident) = next_token {
|
||||||
|
Ok(ident)
|
||||||
|
} else {
|
||||||
|
Err(ParseError::ExpectedIdent(next_token))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub(crate) enum RsxToken {
|
||||||
|
OpeningTag(Symbol, HashMap<TagLhs, TagRhs>),
|
||||||
|
EmptyTag(Symbol, HashMap<TagLhs, TagRhs>),
|
||||||
|
ClosingTag(Symbol),
|
||||||
|
Str(String),
|
||||||
|
Code(Expr),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Iterator for RsxParser {
|
||||||
|
type Item = Result<RsxToken, ParseError>;
|
||||||
|
|
||||||
|
fn next(&mut self) -> Option<Self::Item> {
|
||||||
|
match self.next_token() {
|
||||||
|
Ok(Some(token)) => Some(Ok(token)),
|
||||||
|
Ok(None) => None,
|
||||||
|
Err(err) => Some(Err(err)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -8,7 +8,9 @@ build = "src/build.rs"
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
enterprise-compiler = { path = "../../enterprise-compiler" }
|
enterprise-compiler = { path = "../../enterprise-compiler" }
|
||||||
enterprise-macros = { path = "../../enterprise-macros" }
|
enterprise-macros = { path = "../../enterprise-macros" }
|
||||||
|
enterprise = { path = "../.." }
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
stdweb = "0.4.20"
|
stdweb = "0.4.20"
|
||||||
|
enterprise-macros = { path = "../../enterprise-macros" }
|
||||||
enterprise = { path = "../.." }
|
enterprise = { path = "../.." }
|
|
@ -8,10 +8,12 @@ component! {
|
||||||
}
|
}
|
||||||
|
|
||||||
view {
|
view {
|
||||||
<TextBox bind:value="name" />
|
<input bind:value="name" />
|
||||||
Hello, {name}!
|
"Hello, " {name} "!"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn main() {}
|
fn main() {
|
||||||
|
enterprise_compiler::process("helloworld", HelloWorld);
|
||||||
|
}
|
||||||
|
|
|
@ -1,13 +1,13 @@
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate enterprise;
|
extern crate enterprise;
|
||||||
|
|
||||||
extern crate stdweb;
|
enterprise_mod!(helloworld);
|
||||||
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use enterprise::{Backend, Component, Web};
|
use enterprise::{Backend, Component, Web};
|
||||||
|
|
||||||
example!();
|
use crate::helloworld::HelloWorld;
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
stdweb::initialize();
|
stdweb::initialize();
|
||||||
|
|
10
src/lib.rs
10
src/lib.rs
|
@ -18,3 +18,13 @@ pub trait Component<B: Backend> {
|
||||||
/// TODO: replace this with a real init function.
|
/// TODO: replace this with a real init function.
|
||||||
fn create(&self, el: &crate::stdweb::web::Element);
|
fn create(&self, el: &crate::stdweb::web::Element);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Declares a mod
|
||||||
|
#[macro_export]
|
||||||
|
macro_rules! enterprise_mod {
|
||||||
|
($vis:vis $name:ident) => {
|
||||||
|
$vis mod $name {
|
||||||
|
include!(concat!(env!("OUT_DIR"), "/", stringify!($name), ".rs"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -9,3 +9,5 @@ edition = "2018"
|
||||||
[dependencies]
|
[dependencies]
|
||||||
lazy_static = "1.4.0"
|
lazy_static = "1.4.0"
|
||||||
spin = "0.5.2"
|
spin = "0.5.2"
|
||||||
|
serde_derive = "1.0.104"
|
||||||
|
serde = "1.0.104"
|
||||||
|
|
|
@ -1,13 +1,20 @@
|
||||||
// cribbed from https://github.com/remexre/symbol-rs
|
// cribbed from https://github.com/remexre/symbol-rs
|
||||||
|
|
||||||
|
#[macro_use]
|
||||||
|
extern crate serde_derive;
|
||||||
|
|
||||||
use std::cmp::Ordering;
|
use std::cmp::Ordering;
|
||||||
use std::collections::BTreeSet;
|
use std::collections::BTreeSet;
|
||||||
use std::fmt::{Debug, Display, Formatter, Result as FmtResult};
|
use std::fmt::{self, Debug, Display, Formatter, Result as FmtResult};
|
||||||
use std::mem::{forget, transmute};
|
use std::mem::{forget, transmute};
|
||||||
use std::ops::Deref;
|
use std::ops::Deref;
|
||||||
use std::sync::atomic::{AtomicUsize, Ordering as AtomicOrdering};
|
use std::sync::atomic::{AtomicUsize, Ordering as AtomicOrdering};
|
||||||
|
|
||||||
use lazy_static::lazy_static;
|
use lazy_static::lazy_static;
|
||||||
|
use serde::{
|
||||||
|
de::{Deserialize, Deserializer, Visitor},
|
||||||
|
ser::{Serialize, Serializer},
|
||||||
|
};
|
||||||
use spin::Mutex;
|
use spin::Mutex;
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
|
@ -111,3 +118,29 @@ fn leak_string(s: String) -> &'static str {
|
||||||
forget(s);
|
forget(s);
|
||||||
out
|
out
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// SERDE
|
||||||
|
|
||||||
|
impl Serialize for Symbol {
|
||||||
|
fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
|
||||||
|
s.serialize_str(self.s)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'d> Deserialize<'d> for Symbol {
|
||||||
|
fn deserialize<D: Deserializer<'d>>(d: D) -> Result<Self, D::Error> {
|
||||||
|
d.deserialize_str(SymVisitor)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct SymVisitor;
|
||||||
|
|
||||||
|
impl<'d> Visitor<'d> for SymVisitor {
|
||||||
|
type Value = Symbol;
|
||||||
|
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
write!(f, "symbol")
|
||||||
|
}
|
||||||
|
fn visit_str<E>(self, string: &str) -> Result<Self::Value, E> {
|
||||||
|
Ok(Symbol::from(string))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
1
syn-serde
Submodule
1
syn-serde
Submodule
|
@ -0,0 +1 @@
|
||||||
|
Subproject commit dff506bb8a83702e2dc82b17177dda43e6de0f3a
|
Loading…
Reference in a new issue