From 96df945dbc3e0f760e6fc91ae0cf01845a99eab2 Mon Sep 17 00:00:00 2001 From: Michael Zhang Date: Fri, 21 Feb 2020 02:37:03 -0600 Subject: [PATCH] fix code according to proptest --- enterprise-compiler/src/model/mod.rs | 49 ++++++++++---- enterprise-compiler/src/model/props.rs | 12 ++-- enterprise-compiler/src/tuple_map.rs | 24 +++---- .../{lib.txt => parser.txt} | 2 +- enterprise-macros/src/parser.rs | 64 ++++++++++++++++--- enterprise-macros/src/rsx.rs | 55 ++++++++++++---- examples/todomvc/src/build.rs | 2 +- output.txt | 0 syn-serde/Cargo.toml | 2 +- 9 files changed, 158 insertions(+), 52 deletions(-) rename enterprise-macros/proptest-regressions/{lib.txt => parser.txt} (70%) create mode 100644 output.txt diff --git a/enterprise-compiler/src/model/mod.rs b/enterprise-compiler/src/model/mod.rs index 1f4f263..9c963fa 100644 --- a/enterprise-compiler/src/model/mod.rs +++ b/enterprise-compiler/src/model/mod.rs @@ -1,7 +1,6 @@ mod props; -use std::collections::HashMap; -use std::hash::{BuildHasher, Hash}; +use std::collections::BTreeMap; use std::iter::{self, FromIterator}; use proc_macro2::{Span, TokenStream, TokenTree}; @@ -13,11 +12,9 @@ pub use self::props::*; pub type Id = Symbol; -pub type ModelMap = HashMap; +pub type ModelMap = BTreeMap; -pub fn convert_map( - map: HashMap, -) -> HashMap { +pub fn convert_map(map: BTreeMap) -> BTreeMap { map.into_iter() .map(|(left, (ty, expr))| { let ty = ty.to_adapter(); @@ -37,6 +34,7 @@ pub struct Component { impl ToTokens for Component { fn to_tokens(&self, stream: &mut TokenStream) { + let name = format_ident!("{}", self.name); let model = TokenStream::from_iter(self.model.iter().map(|(name, (ty, init))| { let name = format_ident!("{}", name.as_str()); let ty = syn::Type::from_adapter(ty); @@ -45,7 +43,7 @@ impl ToTokens for Component { })); let view = TokenStream::from_iter(self.view.iter().map(|rsx| rsx.to_token_stream())); stream.extend(quote! { - component { + component #name { model { #model } view { #view } } @@ -53,7 +51,7 @@ impl ToTokens for Component { } } -#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] +#[derive(Clone, Debug, Serialize, Deserialize, PartialOrd, Ord, PartialEq, Eq)] pub enum TagLhs { Bind(String), Plain(String), @@ -89,6 +87,20 @@ pub enum TagRhs { Text(String), } +impl PartialEq for TagRhs { + fn eq(&self, other: &TagRhs) -> bool { + match (self, other) { + (TagRhs::Code(expr), TagRhs::Code(other)) => { + syn::Expr::from_adapter(expr) == syn::Expr::from_adapter(other) + } + (TagRhs::Text(string), TagRhs::Text(other)) => string == other, + _ => false, + } + } +} + +impl Eq for TagRhs {} + impl Clone for TagRhs { fn clone(&self) -> Self { match self { @@ -116,11 +128,11 @@ impl ToTokens for TagRhs { } } -#[derive(Clone, Debug, Default, Serialize, Deserialize)] +#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)] pub struct Elem { pub tag: String, #[serde(with = "crate::tuple_map")] - pub attrs: HashMap, + pub attrs: BTreeMap, pub inner: Option>, } @@ -153,6 +165,21 @@ pub enum Rsx { _Nonexhaustive, } +impl PartialEq for Rsx { + fn eq(&self, other: &Rsx) -> bool { + match (self, other) { + (Rsx::Elem(this), Rsx::Elem(other)) => this == other, + (Rsx::Code(expr), Rsx::Code(other)) => { + syn::Expr::from_adapter(expr) == syn::Expr::from_adapter(other) + } + (Rsx::Text(this), Rsx::Text(other)) => this == other, + _ => false, + } + } +} + +impl Eq for Rsx {} + impl ToTokens for Rsx { fn to_tokens(&self, stream: &mut TokenStream) { match self { @@ -161,7 +188,7 @@ impl ToTokens for Rsx { } Rsx::Code(expr) => { let expr = syn::Expr::from_adapter(expr); - stream.extend(quote! { #expr }); + stream.extend(quote! { { #expr } }); } Rsx::Text(string) => { let string = syn::Lit::Str(syn::LitStr::new(string.as_ref(), Span::call_site())); diff --git a/enterprise-compiler/src/model/props.rs b/enterprise-compiler/src/model/props.rs index 5b71983..906e604 100644 --- a/enterprise-compiler/src/model/props.rs +++ b/enterprise-compiler/src/model/props.rs @@ -1,7 +1,7 @@ -use std::collections::HashMap; +use std::collections::BTreeMap; use proptest::{ - collection::{hash_map, vec, SizeRange}, + collection::{btree_map, vec, SizeRange}, option::{self, Probability}, prelude::*, string::string_regex, @@ -15,7 +15,7 @@ use super::{Component, Elem, Rsx}; prop_compose! { pub fn arbitrary_component() ( name in ident_strategy(), - model in hash_map( + model in btree_map( ident_strategy().prop_map(|ident| Symbol::from(ident)), // TODO: maybe actually have tests for syn? (Just(syn::parse_str::("()").unwrap().to_adapter()), Just(syn::parse_str::("()").unwrap().to_adapter())), @@ -35,9 +35,9 @@ pub fn arbitrary_view() -> impl Strategy { Just(Rsx::Code( syn::parse_str::("()").unwrap().to_adapter() )), - any::().prop_map(Rsx::Text), + string_regex(r"[:print:]+").unwrap().prop_map(Rsx::Text), ]; - leaf.prop_recursive(4, 16, 5, |inner| { + leaf.prop_recursive(2, 4, 5, |inner| { prop_oneof![( ident_strategy(), option::weighted(Probability::new(0.9), vec(inner, SizeRange::default())), @@ -45,7 +45,7 @@ pub fn arbitrary_view() -> impl Strategy { .prop_map(|(tag, inner)| Rsx::Elem(Elem { tag, // TODO: ouais - attrs: HashMap::new(), + attrs: BTreeMap::new(), inner, }))] }) diff --git a/enterprise-compiler/src/tuple_map.rs b/enterprise-compiler/src/tuple_map.rs index 8099090..fbb6b7e 100644 --- a/enterprise-compiler/src/tuple_map.rs +++ b/enterprise-compiler/src/tuple_map.rs @@ -1,18 +1,20 @@ // https://github.com/daboross/serde-tuple-vec-map/blob/master/src/lib.rs use std::fmt; -use std::hash::Hash; + use std::marker::PhantomData; -use std::collections::HashMap; +use std::collections::BTreeMap; use serde::{ de::{Deserialize, Deserializer, SeqAccess, Visitor}, ser::{Serialize, Serializer}, }; +trait Delegate: Ord {} + struct TupleVecMapVisitor { - marker: PhantomData>, + marker: PhantomData>, } impl TupleVecMapVisitor { @@ -23,12 +25,12 @@ impl TupleVecMapVisitor { } } -impl<'de, K: Eq + Hash, V> Visitor<'de> for TupleVecMapVisitor +impl<'de, K: Ord, V> Visitor<'de> for TupleVecMapVisitor where K: Deserialize<'de>, V: Deserialize<'de>, { - type Value = HashMap; + type Value = BTreeMap; fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { formatter.write_str("a map") @@ -36,7 +38,7 @@ where #[inline] fn visit_unit(self) -> Result { - Ok(HashMap::new()) + Ok(BTreeMap::new()) } #[inline] @@ -44,7 +46,7 @@ where where T: SeqAccess<'de>, { - let mut values = HashMap::new(); + let mut values = BTreeMap::new(); while let Some((key, value)) = seq.next_element()? { values.insert(key, value); @@ -54,14 +56,14 @@ where } } -/// Serialize an array of `(K, V)` pairs as if it were a `HashMap`. +/// Serialize an array of `(K, V)` pairs as if it were a `BTreeMap`. /// /// In formats where dictionaries are ordered, this maintains the input data's order. Each pair is treated as a single /// entry into the dictionary. /// /// Behavior when duplicate keys are present in the data is unspecified and serializer-dependent. This function does /// not check for duplicate keys and will not warn the serializer. -pub fn serialize(data: &HashMap, serializer: S) -> Result +pub fn serialize(data: &BTreeMap, serializer: S) -> Result where S: Serializer, K: Serialize, @@ -70,12 +72,12 @@ where serializer.collect_seq(data.iter().map(|x| (x.0, x.1))) } -/// Deserialize to a `Vec<(K, V)>` as if it were a `HashMap`. +/// Deserialize to a `Vec<(K, V)>` as if it were a `BTreeMap`. /// /// This directly deserializes into the returned vec with no intermediate allocation. /// /// In formats where dictionaries are ordered, this maintains the input data's order. -pub fn deserialize<'de, K: Eq + Hash, V, D>(deserializer: D) -> Result, D::Error> +pub fn deserialize<'de, K: Ord, V, D>(deserializer: D) -> Result, D::Error> where D: Deserializer<'de>, K: Deserialize<'de>, diff --git a/enterprise-macros/proptest-regressions/lib.txt b/enterprise-macros/proptest-regressions/parser.txt similarity index 70% rename from enterprise-macros/proptest-regressions/lib.txt rename to enterprise-macros/proptest-regressions/parser.txt index ace6147..3441e8b 100644 --- a/enterprise-macros/proptest-regressions/lib.txt +++ b/enterprise-macros/proptest-regressions/parser.txt @@ -4,4 +4,4 @@ # # It is recommended to check this file in to source control so that # everyone who runs the test benefits from these saved cases. -cc 2a51d2b2a9a6442d273f2835de2bc13bc778cab5d1333e190cc6fb90c5d5e50a # shrinks to tree = Component { name: "A", model: {}, view: [] } +cc c87725a641776defa0a321d11950acba1b25c60d510345f7a487df38d08795bc # shrinks to tree = Component { name: "a", model: {}, view: [] } diff --git a/enterprise-macros/src/parser.rs b/enterprise-macros/src/parser.rs index 3e6174d..b02af61 100644 --- a/enterprise-macros/src/parser.rs +++ b/enterprise-macros/src/parser.rs @@ -1,4 +1,4 @@ -use std::collections::HashMap; +use std::collections::BTreeMap; use std::iter::FromIterator; use std::iter::Peekable; @@ -9,7 +9,7 @@ use proc_macro2::{ use symbol::Symbol; use syn::{ parse::{Parse, ParseStream}, - Error as SynError, Expr, Result as SynResult, Token, Type, + Error as SynError, Expr, Lit, Result as SynResult, Token, Type, }; use syn_serde::Syn; @@ -26,8 +26,12 @@ pub(crate) enum ParseError { Syn(SynError), UnexpectedEOF, UnexpectedKeyword, + UnexpectedToken(TokenTree), MissingModel, MissingView, + ClosedTooFar, + WrongClosingTag(String, String), + UnrecognizedLiteral(Lit), // InvalidRsx(TokenTree), UnmatchedOpenTag(TokenTree), } @@ -148,6 +152,7 @@ impl Visitor { } buf.push(next_token); } + println!("model buf: {:?}", buf); // probably shouldn't happen? if buf.len() == 0 { @@ -166,10 +171,11 @@ impl Visitor { ))) }; - let mut map = HashMap::new(); + let mut map = BTreeMap::new(); while let Some((name, ty, init, comma)) = single_def()? { + println!("single_def => ({}, {:?}, {:?}, {})", name, ty, init, comma); map.insert(name, (ty, init)); - if comma { + if !comma { break; } } @@ -179,18 +185,42 @@ impl Visitor { fn consume_view(&mut self) -> Result, ParseError> { let mut rsx_parser = RsxParser::new(self.0.clone()); let mut result = Vec::new(); + let mut tag_stack = Vec::new(); while let Some(next_token) = rsx_parser.next() { match next_token? { - RsxToken::EmptyTag(name, attrs) => { + RsxToken::EmptyTag(tag, attrs) => { let elem = Elem { - tag: name.to_string(), + tag: tag.to_string(), attrs, inner: None, }; let el = Rsx::Elem(elem); result.push(el); } + RsxToken::OpeningTag(tag, attrs) => { + tag_stack.push((tag, attrs, result.clone())); + result.clear(); + } + RsxToken::ClosingTag(tag) => { + if let Some((last_tag, attrs, mut last_result)) = tag_stack.pop() { + if tag.as_str() == last_tag.as_str() { + last_result.push(Rsx::Elem(Elem { + tag: tag.to_string(), + attrs: attrs.clone(), + inner: Some(result), + })); + result = last_result; + } else { + return Err(ParseError::WrongClosingTag( + last_tag.to_string(), + tag.to_string(), + )); + } + } else { + return Err(ParseError::ClosedTooFar); + } + } RsxToken::Code(expr) => { result.push(Rsx::Code(expr.to_adapter())); } @@ -290,20 +320,38 @@ impl Iterator for Visitor { #[cfg(test)] mod tests { + use std::collections::BTreeMap; + use enterprise_compiler::model::*; use proptest::prelude::*; use quote::ToTokens; + use syn_serde::Syn; + use super::Visitor; + fn convert( + map: &BTreeMap, + ) -> BTreeMap { + map.iter() + .map(|(name, (ty, expr))| { + let ty = syn::Type::from_adapter(ty); + let expr = syn::Expr::from_adapter(expr); + (name.clone(), (ty, expr)) + }) + .collect() + } + proptest! { #[test] fn tokens_parse_compatibility(tree in arbitrary_component()) { let tokens = tree.to_token_stream(); - let mut visitor = Visitor::from_tokens(tokens.clone()); let tree2 = visitor.next().unwrap().unwrap(); - prop_assert_eq!(format!("{}", tokens), format!("{}", tree2.to_token_stream())); + // compare the trees + prop_assert_eq!(tree.name, tree2.name, "name"); + prop_assert_eq!(convert(&tree.model), convert(&tree2.model), "model"); + prop_assert_eq!(tree.view, tree2.view, "view"); } } } diff --git a/enterprise-macros/src/rsx.rs b/enterprise-macros/src/rsx.rs index 3e92212..3b6874d 100644 --- a/enterprise-macros/src/rsx.rs +++ b/enterprise-macros/src/rsx.rs @@ -1,4 +1,4 @@ -use std::collections::HashMap; +use std::collections::BTreeMap; use std::iter::FromIterator; use std::iter::Peekable; @@ -8,7 +8,7 @@ use symbol::Symbol; use syn::{Expr, Lit}; use syn_serde::Syn; -use crate::parser::{ParseError, consume_ident, consume_punct}; +use crate::parser::{consume_ident, consume_punct, ParseError}; pub(crate) struct RsxParser(Peekable); @@ -46,6 +46,8 @@ impl RsxParser { let name = self.consume_ident()?; if is_closing { + // TODO: assert next is > + self.0.next(); return Ok(Some(RsxToken::ClosingTag(Symbol::from(name.to_string())))); } @@ -76,7 +78,7 @@ impl RsxParser { buf.push(next_token); } - let mut attrs = HashMap::new(); + let mut attrs = BTreeMap::new(); let mut iter = buf.into_iter().peekable(); loop { // consume a single attr @@ -133,24 +135,50 @@ impl RsxParser { } else { RsxToken::OpeningTag }; - return Ok(Some(variant(Symbol::from(name.to_string()), attrs))); + Ok(Some(variant(Symbol::from(name.to_string()), attrs))) } TokenTree::Literal(lit) => { let stream = TokenStream::from(TokenTree::Literal(lit)); let lit = syn::parse2::(stream)?; if let Lit::Str(string) = lit { - return Ok(Some(RsxToken::Str(string.value()))); + Ok(Some(RsxToken::Str(string.value()))) + } else { + Err(ParseError::UnrecognizedLiteral(lit)) } } TokenTree::Group(group) if group.delimiter() == Delimiter::Brace => { - let expr = syn::parse2::(group.stream())?; - return Ok(Some(RsxToken::Code(expr))); - } - _ => unimplemented!("TOKEN: {:?}", token), - }; + let mut stream = group.stream().into_iter(); - unimplemented!("the fuck") + enum GroupType { + Pound, + Slash, + None, + } + + // based on svelte for now, we'll probably change up the syntax a bit later + let is_special = match stream.next() { + Some(TokenTree::Punct(punct)) if punct.as_char() == '#' => GroupType::Pound, + Some(TokenTree::Punct(punct)) if punct.as_char() == '/' => GroupType::Slash, + _ => GroupType::None, + }; + + if let GroupType::None = is_special { + let expr = syn::parse2::(group.stream())?; + Ok(Some(RsxToken::Code(expr))) + } else { + match stream.next() { + Some(TokenTree::Ident(ident)) if ident == "for" => { + // syntax: {#for pattern in expr} {/for} + Ok(Some(RsxToken::StartFor())) + } + Some(other) => Err(ParseError::ExpectedIdent(other)), + None => Err(ParseError::UnexpectedEOF), + } + } + } + token => Err(ParseError::UnexpectedToken(token)), + } } fn consume_ident(&mut self) -> Result { @@ -170,11 +198,12 @@ impl RsxParser { #[derive(Debug)] pub(crate) enum RsxToken { - OpeningTag(Symbol, HashMap), - EmptyTag(Symbol, HashMap), + OpeningTag(Symbol, BTreeMap), + EmptyTag(Symbol, BTreeMap), ClosingTag(Symbol), Str(String), Code(Expr), + StartFor(), } impl Iterator for RsxParser { diff --git a/examples/todomvc/src/build.rs b/examples/todomvc/src/build.rs index 4554650..7798bc1 100644 --- a/examples/todomvc/src/build.rs +++ b/examples/todomvc/src/build.rs @@ -10,7 +10,7 @@ component! { view {
    - {#for (key, todo) in todos} + {#for (key, todo) in todos }
  • {todo} [x]
  • {/for}
diff --git a/output.txt b/output.txt new file mode 100644 index 0000000..e69de29 diff --git a/syn-serde/Cargo.toml b/syn-serde/Cargo.toml index 48b06fc..e88c789 100644 --- a/syn-serde/Cargo.toml +++ b/syn-serde/Cargo.toml @@ -22,7 +22,7 @@ proc-macro2 = { version = "1.0", default-features = false } serde = { version = "1.0.99", features = ["derive"] } serde_derive = "1.0.99" # This is necessary to make `-Z minimal-versions` build successful. serde_json = { version = "1.0", optional = true } -syn = { version = "1.0.5", default-features = false, features = ["full"] } +syn = { version = "1.0.5", default-features = false, features = ["extra-traits", "full"] } [dev-dependencies] quote = "1.0"