fix code according to proptest

This commit is contained in:
Michael Zhang 2020-02-21 02:37:03 -06:00
parent f9c478276d
commit 96df945dbc
Signed by: michael
GPG key ID: BDA47A31A3C8EE6B
9 changed files with 158 additions and 52 deletions

View file

@ -1,7 +1,6 @@
mod props;
use std::collections::HashMap;
use std::hash::{BuildHasher, Hash};
use std::collections::BTreeMap;
use std::iter::{self, FromIterator};
use proc_macro2::{Span, TokenStream, TokenTree};
@ -13,11 +12,9 @@ pub use self::props::*;
pub type Id = Symbol;
pub type ModelMap = HashMap<Symbol, (Type, Expr)>;
pub type ModelMap = BTreeMap<Symbol, (Type, Expr)>;
pub fn convert_map<T: Hash + Eq, S: BuildHasher>(
map: HashMap<T, (syn::Type, syn::Expr), S>,
) -> HashMap<T, (Type, Expr)> {
pub fn convert_map<T: Ord>(map: BTreeMap<T, (syn::Type, syn::Expr)>) -> BTreeMap<T, (Type, Expr)> {
map.into_iter()
.map(|(left, (ty, expr))| {
let ty = ty.to_adapter();
@ -37,6 +34,7 @@ pub struct Component {
impl ToTokens for Component {
fn to_tokens(&self, stream: &mut TokenStream) {
let name = format_ident!("{}", self.name);
let model = TokenStream::from_iter(self.model.iter().map(|(name, (ty, init))| {
let name = format_ident!("{}", name.as_str());
let ty = syn::Type::from_adapter(ty);
@ -45,7 +43,7 @@ impl ToTokens for Component {
}));
let view = TokenStream::from_iter(self.view.iter().map(|rsx| rsx.to_token_stream()));
stream.extend(quote! {
component {
component #name {
model { #model }
view { #view }
}
@ -53,7 +51,7 @@ impl ToTokens for Component {
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[derive(Clone, Debug, Serialize, Deserialize, PartialOrd, Ord, PartialEq, Eq)]
pub enum TagLhs {
Bind(String),
Plain(String),
@ -89,6 +87,20 @@ pub enum TagRhs {
Text(String),
}
impl PartialEq<TagRhs> for TagRhs {
fn eq(&self, other: &TagRhs) -> bool {
match (self, other) {
(TagRhs::Code(expr), TagRhs::Code(other)) => {
syn::Expr::from_adapter(expr) == syn::Expr::from_adapter(other)
}
(TagRhs::Text(string), TagRhs::Text(other)) => string == other,
_ => false,
}
}
}
impl Eq for TagRhs {}
impl Clone for TagRhs {
fn clone(&self) -> Self {
match self {
@ -116,11 +128,11 @@ impl ToTokens for TagRhs {
}
}
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq)]
pub struct Elem<T> {
pub tag: String,
#[serde(with = "crate::tuple_map")]
pub attrs: HashMap<TagLhs, TagRhs>,
pub attrs: BTreeMap<TagLhs, TagRhs>,
pub inner: Option<Vec<T>>,
}
@ -153,6 +165,21 @@ pub enum Rsx {
_Nonexhaustive,
}
impl PartialEq<Rsx> for Rsx {
fn eq(&self, other: &Rsx) -> bool {
match (self, other) {
(Rsx::Elem(this), Rsx::Elem(other)) => this == other,
(Rsx::Code(expr), Rsx::Code(other)) => {
syn::Expr::from_adapter(expr) == syn::Expr::from_adapter(other)
}
(Rsx::Text(this), Rsx::Text(other)) => this == other,
_ => false,
}
}
}
impl Eq for Rsx {}
impl ToTokens for Rsx {
fn to_tokens(&self, stream: &mut TokenStream) {
match self {
@ -161,7 +188,7 @@ impl ToTokens for Rsx {
}
Rsx::Code(expr) => {
let expr = syn::Expr::from_adapter(expr);
stream.extend(quote! { #expr });
stream.extend(quote! { { #expr } });
}
Rsx::Text(string) => {
let string = syn::Lit::Str(syn::LitStr::new(string.as_ref(), Span::call_site()));

View file

@ -1,7 +1,7 @@
use std::collections::HashMap;
use std::collections::BTreeMap;
use proptest::{
collection::{hash_map, vec, SizeRange},
collection::{btree_map, vec, SizeRange},
option::{self, Probability},
prelude::*,
string::string_regex,
@ -15,7 +15,7 @@ use super::{Component, Elem, Rsx};
prop_compose! {
pub fn arbitrary_component() (
name in ident_strategy(),
model in hash_map(
model in btree_map(
ident_strategy().prop_map(|ident| Symbol::from(ident)),
// TODO: maybe actually have tests for syn?
(Just(syn::parse_str::<Type>("()").unwrap().to_adapter()), Just(syn::parse_str::<Expr>("()").unwrap().to_adapter())),
@ -35,9 +35,9 @@ pub fn arbitrary_view() -> impl Strategy<Value = Rsx> {
Just(Rsx::Code(
syn::parse_str::<Expr>("()").unwrap().to_adapter()
)),
any::<String>().prop_map(Rsx::Text),
string_regex(r"[:print:]+").unwrap().prop_map(Rsx::Text),
];
leaf.prop_recursive(4, 16, 5, |inner| {
leaf.prop_recursive(2, 4, 5, |inner| {
prop_oneof![(
ident_strategy(),
option::weighted(Probability::new(0.9), vec(inner, SizeRange::default())),
@ -45,7 +45,7 @@ pub fn arbitrary_view() -> impl Strategy<Value = Rsx> {
.prop_map(|(tag, inner)| Rsx::Elem(Elem {
tag,
// TODO: ouais
attrs: HashMap::new(),
attrs: BTreeMap::new(),
inner,
}))]
})

View file

@ -1,18 +1,20 @@
// https://github.com/daboross/serde-tuple-vec-map/blob/master/src/lib.rs
use std::fmt;
use std::hash::Hash;
use std::marker::PhantomData;
use std::collections::HashMap;
use std::collections::BTreeMap;
use serde::{
de::{Deserialize, Deserializer, SeqAccess, Visitor},
ser::{Serialize, Serializer},
};
trait Delegate: Ord {}
struct TupleVecMapVisitor<K, V> {
marker: PhantomData<HashMap<K, V>>,
marker: PhantomData<BTreeMap<K, V>>,
}
impl<K, V> TupleVecMapVisitor<K, V> {
@ -23,12 +25,12 @@ impl<K, V> TupleVecMapVisitor<K, V> {
}
}
impl<'de, K: Eq + Hash, V> Visitor<'de> for TupleVecMapVisitor<K, V>
impl<'de, K: Ord, V> Visitor<'de> for TupleVecMapVisitor<K, V>
where
K: Deserialize<'de>,
V: Deserialize<'de>,
{
type Value = HashMap<K, V>;
type Value = BTreeMap<K, V>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a map")
@ -36,7 +38,7 @@ where
#[inline]
fn visit_unit<E>(self) -> Result<Self::Value, E> {
Ok(HashMap::new())
Ok(BTreeMap::new())
}
#[inline]
@ -44,7 +46,7 @@ where
where
T: SeqAccess<'de>,
{
let mut values = HashMap::new();
let mut values = BTreeMap::new();
while let Some((key, value)) = seq.next_element()? {
values.insert(key, value);
@ -54,14 +56,14 @@ where
}
}
/// Serialize an array of `(K, V)` pairs as if it were a `HashMap<K, V>`.
/// Serialize an array of `(K, V)` pairs as if it were a `BTreeMap<K, V>`.
///
/// In formats where dictionaries are ordered, this maintains the input data's order. Each pair is treated as a single
/// entry into the dictionary.
///
/// Behavior when duplicate keys are present in the data is unspecified and serializer-dependent. This function does
/// not check for duplicate keys and will not warn the serializer.
pub fn serialize<K: Eq + Hash, V, S>(data: &HashMap<K, V>, serializer: S) -> Result<S::Ok, S::Error>
pub fn serialize<K: Ord, V, S>(data: &BTreeMap<K, V>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
K: Serialize,
@ -70,12 +72,12 @@ where
serializer.collect_seq(data.iter().map(|x| (x.0, x.1)))
}
/// Deserialize to a `Vec<(K, V)>` as if it were a `HashMap<K, V>`.
/// Deserialize to a `Vec<(K, V)>` as if it were a `BTreeMap<K, V>`.
///
/// This directly deserializes into the returned vec with no intermediate allocation.
///
/// In formats where dictionaries are ordered, this maintains the input data's order.
pub fn deserialize<'de, K: Eq + Hash, V, D>(deserializer: D) -> Result<HashMap<K, V>, D::Error>
pub fn deserialize<'de, K: Ord, V, D>(deserializer: D) -> Result<BTreeMap<K, V>, D::Error>
where
D: Deserializer<'de>,
K: Deserialize<'de>,

View file

@ -4,4 +4,4 @@
#
# It is recommended to check this file in to source control so that
# everyone who runs the test benefits from these saved cases.
cc 2a51d2b2a9a6442d273f2835de2bc13bc778cab5d1333e190cc6fb90c5d5e50a # shrinks to tree = Component { name: "A", model: {}, view: [] }
cc c87725a641776defa0a321d11950acba1b25c60d510345f7a487df38d08795bc # shrinks to tree = Component { name: "a", model: {}, view: [] }

View file

@ -1,4 +1,4 @@
use std::collections::HashMap;
use std::collections::BTreeMap;
use std::iter::FromIterator;
use std::iter::Peekable;
@ -9,7 +9,7 @@ use proc_macro2::{
use symbol::Symbol;
use syn::{
parse::{Parse, ParseStream},
Error as SynError, Expr, Result as SynResult, Token, Type,
Error as SynError, Expr, Lit, Result as SynResult, Token, Type,
};
use syn_serde::Syn;
@ -26,8 +26,12 @@ pub(crate) enum ParseError {
Syn(SynError),
UnexpectedEOF,
UnexpectedKeyword,
UnexpectedToken(TokenTree),
MissingModel,
MissingView,
ClosedTooFar,
WrongClosingTag(String, String),
UnrecognizedLiteral(Lit),
// InvalidRsx(TokenTree),
UnmatchedOpenTag(TokenTree),
}
@ -148,6 +152,7 @@ impl Visitor {
}
buf.push(next_token);
}
println!("model buf: {:?}", buf);
// probably shouldn't happen?
if buf.len() == 0 {
@ -166,10 +171,11 @@ impl Visitor {
)))
};
let mut map = HashMap::new();
let mut map = BTreeMap::new();
while let Some((name, ty, init, comma)) = single_def()? {
println!("single_def => ({}, {:?}, {:?}, {})", name, ty, init, comma);
map.insert(name, (ty, init));
if comma {
if !comma {
break;
}
}
@ -179,18 +185,42 @@ impl Visitor {
fn consume_view(&mut self) -> Result<Vec<Rsx>, ParseError> {
let mut rsx_parser = RsxParser::new(self.0.clone());
let mut result = Vec::new();
let mut tag_stack = Vec::new();
while let Some(next_token) = rsx_parser.next() {
match next_token? {
RsxToken::EmptyTag(name, attrs) => {
RsxToken::EmptyTag(tag, attrs) => {
let elem = Elem {
tag: name.to_string(),
tag: tag.to_string(),
attrs,
inner: None,
};
let el = Rsx::Elem(elem);
result.push(el);
}
RsxToken::OpeningTag(tag, attrs) => {
tag_stack.push((tag, attrs, result.clone()));
result.clear();
}
RsxToken::ClosingTag(tag) => {
if let Some((last_tag, attrs, mut last_result)) = tag_stack.pop() {
if tag.as_str() == last_tag.as_str() {
last_result.push(Rsx::Elem(Elem {
tag: tag.to_string(),
attrs: attrs.clone(),
inner: Some(result),
}));
result = last_result;
} else {
return Err(ParseError::WrongClosingTag(
last_tag.to_string(),
tag.to_string(),
));
}
} else {
return Err(ParseError::ClosedTooFar);
}
}
RsxToken::Code(expr) => {
result.push(Rsx::Code(expr.to_adapter()));
}
@ -290,20 +320,38 @@ impl Iterator for Visitor {
#[cfg(test)]
mod tests {
use std::collections::BTreeMap;
use enterprise_compiler::model::*;
use proptest::prelude::*;
use quote::ToTokens;
use syn_serde::Syn;
use super::Visitor;
fn convert<K: Clone + Ord>(
map: &BTreeMap<K, (syn_serde::Type, syn_serde::Expr)>,
) -> BTreeMap<K, (syn::Type, syn::Expr)> {
map.iter()
.map(|(name, (ty, expr))| {
let ty = syn::Type::from_adapter(ty);
let expr = syn::Expr::from_adapter(expr);
(name.clone(), (ty, expr))
})
.collect()
}
proptest! {
#[test]
fn tokens_parse_compatibility(tree in arbitrary_component()) {
let tokens = tree.to_token_stream();
let mut visitor = Visitor::from_tokens(tokens.clone());
let tree2 = visitor.next().unwrap().unwrap();
prop_assert_eq!(format!("{}", tokens), format!("{}", tree2.to_token_stream()));
// compare the trees
prop_assert_eq!(tree.name, tree2.name, "name");
prop_assert_eq!(convert(&tree.model), convert(&tree2.model), "model");
prop_assert_eq!(tree.view, tree2.view, "view");
}
}
}

View file

@ -1,4 +1,4 @@
use std::collections::HashMap;
use std::collections::BTreeMap;
use std::iter::FromIterator;
use std::iter::Peekable;
@ -8,7 +8,7 @@ use symbol::Symbol;
use syn::{Expr, Lit};
use syn_serde::Syn;
use crate::parser::{ParseError, consume_ident, consume_punct};
use crate::parser::{consume_ident, consume_punct, ParseError};
pub(crate) struct RsxParser(Peekable<IntoIter>);
@ -46,6 +46,8 @@ impl RsxParser {
let name = self.consume_ident()?;
if is_closing {
// TODO: assert next is >
self.0.next();
return Ok(Some(RsxToken::ClosingTag(Symbol::from(name.to_string()))));
}
@ -76,7 +78,7 @@ impl RsxParser {
buf.push(next_token);
}
let mut attrs = HashMap::new();
let mut attrs = BTreeMap::new();
let mut iter = buf.into_iter().peekable();
loop {
// consume a single attr
@ -133,24 +135,50 @@ impl RsxParser {
} else {
RsxToken::OpeningTag
};
return Ok(Some(variant(Symbol::from(name.to_string()), attrs)));
Ok(Some(variant(Symbol::from(name.to_string()), attrs)))
}
TokenTree::Literal(lit) => {
let stream = TokenStream::from(TokenTree::Literal(lit));
let lit = syn::parse2::<Lit>(stream)?;
if let Lit::Str(string) = lit {
return Ok(Some(RsxToken::Str(string.value())));
Ok(Some(RsxToken::Str(string.value())))
} else {
Err(ParseError::UnrecognizedLiteral(lit))
}
}
TokenTree::Group(group) if group.delimiter() == Delimiter::Brace => {
let expr = syn::parse2::<Expr>(group.stream())?;
return Ok(Some(RsxToken::Code(expr)));
}
_ => unimplemented!("TOKEN: {:?}", token),
};
let mut stream = group.stream().into_iter();
unimplemented!("the fuck")
enum GroupType {
Pound,
Slash,
None,
}
// based on svelte for now, we'll probably change up the syntax a bit later
let is_special = match stream.next() {
Some(TokenTree::Punct(punct)) if punct.as_char() == '#' => GroupType::Pound,
Some(TokenTree::Punct(punct)) if punct.as_char() == '/' => GroupType::Slash,
_ => GroupType::None,
};
if let GroupType::None = is_special {
let expr = syn::parse2::<Expr>(group.stream())?;
Ok(Some(RsxToken::Code(expr)))
} else {
match stream.next() {
Some(TokenTree::Ident(ident)) if ident == "for" => {
// syntax: {#for pattern in expr} {/for}
Ok(Some(RsxToken::StartFor()))
}
Some(other) => Err(ParseError::ExpectedIdent(other)),
None => Err(ParseError::UnexpectedEOF),
}
}
}
token => Err(ParseError::UnexpectedToken(token)),
}
}
fn consume_ident(&mut self) -> Result<Ident, ParseError> {
@ -170,11 +198,12 @@ impl RsxParser {
#[derive(Debug)]
pub(crate) enum RsxToken {
OpeningTag(Symbol, HashMap<TagLhs, TagRhs>),
EmptyTag(Symbol, HashMap<TagLhs, TagRhs>),
OpeningTag(Symbol, BTreeMap<TagLhs, TagRhs>),
EmptyTag(Symbol, BTreeMap<TagLhs, TagRhs>),
ClosingTag(Symbol),
Str(String),
Code(Expr),
StartFor(),
}
impl Iterator for RsxParser {

View file

@ -10,7 +10,7 @@ component! {
view {
<input on:submit={|evt| { todos.push(evt.name); }} />
<ul>
{#for (key, todo) in todos}
{#for (key, todo) in todos }
<li>{todo} <a on:click={|_| { todos.remove(key); }}>[x]</a></li>
{/for}
</ul>

0
output.txt Normal file
View file

View file

@ -22,7 +22,7 @@ proc-macro2 = { version = "1.0", default-features = false }
serde = { version = "1.0.99", features = ["derive"] }
serde_derive = "1.0.99" # This is necessary to make `-Z minimal-versions` build successful.
serde_json = { version = "1.0", optional = true }
syn = { version = "1.0.5", default-features = false, features = ["full"] }
syn = { version = "1.0.5", default-features = false, features = ["extra-traits", "full"] }
[dev-dependencies]
quote = "1.0"