Compare commits

...

No commits in common. "master" and "dev" have entirely different histories.
master ... dev

83 changed files with 2548 additions and 19274 deletions

1
.gitignore vendored
View file

@ -1,2 +1 @@
/target
**/*.rs.bk

View file

@ -1,2 +0,0 @@
syn-serde
symbol

2628
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,26 +1,4 @@
[package]
name = "enterprise"
version = "0.1.0"
authors = ["Michael Zhang <iptq@protonmail.com>"]
edition = "2018"
[workspace]
members = [
"enterprise-compiler",
"enterprise-macros",
"symbol",
"syn-serde",
"examples/helloworld",
"examples/todomvc",
"backend",
]
[features]
default = ["web"]
web = ["stdweb"]
[dependencies]
enterprise-compiler = { path = "enterprise-compiler" }
stdweb = { version = "0.4.20", optional = true }
parking_lot = "0.10.0"

16
backend/Cargo.toml Normal file
View file

@ -0,0 +1,16 @@
[package]
name = "backend"
version = "0.1.0"
authors = ["Michael Zhang <iptq@protonmail.com>"]
edition = "2018"
[dependencies]
gluon = { version = "0.14.1", features = ["serialization"] }
serde_derive = "1.0.106"
serde = "1.0.106"
toml = "0.5.6"
anyhow = "1.0.28"
serde_json = "1.0.51"
sqlx = { version = "0.3.4", features = ["sqlite"] }
warp = "0.2.2"
tokio = { version = "0.2.18", features = ["full"] }

95
backend/src/lib.rs Normal file
View file

@ -0,0 +1,95 @@
use std::convert::Infallible;
use std::collections::HashMap;
use std::fs::File;
use std::io::Read;
use std::path::Path;
use gluon::ThreadExt;
use anyhow::Result;
use toml::Value;
use warp::Filter;
#[macro_use]
extern crate serde_derive;
#[derive(Debug, Serialize, Deserialize)]
pub struct FrontConfig {
models: Option<String>,
routes: Option<HashMap<String, Value>>,
}
#[derive(Debug)]
enum Handler {
App(App),
Func(),
}
#[derive(Debug)]
pub struct App {
routes: HashMap<String, Handler>,
}
pub fn build_app(path: impl AsRef<Path>) -> Result<App> {
let path = path.as_ref();
let mut contents = String::new();
let mut file = File::open(path)?;
file.read_to_string(&mut contents)?;
let parsed_config = toml::from_str::<FrontConfig>(&contents)?;
println!("Config: {:?}", parsed_config);
let mut routes = HashMap::new();
let mut process_path = |route: &str, value: &Value| -> Result<()> {
if let Some(path_str) = value
.as_table()
.and_then(|table| table.get("path"))
.and_then(|value| value.as_str())
{
let dir = path.parent().unwrap();
let new_path = dir.join(path_str).join("_front.toml");
if new_path.exists() {
println!("new_path: {:?}", new_path);
let app = build_app(new_path)?;
routes.insert(route.to_string(), app);
}
}
Ok(())
};
if let Some(route_cfg) = parsed_config.routes {
for (route, config) in route_cfg {
if let Some(list) = config.as_array() {
for value in list {
process_path(&route, value)?;
}
} else {
process_path(&route, &config)?;
}
}
}
if let Some(models) = parsed_config.models {
let dir = path.parent().unwrap();
let models_path = dir.join(models + ".glu");
if models_path.exists() {
let mut contents = String::new();
let mut file = File::open(models_path)?;
file.read_to_string(&mut contents)?;
let vm = gluon::new_vm();
// vm.load_file(models_path.to_str().unwrap())?;
let (result, _) = vm.run_expr::<String>("models", &contents)?;
println!("OUAIS {:?}", result);
}
}
Ok(App { routes })
}
pub fn build_config_from_root(path: impl AsRef<Path>) -> Result<App> {
build_app(path)
}
pub fn create_filter(app: App) -> impl Filter<Extract = (&'static str,), Error = Infallible> + Clone {
warp::any().map(|| "hello")
}

12
backend/src/main.rs Normal file
View file

@ -0,0 +1,12 @@
use anyhow::Result;
use warp::Filter;
#[tokio::main]
async fn main() -> Result<()> {
let app = backend::build_config_from_root("/home/michael/Projects/enterprise/examples/realworld/_front.toml")?;
let filter = backend::create_filter(app);
warp::serve(filter).run(([127u8, 0, 0, 1], 3000)).await;
Ok(())
}

View file

@ -1,20 +0,0 @@
[package]
name = "enterprise-compiler"
version = "0.1.0"
authors = ["Michael Zhang <iptq@protonmail.com>"]
edition = "2018"
[dependencies]
bimap = "0.4.0"
lazy_static = "1.4.0"
maplit = "1.0.2"
petgraph = "0.5.0"
proc-macro2 = "1.0.8"
quote = "1.0.2"
serde = "1.0.104"
serde_derive = "1.0.104"
spin = "0.5.2"
symbol = { path = "../symbol" }
syn-serde = { path = "../syn-serde" }
syn = { version = "1.0.14", features = ["extra-traits", "full"] }
serde_json = "1.0.48"

View file

@ -1,95 +0,0 @@
#[macro_use]
extern crate quote;
extern crate maplit;
#[macro_use]
extern crate serde_derive;
pub mod model;
mod tuple_map;
mod visitor;
use std::env;
use std::fs::File;
use std::io::Write;
use std::path::PathBuf;
use crate::model::Component;
use crate::visitor::Visitor;
use proc_macro2::TokenStream;
use symbol::Symbol;
pub fn build(
// name: impl AsRef<str>,
// datamodel: &HashMap<String, String>,
// datainit: &HashMap<String, String>,
// dom: &[Rsx],
component: &Component,
) -> TokenStream {
let name = &component.name;
let mut visitor = Visitor::new();
visitor.load_model(&component.model);
let new_dom = visitor.make_graph(&component.view);
let toplevel_names = visitor.gen_code(&new_dom);
// let graph: Graph<_, _, _> = visitor.deps.clone().into_graph();
// println!("{:?}", Dot::new(&graph));
let name = format_ident!("{}", name);
let mut model = TokenStream::new();
let mut init = TokenStream::new();
for (name, (ty, value)) in component.model.iter() {
let name = format_ident!("{}", name.as_str());
let ty: syn::Type = ty.into();
let value: syn::Expr = value.into();
model.extend(quote! { #name : std::sync::Arc<enterprise::parking_lot::Mutex<#ty>> , });
init.extend(
quote! { #name : std::sync::Arc::new(enterprise::parking_lot::Mutex::new(#value .into())) , },
);
}
let impl_code = &visitor.impl_code;
let mut init_el_code = TokenStream::new();
for fn_name in toplevel_names.iter() {
let fn_name = format_ident!("{}", fn_name);
init_el_code.extend(quote! {
{
use enterprise::stdweb::web::INode;
let sub = self.#fn_name();
el.append_child(&sub);
}
});
}
quote! {
pub struct #name<B> {
_b: std::marker::PhantomData<B>,
#model
}
impl<B> #name<B> {
pub fn new(_: &B) -> Self {
#name {
_b: std::marker::PhantomData::default(),
#init
}
}
#impl_code
}
impl<B: enterprise::Backend> enterprise::Component<B> for #name<B> {
fn create(&self, el: &enterprise::stdweb::web::Element) {
#init_el_code
}
}
}
}
pub fn process(mod_name: impl AsRef<str>, code: impl AsRef<str>) {
let component: Component = serde_json::from_str(code.as_ref()).unwrap();
let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap());
let mut out_file = File::create(out_dir.join(format!("{}.rs", mod_name.as_ref()))).unwrap();
let tokens = build(&component);
write!(out_file, "{}", tokens).unwrap();
}

View file

@ -1,94 +0,0 @@
use std::collections::HashMap;
use std::hash::{BuildHasher, Hash};
use symbol::Symbol;
use syn_serde::{Expr, Syn, Type};
pub type Id = Symbol;
pub type ModelMap = HashMap<Symbol, (Type, Expr)>;
pub fn convert_map<T: Hash + Eq, S: BuildHasher>(
map: HashMap<T, (syn::Type, syn::Expr), S>,
) -> HashMap<T, (Type, Expr)> {
map.into_iter()
.map(|(left, (ty, expr))| {
let ty = ty.to_adapter();
let expr = expr.to_adapter();
(left, (ty, expr))
})
.collect()
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Component {
pub name: String,
#[serde(with = "crate::tuple_map")]
pub model: ModelMap,
pub view: Vec<Rsx>,
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub enum TagLhs {
Bind(String),
Plain(String),
On(String),
#[doc(hidden)]
_Nonexhaustive,
}
#[derive(Debug, Serialize, Deserialize)]
pub enum TagRhs {
Code(Expr),
Text(String),
}
impl Clone for TagRhs {
fn clone(&self) -> Self {
match self {
TagRhs::Code(expr) => {
let expr: syn::Expr = Syn::from_adapter(&*expr);
TagRhs::Code(expr.to_adapter())
}
TagRhs::Text(string) => TagRhs::Text(string.clone()),
}
}
}
#[derive(Debug, Default, Serialize, Deserialize)]
pub struct Elem<T> {
pub tag: String,
#[serde(with = "crate::tuple_map")]
pub attrs: HashMap<TagLhs, TagRhs>,
pub inner: Vec<T>,
}
#[derive(Debug, Serialize, Deserialize)]
pub enum Rsx {
Elem(Elem<Rsx>),
Code(Expr),
Text(String),
#[doc(hidden)]
_Nonexhaustive,
}
#[derive(Debug)]
pub enum TaggedRsx {
Elem(Id, Elem<TaggedRsx>),
Code(Id, Box<Expr>),
Text(Id, String),
#[doc(hidden)]
_Nonexhaustive,
}
impl TaggedRsx {
pub fn get_id(&self) -> Id {
match self {
TaggedRsx::Elem(id, _) | TaggedRsx::Code(id, _) | TaggedRsx::Text(id, _) => *id,
_ => unimplemented!("tagged rsx"),
}
}
}

View file

@ -1,85 +0,0 @@
// https://github.com/daboross/serde-tuple-vec-map/blob/master/src/lib.rs
use std::fmt;
use std::hash::Hash;
use std::marker::PhantomData;
use std::collections::HashMap;
use serde::{
de::{Deserialize, Deserializer, SeqAccess, Visitor},
ser::{Serialize, Serializer},
};
struct TupleVecMapVisitor<K, V> {
marker: PhantomData<HashMap<K, V>>,
}
impl<K, V> TupleVecMapVisitor<K, V> {
pub fn new() -> Self {
TupleVecMapVisitor {
marker: PhantomData,
}
}
}
impl<'de, K: Eq + Hash, V> Visitor<'de> for TupleVecMapVisitor<K, V>
where
K: Deserialize<'de>,
V: Deserialize<'de>,
{
type Value = HashMap<K, V>;
fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("a map")
}
#[inline]
fn visit_unit<E>(self) -> Result<Self::Value, E> {
Ok(HashMap::new())
}
#[inline]
fn visit_seq<T>(self, mut seq: T) -> Result<Self::Value, T::Error>
where
T: SeqAccess<'de>,
{
let mut values = HashMap::new();
while let Some((key, value)) = seq.next_element()? {
values.insert(key, value);
}
Ok(values)
}
}
/// Serialize an array of `(K, V)` pairs as if it were a `HashMap<K, V>`.
///
/// In formats where dictionaries are ordered, this maintains the input data's order. Each pair is treated as a single
/// entry into the dictionary.
///
/// Behavior when duplicate keys are present in the data is unspecified and serializer-dependent. This function does
/// not check for duplicate keys and will not warn the serializer.
pub fn serialize<K: Eq + Hash, V, S>(data: &HashMap<K, V>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
K: Serialize,
V: Serialize,
{
serializer.collect_seq(data.iter().map(|x| (x.0, x.1)))
}
/// Deserialize to a `Vec<(K, V)>` as if it were a `HashMap<K, V>`.
///
/// This directly deserializes into the returned vec with no intermediate allocation.
///
/// In formats where dictionaries are ordered, this maintains the input data's order.
pub fn deserialize<'de, K: Eq + Hash, V, D>(deserializer: D) -> Result<HashMap<K, V>, D::Error>
where
D: Deserializer<'de>,
K: Deserialize<'de>,
V: Deserialize<'de>,
{
deserializer.deserialize_seq(TupleVecMapVisitor::new())
}

View file

@ -1,234 +0,0 @@
use std::collections::HashMap;
use std::collections::HashSet;
use petgraph::graphmap::DiGraphMap;
use petgraph::visit::Dfs;
use proc_macro2::{TokenStream, TokenTree};
use quote::ToTokens;
use syn::{Expr, Type};
use syn_serde::Syn;
use crate::model::{Elem, Id, ModelMap, Rsx, TagLhs, TagRhs, TaggedRsx};
use crate::Symbol;
#[derive(Copy, Clone, Debug, Hash, PartialOrd, Ord, PartialEq, Eq)]
pub enum DepNode {
// This is an attribute on an element
// Not read-only
RsxAttr(Symbol, Symbol),
// This is a text node (innertext)
// These are read-only
RsxSpan(Symbol),
// This is something in the model
ModelValue(Symbol),
}
impl DepNode {
fn gen_update_code(
&self,
// model_bimap: &BiHashMap<Id, String>,
updates: &mut TokenStream,
update_func: &mut TokenStream,
) {
match self {
DepNode::ModelValue(sym) => {
let sym_name = format_ident!("{}", sym.to_string());
let inner_lock = format_ident!("inner_lock_{}", Symbol::gensym().as_str());
updates.extend(quote! {
let #inner_lock = self.#sym_name.clone();
});
update_func.extend(quote! {
{
let mut locked = #inner_lock.lock();
*locked = new_value.clone();
}
});
}
DepNode::RsxSpan(id) => {
let id_str = id.as_str();
update_func.extend(quote! {
{
use enterprise::stdweb::web::{INonElementParentNode, INode};
if let Some(target) = enterprise::stdweb::web::document().get_element_by_id(#id_str) {
target.set_text_content(&new_value.clone());
}
}
});
}
_ => (),
}
}
}
type DependencyGraph = DiGraphMap<DepNode, ()>;
#[derive(Default, Debug)]
pub struct Visitor {
idx: u32,
pub(crate) deps: DependencyGraph,
model: HashMap<Id, (Type, Expr)>,
pub(crate) impl_code: TokenStream,
elem_attr_map: HashMap<Id, HashSet<Id>>,
// symbol maps
// model_bimap: BiHashMap<Id, String>,
}
impl Visitor {
pub fn new() -> Visitor {
Visitor {
..Default::default()
}
}
pub fn load_model(&mut self, model: &ModelMap) {
for (key, (ty, init)) in model {
let ty = Syn::from_adapter(&*ty);
let init = Syn::from_adapter(&*init);
self.model.insert(key.clone(), (ty, init));
}
// self.model.extend(model.clone());
}
pub fn make_graph(&mut self, nodes: &[Rsx]) -> Vec<TaggedRsx> {
let mut new_nodes = Vec::new();
for node in nodes {
let node_id = Symbol::gensym();
let new_node = match node {
Rsx::Elem(Elem { tag, attrs, inner }) => {
let tag_inner = self.make_graph(&inner);
for (lhs, rhs) in attrs {
if let TagLhs::Bind(attr) = lhs {
if let TagRhs::Text(text) = rhs {
let text_sym = Symbol::from(text);
if self.model.contains_key(&text_sym) {
let from = DepNode::RsxAttr(node_id, Symbol::from(attr));
let to = DepNode::ModelValue(text_sym);
self.deps.add_edge(from, to, ());
if let Some(set) = self.elem_attr_map.get_mut(&node_id) {
set.insert(Symbol::from(attr));
} else {
let mut set = HashSet::new();
set.insert(Symbol::from(attr));
self.elem_attr_map.insert(node_id, set);
}
}
}
}
}
TaggedRsx::Elem(
node_id,
Elem {
tag: tag.to_string(),
attrs: attrs.clone(),
inner: tag_inner,
},
)
}
Rsx::Code(expr) => {
let syn_expr = Syn::from_adapter(&*expr);
let deps = self.extract_model_dependencies(&syn_expr);
for dep in deps {
let from = DepNode::ModelValue(dep);
let to = DepNode::RsxSpan(node_id);
self.deps.add_edge(from, to, ());
}
TaggedRsx::Code(node_id, Box::new(syn_expr.clone().to_adapter()))
}
Rsx::Text(literal) => TaggedRsx::Text(node_id, literal.clone()),
_ => unimplemented!(),
};
new_nodes.push(new_node);
}
new_nodes
}
pub fn gen_code(&mut self, nodes: &[TaggedRsx]) -> Vec<String> {
let mut names = Vec::new();
for node in nodes {
let node_str = node.get_id().as_str();
let make_node_id = format_ident!("make_{}", node_str);
match node {
TaggedRsx::Elem(node_id, Elem { tag, inner, .. }) => {
let mut updates = TokenStream::new();
if let Some(this_attrs) = self.elem_attr_map.get(node_id) {
for attr in this_attrs {
let starting = DepNode::RsxAttr(*node_id, *attr);
let mut dfs = Dfs::new(&self.deps, starting);
let mut update_func = TokenStream::new();
while let Some(nx) = dfs.next(&self.deps) {
if nx != starting {
nx.gen_update_code(
// &self.model_bimap,
&mut updates,
&mut update_func,
);
}
}
updates.extend(quote! {
{
use enterprise::stdweb::{web::IEventTarget, unstable::TryFrom};
let inner_el = el.clone();
el.add_event_listener(move |evt: enterprise::stdweb::web::event::InputEvent| {
let new_value = enterprise::stdweb::web::html_element::InputElement::try_from(inner_el.clone()).unwrap().raw_value();
#update_func
});
}
});
}
}
self.impl_code.extend(quote! {
fn #make_node_id(&self) -> impl enterprise::stdweb::web::INode {
use enterprise::stdweb::web::IElement;
let el = enterprise::stdweb::web::document().create_element(#tag).unwrap();
el.set_attribute("id", #node_str).unwrap();
#updates
el
}
});
self.gen_code(&inner);
}
TaggedRsx::Code(_, _) => {
self.impl_code.extend(quote! {
#[inline]
fn #make_node_id(&self) -> impl enterprise::stdweb::web::INode {
use enterprise::stdweb::web::IElement;
let el = enterprise::stdweb::web::document().create_element("span").expect("shouldn't fail");
el.set_attribute("id", #node_str).unwrap();
el
}
});
}
TaggedRsx::Text(_, literal) => {
self.impl_code.extend(quote! {
#[inline]
fn #make_node_id(&self) -> impl enterprise::stdweb::web::INode {
enterprise::stdweb::web::document().create_text_node(#literal)
}
});
}
_ => unimplemented!("gen_code tagged rsx"),
}
names.push(format!("{}", make_node_id));
}
names
}
/// This is using a really dumb heuristic
fn extract_model_dependencies(&self, expr: &Expr) -> HashSet<Symbol> {
let tokens = expr.to_token_stream();
let mut result = HashSet::new();
for token in tokens.into_iter() {
if let TokenTree::Ident(ident) = token {
// if let Some(id) = self.model_bimap.get_by_right(&ident.to_string()) {
let sym = Symbol::from(ident.to_string());
if self.model.contains_key(&sym) {
result.insert(sym);
}
// result.insert(format!("{}", ident));
}
}
result
}
}

View file

@ -1,17 +0,0 @@
[package]
name = "enterprise-macros"
version = "0.1.0"
authors = ["Michael Zhang <iptq@protonmail.com>"]
edition = "2018"
[lib]
proc-macro = true
[build-dependencies]
lalrpop = "0.17.2"
[dependencies]
proc-macro2 = { version = "1.0.7", features = ["span-locations"] }
quote = "1.0.2"
thiserror = "1.0.9"
lalrpop-util = "0.17.2"

View file

@ -1,3 +0,0 @@
fn main() {
lalrpop::process_root().unwrap();
}

View file

@ -1,34 +0,0 @@
use std::collections::HashMap;
#[derive(Debug)]
pub enum Toplevel {
Use(Use),
Component(Component),
}
#[derive(Debug)]
pub struct Use(pub Vec<String>);
#[derive(Debug)]
pub struct Component {
pub name: String,
pub body: Vec<ComponentBody>,
}
#[derive(Debug)]
pub enum ComponentBody {
Constructor(),
View(Rsx),
Fn(),
}
#[derive(Debug)]
pub enum Rsx {
Tag {
tag: String,
attrs: HashMap<String, String>,
inner: Vec<Rsx>,
},
CodeSegment(String),
Text(String),
}

View file

@ -1,128 +0,0 @@
use std::fmt;
use lalrpop_util::ParseError;
use proc_macro2::{
Delimiter, Group, Ident, LineColumn, Literal, Punct, Spacing, Span as Span2, TokenStream,
TokenTree,
};
use crate::parser::parser::__ToTriple;
macro_rules! generate_token {
([$($keyword_name:ident: $keyword:ident),* $(,)?]) => {
#[derive(Debug, Clone)]
pub enum TokenType {
Ident(Ident),
Punct(char, Punct),
Literal(Literal),
$($keyword_name(Span),)*
}
impl fmt::Display for TokenType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use TokenType::*;
match self {
Ident(ident) => ident.fmt(f),
Punct(_, punct) => punct.fmt(f),
Literal(literal) => literal.fmt(f),
$($keyword_name(_) => f.write_str(stringify!($keyword)),)*
}
}
}
impl TokenType {
pub fn span(&self) -> Span {
use TokenType::*;
match self {
Ident(ident) => Span(ident.span()),
Punct(_, punct) => Span(punct.span()),
Literal(literal) => Span(literal.span()),
$($keyword_name(span) => *span,)*
}
}
}
fn flatten_tree(token_tree: TokenTree) -> Vec<Token> {
match token_tree {
TokenTree::Group(group) => {
use Delimiter::*;
let mut result = flatten(group.stream());
let surround = match group.delimiter() {
Brace => Some(construct_group_tokens('{', '}', group)),
Parenthesis => Some(construct_group_tokens('(', ')', group)),
Bracket => Some(construct_group_tokens('[', ']', group)),
None => Option::None,
};
if let Some((start, end)) = surround {
result.insert(0, start);
result.push(end);
}
result
},
TokenTree::Ident(ident) => {
let token = match ident.to_string().as_ref() {
$(stringify!($keyword) => TokenType::$keyword_name(Span(ident.span())),)*
_ => TokenType::Ident(ident),
};
vec![construct_token(token)]
}
TokenTree::Punct(punct) => {
let token = TokenType::Punct(punct.as_char(), punct);
vec![construct_token(token)]
}
TokenTree::Literal(literal) => {
let token = TokenType::Literal(literal);
vec![construct_token(token)]
}
}
}
}
}
generate_token!([
Component: component,
Constructor: constructor,
Fn: fn,
Use: use,
View: view,
]);
pub type Token = Result<(Span, TokenType, Span), ()>;
pub fn flatten(token_stream: TokenStream) -> Vec<Token> {
token_stream.into_iter().flat_map(flatten_tree).collect()
}
fn construct_group_tokens(left: char, right: char, group: Group) -> (Token, Token) {
let mut left_punct = Punct::new(left, Spacing::Alone);
left_punct.set_span(group.span_open());
let mut right_punct = Punct::new(right, Spacing::Alone);
right_punct.set_span(group.span_close());
(
construct_token(TokenType::Punct(left, left_punct)),
construct_token(TokenType::Punct(right, right_punct)),
)
}
fn construct_token(token: TokenType) -> Token {
let span = token.span();
Ok((span, token, span))
}
#[derive(Copy, Clone, Debug)]
pub struct Span(pub Span2);
impl fmt::Display for Span {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:?}", self)
}
}
impl Default for Span {
fn default() -> Self {
Span(Span2::call_site())
}
}

View file

@ -1,80 +0,0 @@
extern crate proc_macro;
extern crate thiserror;
#[macro_use]
extern crate lalrpop_util;
mod ast;
mod flatten;
mod parser {
#![allow(dead_code, unused_variables, unknown_lints, non_snake_case)]
lalrpop_mod!(pub parser);
}
use proc_macro2::TokenStream;
use quote::quote;
use crate::flatten::{Span, TokenType};
use crate::parser::parser::*;
#[proc_macro]
pub fn component(input_tokens: proc_macro::TokenStream) -> proc_macro::TokenStream {
let input_tokens: TokenStream = input_tokens.into();
let tokens = flatten::flatten(input_tokens);
eprintln!("Tokens:");
for token in tokens.iter() {
eprintln!("- {:?}", token);
}
eprintln!();
let parser = ProgramParser::new();
let result = parser.parse(tokens);
match result {
Ok(result) => eprintln!("success: {:?}", result),
Err(err) => {
use lalrpop_util::ParseError::*;
match err {
User { ref error } => print!("user error: {:?}", error),
InvalidToken { ref location } => print!("Invalid token at {}", location),
UnrecognizedEOF {
ref location,
ref expected,
} => {
print!("Unrecognized EOF found at {}", location);
fmt_expected(expected)
}
UnrecognizedToken {
token: (ref start, ref token, ref end),
ref expected,
} => {
print!("Unrecognized token `{}` found at {}:{}", token, start, end);
fmt_expected(expected)
}
ExtraToken {
token: (ref start, ref token, ref end),
} => print!("Extra token {} found at {}:{}", token, start, end),
}
}
}
panic!();
let result = quote! {};
result.into()
}
/// Format a list of expected tokens.
fn fmt_expected(expected: &[String]) {
if !expected.is_empty() {
println!();
for (i, e) in expected.iter().enumerate() {
let sep = match i {
0 => "Expected one of",
_ if i < expected.len() - 1 => ",",
// Last expected message to be written
_ => " or",
};
print!("{} {}", sep, e);
}
}
}

View file

@ -1,162 +0,0 @@
use std::iter::FromIterator;
use std::collections::HashMap;
use proc_macro2::{Ident, Punct, TokenTree, Delimiter, TokenStream, Group, Spacing, Literal};
use crate::{TokenType, Span};
use crate::ast::*;
grammar;
pub Program: Vec<Toplevel> = Toplevel*;
pub Toplevel: Toplevel = {
Component => Toplevel::Component(<>),
Use => Toplevel::Use(<>),
};
Use: Use = {
"use" <path:Delim<Ident, (":" ":")>> ";" => Use(path.into_iter().map(|ident| ident.to_string()).collect()),
};
Component: Component = {
"component" <name:Ident> <body:Body<ComponentBody*>> => {
Component {
name: name.to_string(),
body,
}
}
};
ComponentBody: ComponentBody = {
"constructor" "(" ")" BraceGrouper => {
ComponentBody::Constructor()
},
"view" <rsx:Body<Rsx>> => {
ComponentBody::View(rsx)
},
"fn" Ident "(" Delim<Arg, ","> ")" BraceGrouper => {
ComponentBody::Fn()
},
};
// TODO: finish this
Arg: () = {
Punct Ident Ident => {},
Ident ":" Ident => {},
};
Rsx: Rsx = {
"<" <tag:Ident> <attrs:Attrs> "/" ">" => { Rsx::Tag { tag: tag.to_string(), attrs, inner: Vec::new(), } },
"<" <tag:Ident> <attrs:Attrs> ">" <inner:Rsx*> "<" "/" <closeTag:Ident> ">" => {
assert_eq!(tag, closeTag, "Tags {} and {} do not match.", tag, closeTag);
Rsx::Tag { tag: tag.to_string(), attrs, inner }
},
BraceGrouper => { Rsx::CodeSegment(<>.to_string()) },
AnyText => { Rsx::Text(<>) },
};
Attrs: HashMap<String, String> = {
(AttrLhs "=" BraceGrouper)* => {
<>.into_iter()
.map(|(lhs, _, rhs)| (lhs.to_string(), rhs.to_string()))
.collect()
}
};
AttrLhs: String = {
<a:Ident> ":" <b:Ident> => format!("{}:{}", a, b),
Ident => <>.to_string(),
};
//
pub ArbitraryBlocks: Vec<TokenTree> = AnyToken*;
Grouper: TokenTree = {
BraceGrouper => <>,
"(" <b:AnyToken*> ")" => TokenTree::Group(Group::new(Delimiter::Parenthesis, TokenStream::from_iter(b.into_iter()))),
"[" <b:AnyToken*> "]" => TokenTree::Group(Group::new(Delimiter::Bracket, TokenStream::from_iter(b.into_iter()))),
};
BraceGrouper: TokenTree = {
"{" <b:AnyToken*> "}" => TokenTree::Group(Group::new(Delimiter::Brace, TokenStream::from_iter(b.into_iter()))),
};
AnyText: String = {
Ident => <>.to_string(),
Punct => <>.to_string(),
Literal => <>.to_string(),
};
AnyToken: TokenTree = {
Grouper => <>,
"component" => TokenTree::Ident(Ident::new("component", <>.0)),
"constructor" => TokenTree::Ident(Ident::new("constructor", <>.0)),
"fn" => TokenTree::Ident(Ident::new("fn", <>.0)),
"use" => TokenTree::Ident(Ident::new("use", <>.0)),
"view" => TokenTree::Ident(Ident::new("view", <>.0)),
":" => TokenTree::Punct(Punct::new(':', Spacing::Alone)),
";" => TokenTree::Punct(Punct::new(';', Spacing::Alone)),
"," => TokenTree::Punct(Punct::new(',', Spacing::Alone)),
"<" => TokenTree::Punct(Punct::new('<', Spacing::Alone)),
">" => TokenTree::Punct(Punct::new('>', Spacing::Alone)),
"/" => TokenTree::Punct(Punct::new('/', Spacing::Alone)),
"=" => TokenTree::Punct(Punct::new('=', Spacing::Alone)),
Ident => TokenTree::Ident(<>),
Punct => TokenTree::Punct(<>),
Literal => TokenTree::Literal(<>),
};
Body<T>: T = {
"{" <body:T> "}" => body,
};
Delim<T, Sep>: Vec<T> = {
<all:(T ((Sep T)+ Sep?)?)?> => {
let mut vec = Vec::new();
if let Some((initial, rest)) = all {
vec.push(initial);
if let Some((rest, _)) = rest {
for (_, item) in rest {
vec.push(item);
}
}
}
vec
},
};
extern {
type Location = Span;
type Error = ();
enum TokenType {
"component" => TokenType::Component(<Span>),
"constructor" => TokenType::Constructor(<Span>),
"fn" => TokenType::Fn(<Span>),
"use" => TokenType::Use(<Span>),
"view" => TokenType::View(<Span>),
":" => TokenType::Punct(':', _),
";" => TokenType::Punct(';', _),
"," => TokenType::Punct(',', _),
"{" => TokenType::Punct('{', _),
"}" => TokenType::Punct('}', _),
"(" => TokenType::Punct('(', _),
")" => TokenType::Punct(')', _),
"[" => TokenType::Punct('[', _),
"]" => TokenType::Punct(']', _),
"<" => TokenType::Punct('<', _),
">" => TokenType::Punct('>', _),
"/" => TokenType::Punct('/', _),
"=" => TokenType::Punct('=', _),
Punct => TokenType::Punct(_, <Punct>),
Ident => TokenType::Ident(<Ident>),
Literal => TokenType::Literal(<Literal>),
}
}

View file

@ -1,18 +0,0 @@
[package]
name = "enterprise-macros"
version = "0.1.0"
authors = ["Michael Zhang <iptq@protonmail.com>"]
edition = "2018"
[lib]
proc-macro = true
[dependencies]
proc-macro2 = { version = "1.0.7", features = ["span-locations"] }
quote = "1.0.2"
thiserror = "1.0.9"
symbol = { path = "../symbol" }
enterprise-compiler = { path = "../enterprise-compiler" }
syn-serde = { path = "../syn-serde" }
syn = { version = "1.0.14", features = ["extra-traits", "full"] }
serde_json = "1.0.48"

View file

@ -1,316 +0,0 @@
extern crate proc_macro;
#[macro_use]
extern crate quote;
mod rsx;
use std::collections::HashMap;
use std::iter::FromIterator;
use std::iter::Peekable;
use enterprise_compiler::model::{Component, Elem, ModelMap, Rsx};
use proc_macro2::{
token_stream::IntoIter, Delimiter, Group, Ident, Punct, Spacing, TokenStream, TokenTree,
};
use symbol::Symbol;
use syn::{
parse::{Parse, ParseStream},
Error as SynError, Expr, Result as SynResult, Token, Type,
};
use syn_serde::Syn;
use crate::rsx::{RsxParser, RsxToken};
#[derive(Debug)]
enum ParseError {
ExpectedKeyword(Symbol, Ident),
ExpectedIdent(TokenTree),
ExpectedGroup(TokenTree),
ExpectedPunct(TokenTree),
WrongDelimiter(Delimiter, Delimiter),
WrongPunct(char, Punct),
Syn(SynError),
UnexpectedEOF,
UnexpectedKeyword,
MissingModel,
MissingView,
// InvalidRsx(TokenTree),
UnmatchedOpenTag(TokenTree),
}
impl From<SynError> for ParseError {
fn from(err: SynError) -> Self {
ParseError::Syn(err)
}
}
enum ComponentBlock {
Model(ModelMap),
View(Vec<Rsx>),
}
struct Visitor(Peekable<IntoIter>);
impl Visitor {
fn from_tokens(stream: TokenStream) -> Self {
Visitor(stream.into_iter().peekable())
}
fn consume_component(&mut self) -> Result<Option<Component>, ParseError> {
if let None = self.0.peek() {
return Ok(None);
}
self.consume_keyword("component")?;
let name = consume_ident(&mut self.0)?.to_string();
let def = self.consume_group(Delimiter::Brace)?;
let mut def_visitor = Visitor::from_tokens(def.stream());
let mut model_map = None;
let mut view = None;
while let Some(block) = def_visitor.next_inner_block()? {
match block {
ComponentBlock::Model(inner) => model_map = Some(inner),
ComponentBlock::View(inner) => view = Some(inner),
}
}
let model = match model_map {
Some(model_map) => model_map,
None => return Err(ParseError::MissingModel),
};
let view = match view {
Some(view) => view,
None => return Err(ParseError::MissingView),
};
Ok(Some(Component { name, model, view }))
}
fn next_inner_block(&mut self) -> Result<Option<ComponentBlock>, ParseError> {
let next_token = self.0.peek();
if next_token.is_none() {
return Ok(None);
}
let next_ident = consume_ident(&mut self.0)?;
match next_ident.to_string().as_ref() {
"model" => {
let next_group = self.consume_group(Delimiter::Brace)?;
let mut model_visitor = Visitor::from_tokens(next_group.stream());
println!("SHIET");
let model_map = model_visitor.consume_model_map()?;
Ok(Some(ComponentBlock::Model(model_map)))
}
"view" => {
let next_group = self.consume_group(Delimiter::Brace)?;
let mut view_visitor = Visitor::from_tokens(next_group.stream());
let view = view_visitor.consume_view()?;
Ok(Some(ComponentBlock::View(view)))
}
_ => Err(ParseError::UnexpectedKeyword),
}
}
fn consume_model_map(&mut self) -> Result<ModelMap, ParseError> {
#[derive(Debug)]
struct ModelEntry {
name: Ident,
colon: Token![:],
ty: Type,
eq: Token![=],
init: Expr,
}
impl Parse for ModelEntry {
fn parse(input: ParseStream) -> SynResult<Self> {
Ok(ModelEntry {
name: input.parse()?,
colon: input.parse()?,
ty: input.parse()?,
eq: input.parse()?,
init: input.parse()?,
})
}
}
let mut single_def = || -> Result<Option<(Symbol, Type, Expr, bool)>, ParseError> {
let next_token = self.0.peek();
if next_token.is_none() {
return Ok(None);
}
// read until comma or end
let mut buf = Vec::new();
let mut hit_comma = false;
loop {
let next_token = self.0.peek();
if next_token.is_none() {
break;
}
let next_token = self.0.next().expect("unreachable");
if let TokenTree::Punct(ref punct) = next_token {
if punct.as_char() == ',' && punct.spacing() == Spacing::Alone {
hit_comma = true;
break;
}
}
buf.push(next_token);
}
// probably shouldn't happen?
if buf.len() == 0 {
return Ok(None);
}
let stream = TokenStream::from_iter(buf);
let item = syn::parse2::<ModelEntry>(stream)?;
// println!("ITEM: {:?}", item);
Ok(Some((
Symbol::from(item.name.to_string()),
item.ty,
item.init,
hit_comma,
)))
};
let mut map = HashMap::new();
while let Some((name, ty, init, comma)) = single_def()? {
map.insert(name, (ty, init));
if comma {
break;
}
}
Ok(enterprise_compiler::model::convert_map(map))
}
fn consume_view(&mut self) -> Result<Vec<Rsx>, ParseError> {
let mut rsx_parser = RsxParser::new(self.0.clone());
let mut result = Vec::new();
while let Some(next_token) = rsx_parser.next() {
match next_token? {
RsxToken::EmptyTag(name, attrs) => {
let elem = Elem {
tag: name.to_string(),
attrs,
inner: vec![],
};
let el = Rsx::Elem(elem);
result.push(el);
}
RsxToken::Code(expr) => {
result.push(Rsx::Code(expr.to_adapter()));
}
RsxToken::Str(string) => {
result.push(Rsx::Text(string));
}
_ => (),
}
}
Ok(result)
}
fn consume_keyword(&mut self, keyword: impl AsRef<str>) -> Result<(), ParseError> {
let keyword = keyword.as_ref();
let ident = consume_ident(&mut self.0)?;
let ident_str = ident.to_string();
if keyword == &ident_str {
Ok(())
} else {
Err(ParseError::ExpectedKeyword(Symbol::from(keyword), ident))
}
}
fn consume_group(&mut self, delimiter: Delimiter) -> Result<Group, ParseError> {
let next_token = self.0.peek();
if next_token.is_none() {
return Err(ParseError::UnexpectedEOF);
}
let next_token = self.0.next().expect("unreachable");
if let TokenTree::Group(group) = next_token {
if delimiter == group.delimiter() {
Ok(group)
} else {
Err(ParseError::WrongDelimiter(delimiter, group.delimiter()))
}
} else {
Err(ParseError::ExpectedGroup(next_token))
}
}
}
fn consume_punct(
iter: &mut Peekable<impl Iterator<Item = TokenTree>>,
equals: Option<char>,
) -> Result<Punct, ParseError> {
let next_token = iter.peek();
if next_token.is_none() {
return Err(ParseError::UnexpectedEOF);
}
let next_token = iter.next().expect("unreachable");
if let TokenTree::Punct(punct) = next_token {
if let Some(equals) = equals {
if punct.as_char() == equals {
Ok(punct)
} else {
Err(ParseError::WrongPunct(equals, punct))
}
} else {
Ok(punct)
}
} else {
Err(ParseError::ExpectedPunct(next_token))
}
}
fn consume_ident(
iter: &mut Peekable<impl Iterator<Item = TokenTree>>,
) -> Result<Ident, ParseError> {
let next_token = iter.peek();
if next_token.is_none() {
return Err(ParseError::UnexpectedEOF);
}
let next_token = iter.next().expect("unreachable");
if let TokenTree::Ident(ident) = next_token {
Ok(ident)
} else {
Err(ParseError::ExpectedIdent(next_token))
}
}
impl Iterator for Visitor {
type Item = Result<Component, ParseError>;
fn next(&mut self) -> Option<Self::Item> {
match self.consume_component() {
Ok(Some(component)) => Some(Ok(component)),
Ok(None) => None,
Err(err) => Some(Err(err)),
}
}
}
#[proc_macro]
pub fn component(input_tokens: proc_macro::TokenStream) -> proc_macro::TokenStream {
let input_tokens: TokenStream = input_tokens.into();
// println!("TOKENS: {:?}", input_tokens);
let visitor = Visitor::from_tokens(input_tokens);
// TODO: allow importing and stuff
let mut output = TokenStream::new();
for component in visitor {
// println!("- {:#?}", component);
let component = component.expect("holy shiet");
let name = format_ident!("{}", component.name);
let serialized = serde_json::to_string(&component).expect("fucking json");
output.extend(quote! {
const #name: &'static str = #serialized;
});
}
output.into()
}

View file

@ -1,191 +0,0 @@
use std::collections::HashMap;
use std::iter::FromIterator;
use std::iter::Peekable;
use enterprise_compiler::model::{TagLhs, TagRhs};
use proc_macro2::{token_stream::IntoIter, Delimiter, Ident, TokenStream, TokenTree};
use symbol::Symbol;
use syn::{Expr, Lit};
use syn_serde::Syn;
use crate::ParseError;
use crate::{consume_ident, consume_punct};
pub(crate) struct RsxParser(Peekable<IntoIter>);
impl RsxParser {
pub fn new(tokens: impl Iterator<Item = TokenTree>) -> Self {
let tokens = TokenStream::from_iter(tokens);
RsxParser(tokens.into_iter().peekable())
}
pub fn next_token(&mut self) -> Result<Option<RsxToken>, ParseError> {
let token = self.0.peek();
if token.is_none() {
return Ok(None);
}
let token = self.0.next().expect("unreachable");
match token {
TokenTree::Punct(ref punct) if punct.as_char() == '<' => {
let next_token = self.0.peek();
if next_token.is_none() {
return Err(ParseError::UnmatchedOpenTag(token));
}
let next_token = next_token.expect("unreachable");
let is_closing = if let TokenTree::Punct(punct2) = next_token {
if punct2.as_char() == '/' {
self.0.next();
true
} else {
false
}
} else {
false
};
let name = self.consume_ident()?;
if is_closing {
return Ok(Some(RsxToken::ClosingTag(Symbol::from(name.to_string()))));
}
// read until closing tag
let mut buf = Vec::new();
let mut prev_tag = None;
let mut is_empty = false;
loop {
let next_token = self.0.peek();
if next_token.is_none() {
// probably wrong error?
return Err(ParseError::UnexpectedEOF);
}
let next_token = self.0.next().expect("unreachable");
if let TokenTree::Punct(ref punct) = next_token {
if punct.as_char() == '>' {
if let Some(TokenTree::Punct(ref punct2)) = prev_tag {
if punct2.as_char() == '/' {
buf.truncate(buf.len() - 1);
is_empty = true;
}
}
break;
}
}
prev_tag = Some(next_token.clone());
buf.push(next_token);
}
let mut attrs = HashMap::new();
let mut iter = buf.into_iter().peekable();
loop {
// consume a single attr
let next_token = iter.peek();
if next_token.is_none() {
break;
}
let name_or_prefix = consume_ident(&mut iter)?.to_string();
let lhs = if let Some(TokenTree::Punct(ref punct)) = iter.peek() {
if punct.as_char() == ':' {
iter.next();
let name = consume_ident(&mut iter)?.to_string();
if name_or_prefix == "bind" {
TagLhs::Bind(name)
} else if name_or_prefix == "on" {
TagLhs::On(name)
} else {
unimplemented!("these are wrong states")
}
} else if punct.as_char() == '=' {
TagLhs::Plain(name_or_prefix.to_string())
} else {
unimplemented!("these are wrong states")
}
} else {
unimplemented!("these are wrong states")
};
consume_punct(&mut iter, Some('='))?;
let next_token = iter.next();
let rhs = match next_token {
Some(TokenTree::Literal(lit)) => {
let stream = TokenStream::from(TokenTree::Literal(lit));
let lit = syn::parse2::<Lit>(stream)?;
if let Lit::Str(string) = lit {
TagRhs::Text(string.value())
} else {
unimplemented!("grrr")
}
}
Some(TokenTree::Group(group)) if group.delimiter() == Delimiter::Brace => {
let expr = syn::parse2::<Expr>(group.stream())?;
TagRhs::Code(expr.to_adapter())
}
_ => unimplemented!("wrong state: {:?}", next_token),
};
attrs.insert(lhs, rhs);
}
let variant = if is_empty {
RsxToken::EmptyTag
} else {
RsxToken::OpeningTag
};
return Ok(Some(variant(Symbol::from(name.to_string()), attrs)));
}
TokenTree::Literal(lit) => {
let stream = TokenStream::from(TokenTree::Literal(lit));
let lit = syn::parse2::<Lit>(stream)?;
if let Lit::Str(string) = lit {
return Ok(Some(RsxToken::Str(string.value())));
}
}
TokenTree::Group(group) if group.delimiter() == Delimiter::Brace => {
let expr = syn::parse2::<Expr>(group.stream())?;
return Ok(Some(RsxToken::Code(expr)));
}
_ => unimplemented!("TOKEN: {:?}", token),
};
unimplemented!("the fuck")
}
fn consume_ident(&mut self) -> Result<Ident, ParseError> {
let next_token = self.0.peek();
if next_token.is_none() {
return Err(ParseError::UnexpectedEOF);
}
let next_token = self.0.next().expect("unreachable");
if let TokenTree::Ident(ident) = next_token {
Ok(ident)
} else {
Err(ParseError::ExpectedIdent(next_token))
}
}
}
#[derive(Debug)]
pub(crate) enum RsxToken {
OpeningTag(Symbol, HashMap<TagLhs, TagRhs>),
EmptyTag(Symbol, HashMap<TagLhs, TagRhs>),
ClosingTag(Symbol),
Str(String),
Code(Expr),
}
impl Iterator for RsxParser {
type Item = Result<RsxToken, ParseError>;
fn next(&mut self) -> Option<Self::Item> {
match self.next_token() {
Ok(Some(token)) => Some(Ok(token)),
Ok(None) => None,
Err(err) => Some(Err(err)),
}
}
}

View file

@ -1,2 +0,0 @@
/target
**/*.rs.bk

View file

@ -1,16 +0,0 @@
[package]
name = "helloworld"
version = "0.1.0"
authors = ["Michael Zhang <iptq@protonmail.com>"]
edition = "2018"
build = "src/build.rs"
[build-dependencies]
enterprise-compiler = { path = "../../enterprise-compiler" }
enterprise-macros = { path = "../../enterprise-macros" }
enterprise = { path = "../.." }
[dependencies]
stdweb = "0.4.20"
enterprise-macros = { path = "../../enterprise-macros" }
enterprise = { path = "../.." }

View file

@ -1,19 +0,0 @@
#[macro_use]
extern crate enterprise_macros;
component! {
component HelloWorld {
model {
name: String = "hello",
}
view {
<input bind:value="name" />
"Hello, " {name} "!"
}
}
}
fn main() {
enterprise_compiler::process("helloworld", HelloWorld);
}

View file

@ -1,20 +0,0 @@
#[macro_use]
extern crate enterprise;
enterprise_mod!(helloworld);
use std::sync::Arc;
use enterprise::{Backend, Component, Web};
use crate::helloworld::HelloWorld;
fn main() {
stdweb::initialize();
let web = Web;
let app = HelloWorld::new(&web);
web.initialize(app, "app".into());
stdweb::event_loop();
}

View file

@ -1,11 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>what the Hek</title>
</head>
<body>
<div id="app"></div>
<script src="helloworld.js"></script>
</body>
</html>

View file

@ -1,9 +0,0 @@
[package]
name = "todomvc"
version = "0.1.0"
authors = ["Michael Zhang <iptq@protonmail.com>"]
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]

View file

@ -1,3 +0,0 @@
fn main() {
println!("Hello, world!");
}

View file

@ -1,16 +0,0 @@
//! Everything related to backends and backend-specific actions.
mod web;
pub use self::web::Web;
use crate::Component;
/// Describes a possible compile backend.
pub trait Backend: Sized {
/// The parameters that are passed into the init function.
type InitParams;
/// Initializes the backend with the given component.
fn initialize<C: Component<Self>>(&self, _: C, _: Self::InitParams);
}

View file

@ -1,18 +0,0 @@
use stdweb::web::{document, INonElementParentNode};
use crate::backend::Backend;
use crate::Component;
/// Compiling to a web application.
pub struct Web;
impl Backend for Web {
type InitParams = String;
fn initialize<C: Component<Self>>(&self, component: C, params: Self::InitParams) {
let id = params.as_ref();
if let Some(el) = document().get_element_by_id(id) {
component.create(&el);
}
}
}

View file

@ -1,4 +0,0 @@
//! Compiler-related procedures.
/// Processes a set of component definitions.
pub fn process() {}

View file

@ -1,30 +0,0 @@
//! Enterprise is a backend-agnostic framework for developing server-client GUI applications.
#![deny(missing_docs)]
pub extern crate enterprise_compiler;
// re-exports
pub extern crate parking_lot;
pub extern crate stdweb;
mod backend;
pub mod compiler;
pub use crate::backend::{Backend, Web};
/// Components are the building-blocks of enterprise applications.
pub trait Component<B: Backend> {
/// TODO: replace this with a real init function.
fn create(&self, el: &crate::stdweb::web::Element);
}
/// Declares a mod
#[macro_export]
macro_rules! enterprise_mod {
($vis:vis $name:ident) => {
$vis mod $name {
include!(concat!(env!("OUT_DIR"), "/", stringify!($name), ".rs"));
}
}
}

View file

@ -1,13 +0,0 @@
[package]
name = "symbol"
version = "0.1.0"
authors = ["Michael Zhang <iptq@protonmail.com>"]
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
lazy_static = "1.4.0"
spin = "0.5.2"
serde_derive = "1.0.104"
serde = "1.0.104"

View file

@ -1,143 +0,0 @@
// cribbed from https://github.com/remexre/symbol-rs
use std::cmp::Ordering;
use std::collections::BTreeSet;
use std::fmt::{self, Debug, Display, Formatter, Result as FmtResult};
use std::mem::{forget, transmute};
use std::ops::Deref;
use std::sync::atomic::{AtomicUsize, Ordering as AtomicOrdering};
use lazy_static::lazy_static;
use serde::{
de::{Deserialize, Deserializer, Visitor},
ser::{Serialize, Serializer},
};
use spin::Mutex;
lazy_static! {
static ref SYMBOL_HEAP: Mutex<BTreeSet<&'static str>> = Mutex::new(BTreeSet::new());
}
/// An interned string with O(1) equality.
#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd)]
pub struct Symbol {
s: &'static str,
}
impl Symbol {
/// Retrieves the address of the backing string.
pub fn addr(self) -> usize {
self.s.as_ptr() as usize
}
/// Retrieves the string from the Symbol.
pub fn as_str(self) -> &'static str {
self.s
}
/// Generates a new symbol with a name of the form `G#n`, where `n` is some positive integer.
pub fn gensym() -> Symbol {
lazy_static! {
static ref N: AtomicUsize = AtomicUsize::new(0);
}
let mut heap = SYMBOL_HEAP.lock();
let n = loop {
let n = leak_string(format!("sym_{}", N.fetch_add(1, AtomicOrdering::SeqCst)));
if heap.insert(n) {
break n;
}
};
drop(heap);
Symbol::from(n)
}
}
impl Debug for Symbol {
fn fmt(&self, fmt: &mut Formatter) -> FmtResult {
Debug::fmt(self.s, fmt)
}
}
impl Deref for Symbol {
type Target = str;
fn deref(&self) -> &str {
self.s
}
}
impl Display for Symbol {
fn fmt(&self, fmt: &mut Formatter) -> FmtResult {
fmt.write_str(self.s)
}
}
impl<S: AsRef<str>> From<S> for Symbol {
fn from(s: S) -> Symbol {
let s = s.as_ref();
{
let mut heap = SYMBOL_HEAP.lock();
if heap.get(s).is_none() {
heap.insert(leak_string(s.to_owned()));
}
}
let s = {
let heap = SYMBOL_HEAP.lock();
*heap.get(s).unwrap()
};
Symbol { s }
}
}
impl Ord for Symbol {
fn cmp(&self, other: &Self) -> Ordering {
let l = self.addr();
let r = other.addr();
l.cmp(&r)
}
}
impl<S: AsRef<str>> PartialEq<S> for Symbol {
fn eq(&self, other: &S) -> bool {
self.partial_cmp(&other.as_ref()) == Some(Ordering::Equal)
}
}
impl<S: AsRef<str>> PartialOrd<S> for Symbol {
fn partial_cmp(&self, other: &S) -> Option<Ordering> {
self.s.partial_cmp(other.as_ref())
}
}
fn leak_string(s: String) -> &'static str {
let out = unsafe { transmute(&s as &str) };
forget(s);
out
}
// SERDE
impl Serialize for Symbol {
fn serialize<S: Serializer>(&self, s: S) -> Result<S::Ok, S::Error> {
s.serialize_str(self.s)
}
}
impl<'d> Deserialize<'d> for Symbol {
fn deserialize<D: Deserializer<'d>>(d: D) -> Result<Self, D::Error> {
d.deserialize_str(SymVisitor)
}
}
struct SymVisitor;
impl<'d> Visitor<'d> for SymVisitor {
type Value = Symbol;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "symbol")
}
fn visit_str<E>(self, string: &str) -> Result<Self::Value, E> {
Ok(Symbol::from(string))
}
}

View file

@ -1,22 +0,0 @@
# EditorConfig configuration
# https://editorconfig.org
# Top-most EditorConfig file
root = true
# Unix-style newlines with a newline ending every file, utf-8 charset
[*]
end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
charset = utf-8
# Match rust/toml, set 4 space indentation
[*.{rs,toml}]
indent_style = space
indent_size = 4
# Match json/yaml/markdown, set 2 space indentation
[*.{json,yml,md}]
indent_style = space
indent_size = 2

View file

@ -1,4 +0,0 @@
[attr]rust text eol=lf whitespace=tab-in-indent,trailing-space,tabwidth=4
* text=auto eol=lf
*.rs rust

View file

@ -1 +0,0 @@
* @taiki-e

View file

@ -1,10 +0,0 @@
status = [
"test (1.31.0)",
"test (stable)",
"test (beta)",
"test (nightly)",
"style (clippy)",
"style (rustfmt)",
"style (rustdoc)",
]
delete_merged_branches = true

View file

@ -1,95 +0,0 @@
name: ci
on:
pull_request:
push:
branches:
- master
- staging
- trying
schedule:
- cron: '00 01 * * *'
env:
RUSTFLAGS: -Dwarnings
jobs:
test:
name: test
runs-on: ubuntu-latest
strategy:
matrix:
rust:
# This is the minimum supported Rust version of this crate.
# When updating this, the reminder to update the minimum supported
# Rust version in README.md.
#
# Tests are not run as tests may require newer versions of Rust.
- 1.31.0
- stable
- beta
- nightly
steps:
- uses: actions/checkout@master
- name: Install Rust
shell: bash
run: |
. ./ci/install-rust.sh ${{ matrix.rust }}
- name: Install cargo-hack
if: matrix.rust != '1.31.0'
run: |
cargo install cargo-hack
- name: cargo check
if: matrix.rust == '1.31.0'
run: |
cargo check --all-features
- name: cargo test
if: matrix.rust != '1.31.0'
run: |
cargo test --all-features
- name: cargo hack check --each-feature
if: matrix.rust != '1.31.0'
run: |
cargo hack check --all --each-feature --no-dev-deps
# Refs: https://github.com/rust-lang/cargo/issues/5657
- name: cargo check -Zminimal-versions
if: matrix.rust == 'nightly'
run: |
cargo update -Zminimal-versions
cargo hack check --all --all-features --no-dev-deps --ignore-private
style:
name: style
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
component:
- clippy
- rustfmt
- rustdoc
steps:
- uses: actions/checkout@master
- name: Install Rust
shell: bash
run: |
. ./ci/install-rust.sh
- name: Install component
if: matrix.component != 'rustdoc'
shell: bash
run: |
. ./ci/install-component.sh ${{ matrix.component }}
- name: cargo clippy
if: matrix.component == 'clippy'
run: |
cargo clippy --all --all-features --all-targets
- name: cargo fmt -- --check
if: matrix.component == 'rustfmt'
run: |
cargo fmt --all -- --check
- name: cargo doc
if: matrix.component == 'rustdoc'
env:
RUSTDOCFLAGS: -Dwarnings
run: |
cargo doc --no-deps --all --all-features

View file

@ -1,7 +0,0 @@
target
**/*.rs.bk
Cargo.lock
# For platform and editor specific settings, it is recommended to add to
# a global .gitignore file.
# Refs: https://help.github.com/en/articles/ignoring-files#create-a-global-gitignore

View file

@ -1,21 +0,0 @@
# Changelog
All notable changes to this project will be documented in this file.
This project adheres to [Semantic Versioning](https://semver.org).
## [Unreleased]
# [0.2.0] - 2019-09-16
* [Removed error from `to_string` / `to_vec`.][e949263]
[e949263]: https://github.com/taiki-e/syn-serde/commit/e9492636eb7d58565fc415e55fd824b06b37f3d3
# [0.1.0] - 2019-09-16
Initial release
[Unreleased]: https://github.com/taiki-e/syn-serde/compare/v0.2.0...HEAD
[0.2.0]: https://github.com/taiki-e/syn-serde/compare/v0.1.0...v0.2.0
[0.1.0]: https://github.com/taiki-e/syn-serde/releases/tag/v0.1.0

View file

@ -1,36 +0,0 @@
[package]
name = "syn-serde"
version = "0.2.0"
authors = ["David Tolnay <dtolnay@gmail.com>", "Taiki Endo <te316e89@gmail.com>"]
edition = "2018"
license = "Apache-2.0 OR MIT"
repository = "https://github.com/taiki-e/syn-serde"
homepage = "https://github.com/taiki-e/syn-serde"
documentation = "https://docs.rs/syn-serde"
keywords = ["serde", "serialization", "syn"]
categories = ["development-tools::procedural-macro-helpers", "encoding"]
readme = "README.md"
description = """
Library to serialize and deserialize Syn syntax trees.
"""
[features]
json = ["serde_json"]
[dependencies]
proc-macro2 = { version = "1.0", default-features = false }
serde = { version = "1.0.99", features = ["derive"] }
serde_derive = "1.0.99" # This is necessary to make `-Z minimal-versions` build successful.
serde_json = { version = "1.0", optional = true }
syn = { version = "1.0.5", default-features = false, features = ["full"] }
[dev-dependencies]
quote = "1.0"
serde_json = "1.0"
syn = { version = "1.0", features = ["full", "extra-traits"] }
[package.metadata.docs.rs]
all-features = true
[package.metadata.playground]
all-features = true

View file

@ -1,201 +0,0 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View file

@ -1,23 +0,0 @@
Permission is hereby granted, free of charge, to any
person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the
Software without restriction, including without
limitation the rights to use, copy, modify, merge,
publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice
shall be included in all copies or substantial portions
of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

View file

@ -1,130 +0,0 @@
# syn-serde
[![crates-badge]][crates-url]
[![docs-badge]][docs-url]
[![license-badge]][license]
[![rustc-badge]][rustc-url]
[crates-badge]: https://img.shields.io/crates/v/syn-serde.svg
[crates-url]: https://crates.io/crates/syn-serde
[docs-badge]: https://docs.rs/syn-serde/badge.svg
[docs-url]: https://docs.rs/syn-serde
[license-badge]: https://img.shields.io/crates/l/syn-serde.svg
[license]: #license
[rustc-badge]: https://img.shields.io/badge/rustc-1.31+-lightgray.svg
[rustc-url]: https://blog.rust-lang.org/2018/12/06/Rust-1.31-and-rust-2018.html
Library to serialize and deserialize [Syn] syntax trees.
[**Documentation**][docs-url]
## Usage
Add this to your `Cargo.toml`:
```toml
[dependencies]
syn-serde = "0.2"
```
The current syn-serde requires Rust 1.31 or later.
## Examples
```toml
[dependencies]
syn-serde = { version = "0.2", features = ["json"] }
syn = { version = "1", features = ["full"] }
```
```rust
use syn_serde::json;
let syn_file: syn::File = syn::parse_quote! {
fn main() {
println!("Hello, world!");
}
};
println!("{}", json::to_string_pretty(&syn_file));
```
This prints the following JSON:
```json
{
"items": [
{
"fn": {
"ident": "main",
"inputs": [],
"output": null,
"stmts": [
{
"semi": {
"macro": {
"path": {
"segments": [
{
"ident": "println"
}
]
},
"delimiter": "paren",
"tokens": [
{
"lit": "\"Hello, world!\""
}
]
}
}
}
]
}
}
]
}
```
### Rust source file -> JSON representation of the syntax tree
The [`rust2json`] example parse a Rust source file into a `syn_serde::File`
and print out a JSON representation of the syntax tree.
[`rust2json`]: examples/rust2json
### JSON file -> Rust syntax tree
The [`json2rust`] example parse a JSON file into a `syn_serde::File` and
print out a Rust syntax tree.
[`json2rust`]: examples/json2rust
## Optional features
* **`json`** — Provides functions for JSON <-> Rust serializing and
deserializing.
## Relationship to Syn
syn-serde is a fork of [Syn], and syn-serde provides a set of data structures
similar but not identical to [Syn]. All data structures provided by syn-serde
can be converted to the data structures of [Syn] and [proc-macro2].
The data structures of syn-serde 0.2 is compatible with the data structures of [Syn] 1.0.
[Syn]: https://github.com/dtolnay/syn
[proc-macro2]: https://github.com/alexcrichton/proc-macro2
## License
Licensed under either of
* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or <http://www.apache.org/licenses/LICENSE-2.0>)
* MIT license ([LICENSE-MIT](LICENSE-MIT) or <http://opensource.org/licenses/MIT>)
at your option.
### Contribution
Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions.

View file

@ -1,29 +0,0 @@
#!/bin/bash
set -euo pipefail
component="${1}"
if ! rustup component add "${component}" 2>/dev/null; then
# If the component is unavailable on the latest nightly,
# use the latest toolchain with the component available.
# Refs: https://github.com/rust-lang/rustup-components-history#the-web-part
target=$(curl -sSf "https://rust-lang.github.io/rustup-components-history/x86_64-unknown-linux-gnu/${component}")
echo "'${component}' is unavailable on the default toolchain, use the toolchain 'nightly-${target}' instead"
rustup update "nightly-${target}" --no-self-update
rustup default "nightly-${target}"
echo "Query rust and cargo versions:"
rustup -V
rustc -V
cargo -V
rustup component add "${component}"
fi
echo "Query component versions:"
case "${component}" in
clippy | miri) cargo "${component}" -V ;;
rustfmt) "${component}" -V ;;
esac

View file

@ -1,20 +0,0 @@
#!/bin/bash
set -euo pipefail
toolchain="${1:-nightly}"
if rustup -V 2>/dev/null; then
rustup set profile minimal
rustup update "${toolchain}" --no-self-update
rustup default "${toolchain}"
else
curl -sSf https://sh.rustup.rs | sh -s -- -y --profile minimal --default-toolchain "${toolchain}"
export PATH=${PATH}:${HOME}/.cargo/bin
echo "##[add-path]${HOME}/.cargo/bin"
fi
echo "Query rust and cargo versions:"
rustup -V
rustc -V
cargo -V

View file

@ -1,17 +0,0 @@
[package]
name = "syn-serde-internal-codegen"
version = "0.0.0"
authors = ["David Tolnay <dtolnay@gmail.com>", "Nika Layzell <nika@thelayzells.com>", "Taiki Endo <te316e89@gmail.com>"]
edition = "2018"
publish = false # this is an internal crate which should never be published
[workspace]
# Prefer that `cargo clean` in syn-serde's directory does not require a rebuild of
# rustfmt in the codegen directory.
[dependencies]
proc-macro2 = "1.0"
quote = "1.0"
rustfmt = { package = "rustfmt-nightly", git = "https://github.com/rust-lang-nursery/rustfmt" }
serde_json = "1.0"
syn-codegen = { git = "https://github.com/dtolnay/syn" }

View file

@ -1,12 +0,0 @@
# syn_serde_codegen
This is an internal (not published on crates.io) crate which is used to generate
the files in the `gen/` directory of `syn-serde`. It is used to ensure that the
implementations for `Syn` remain in sync with the
actual AST.
To run this program, run `cargo run` in this directory, and the `gen/` folder
will be re-generated.
This program is slow, and is therefore not run when building `syn-serde` as part of
the build script to save on compile time.

View file

@ -1,40 +0,0 @@
use crate::Result;
use proc_macro2::TokenStream;
use std::{fmt, fs, io::Write, path::Path};
pub(crate) fn write<P: AsRef<Path>>(path: P, content: TokenStream) -> Result<()> {
let mut formatted = Vec::new();
writeln!(
formatted,
"// This file is @generated by syn-serde-internal-codegen.\n\
// It is not intended for manual editing.\n"
)?;
let mut config = rustfmt::Config::default();
config.set().emit_mode(rustfmt::EmitMode::Stdout);
config.set().verbose(rustfmt::Verbosity::Quiet);
config.set().format_macro_matchers(true);
config.set().normalize_doc_attributes(true);
let mut session = rustfmt::Session::new(config, Some(&mut formatted));
session.format(rustfmt::Input::Text(content.to_string())).map_err(RustfmtError)?;
drop(session);
if path.as_ref().is_file() && fs::read(&path)? == formatted {
return Ok(());
}
fs::write(path, formatted)?;
Ok(())
}
#[derive(Debug)]
struct RustfmtError(rustfmt::ErrorKind);
impl fmt::Display for RustfmtError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl std::error::Error for RustfmtError {}

View file

@ -1,20 +0,0 @@
use proc_macro2::TokenStream;
use syn_codegen::{Definitions, Node};
pub(crate) fn traverse(
defs: &Definitions,
node: fn(&mut TokenStream, &Node, &Definitions),
) -> TokenStream {
let mut types = defs.types.clone();
types.sort_by(|a, b| a.ident.cmp(&b.ident));
let mut traits = TokenStream::new();
for s in types {
if s.ident == "Reserved" {
continue;
}
node(&mut traits, &s, defs);
}
traits
}

View file

@ -1,32 +0,0 @@
// Based on https://github.com/dtolnay/syn/tree/1.0.5/codegen.
//
// This crate generates the Syn trait in syn-serde programmatically from
// the syntax tree description.
#![recursion_limit = "128"]
#![warn(rust_2018_idioms, unreachable_pub)]
const SYN_JSON: &str = "../syn.json";
mod file;
mod gen;
mod serde;
use std::fs;
type Result<T> = std::result::Result<T, Box<dyn std::error::Error>>;
fn main() {
if let Err(e) = try_main() {
eprintln!("error: {}", e);
std::process::exit(1);
}
}
fn try_main() -> Result<()> {
let defs = fs::read_to_string(SYN_JSON)?;
let defs = serde_json::from_str(&defs)?;
serde::generate(&defs)?;
Ok(())
}

View file

@ -1,293 +0,0 @@
use crate::{file, gen, Result};
use proc_macro2::TokenStream;
use quote::{format_ident, quote};
use syn_codegen::{Data, Definitions, Node, Type};
const SERDE_SRC: &str = "../src/gen/mod.rs";
const IGNORED_TYPES: &[&str] = &[
/* we don't have them */
"DeriveInput",
"Data",
"DataStruct",
"DataEnum",
"DataUnion",
/* private */
"LitByte",
"LitByteStr",
"LitChar",
"LitFloat",
"LitInt",
"LitStr",
/* cannot be implemented by codegen */
"Type",
"UseTree",
"Visibility",
"Receiver",
/* optimize */
"Generics",
"ExprMatch",
"Arm",
"TraitItemMethod",
"ItemStruct",
"ReturnType",
];
const EMPTY_STRUCTS: &[&str] = &["TypeInfer", "TypeNever", "UseGlob", "VisCrate", "VisPublic"];
#[allow(clippy::cognitive_complexity)]
fn visit(ty: &Type, name: &TokenStream) -> (Option<TokenStream>, TokenStream) {
match ty {
Type::Box(_) | Type::Vec(_) | Type::Punctuated(_) => {
let from = Some(quote!(#name.map_into()));
let into = quote!(#name.map_into());
(from, into)
}
Type::Option(t) => match &**t {
Type::Token(_) | Type::Group(_) => {
let from = Some(quote!(#name.is_some()));
let into = quote!(default_or_none(#name));
(from, into)
}
Type::Tuple(t) => {
let mut from_expr = Vec::new();
let mut from_pat = Vec::new();
let mut into_expr = Vec::new();
let mut into_pat = Vec::new();
for (i, t) in t.iter().enumerate() {
let id = format_ident!("_{}", i);
let (from, into) = visit(t, &quote!((*#id)));
from_pat.push(id.clone());
into_expr.push(into);
if from.is_some() {
into_pat.push(id);
from_expr.push(from);
}
}
assert_eq!(from_pat.len(), into_expr.len());
assert_eq!(into_pat.len(), from_expr.len());
assert_ne!(into_pat.len(), 0);
if into_pat.len() == 1 {
let from = Some(quote!(#name.ref_map(|(#(#from_pat),*)| #(#from_expr),*)));
let into = quote!(#name.ref_map(|#(#into_pat),*| (#(#into_expr),*)));
(from, into)
} else {
let from = Some(quote!(#name.ref_map(|(#(#from_pat),*)| (#(#from_expr),*))));
let into = quote!(#name.ref_map(|(#(#into_pat),*)| (#(#into_expr),*)));
(from, into)
}
}
Type::Box(_) | Type::Vec(_) | Type::Punctuated(_) => {
let from = Some(quote!(#name.ref_map(MapInto::map_into)));
let into = quote!(#name.ref_map(MapInto::map_into));
(from, into)
}
Type::Std(t) if t == "String" => {
// `From<&String> for String` requires Rust 1.36 or later.
// Refs: https://github.com/rust-lang/rust/pull/59825
let from = Some(quote!(#name.ref_map(ToString::to_string)));
let into = quote!(#name.ref_map(ToString::to_string));
(from, into)
}
_ => {
let from = Some(quote!(#name.map_into()));
let into = quote!(#name.map_into());
(from, into)
}
},
Type::Token(_) | Type::Group(_) => {
let from = None;
let into = quote!(default());
(from, into)
}
Type::Syn(t) if t == "Reserved" => {
let from = None;
let into = quote!(default());
(from, into)
}
Type::Ext(t) if t == "Span" => {
let from = None;
let into = quote!(proc_macro2::Span::call_site());
(from, into)
}
Type::Syn(_) | Type::Ext(_) => {
let from = Some(quote!(#name.ref_into()));
let into = quote!(#name.ref_into());
(from, into)
}
Type::Std(_) => {
let from = Some(quote!(#name.into()));
let into = quote!(#name.into());
(from, into)
}
Type::Tuple(t) => unreachable!("Type::Tuple: {:?}", t),
}
}
#[allow(clippy::cognitive_complexity)]
fn node(traits: &mut TokenStream, node: &Node, _defs: &Definitions) {
if IGNORED_TYPES.contains(&&*node.ident) {
return;
}
let ty = format_ident!("{}", &node.ident);
let mut from_impl = TokenStream::new();
let mut into_impl = TokenStream::new();
match &node.data {
Data::Enum(variants) => {
let mut from_variants = TokenStream::new();
let mut into_variants = TokenStream::new();
for (variant, fields) in variants {
let variant_ident = format_ident!("{}", variant);
if fields.is_empty() {
from_variants.extend(quote! {
syn::#ty::#variant_ident => {
#ty::#variant_ident
}
});
into_variants.extend(quote! {
#ty::#variant_ident => {
syn::#ty::#variant_ident
}
});
} else {
let mut from_expr = Vec::new();
let mut from_pat = Vec::new();
let mut into_expr = Vec::new();
let mut into_pat = Vec::new();
for (i, t) in fields.iter().enumerate() {
let id = format_ident!("_{}", i);
let (from, into) = visit(t, &quote!((*#id)));
from_pat.push(id.clone());
into_expr.push(into);
if from.is_some() {
into_pat.push(id);
from_expr.push(from);
}
}
if from_expr.is_empty() {
from_variants.extend(quote! {
syn::#ty::#variant_ident(..) => {
#ty::#variant_ident
}
});
into_variants.extend(quote! {
#ty::#variant_ident => {
syn::#ty::#variant_ident(#(#into_expr),*)
}
});
} else {
from_variants.extend(quote! {
syn::#ty::#variant_ident(#(#from_pat),*) => {
#ty::#variant_ident(#(#from_expr),*)
}
});
into_variants.extend(quote! {
#ty::#variant_ident(#(#into_pat),*) => {
syn::#ty::#variant_ident(#(#into_expr),*)
}
});
}
}
}
let nonexhaustive =
if node.exhaustive { None } else { Some(quote!(_ => unreachable!())) };
from_impl.extend(quote! {
match node {
#from_variants
#nonexhaustive
}
});
into_impl.extend(quote! {
match node {
#into_variants
#nonexhaustive
}
});
}
Data::Struct(fields) => {
let mut from_fields = TokenStream::new();
let mut into_fields = TokenStream::new();
for (field, ty) in fields {
let id = format_ident!("{}", field);
let ref_toks = quote!(node.#id);
let (from, into) = visit(&ty, &ref_toks);
if from.is_some() {
from_fields.extend(quote! {
#id: #from,
});
}
into_fields.extend(quote! {
#id: #into,
});
}
assert!(!fields.is_empty(), "fields.is_empty: {}", ty);
if !from_fields.is_empty() {
from_impl.extend(quote! {
#ty {
#from_fields
}
});
into_impl.extend(quote! {
syn::#ty {
#into_fields
}
});
} else {
assert!(EMPTY_STRUCTS.contains(&&*node.ident), "from_fields.is_empty(): {}", ty);
return;
}
}
Data::Private => unreachable!("Data::Private: {}", ty),
}
traits.extend(quote! {
syn_trait_impl!(syn::#ty);
impl From<&syn::#ty> for #ty {
fn from(node: &syn::#ty) -> Self {
#from_impl
}
}
impl From<&#ty> for syn::#ty {
fn from(node: &#ty) -> Self {
#into_impl
}
}
});
}
pub(crate) fn generate(defs: &Definitions) -> Result<()> {
let traits = gen::traverse(defs, node);
file::write(
SERDE_SRC,
quote! {
// Unreachable code is generated sometimes without the full feature.
#![allow(unreachable_code, unused_variables, unused_parens)]
#![allow(
clippy::double_parens,
clippy::identity_conversion,
clippy::just_underscores_and_digits,
)]
use crate::*;
#traits
},
)?;
Ok(())
}

View file

@ -1,9 +0,0 @@
### [`rust2json`](rust2json)
**Rust -> JSON**.
Little utility to parse a Rust source file into a `syn_serde::File` and print out a JSON representation of the syntax tree.
### [`json2rust`](json2rust)
**JSON -> Rust**.
Little utility to parse a JSON file into a `syn_serde::File` and print out a Rust syntax tree.

View file

@ -1,12 +0,0 @@
[package]
name = "json2rust"
version = "0.0.0"
authors = ["Taiki Endo <te316e89@gmail.com>"]
edition = "2018"
publish = false
[dependencies]
quote = "1.0"
syn-serde = { path = "../..", features = ["json"] }
syn = { version = "1.0", features = ["full"] }
tempfile = "3.1"

View file

@ -1,7 +0,0 @@
Parse a JSON file into a `syn_serde::File` and print out a Rust syntax tree.
```text
cargo run -- json2rust_main.json
```
The output is the same as which deleted blank line and comment from [src/main.rs](src/main.rs).

File diff suppressed because it is too large Load diff

View file

@ -1,54 +0,0 @@
#![warn(rust_2018_idioms)]
use quote::ToTokens;
use std::{
env, fs,
io::{self, BufWriter, Write},
path::PathBuf,
process::{Command, Stdio},
};
use syn_serde::json;
use tempfile::Builder;
type Result<T> = std::result::Result<T, Box<dyn std::error::Error>>;
fn main() {
if let Err(e) = try_main() {
eprintln!("{}", e);
std::process::exit(1);
}
}
fn try_main() -> Result<()> {
let mut args = env::args_os();
let _ = args.next(); // executable name
let filepath = match (args.next(), args.next()) {
(Some(arg1), None) => PathBuf::from(arg1),
_ => {
println!("Usage: rust2json path/to/filename.rs");
return Ok(());
}
};
let json = fs::read_to_string(&filepath)?;
let syntax: syn::File = json::from_str(&json)?;
let outdir = Builder::new().prefix("json2rust").tempdir()?;
let outfile_path = outdir.path().join("expanded");
fs::write(&outfile_path, syntax.into_token_stream().to_string())?;
// Run rustfmt
// https://github.com/dtolnay/cargo-expand/blob/0.4.9/src/main.rs#L181-L182
let rustfmt_config_path = outdir.path().join("rustfmt.toml");
fs::write(rustfmt_config_path, "normalize_doc_attributes = true\n")?;
// Ignore any errors.
let _status = Command::new("rustfmt").arg(&outfile_path).stderr(Stdio::null()).status();
let writer = io::stdout();
let mut writer = BufWriter::new(writer.lock());
writer.write_all(fs::read_to_string(&outfile_path)?.as_bytes())?;
writer.flush()?;
Ok(())
}

View file

@ -1,10 +0,0 @@
[package]
name = "rust2json"
version = "0.0.0"
authors = ["Taiki Endo <te316e89@gmail.com>"]
edition = "2018"
publish = false
[dependencies]
syn-serde = { path = "../..", features = ["json"] }
syn = { version = "1.0", features = ["full"] }

View file

@ -1,7 +0,0 @@
Parse a Rust source file into a `syn_serde::File` and print out a JSON representation of the syntax tree.
```text
cargo run -- src/main.rs
```
The output is the same as [rust2json_main.json](rust2json_main.json).

View file

@ -1,782 +0,0 @@
{
"attrs": [
{
"style": "inner",
"path": {
"segments": [
{
"ident": "warn"
}
]
},
"tokens": [
{
"group": {
"delimiter": "parenthesis",
"stream": [
{
"ident": "rust_2018_idioms"
}
]
}
}
]
}
],
"items": [
{
"use": {
"tree": {
"path": {
"ident": "std",
"tree": {
"group": [
{
"ident": "env"
},
{
"ident": "fs"
},
{
"path": {
"ident": "io",
"tree": {
"group": [
{
"ident": "self"
},
{
"ident": "BufWriter"
},
{
"ident": "Write"
}
]
}
}
},
{
"path": {
"ident": "path",
"tree": {
"ident": "PathBuf"
}
}
}
]
}
}
}
}
},
{
"use": {
"tree": {
"path": {
"ident": "syn_serde",
"tree": {
"ident": "json"
}
}
}
}
},
{
"type": {
"ident": "Result",
"generics": {
"params": [
{
"type": {
"ident": "T"
}
}
]
},
"ty": {
"path": {
"segments": [
{
"ident": "std"
},
{
"ident": "result"
},
{
"ident": "Result",
"arguments": {
"angle_bracketed": {
"args": [
{
"type": {
"path": {
"segments": [
{
"ident": "T"
}
]
}
}
},
{
"type": {
"path": {
"segments": [
{
"ident": "Box",
"arguments": {
"angle_bracketed": {
"args": [
{
"type": {
"trait_object": {
"dyn": true,
"bounds": [
{
"trait": {
"path": {
"segments": [
{
"ident": "std"
},
{
"ident": "error"
},
{
"ident": "Error"
}
]
}
}
}
]
}
}
}
]
}
}
}
]
}
}
}
]
}
}
}
]
}
}
}
},
{
"fn": {
"ident": "main",
"inputs": [],
"output": null,
"stmts": [
{
"expr": {
"if": {
"cond": {
"let": {
"pat": {
"tuple_struct": {
"path": {
"segments": [
{
"ident": "Err"
}
]
},
"pat": {
"elems": [
{
"ident": {
"ident": "e"
}
}
]
}
}
},
"expr": {
"call": {
"func": {
"path": {
"segments": [
{
"ident": "try_main"
}
]
}
},
"args": []
}
}
}
},
"then_branch": [
{
"semi": {
"macro": {
"path": {
"segments": [
{
"ident": "eprintln"
}
]
},
"delimiter": "paren",
"tokens": [
{
"lit": "\"{}\""
},
{
"punct": {
"op": ",",
"spacing": "alone"
}
},
{
"ident": "e"
}
]
}
}
},
{
"semi": {
"call": {
"func": {
"path": {
"segments": [
{
"ident": "std"
},
{
"ident": "process"
},
{
"ident": "exit"
}
]
}
},
"args": [
{
"lit": {
"int": "1"
}
}
]
}
}
}
]
}
}
}
]
}
},
{
"fn": {
"ident": "try_main",
"inputs": [],
"output": {
"path": {
"segments": [
{
"ident": "Result",
"arguments": {
"angle_bracketed": {
"args": [
{
"type": {
"tuple": {
"elems": []
}
}
}
]
}
}
}
]
}
},
"stmts": [
{
"let": {
"pat": {
"ident": {
"mut": true,
"ident": "args"
}
},
"init": {
"call": {
"func": {
"path": {
"segments": [
{
"ident": "env"
},
{
"ident": "args_os"
}
]
}
},
"args": []
}
}
}
},
{
"let": {
"pat": {
"_": {}
},
"init": {
"method_call": {
"receiver": {
"path": {
"segments": [
{
"ident": "args"
}
]
}
},
"method": "next",
"args": []
}
}
}
},
{
"let": {
"pat": {
"ident": {
"ident": "filepath"
}
},
"init": {
"match": {
"expr": {
"tuple": {
"elems": [
{
"method_call": {
"receiver": {
"path": {
"segments": [
{
"ident": "args"
}
]
}
},
"method": "next",
"args": []
}
},
{
"method_call": {
"receiver": {
"path": {
"segments": [
{
"ident": "args"
}
]
}
},
"method": "next",
"args": []
}
}
]
}
},
"arms": [
{
"pat": {
"tuple": {
"elems": [
{
"tuple_struct": {
"path": {
"segments": [
{
"ident": "Some"
}
]
},
"pat": {
"elems": [
{
"ident": {
"ident": "arg"
}
}
]
}
}
},
{
"ident": {
"ident": "None"
}
}
]
}
},
"body": {
"call": {
"func": {
"path": {
"segments": [
{
"ident": "PathBuf"
},
{
"ident": "from"
}
]
}
},
"args": [
{
"path": {
"segments": [
{
"ident": "arg"
}
]
}
}
]
}
}
},
{
"pat": {
"_": {}
},
"body": {
"block": {
"stmts": [
{
"semi": {
"macro": {
"path": {
"segments": [
{
"ident": "println"
}
]
},
"delimiter": "paren",
"tokens": [
{
"lit": "\"Usage: rust2json path/to/filename.rs\""
}
]
}
}
},
{
"semi": {
"return": {
"expr": {
"call": {
"func": {
"path": {
"segments": [
{
"ident": "Ok"
}
]
}
},
"args": [
{
"tuple": {
"elems": []
}
}
]
}
}
}
}
}
]
}
}
}
]
}
}
}
},
{
"let": {
"pat": {
"ident": {
"ident": "code"
}
},
"init": {
"try": {
"expr": {
"call": {
"func": {
"path": {
"segments": [
{
"ident": "fs"
},
{
"ident": "read_to_string"
}
]
}
},
"args": [
{
"reference": {
"expr": {
"path": {
"segments": [
{
"ident": "filepath"
}
]
}
}
}
}
]
}
}
}
}
}
},
{
"let": {
"pat": {
"ident": {
"ident": "syntax"
}
},
"init": {
"try": {
"expr": {
"call": {
"func": {
"path": {
"segments": [
{
"ident": "syn"
},
{
"ident": "parse_file"
}
]
}
},
"args": [
{
"reference": {
"expr": {
"path": {
"segments": [
{
"ident": "code"
}
]
}
}
}
}
]
}
}
}
}
}
},
{
"let": {
"pat": {
"ident": {
"ident": "writer"
}
},
"init": {
"call": {
"func": {
"path": {
"segments": [
{
"ident": "io"
},
{
"ident": "stdout"
}
]
}
},
"args": []
}
}
}
},
{
"let": {
"pat": {
"ident": {
"mut": true,
"ident": "writer"
}
},
"init": {
"call": {
"func": {
"path": {
"segments": [
{
"ident": "BufWriter"
},
{
"ident": "new"
}
]
}
},
"args": [
{
"method_call": {
"receiver": {
"path": {
"segments": [
{
"ident": "writer"
}
]
}
},
"method": "lock",
"args": []
}
}
]
}
}
}
},
{
"semi": {
"try": {
"expr": {
"call": {
"func": {
"path": {
"segments": [
{
"ident": "json"
},
{
"ident": "to_writer_pretty"
}
]
}
},
"args": [
{
"reference": {
"mut": true,
"expr": {
"path": {
"segments": [
{
"ident": "writer"
}
]
}
}
}
},
{
"reference": {
"expr": {
"path": {
"segments": [
{
"ident": "syntax"
}
]
}
}
}
}
]
}
}
}
}
},
{
"semi": {
"try": {
"expr": {
"method_call": {
"receiver": {
"path": {
"segments": [
{
"ident": "writer"
}
]
}
},
"method": "flush",
"args": []
}
}
}
}
},
{
"expr": {
"call": {
"func": {
"path": {
"segments": [
{
"ident": "Ok"
}
]
}
},
"args": [
{
"tuple": {
"elems": []
}
}
]
}
}
}
]
}
}
]
}

View file

@ -1,39 +0,0 @@
#![warn(rust_2018_idioms)]
use std::{
env, fs,
io::{self, BufWriter, Write},
path::PathBuf,
};
use syn_serde::json;
type Result<T> = std::result::Result<T, Box<dyn std::error::Error>>;
fn main() {
if let Err(e) = try_main() {
eprintln!("{}", e);
std::process::exit(1);
}
}
fn try_main() -> Result<()> {
let mut args = env::args_os();
let _ = args.next(); // executable name
let filepath = match (args.next(), args.next()) {
(Some(arg), None) => PathBuf::from(arg),
_ => {
println!("Usage: rust2json path/to/filename.rs");
return Ok(());
}
};
let code = fs::read_to_string(&filepath)?;
let syntax = syn::parse_file(&code)?;
let writer = io::stdout();
let mut writer = BufWriter::new(writer.lock());
json::to_writer_pretty(&mut writer, &syntax)?;
writer.flush()?;
Ok(())
}

View file

@ -1,115 +0,0 @@
use super::*;
ast_struct! {
/// An attribute like `#[repr(transparent)]`.
///
/// # Syntax
///
/// Rust has six types of attributes.
///
/// - Outer attributes like `#[repr(transparent)]`. These appear outside or
/// in front of the item they describe.
/// - Inner attributes like `#![feature(proc_macro)]`. These appear inside
/// of the item they describe, usually a module.
/// - Outer doc comments like `/// # Example`.
/// - Inner doc comments like `//! Please file an issue`.
/// - Outer block comments `/** # Example */`.
/// - Inner block comments `/*! Please file an issue */`.
///
/// The `style` field of type `AttrStyle` distinguishes whether an attribute
/// is outer or inner. Doc comments and block comments are promoted to
/// attributes, as this is how they are processed by the compiler and by
/// `macro_rules!` macros.
///
/// The `path` field gives the possibly colon-delimited path against which
/// the attribute is resolved. It is equal to `"doc"` for desugared doc
/// comments. The `tokens` field contains the rest of the attribute body as
/// tokens.
///
/// ```text
/// #[derive(Copy)] #[crate::precondition x < 5]
/// ^^^^^^~~~~~~ ^^^^^^^^^^^^^^^^^^^ ~~~~~
/// path tokens path tokens
/// ```
pub struct Attribute {
pub(crate) style: AttrStyle,
pub(crate) path: Path,
#[serde(default, skip_serializing_if = "TokenStream::is_empty")]
pub(crate) tokens: TokenStream,
}
}
ast_enum! {
/// Distinguishes between attributes that decorate an item and attributes
/// that are contained within an item.
///
/// # Outer attributes
///
/// - `#[repr(transparent)]`
/// - `/// # Example`
/// - `/** Please file an issue */`
///
/// # Inner attributes
///
/// - `#![feature(proc_macro)]`
/// - `//! # Example`
/// - `/*! Please file an issue */`
pub enum AttrStyle {
Outer,
Inner,
}
}
ast_enum! {
/// Content of a compile-time structured attribute.
///
/// ## Path
///
/// A meta path is like the `test` in `#[test]`.
///
/// ## List
///
/// A meta list is like the `derive(Copy)` in `#[derive(Copy)]`.
///
/// ## NameValue
///
/// A name-value meta is like the `path = "..."` in `#[path =
/// "sys/windows.rs"]`.
pub enum Meta {
Path(Path),
/// A structured list within an attribute, like `derive(Copy, Clone)`.
List(MetaList),
/// A name-value pair within an attribute, like `feature = "nightly"`.
NameValue(MetaNameValue),
}
}
ast_struct! {
/// A structured list within an attribute, like `derive(Copy, Clone)`.
pub struct MetaList {
pub(crate) path: Path,
pub(crate) nested: Punctuated<NestedMeta>,
}
}
ast_struct! {
/// A name-value pair within an attribute, like `feature = "nightly"`.
pub struct MetaNameValue {
pub(crate) path: Path,
pub(crate) lit: Lit,
}
}
ast_enum! {
/// Element of a compile-time attribute list.
pub enum NestedMeta {
/// A structured meta item, like the `Copy` in `#[derive(Copy)]` which
/// would be a nested `Meta::Word`.
Meta(Meta),
/// A Rust literal, like the `"new_name"` in `#[rename("new_name")]`.
Lit(Lit),
}
}

View file

@ -1,185 +0,0 @@
use super::*;
ast_struct! {
/// An enum variant.
pub struct Variant {
/// Attributes tagged on the variant.
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
/// Name of the variant.
pub(crate) ident: Ident,
/// Content stored in the variant.
pub(crate) fields: Fields,
/// Explicit discriminant: `Variant = 1`
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) discriminant: Option<Expr>,
}
}
ast_enum! {
/// Data stored within an enum variant or struct.
pub enum Fields {
/// Named fields of a struct or struct variant such as `Point { x: f64,
/// y: f64 }`.
Named(FieldsNamed),
/// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`.
Unnamed(FieldsUnnamed),
/// Unit struct or unit variant such as `None`.
Unit,
}
}
ast_struct! {
/// Named fields of a struct or struct variant such as `Point { x: f64,
/// y: f64 }`.
#[serde(transparent)]
pub struct FieldsNamed {
pub(crate) named: Punctuated<Field>,
}
}
ast_struct! {
/// Unnamed fields of a tuple struct or tuple variant such as `Some(T)`.
#[serde(transparent)]
pub struct FieldsUnnamed {
pub(crate) unnamed: Punctuated<Field>,
}
}
impl Fields {
pub(crate) fn is_named(&self) -> bool {
match self {
Fields::Named(_) => true,
Fields::Unnamed(_) | Fields::Unit => false,
}
}
}
// assertions
// `syn::perse*` functions will detect these, but there is a possibility to
// generate incorrect code by subsequent operations.
pub(crate) fn assert_struct_semi(fields: &Fields, semi_token: bool) {
match fields {
// struct foo {};
Fields::Named(_) => assert!(!semi_token, "unexpected token: `;`"),
// struct foo ()
Fields::Unnamed(_) => assert!(
semi_token,
"unexpected end of input, expected `where` or `;`"
),
// struct foo
Fields::Unit => assert!(
semi_token,
"unexpected end of input, expected one of: `where`, parentheses, curly braces, `;`"
),
}
}
ast_struct! {
/// A field of a struct or enum variant.
pub struct Field {
/// Attributes tagged on the field.
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
/// Visibility of the field.
#[serde(default, skip_serializing_if = "Visibility::is_inherited")]
pub(crate) vis: Visibility,
/// Name of the field, if any.
///
/// Fields of tuple structs have no names.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) ident: Option<Ident>,
#[serde(default, skip_serializing_if = "not")]
pub(crate) colon_token: bool,
/// Type of the field.
pub(crate) ty: Type,
}
}
ast_enum! {
/// The visibility level of an item: inherited or `pub` or
/// `pub(restricted)`.
pub enum Visibility {
/// A public visibility level: `pub`.
#[serde(rename = "pub")]
Public,
/// A crate-level visibility: `crate`.
Crate,
/// A visibility level restricted to some path: `pub(self)` or
/// `pub(super)` or `pub(crate)` or `pub(in some::module)`.
Restricted(VisRestricted),
/// An inherited visibility, which usually means private.
Inherited,
}
}
impl Visibility {
pub(crate) fn is_inherited(&self) -> bool {
match self {
Visibility::Inherited => true,
_ => false,
}
}
}
impl Default for Visibility {
fn default() -> Self {
Visibility::Inherited
}
}
ast_struct! {
/// A visibility level restricted to some path: `pub(self)` or
/// `pub(super)` or `pub(crate)` or `pub(in some::module)`.
pub struct VisRestricted {
#[serde(default, skip_serializing_if = "not")]
pub(crate) in_token: bool,
pub(crate) path: Box<Path>,
}
}
mod convert {
use super::*;
// Visibility
syn_trait_impl!(syn::Visibility);
impl From<&syn::Visibility> for Visibility {
fn from(other: &syn::Visibility) -> Self {
use super::Visibility::*;
use syn::Visibility;
match other {
Visibility::Public(_) => Public,
Visibility::Crate(_) => Crate,
Visibility::Restricted(x) => Restricted(x.ref_into()),
Visibility::Inherited => Inherited,
}
}
}
impl From<&Visibility> for syn::Visibility {
fn from(other: &Visibility) -> Self {
use syn::Visibility::*;
match other {
Visibility::Public => Public(syn::VisPublic {
pub_token: default(),
}),
Visibility::Crate => Crate(syn::VisCrate {
crate_token: default(),
}),
Visibility::Restricted(x) => Restricted(x.into()),
Visibility::Inherited => Inherited,
}
}
}
}

View file

@ -1,779 +0,0 @@
use super::*;
ast_enum! {
/// A Rust expression.
pub enum Expr {
/// A slice literal expression: `[a, b, c, d]`.
Array(ExprArray),
/// An assignment expression: `a = compute()`.
Assign(ExprAssign),
/// A compound assignment expression: `counter += 1`.
AssignOp(ExprAssignOp),
/// An async block: `async { ... }`.
Async(ExprAsync),
/// An await expression: `fut.await`.
Await(ExprAwait),
/// A binary operation: `a + b`, `a * b`.
Binary(ExprBinary),
/// A blocked scope: `{ ... }`.
Block(ExprBlock),
/// A box expression: `box f`.
Box(ExprBox),
/// A `break`, with an optional label to break and an optional
/// expression.
Break(ExprBreak),
/// A function call expression: `invoke(a, b)`.
Call(ExprCall),
/// A cast expression: `foo as f64`.
Cast(ExprCast),
/// A closure expression: `|a, b| a + b`.
Closure(ExprClosure),
/// A `continue`, with an optional label.
Continue(ExprContinue),
/// Access of a named struct field (`obj.k`) or unnamed tuple struct
/// field (`obj.0`).
Field(ExprField),
/// A for loop: `for pat in expr { ... }`.
ForLoop(ExprForLoop),
/// An expression contained within invisible delimiters.
///
/// This variant is important for faithfully representing the precedence
/// of expressions and is related to `None`-delimited spans in a
/// `TokenStream`.
Group(ExprGroup),
/// An `if` expression with an optional `else` block: `if expr { ... }
/// else { ... }`.
///
/// The `else` branch expression may only be an `If` or `Block`
/// expression, not any of the other types of expression.
If(ExprIf),
/// A square bracketed indexing expression: `vector[2]`.
Index(ExprIndex),
/// A `let` guard: `let Some(x) = opt`.
Let(ExprLet),
/// A literal in place of an expression: `1`, `"foo"`.
Lit(ExprLit),
/// Conditionless loop: `loop { ... }`.
Loop(ExprLoop),
/// A macro invocation expression: `format!("{}", q)`.
Macro(ExprMacro),
/// A `match` expression: `match n { Some(n) => {}, None => {} }`.
Match(ExprMatch),
/// A method call expression: `x.foo::<T>(a, b)`.
MethodCall(ExprMethodCall),
/// A parenthesized expression: `(a + b)`.
Paren(ExprParen),
/// A path like `std::mem::replace` possibly containing generic
/// parameters and a qualified self-type.
///
/// A plain identifier like `x` is a path of length 1.
Path(ExprPath),
/// A range expression: `1..2`, `1..`, `..2`, `1..=2`, `..=2`.
Range(ExprRange),
/// A referencing operation: `&a` or `&mut a`.
Reference(ExprReference),
/// An array literal constructed from one repeated element: `[0u8; N]`.
Repeat(ExprRepeat),
/// A `return`, with an optional value to be returned.
Return(ExprReturn),
/// A struct literal expression: `Point { x: 1, y: 1 }`.
///
/// The `rest` provides the value of the remaining fields as in `S { a:
/// 1, b: 1, ..rest }`.
Struct(ExprStruct),
/// A try-expression: `expr?`.
Try(ExprTry),
/// A try block: `try { ... }`.
TryBlock(ExprTryBlock),
/// A tuple expression: `(a, b, c, d)`.
Tuple(ExprTuple),
/// A type ascription expression: `foo: f64`.
Type(ExprType),
/// A unary operation: `!x`, `*x`.
Unary(ExprUnary),
/// An unsafe block: `unsafe { ... }`.
Unsafe(ExprUnsafe),
/// Tokens in expression position not interpreted by Syn.
Verbatim(TokenStream),
/// A while loop: `while expr { ... }`.
While(ExprWhile),
/// A yield expression: `yield expr`.
Yield(ExprYield),
#[doc(hidden)]
__Nonexhaustive,
}
}
ast_struct! {
/// A slice literal expression: `[a, b, c, d]`.
pub struct ExprArray {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) elems: Punctuated<Expr>,
}
}
ast_struct! {
/// An assignment expression: `a = compute()`.
pub struct ExprAssign {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) left: Box<Expr>,
pub(crate) right: Box<Expr>,
}
}
ast_struct! {
/// A compound assignment expression: `counter += 1`.
pub struct ExprAssignOp {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) left: Box<Expr>,
pub(crate) op: BinOp,
pub(crate) right: Box<Expr>,
}
}
ast_struct! {
/// An async block: `async { ... }`.
pub struct ExprAsync {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(rename = "move")]
#[serde(default, skip_serializing_if = "not")]
pub(crate) capture: bool,
#[serde(rename = "stmts")]
pub(crate) block: Block,
}
}
ast_struct! {
/// An await expression: `fut.await`.
pub struct ExprAwait {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) base: Box<Expr>,
}
}
ast_struct! {
/// A binary operation: `a + b`, `a * b`.
pub struct ExprBinary {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) left: Box<Expr>,
pub(crate) op: BinOp,
pub(crate) right: Box<Expr>,
}
}
ast_struct! {
/// A blocked scope: `{ ... }`.
pub struct ExprBlock {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) label: Option<Label>,
#[serde(rename = "stmts")]
pub(crate) block: Block,
}
}
ast_struct! {
/// A box expression: `box f`.
pub struct ExprBox {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) expr: Box<Expr>,
}
}
ast_struct! {
/// A `break`, with an optional label to break and an optional
/// expression.
pub struct ExprBreak {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) label: Option<Lifetime>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) expr: Option<Box<Expr>>,
}
}
ast_struct! {
/// A function call expression: `invoke(a, b)`.
pub struct ExprCall {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) func: Box<Expr>,
pub(crate) args: Punctuated<Expr>,
}
}
ast_struct! {
/// A cast expression: `foo as f64`.
pub struct ExprCast {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) expr: Box<Expr>,
pub(crate) ty: Box<Type>,
}
}
ast_struct! {
/// A closure expression: `|a, b| a + b`.
pub struct ExprClosure {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(rename = "async")]
#[serde(default, skip_serializing_if = "not")]
pub(crate) asyncness: bool,
#[serde(rename = "static")]
#[serde(default, skip_serializing_if = "not")]
pub(crate) movability: bool,
#[serde(rename = "move")]
#[serde(default, skip_serializing_if = "not")]
pub(crate) capture: bool,
pub(crate) inputs: Punctuated<Pat>,
#[serde(default)]
pub(crate) output: ReturnType,
pub(crate) body: Box<Expr>,
}
}
ast_struct! {
/// A `continue`, with an optional label.
pub struct ExprContinue {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) label: Option<Lifetime>,
}
}
ast_struct! {
/// Access of a named struct field (`obj.k`) or unnamed tuple struct
/// field (`obj.0`).
pub struct ExprField {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) base: Box<Expr>,
#[serde(flatten)]
pub(crate) member: Member,
}
}
ast_struct! {
/// A for loop: `for pat in expr { ... }`.
pub struct ExprForLoop {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) label: Option<Label>,
pub(crate) pat: Pat,
pub(crate) expr: Box<Expr>,
pub(crate) body: Block,
}
}
ast_struct! {
/// An expression contained within invisible delimiters.
///
/// This variant is important for faithfully representing the precedence
/// of expressions and is related to `None`-delimited spans in a
/// `TokenStream`.
pub struct ExprGroup {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) expr: Box<Expr>,
}
}
ast_struct! {
/// An `if` expression with an optional `else` block: `if expr { ... }
/// else { ... }`.
///
/// The `else` branch expression may only be an `If` or `Block`
/// expression, not any of the other types of expression.
pub struct ExprIf {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) cond: Box<Expr>,
pub(crate) then_branch: Block,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) else_branch: Option<Box<Expr>>,
}
}
ast_struct! {
/// A square bracketed indexing expression: `vector[2]`.
pub struct ExprIndex {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) expr: Box<Expr>,
pub(crate) index: Box<Expr>,
}
}
ast_struct! {
/// A `let` guard: `let Some(x) = opt`.
pub struct ExprLet {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) pat: Pat,
pub(crate) expr: Box<Expr>,
}
}
ast_struct! {
/// A literal in place of an expression: `1`, `"foo"`.
pub struct ExprLit {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(flatten)]
pub(crate) lit: Lit,
}
}
ast_struct! {
/// Conditionless loop: `loop { ... }`.
pub struct ExprLoop {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) label: Option<Label>,
pub(crate) body: Block,
}
}
ast_struct! {
/// A macro invocation expression: `format!("{}", q)`.
pub struct ExprMacro {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(flatten)]
pub(crate) mac: Macro,
}
}
ast_struct! {
/// A `match` expression: `match n { Some(n) => {}, None => {} }`.
pub struct ExprMatch {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) expr: Box<Expr>,
pub(crate) arms: Vec<Arm>,
}
}
ast_struct! {
/// A method call expression: `x.foo::<T>(a, b)`.
pub struct ExprMethodCall {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) receiver: Box<Expr>,
pub(crate) method: Ident,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) turbofish: Option<MethodTurbofish>,
pub(crate) args: Punctuated<Expr>,
}
}
ast_struct! {
/// A parenthesized expression: `(a + b)`.
pub struct ExprParen {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) expr: Box<Expr>,
}
}
ast_struct! {
/// A path like `std::mem::replace` possibly containing generic
/// parameters and a qualified self-type.
///
/// A plain identifier like `x` is a path of length 1.
pub struct ExprPath {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) qself: Option<QSelf>,
#[serde(flatten)]
pub(crate) path: Path,
}
}
ast_struct! {
/// A range expression: `1..2`, `1..`, `..2`, `1..=2`, `..=2`.
pub struct ExprRange {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) from: Option<Box<Expr>>,
pub(crate) limits: RangeLimits,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) to: Option<Box<Expr>>,
}
}
ast_struct! {
/// A referencing operation: `&a` or `&mut a`.
pub struct ExprReference {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
// #[serde(default, skip_serializing_if = "Reserved::is_default")]
// pub(crate) raw: Reserved,
#[serde(rename = "mut")]
#[serde(default, skip_serializing_if = "not")]
pub(crate) mutability: bool,
pub(crate) expr: Box<Expr>,
}
}
ast_struct! {
/// An array literal constructed from one repeated element: `[0u8; N]`.
pub struct ExprRepeat {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) expr: Box<Expr>,
pub(crate) len: Box<Expr>,
}
}
ast_struct! {
/// A `return`, with an optional value to be returned.
pub struct ExprReturn {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) expr: Option<Box<Expr>>,
}
}
ast_struct! {
/// A struct literal expression: `Point { x: 1, y: 1 }`.
///
/// The `rest` provides the value of the remaining fields as in `S { a:
/// 1, b: 1, ..rest }`.
pub struct ExprStruct {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) path: Path,
pub(crate) fields: Punctuated<FieldValue>,
#[serde(default, skip_serializing_if = "not")]
pub(crate) dot2_token: bool,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) rest: Option<Box<Expr>>,
}
}
ast_struct! {
/// A try-expression: `expr?`.
pub struct ExprTry {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) expr: Box<Expr>,
}
}
ast_struct! {
/// A try block: `try { ... }`.
pub struct ExprTryBlock {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(rename = "stmts")]
pub(crate) block: Block,
}
}
ast_struct! {
/// A tuple expression: `(a, b, c, d)`.
pub struct ExprTuple {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) elems: Punctuated<Expr>,
}
}
ast_struct! {
/// A type ascription expression: `foo: f64`.
pub struct ExprType {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) expr: Box<Expr>,
pub(crate) ty: Box<Type>,
}
}
ast_struct! {
/// A unary operation: `!x`, `*x`.
pub struct ExprUnary {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) op: UnOp,
pub(crate) expr: Box<Expr>,
}
}
ast_struct! {
/// An unsafe block: `unsafe { ... }`.
pub struct ExprUnsafe {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(rename = "stmts")]
pub(crate) block: Block,
}
}
ast_struct! {
/// A while loop: `while expr { ... }`.
pub struct ExprWhile {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) label: Option<Label>,
pub(crate) cond: Box<Expr>,
pub(crate) body: Block,
}
}
ast_struct! {
/// A yield expression: `yield expr`.
pub struct ExprYield {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) expr: Option<Box<Expr>>,
}
}
ast_enum! {
/// A struct or tuple struct field accessed in a struct literal or field
/// expression.
pub enum Member {
/// A named field like `self.x`.
#[serde(rename = "ident")]
Named(Ident),
/// An unnamed field like `self.0`.
#[serde(rename = "index")]
Unnamed(Index),
}
}
ast_struct! {
/// The index of an unnamed tuple struct field.
#[serde(transparent)]
pub struct Index {
pub(crate) index: u32,
}
}
ast_struct! {
/// The `::<>` explicit type parameters passed to a method call:
/// `parse::<u64>()`.
pub struct MethodTurbofish {
pub(crate) args: Punctuated<GenericMethodArgument>,
}
}
ast_enum! {
/// An individual generic argument to a method, like `T`.
pub enum GenericMethodArgument {
/// A type argument.
Type(Type),
/// A const expression. Must be inside of a block.
///
/// NOTE: Identity expressions are represented as Type arguments, as
/// they are indistinguishable syntactically.
Const(Expr),
}
}
ast_struct! {
/// A field-value pair in a struct literal.
pub struct FieldValue {
/// Attributes tagged on the field.
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
/// Name or index of the field.
#[serde(flatten)]
pub(crate) member: Member,
/// The colon in `Struct { x: x }`. If written in shorthand like
/// `Struct { x }`, there is no colon.
#[serde(default, skip_serializing_if = "not")]
pub(crate) colon_token: bool,
/// Value of the field.
pub(crate) expr: Expr,
}
}
ast_struct! {
/// A lifetime labeling a `for`, `while`, or `loop`.
#[serde(transparent)]
pub struct Label {
pub(crate) name: Lifetime,
}
}
ast_struct! {
/// One arm of a `match` expression: `0...10 => { return true; }`.
pub struct Arm {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) pat: Pat,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) guard: Option<Box<Expr>>,
pub(crate) body: Box<Expr>,
// #[serde(default, skip_serializing_if = "not")]
// pub(crate) comma: bool,
}
}
ast_enum! {
/// Limit types of a range, inclusive or exclusive.
pub enum RangeLimits {
/// Inclusive at the beginning, exclusive at the end.
#[serde(rename = "..")]
HalfOpen,
/// Inclusive at the beginning and end.
#[serde(rename = "..=")]
Closed,
}
}
pub(crate) fn requires_terminator(expr: &Expr) -> bool {
// see https://github.com/rust-lang/rust/blob/eb8f2586e/src/libsyntax/parse/classify.rs#L17-L37
match expr {
Expr::Unsafe(..)
| Expr::Block(..)
| Expr::If(..)
| Expr::Match(..)
| Expr::While(..)
| Expr::Loop(..)
| Expr::ForLoop(..)
| Expr::Async(..)
| Expr::TryBlock(..) => false,
_ => true,
}
}
mod convert {
use super::*;
// ExprMatch
syn_trait_impl!(syn::ExprMatch);
fn from_syn_arms(other: &[syn::Arm]) -> Vec<Arm> {
let last = other.len().saturating_sub(1);
other
.iter()
.enumerate()
.map(|(i, other)| {
let body = other.body.map_into();
if i < last && requires_terminator(&*body) {
assert!(other.comma.is_some(), "expected `,`");
}
Arm {
attrs: other.attrs.map_into(),
pat: other.pat.ref_into(),
guard: other.guard.ref_map(|(_, x)| x.map_into()),
body,
}
})
.collect()
}
impl From<&syn::ExprMatch> for ExprMatch {
fn from(other: &syn::ExprMatch) -> Self {
Self {
attrs: other.attrs.map_into(),
expr: other.expr.map_into(),
arms: from_syn_arms(&other.arms),
}
}
}
impl From<&ExprMatch> for syn::ExprMatch {
fn from(other: &ExprMatch) -> Self {
Self {
attrs: other.attrs.map_into(),
match_token: default(),
expr: other.expr.map_into(),
brace_token: default(),
arms: other.arms.map_into(),
}
}
}
// Arm
syn_trait_impl!(syn::Arm);
impl From<&syn::Arm> for Arm {
fn from(other: &syn::Arm) -> Self {
let body = other.body.map_into();
if requires_terminator(&*body) {
assert!(other.comma.is_some(), "expected `,`");
}
Self {
attrs: other.attrs.map_into(),
pat: other.pat.ref_into(),
guard: other.guard.ref_map(|(_, x)| x.map_into()),
body,
}
}
}
impl From<&Arm> for syn::Arm {
fn from(other: &Arm) -> Self {
Self {
attrs: other.attrs.map_into(),
pat: other.pat.ref_into(),
guard: other.guard.ref_map(|x| (default(), x.map_into())),
fat_arrow_token: default(),
body: other.body.map_into(),
comma: default_or_none(requires_terminator(&*other.body)),
}
}
}
}

View file

@ -1,12 +0,0 @@
use super::*;
ast_struct! {
/// A complete file of Rust source code.
pub struct File {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) shebang: Option<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) items: Vec<Item>,
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,223 +0,0 @@
use super::*;
ast_struct! {
/// Lifetimes and type parameters attached to a declaration of a function,
/// enum, trait, etc.
#[derive(Default)]
pub struct Generics {
// #[serde(default, skip_serializing_if = "not")]
// pub(crate) lt_token: bool,
#[serde(default, skip_serializing_if = "Punctuated::is_empty")]
pub(crate) params: Punctuated<GenericParam>,
// #[serde(default, skip_serializing_if = "not")]
// pub(crate) gt_token: bool,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) where_clause: Option<WhereClause>,
}
}
impl Generics {
pub(crate) fn is_none(&self) -> bool {
self.params.is_empty() && self.where_clause.is_none() // && !self.lt_token && !self.gt_token
}
}
ast_enum! {
/// A generic type parameter, lifetime, or const generic: `T: Into<String>`,
/// `'a: 'b`, `const LEN: usize`.
pub enum GenericParam {
/// A generic type parameter: `T: Into<String>`.
Type(TypeParam),
/// A lifetime definition: `'a: 'b + 'c + 'd`.
Lifetime(LifetimeDef),
/// A const generic parameter: `const LENGTH: usize`.
Const(ConstParam),
}
}
ast_struct! {
/// A generic type parameter: `T: Into<String>`.
pub struct TypeParam {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) ident: Ident,
#[serde(default, skip_serializing_if = "not")]
pub(crate) colon_token: bool,
#[serde(default, skip_serializing_if = "Punctuated::is_empty")]
pub(crate) bounds: Punctuated<TypeParamBound>,
#[serde(default, skip_serializing_if = "not")]
pub(crate) eq_token: bool,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) default: Option<Type>,
}
}
ast_struct! {
/// A lifetime definition: `'a: 'b + 'c + 'd`.
pub struct LifetimeDef {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) lifetime: Lifetime,
#[serde(default, skip_serializing_if = "not")]
pub(crate) colon_token: bool,
#[serde(default, skip_serializing_if = "Punctuated::is_empty")]
pub(crate) bounds: Punctuated<Lifetime>,
}
}
ast_struct! {
/// A const generic parameter: `const LENGTH: usize`.
pub struct ConstParam {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) ident: Ident,
pub(crate) ty: Type,
#[serde(default, skip_serializing_if = "not")]
pub(crate) eq_token: bool,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) default: Option<Expr>,
}
}
ast_struct! {
/// A set of bound lifetimes: `for<'a, 'b, 'c>`.
#[derive(Default)]
#[serde(transparent)]
pub struct BoundLifetimes {
pub(crate) lifetimes: Punctuated<LifetimeDef>,
}
}
ast_enum! {
/// A trait or lifetime used as a bound on a type parameter.
pub enum TypeParamBound {
Trait(TraitBound),
Lifetime(Lifetime),
}
}
ast_struct! {
/// A trait used as a bound on a type parameter.
pub struct TraitBound {
#[serde(default, skip_serializing_if = "not")]
pub(crate) paren_token: bool,
#[serde(default, skip_serializing_if = "TraitBoundModifier::is_none")]
pub(crate) modifier: TraitBoundModifier,
/// The `for<'a>` in `for<'a> Foo<&'a T>`
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) lifetimes: Option<BoundLifetimes>,
/// The `Foo<&'a T>` in `for<'a> Foo<&'a T>`
pub(crate) path: Path,
}
}
ast_enum! {
/// A modifier on a trait bound, currently only used for the `?` in
/// `?Sized`.
pub enum TraitBoundModifier {
None,
Maybe,
}
}
impl TraitBoundModifier {
fn is_none(&self) -> bool {
match self {
TraitBoundModifier::None => true,
TraitBoundModifier::Maybe => false,
}
}
}
impl Default for TraitBoundModifier {
fn default() -> Self {
TraitBoundModifier::None
}
}
ast_struct! {
/// A `where` clause in a definition: `where T: Deserialize<'de>, D:
/// 'static`.
#[serde(transparent)]
pub struct WhereClause {
pub(crate) predicates: Punctuated<WherePredicate>,
}
}
ast_enum! {
/// A single predicate in a `where` clause: `T: Deserialize<'de>`.
pub enum WherePredicate {
/// A type predicate in a `where` clause: `for<'c> Foo<'c>: Trait<'c>`.
Type(PredicateType),
/// A lifetime predicate in a `where` clause: `'a: 'b + 'c`.
Lifetime(PredicateLifetime),
/// An equality predicate in a `where` clause (unsupported).
Eq(PredicateEq),
}
}
ast_struct! {
/// A type predicate in a `where` clause: `for<'c> Foo<'c>: Trait<'c>`.
pub struct PredicateType {
/// Any lifetimes from a `for` binding
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) lifetimes: Option<BoundLifetimes>,
/// The type being bounded
pub(crate) bounded_ty: Type,
/// Trait and lifetime bounds (`Clone+Send+'static`)
pub(crate) bounds: Punctuated<TypeParamBound>,
}
}
ast_struct! {
/// A lifetime predicate in a `where` clause: `'a: 'b + 'c`.
pub struct PredicateLifetime {
pub(crate) lifetime: Lifetime,
pub(crate) bounds: Punctuated<Lifetime>,
}
}
ast_struct! {
/// An equality predicate in a `where` clause (unsupported).
pub struct PredicateEq {
pub(crate) lhs_ty: Type,
pub(crate) rhs_ty: Type,
}
}
mod convert {
use super::*;
// Generics
syn_trait_impl!(syn::Generics);
impl From<&syn::Generics> for Generics {
fn from(other: &syn::Generics) -> Self {
// `ident ..>` or `ident <..`
assert_eq!(other.lt_token.is_some(), other.gt_token.is_some());
// `ident T`
assert!(
other.params.is_empty() || other.lt_token.is_some(),
"expected `<`"
);
Self {
params: other.params.map_into(),
where_clause: other.where_clause.map_into(),
}
}
}
impl From<&Generics> for syn::Generics {
fn from(other: &Generics) -> Self {
Self {
lt_token: default_or_none(!other.params.is_empty()),
params: other.params.map_into(),
gt_token: default_or_none(!other.params.is_empty()),
where_clause: other.where_clause.map_into(),
}
}
}
}

View file

@ -1,770 +0,0 @@
use super::*;
ast_enum! {
/// Things that can appear directly inside of a module or scope.
pub enum Item {
/// A constant item: `const MAX: u16 = 65535`.
Const(ItemConst),
/// An enum definition: `enum Foo<A, B> { A(A), B(B) }`.
Enum(ItemEnum),
/// An `extern crate` item: `extern crate serde`.
ExternCrate(ItemExternCrate),
/// A free-standing function: `fn process(n: usize) -> Result<()> { ...
/// }`.
Fn(ItemFn),
/// A block of foreign items: `extern "C" { ... }`.
ForeignMod(ItemForeignMod),
/// An impl block providing trait or associated items: `impl<A> Trait
/// for Data<A> { ... }`.
Impl(ItemImpl),
/// A macro invocation, which includes `macro_rules!` definitions.
Macro(ItemMacro),
/// A 2.0-style declarative macro introduced by the `macro` keyword.
Macro2(ItemMacro2),
/// A module or module declaration: `mod m` or `mod m { ... }`.
Mod(ItemMod),
/// A static item: `static BIKE: Shed = Shed(42)`.
Static(ItemStatic),
/// A struct definition: `struct Foo<A> { x: A }`.
Struct(ItemStruct),
/// A trait definition: `pub trait Iterator { ... }`.
Trait(ItemTrait),
/// A trait alias: `pub trait SharableIterator = Iterator + Sync`.
TraitAlias(ItemTraitAlias),
/// A type alias: `type Result<T> = std::result::Result<T, MyError>`.
Type(ItemType),
/// A union definition: `union Foo<A, B> { x: A, y: B }`.
Union(ItemUnion),
/// A use declaration: `use std::collections::HashMap`.
Use(ItemUse),
/// Tokens forming an item not interpreted by Syn.
Verbatim(TokenStream),
#[doc(hidden)]
__Nonexhaustive,
}
}
ast_struct! {
/// A constant item: `const MAX: u16 = 65535`.
pub struct ItemConst {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "Visibility::is_inherited")]
pub(crate) vis: Visibility,
pub(crate) ident: Ident,
pub(crate) ty: Box<Type>,
pub(crate) expr: Box<Expr>,
}
}
ast_struct! {
/// An enum definition: `enum Foo<A, B> { A(A), B(B) }`.
pub struct ItemEnum {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "Visibility::is_inherited")]
pub(crate) vis: Visibility,
pub(crate) ident: Ident,
#[serde(default, skip_serializing_if = "Generics::is_none")]
pub(crate) generics: Generics,
pub(crate) variants: Punctuated<Variant>,
}
}
ast_struct! {
/// An `extern crate` item: `extern crate serde`.
pub struct ItemExternCrate {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "Visibility::is_inherited")]
pub(crate) vis: Visibility,
pub(crate) ident: Ident,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) rename: Option<Ident>,
}
}
ast_struct! {
/// A free-standing function: `fn process(n: usize) -> Result<()> { ...
/// }`.
pub struct ItemFn {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "Visibility::is_inherited")]
pub(crate) vis: Visibility,
#[serde(flatten)]
pub(crate) sig: Signature,
#[serde(rename = "stmts")]
pub(crate) block: Box<Block>,
}
}
ast_struct! {
/// A block of foreign items: `extern "C" { ... }`.
pub struct ItemForeignMod {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) abi: Abi,
pub(crate) items: Vec<ForeignItem>,
}
}
ast_struct! {
/// An impl block providing trait or associated items: `impl<A> Trait
/// for Data<A> { ... }`.
pub struct ItemImpl {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(rename = "default")]
#[serde(default, skip_serializing_if = "not")]
pub(crate) defaultness: bool,
#[serde(rename = "unsafe")]
#[serde(default, skip_serializing_if = "not")]
pub(crate) unsafety: bool,
#[serde(default, skip_serializing_if = "Generics::is_none")]
pub(crate) generics: Generics,
/// Trait this impl implements.
#[serde(rename = "trait")]
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) trait_: Option<(bool, Path)>,
/// The Self type of the impl.
pub(crate) self_ty: Box<Type>,
pub(crate) items: Vec<ImplItem>,
}
}
ast_struct! {
/// A macro invocation, which includes `macro_rules!` definitions.
pub struct ItemMacro {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
/// The `example` in `macro_rules! example { ... }`.
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) ident: Option<Ident>,
#[serde(flatten)]
pub(crate) mac: Macro,
#[serde(default, skip_serializing_if = "not")]
pub(crate) semi_token: bool,
}
}
ast_struct! {
/// A 2.0-style declarative macro introduced by the `macro` keyword.
pub struct ItemMacro2 {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "Visibility::is_inherited")]
pub(crate) vis: Visibility,
pub(crate) ident: Ident,
pub(crate) rules: TokenStream,
}
}
ast_struct! {
/// A module or module declaration: `mod m` or `mod m { ... }`.
pub struct ItemMod {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "Visibility::is_inherited")]
pub(crate) vis: Visibility,
pub(crate) ident: Ident,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) content: Option<Vec<Item>>,
#[serde(default, skip_serializing_if = "not")]
pub(crate) semi: bool,
}
}
ast_struct! {
/// A static item: `static BIKE: Shed = Shed(42)`.
pub struct ItemStatic {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "Visibility::is_inherited")]
pub(crate) vis: Visibility,
#[serde(rename = "mut")]
#[serde(default, skip_serializing_if = "not")]
pub(crate) mutability: bool,
pub(crate) ident: Ident,
pub(crate) ty: Box<Type>,
pub(crate) expr: Box<Expr>,
}
}
ast_struct! {
/// A struct definition: `struct Foo<A> { x: A }`.
pub struct ItemStruct {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "Visibility::is_inherited")]
pub(crate) vis: Visibility,
pub(crate) ident: Ident,
#[serde(default, skip_serializing_if = "Generics::is_none")]
pub(crate) generics: Generics,
pub(crate) fields: Fields,
// #[serde(default, skip_serializing_if = "not")]
// pub(crate) semi_token: bool,
}
}
ast_struct! {
/// A trait definition: `pub trait Iterator { ... }`.
pub struct ItemTrait {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "Visibility::is_inherited")]
pub(crate) vis: Visibility,
#[serde(rename = "unsafe")]
#[serde(default, skip_serializing_if = "not")]
pub(crate) unsafety: bool,
#[serde(rename = "auto")]
#[serde(default, skip_serializing_if = "not")]
pub(crate) auto_token: bool,
pub(crate) ident: Ident,
#[serde(default, skip_serializing_if = "Generics::is_none")]
pub(crate) generics: Generics,
#[serde(default, skip_serializing_if = "not")]
pub(crate) colon_token: bool,
#[serde(default, skip_serializing_if = "Punctuated::is_empty")]
pub(crate) supertraits: Punctuated<TypeParamBound>,
pub(crate) items: Vec<TraitItem>,
}
}
ast_struct! {
/// A trait alias: `pub trait SharableIterator = Iterator + Sync`.
pub struct ItemTraitAlias {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "Visibility::is_inherited")]
pub(crate) vis: Visibility,
pub(crate) ident: Ident,
#[serde(default, skip_serializing_if = "Generics::is_none")]
pub(crate) generics: Generics,
pub(crate) bounds: Punctuated<TypeParamBound>,
}
}
ast_struct! {
/// A type alias: `type Result<T> = std::result::Result<T, MyError>`.
pub struct ItemType {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "Visibility::is_inherited")]
pub(crate) vis: Visibility,
pub(crate) ident: Ident,
#[serde(default, skip_serializing_if = "Generics::is_none")]
pub(crate) generics: Generics,
pub(crate) ty: Box<Type>,
}
}
ast_struct! {
/// A union definition: `union Foo<A, B> { x: A, y: B }`.
pub struct ItemUnion {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "Visibility::is_inherited")]
pub(crate) vis: Visibility,
pub(crate) ident: Ident,
#[serde(default, skip_serializing_if = "Generics::is_none")]
pub(crate) generics: Generics,
pub(crate) fields: FieldsNamed,
}
}
ast_struct! {
/// A use declaration: `use std::collections::HashMap`.
pub struct ItemUse {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "Visibility::is_inherited")]
pub(crate) vis: Visibility,
#[serde(default, skip_serializing_if = "not")]
pub(crate) leading_colon: bool,
pub(crate) tree: UseTree,
}
}
ast_enum! {
/// A suffix of an import tree in a `use` item: `Type as Renamed` or `*`.
pub enum UseTree {
/// A path prefix of imports in a `use` item: `std::...`.
Path(UsePath),
/// An identifier imported by a `use` item: `HashMap`.
#[serde(rename = "ident")]
Name(UseName),
/// An renamed identifier imported by a `use` item: `HashMap as Map`.
Rename(UseRename),
/// A glob import in a `use` item: `*`.
#[serde(rename = "*")]
Glob,
/// A braced group of imports in a `use` item: `{A, B, C}`.
Group(UseGroup),
}
}
ast_struct! {
/// A path prefix of imports in a `use` item: `std::...`.
pub struct UsePath {
pub(crate) ident: Ident,
pub(crate) tree: Box<UseTree>,
}
}
ast_struct! {
/// An identifier imported by a `use` item: `HashMap`.
#[serde(transparent)]
pub struct UseName {
pub(crate) ident: Ident,
}
}
ast_struct! {
/// An renamed identifier imported by a `use` item: `HashMap as Map`.
pub struct UseRename {
pub(crate) ident: Ident,
pub(crate) rename: Ident,
}
}
ast_struct! {
/// A braced group of imports in a `use` item: `{A, B, C}`.
#[serde(transparent)]
pub struct UseGroup {
pub(crate) items: Punctuated<UseTree>,
}
}
ast_enum! {
/// An item within an `extern` block.
pub enum ForeignItem {
/// A foreign function in an `extern` block.
Fn(ForeignItemFn),
/// A foreign static item in an `extern` block: `static ext: u8`.
Static(ForeignItemStatic),
/// A foreign type in an `extern` block: `type void`.
Type(ForeignItemType),
/// A macro invocation within an extern block.
Macro(ForeignItemMacro),
/// Tokens in an `extern` block not interpreted by Syn.
Verbatim(TokenStream),
#[doc(hidden)]
__Nonexhaustive,
}
}
ast_struct! {
/// A foreign function in an `extern` block.
pub struct ForeignItemFn {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "Visibility::is_inherited")]
pub(crate) vis: Visibility,
#[serde(flatten)]
pub(crate) sig: Signature,
}
}
ast_struct! {
/// A foreign static item in an `extern` block: `static ext: u8`.
pub struct ForeignItemStatic {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "Visibility::is_inherited")]
pub(crate) vis: Visibility,
#[serde(rename = "mut")]
#[serde(default, skip_serializing_if = "not")]
pub(crate) mutability: bool,
pub(crate) ident: Ident,
pub(crate) ty: Box<Type>,
}
}
ast_struct! {
/// A foreign type in an `extern` block: `type void`.
pub struct ForeignItemType {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "Visibility::is_inherited")]
pub(crate) vis: Visibility,
pub(crate) ident: Ident,
}
}
ast_struct! {
/// A macro invocation within an extern block.
pub struct ForeignItemMacro {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(flatten)]
pub(crate) mac: Macro,
#[serde(default, skip_serializing_if = "not")]
pub(crate) semi_token: bool,
}
}
ast_enum! {
/// An item declaration within the definition of a trait.
pub enum TraitItem {
/// An associated constant within the definition of a trait.
Const(TraitItemConst),
/// A trait method within the definition of a trait.
Method(TraitItemMethod),
/// An associated type within the definition of a trait.
Type(TraitItemType),
/// A macro invocation within the definition of a trait.
Macro(TraitItemMacro),
/// Tokens within the definition of a trait not interpreted by Syn.
Verbatim(TokenStream),
#[doc(hidden)]
__Nonexhaustive,
}
}
ast_struct! {
/// An associated constant within the definition of a trait.
pub struct TraitItemConst {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) ident: Ident,
pub(crate) ty: Type,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) default: Option<Expr>,
}
}
ast_struct! {
/// A trait method within the definition of a trait.
pub struct TraitItemMethod {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(flatten)]
pub(crate) sig: Signature,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) default: Option<Block>,
// #[serde(default, skip_serializing_if = "not")]
// pub(crate) semi_token: bool,
}
}
ast_struct! {
/// An associated type within the definition of a trait.
pub struct TraitItemType {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) ident: Ident,
#[serde(default, skip_serializing_if = "Generics::is_none")]
pub(crate) generics: Generics,
#[serde(default, skip_serializing_if = "not")]
pub(crate) colon_token: bool,
#[serde(default, skip_serializing_if = "Punctuated::is_empty")]
pub(crate) bounds: Punctuated<TypeParamBound>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) default: Option<Type>,
}
}
ast_struct! {
/// A macro invocation within the definition of a trait.
pub struct TraitItemMacro {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(flatten)]
pub(crate) mac: Macro,
#[serde(default, skip_serializing_if = "not")]
pub(crate) semi_token: bool,
}
}
ast_enum! {
/// An item within an impl block.
pub enum ImplItem {
/// An associated constant within an impl block.
Const(ImplItemConst),
/// A method within an impl block.
Method(ImplItemMethod),
/// An associated type within an impl block.
Type(ImplItemType),
/// A macro invocation within an impl block.
Macro(ImplItemMacro),
/// Tokens within an impl block not interpreted by Syn.
Verbatim(TokenStream),
#[doc(hidden)]
__Nonexhaustive,
}
}
ast_struct! {
/// An associated constant within an impl block.
pub struct ImplItemConst {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "Visibility::is_inherited")]
pub(crate) vis: Visibility,
#[serde(rename = "default")]
#[serde(default, skip_serializing_if = "not")]
pub(crate) defaultness: bool,
pub(crate) ident: Ident,
pub(crate) ty: Type,
pub(crate) expr: Expr,
}
}
ast_struct! {
/// A method within an impl block.
pub struct ImplItemMethod {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "Visibility::is_inherited")]
pub(crate) vis: Visibility,
#[serde(rename = "default")]
#[serde(default, skip_serializing_if = "not")]
pub(crate) defaultness: bool,
#[serde(flatten)]
pub(crate) sig: Signature,
#[serde(rename = "stmts")]
pub(crate) block: Block,
}
}
ast_struct! {
/// An associated type within an impl block.
pub struct ImplItemType {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "Visibility::is_inherited")]
pub(crate) vis: Visibility,
#[serde(rename = "default")]
#[serde(default, skip_serializing_if = "not")]
pub(crate) defaultness: bool,
pub(crate) ident: Ident,
#[serde(default, skip_serializing_if = "Generics::is_none")]
pub(crate) generics: Generics,
pub(crate) ty: Type,
}
}
ast_struct! {
/// A macro invocation within an impl block.
pub struct ImplItemMacro {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(flatten)]
pub(crate) mac: Macro,
#[serde(default, skip_serializing_if = "not")]
pub(crate) semi_token: bool,
}
}
ast_struct! {
/// A function signature in a trait or implementation: `unsafe fn
/// initialize(&self)`.
pub struct Signature {
#[serde(rename = "const")]
#[serde(default, skip_serializing_if = "not")]
pub(crate) constness: bool,
#[serde(rename = "async")]
#[serde(default, skip_serializing_if = "not")]
pub(crate) asyncness: bool,
#[serde(rename = "unsafe")]
#[serde(default, skip_serializing_if = "not")]
pub(crate) unsafety: bool,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) abi: Option<Abi>,
pub(crate) ident: Ident,
#[serde(default, skip_serializing_if = "Generics::is_none")]
pub(crate) generics: Generics,
pub(crate) inputs: Punctuated<FnArg>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) variadic: Option<Variadic>,
#[serde(default)]
pub(crate) output: ReturnType,
}
}
ast_enum! {
/// An argument in a function signature: the `n: usize` in `fn f(n: usize)`.
pub enum FnArg {
/// The `self` argument of an associated method, whether taken by value
/// or by reference.
Receiver(Receiver),
/// A function argument accepted by pattern and type.
Typed(PatType),
}
}
ast_struct! {
/// The `self` argument of an associated method, whether taken by value
/// or by reference.
pub struct Receiver {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(rename = "ref")]
#[serde(default, skip_serializing_if = "not")]
pub(crate) reference: bool,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) lifetime: Option<Lifetime>,
#[serde(rename = "mut")]
#[serde(default, skip_serializing_if = "not")]
pub(crate) mutability: bool,
}
}
mod convert {
use super::*;
// ItemStruct
syn_trait_impl!(syn::ItemStruct);
impl From<&syn::ItemStruct> for ItemStruct {
fn from(other: &syn::ItemStruct) -> Self {
let fields: Fields = other.fields.ref_into();
assert_struct_semi(&fields, other.semi_token.is_some());
Self {
attrs: other.attrs.map_into(),
vis: other.vis.ref_into(),
ident: other.ident.ref_into(),
generics: other.generics.ref_into(),
fields,
}
}
}
impl From<&ItemStruct> for syn::ItemStruct {
fn from(other: &ItemStruct) -> Self {
Self {
attrs: other.attrs.map_into(),
vis: other.vis.ref_into(),
struct_token: default(),
ident: other.ident.ref_into(),
generics: other.generics.ref_into(),
fields: other.fields.ref_into(),
semi_token: default_or_none(!other.fields.is_named()),
}
}
}
// TraitItemMethod
syn_trait_impl!(syn::TraitItemMethod);
impl From<&syn::TraitItemMethod> for TraitItemMethod {
fn from(other: &syn::TraitItemMethod) -> Self {
if other.default.is_some() {
// `fn foo() -> bool {};`
assert!(other.semi_token.is_none(), "unexpected token: `;`");
} else {
// `fn foo() -> bool`
assert!(other.semi_token.is_some(), "expected `;`");
}
Self {
attrs: other.attrs.map_into(),
sig: other.sig.ref_into(),
default: other.default.map_into(),
}
}
}
impl From<&TraitItemMethod> for syn::TraitItemMethod {
fn from(other: &TraitItemMethod) -> Self {
Self {
attrs: other.attrs.map_into(),
sig: other.sig.ref_into(),
default: other.default.map_into(),
semi_token: default_or_none(other.default.is_none()),
}
}
}
// UseTree
syn_trait_impl!(syn::UseTree);
impl From<&syn::UseTree> for UseTree {
fn from(other: &syn::UseTree) -> Self {
use super::UseTree::*;
use syn::UseTree;
match other {
UseTree::Path(x) => Path(x.into()),
UseTree::Name(x) => Name(x.into()),
UseTree::Rename(x) => Rename(x.into()),
UseTree::Glob(_) => Glob,
UseTree::Group(x) => Group(x.into()),
}
}
}
impl From<&UseTree> for syn::UseTree {
fn from(other: &UseTree) -> Self {
use syn::UseTree::*;
match other {
UseTree::Path(x) => Path(x.into()),
UseTree::Name(x) => Name(x.into()),
UseTree::Rename(x) => Rename(x.into()),
UseTree::Glob => Glob(syn::UseGlob {
star_token: default(),
}),
UseTree::Group(x) => Group(x.into()),
}
}
}
// Receiver
syn_trait_impl!(syn::Receiver);
impl From<&syn::Receiver> for Receiver {
fn from(node: &syn::Receiver) -> Self {
Receiver {
attrs: node.attrs.map_into(),
reference: node.reference.is_some(),
lifetime: node.reference.as_ref().and_then(|(_0, _1)| _1.map_into()),
mutability: node.mutability.is_some(),
}
}
}
impl From<&Receiver> for syn::Receiver {
fn from(node: &Receiver) -> Self {
syn::Receiver {
attrs: node.attrs.map_into(),
reference: if node.reference {
Some((default(), node.lifetime.map_into()))
} else {
None
},
mutability: default_or_none(node.mutability),
self_token: default(),
}
}
}
}

View file

@ -1,210 +0,0 @@
//! A module to provide functions for JSON <-> Rust serialize and deserialize.
//!
//! *This module is available if syn-serde is built with the `"json"` feature.*
use super::*;
use serde_json::Result;
use std::io;
// Serialize [`Syn`] type into JSON data.
/// Serialize the given [`Syn`] type as JSON into the IO stream.
///
/// This function is equivalent to the following code:
///
/// ```rust
/// # use std::io;
/// # fn to_writer<W>(writer: W, syn_file: &syn::File) -> serde_json::Result<()>
/// # where
/// # W: io::Write,
/// # {
/// use syn_serde::Syn;
///
/// let serializable_file = syn_file.to_adapter();
/// serde_json::to_writer(writer, &serializable_file)
/// # }
/// ```
#[inline]
pub fn to_writer<S, W>(writer: W, syn: &S) -> Result<()>
where
S: Syn,
W: io::Write,
{
let adapter = syn.to_adapter();
serde_json::to_writer(writer, &adapter)
}
/// Serialize the given [`Syn`] type as pretty-printed JSON into the IO
/// stream.
///
/// This function is equivalent to the following code:
///
/// ```rust
/// # use std::io;
/// # fn to_writer_pretty<W: io::Write>(writer: W, syn_file: &syn::File) -> serde_json::Result<()> {
/// use syn_serde::Syn;
///
/// let serializable_file = syn_file.to_adapter();
/// serde_json::to_writer_pretty(writer, &serializable_file)
/// # }
/// ```
#[inline]
pub fn to_writer_pretty<S, W>(writer: W, syn: &S) -> Result<()>
where
S: Syn,
W: io::Write,
{
let adapter = syn.to_adapter();
serde_json::to_writer_pretty(writer, &adapter)
}
/// Serialize the given [`Syn`] type as a JSON byte vector.
///
/// This function is equivalent to the following code:
///
/// ```rust
/// # fn to_vec(syn_file: &syn::File) -> Vec<u8> {
/// use syn_serde::Syn;
///
/// let serializable_file = syn_file.to_adapter();
/// serde_json::to_vec(&serializable_file).unwrap()
/// # }
/// ```
#[inline]
pub fn to_vec<S>(syn: &S) -> Vec<u8>
where
S: Syn,
{
let adapter = syn.to_adapter();
serde_json::to_vec(&adapter).unwrap()
}
/// Serialize the given [`Syn`] type as a pretty-printed JSON byte vector.
///
/// This function is equivalent to the following code:
///
/// ```rust
/// # fn to_vec_pretty(syn_file: &syn::File) -> Vec<u8> {
/// use syn_serde::Syn;
///
/// let serializable_file = syn_file.to_adapter();
/// serde_json::to_vec_pretty(&serializable_file).unwrap()
/// # }
/// ```
#[inline]
pub fn to_vec_pretty<S>(syn: &S) -> Vec<u8>
where
S: Syn,
{
let adapter = syn.to_adapter();
serde_json::to_vec_pretty(&adapter).unwrap()
}
/// Serialize the given [`Syn`] type as a String of JSON.
///
/// This function is equivalent to the following code:
///
/// ```rust
/// # fn to_string(syn_file: &syn::File) -> String {
/// use syn_serde::Syn;
///
/// let serializable_file = syn_file.to_adapter();
/// serde_json::to_string(&serializable_file).unwrap()
/// # }
/// ```
#[inline]
pub fn to_string<S>(syn: &S) -> String
where
S: Syn,
{
let adapter = syn.to_adapter();
serde_json::to_string(&adapter).unwrap()
}
/// Serialize the given [`Syn`] type as a pretty-printed String of JSON.
///
/// This function is equivalent to the following code:
///
/// ```rust
/// # fn to_string_pretty(syn_file: &syn::File) -> String {
/// use syn_serde::Syn;
///
/// let serializable_file = syn_file.to_adapter();
/// serde_json::to_string_pretty(&serializable_file).unwrap()
/// # }
/// ```
#[inline]
pub fn to_string_pretty<S>(syn: &S) -> String
where
S: Syn,
{
let adapter = syn.to_adapter();
serde_json::to_string_pretty(&adapter).unwrap()
}
// Deserialize JSON data to [`Syn`] type.
/// Deserialize an instance of [`Syn`] type from an IO stream of JSON.
///
/// This function is equivalent to the following code:
///
/// ```rust
/// # use std::io;
/// # fn from_reader<R: io::Read>(rdr: R) -> serde_json::Result<syn::File> {
/// use syn_serde::Syn;
///
/// let serializable_file: <syn::File as Syn>::Adapter = serde_json::from_reader(rdr)?;
/// let syn_file = syn::File::from_adapter(&serializable_file);
/// Ok(syn_file)
/// # }
/// ```
pub fn from_reader<S, R>(rdr: R) -> Result<S>
where
S: Syn,
R: io::Read,
{
let adapter: S::Adapter = serde_json::from_reader(rdr)?;
Ok(S::from_adapter(&adapter))
}
/// Deserialize an instance of [`Syn`] type from bytes of JSON text.
///
/// This function is equivalent to the following code:
///
/// ```rust
/// # fn from_reader(v: &[u8]) -> serde_json::Result<syn::File> {
/// use syn_serde::Syn;
///
/// let serializable_file: <syn::File as Syn>::Adapter = serde_json::from_slice(v)?;
/// let syn_file = syn::File::from_adapter(&serializable_file);
/// Ok(syn_file)
/// # }
/// ```
pub fn from_slice<S>(v: &[u8]) -> Result<S>
where
S: Syn,
{
let adapter: S::Adapter = serde_json::from_slice(v)?;
Ok(S::from_adapter(&adapter))
}
/// Deserialize an instance of [`Syn`] type from a string of JSON text.
///
/// This function is equivalent to the following code:
///
/// ```rust
/// # fn from_str(s: &str) -> serde_json::Result<syn::File> {
/// use syn_serde::Syn;
///
/// let serializable_file: <syn::File as Syn>::Adapter = serde_json::from_str(s)?;
/// let syn_file = syn::File::from_adapter(&serializable_file);
/// Ok(syn_file)
/// # }
/// ```
pub fn from_str<S>(s: &str) -> Result<S>
where
S: Syn,
{
let adapter: S::Adapter = serde_json::from_str(s)?;
Ok(S::from_adapter(&adapter))
}

View file

@ -1,350 +0,0 @@
//! Library to serialize and deserialize [Syn] syntax trees.
//!
//! ## Examples
//!
//! ```toml
//! [dependencies]
//! syn-serde = { version = "0.2", features = ["json"] }
//! syn = { version = "1", features = ["full"] }
//! ```
//!
//! ```rust
//! # #[cfg(feature = "json")]
//! # fn dox() -> Result<(), Box<dyn std::error::Error>> {
//! use syn_serde::json;
//!
//! let syn_file: syn::File = syn::parse_quote! {
//! fn main() {
//! println!("Hello, world!");
//! }
//! };
//! println!("{}", json::to_string_pretty(&syn_file));
//! # Ok(())
//! # }
//! ```
//!
//! This prints the following JSON:
//!
//! ```json
//! {
//! "items": [
//! {
//! "fn": {
//! "ident": "main",
//! "inputs": [],
//! "output": null,
//! "stmts": [
//! {
//! "semi": {
//! "macro": {
//! "path": {
//! "segments": [
//! {
//! "ident": "println"
//! }
//! ]
//! },
//! "delimiter": "paren",
//! "tokens": [
//! {
//! "lit": "\"Hello, world!\""
//! }
//! ]
//! }
//! }
//! }
//! ]
//! }
//! }
//! ]
//! }
//! ```
//!
//! ### Rust source file -> JSON representation of the syntax tree
//!
//! The [`rust2json`] example parse a Rust source file into a `syn_serde::File`
//! and print out a JSON representation of the syntax tree.
//!
//! ### JSON file -> Rust syntax tree
//!
//! The [`json2rust`] example parse a JSON file into a `syn_serde::File` and
//! print out a Rust syntax tree.
//!
//! ## Optional features
//!
//! - **`json`** — Provides functions for JSON <-> Rust serializing and
//! deserializing.
//!
//! [Syn]: https://github.com/dtolnay/syn
//! [`rust2json`]: https://github.com/taiki-e/syn-serde/tree/master/examples/rust2json
//! [`json2rust`]: https://github.com/taiki-e/syn-serde/tree/master/examples/json2rust
#![doc(html_root_url = "https://docs.rs/syn-serde/0.2.0")]
#![doc(test(
no_crate_inject,
attr(
deny(warnings, rust_2018_idioms, single_use_lifetimes),
allow(dead_code)
)
))]
#![forbid(unsafe_code)]
#![warn(rust_2018_idioms, unreachable_pub)]
// It cannot be included in the published code because these lints have false positives in the minimum required version.
#![cfg_attr(test, warn(single_use_lifetimes))]
#![warn(clippy::all)]
// mem::take requires Rust 1.40
#![allow(clippy::mem_replace_with_default)]
#![allow(clippy::large_enum_variant, clippy::needless_doctest_main)]
#[macro_use]
mod macros;
mod gen;
mod attr;
#[doc(hidden)]
pub use crate::attr::*;
mod data;
#[doc(hidden)]
pub use crate::data::*;
mod expr;
#[doc(hidden)]
pub use crate::expr::*;
mod generics;
#[doc(hidden)]
pub use crate::generics::*;
mod item;
#[doc(hidden)]
pub use crate::item::*;
mod file;
#[doc(hidden)]
pub use crate::file::File;
mod lifetime;
#[doc(hidden)]
pub use crate::lifetime::Lifetime;
mod lit;
#[doc(hidden)]
pub use crate::lit::*;
mod mac;
#[doc(hidden)]
pub use crate::mac::{Macro, MacroDelimiter};
mod op;
#[doc(hidden)]
pub use crate::op::{BinOp, UnOp};
mod ty;
#[doc(hidden)]
pub use crate::ty::*;
mod pat;
#[doc(hidden)]
pub use crate::pat::*;
mod path;
#[doc(hidden)]
pub use crate::path::*;
mod stmt;
#[doc(hidden)]
pub use crate::stmt::{Block, Local, Stmt};
mod token_stream;
#[doc(hidden)]
pub use crate::token_stream::{
Delimiter, Group, Ident, Literal, Punct, Spacing, TokenStream, TokenTree,
};
#[cfg(feature = "json")]
pub mod json;
// =============================================================================
// Syn trait
mod private {
pub trait Sealed {}
}
/// A trait for the data structures of [Syn] and [proc-macro2].
///
/// [Syn]: https://github.com/dtolnay/syn
/// [proc-macro2]: https://github.com/alexcrichton/proc-macro2
#[allow(single_use_lifetimes)] // https://github.com/rust-lang/rust/issues/55058
pub trait Syn: Sized + private::Sealed {
type Adapter: Serialize + for<'de> Deserialize<'de>;
/// Converts a `Syn` type into an adapter.
///
/// ## Examples
///
/// ```rust
/// # #[cfg(feature = "json")]
/// # fn dox() {
/// use syn_serde::Syn;
///
/// let syn_file: syn::File = syn::parse_quote! {
/// fn main() {
/// println!("Hello, world!");
/// }
/// };
///
/// let serializable_file = syn_file.to_adapter();
/// println!("{}", serde_json::to_string_pretty(&serializable_file).unwrap());
/// # }
/// ```
fn to_adapter(&self) -> Self::Adapter;
/// Converts an adapter into a `Syn` type.
///
/// ## Examples
///
/// ```rust
/// # #[cfg(feature = "json")]
/// # fn dox() -> Result<(), Box<dyn std::error::Error>> {
/// use syn_serde::Syn;
///
/// // `struct Unit;`
/// let json = r#"{
/// "struct": {
/// "ident": "Unit",
/// "fields": "unit"
/// }
/// }"#;
///
/// let serializable_file: <syn::File as Syn>::Adapter = serde_json::from_str(json)?;
/// let _syn_file = syn::File::from_adapter(&serializable_file);
/// # Ok(())
/// # }
/// ```
fn from_adapter(adapter: &Self::Adapter) -> Self;
}
// =============================================================================
use proc_macro2::Span;
use serde::{Deserialize, Serialize};
type Punctuated<T> = Vec<T>;
fn default<T>() -> T
where
T: Default,
{
T::default()
}
fn default_or_none<T>(x: bool) -> Option<T>
where
T: Default,
{
if x {
Some(T::default())
} else {
None
}
}
fn not<T>(x: T) -> T::Output
where
T: std::ops::Not,
{
!x
}
// https://github.com/rust-lang/rust/issues/51443
trait RefInto<U>: Sized {
fn ref_into<'a>(&'a self) -> U
where
&'a Self: Into<U>,
{
self.into()
}
}
impl<T, U> RefInto<U> for T {}
trait MapInto<U, M> {
type T;
fn ref_map<'a, F>(&'a self, f: F) -> M
where
Self::T: 'a,
F: FnMut(&'a Self::T) -> U;
fn map_into<'a>(&'a self) -> M
where
Self::T: 'a,
&'a Self::T: Into<U>,
{
self.ref_map(Into::into)
}
}
impl<T, U> MapInto<U, Vec<U>> for Vec<T> {
type T = T;
fn ref_map<'a, F>(&'a self, f: F) -> Vec<U>
where
F: FnMut(&'a Self::T) -> U,
{
self.iter().map(f).collect()
}
}
impl<T, U, P> MapInto<U, syn::punctuated::Punctuated<U, P>> for Vec<T>
where
P: Default,
{
type T = T;
fn ref_map<'a, F>(&'a self, f: F) -> syn::punctuated::Punctuated<U, P>
where
F: FnMut(&'a Self::T) -> U,
{
self.iter().map(f).collect()
}
}
impl<T, U, P> MapInto<U, Vec<U>> for syn::punctuated::Punctuated<T, P>
where
P: Default,
{
type T = T;
fn ref_map<'a, F>(&'a self, f: F) -> Vec<U>
where
F: FnMut(&'a Self::T) -> U,
{
self.iter().map(f).collect()
}
}
impl<T, U> MapInto<U, Option<U>> for Option<T> {
type T = T;
fn ref_map<'a, F>(&'a self, f: F) -> Option<U>
where
F: FnMut(&'a Self::T) -> U,
{
self.as_ref().map(f)
}
}
impl<T, U> MapInto<U, Box<U>> for Box<T> {
type T = T;
fn ref_map<'a, F>(&'a self, mut f: F) -> Box<U>
where
F: FnMut(&'a Self::T) -> U,
{
Box::new(f(&**self))
}
}

View file

@ -1,19 +0,0 @@
use super::*;
ast_struct! {
/// A Rust lifetime: `'a`.
///
/// Lifetime names must conform to the following rules:
///
/// - Must start with an apostrophe.
/// - Must not consist of just an apostrophe: `'`.
/// - Character after the apostrophe must be `_` or a Unicode code point with
/// the XID_Start property.
/// - All following characters must be Unicode code points with the XID_Continue
/// property.
#[derive(Clone)]
#[serde(transparent)]
pub struct Lifetime {
pub(crate) ident: Ident,
}
}

View file

@ -1,525 +0,0 @@
use super::*;
ast_enum! {
/// A Rust literal such as a string or integer or boolean.
pub enum Lit {
/// A UTF-8 string literal: `"foo"`.
Str(LitStr),
/// A byte string literal: `b"foo"`.
ByteStr(LitByteStr),
/// A byte literal: `b'f'`.
Byte(LitByte),
/// A character literal: `'a'`.
Char(LitChar),
/// An integer literal: `1` or `1u16`.
Int(LitInt),
/// A floating point literal: `1f64` or `1.0e10f64`.
///
/// Must be finite. May not be infinte or NaN.
Float(LitFloat),
/// A boolean literal: `true` or `false`.
Bool(LitBool),
/// A raw token literal not interpreted by Syn.
Verbatim(Literal),
}
}
ast_struct! {
/// A UTF-8 string literal: `"foo"`.
#[serde(transparent)]
pub struct LitStr {
token: Literal,
}
}
ast_struct! {
/// A byte string literal: `b"foo"`.
#[serde(transparent)]
pub struct LitByteStr {
token: Literal,
}
}
ast_struct! {
/// A byte literal: `b'f'`.
#[serde(transparent)]
pub struct LitByte {
token: Literal,
}
}
ast_struct! {
/// A character literal: `'a'`.
#[serde(transparent)]
pub struct LitChar {
token: Literal,
}
}
ast_struct! {
/// An integer literal: `1` or `1u16`.
#[serde(transparent)]
pub struct LitInt {
token: Literal,
}
}
ast_struct! {
/// A floating point literal: `1f64` or `1.0e10f64`.
///
/// Must be finite. May not be infinte or NaN.
#[serde(transparent)]
pub struct LitFloat {
token: Literal,
}
}
ast_struct! {
/// A boolean literal: `true` or `false`.
#[serde(transparent)]
pub struct LitBool {
pub(crate) value: bool,
}
}
ast_enum! {
/// The style of a string literal, either plain quoted or a raw string like
/// `r##"data"##`.
pub enum StrStyle {
/// An ordinary string like `"data"`.
Cooked,
/// A raw string like `r##"data"##`.
///
/// The unsigned integer is the number of `#` symbols used.
Raw(usize),
}
}
mod value {
use super::*;
use proc_macro2::{TokenStream, TokenTree};
use std::{
char,
ops::{Index, RangeFrom},
};
/// Get the byte at offset idx, or a default of `b'\0'` if we're looking
/// past the end of the input buffer.
pub(crate) fn byte<S: AsRef<[u8]> + ?Sized>(s: &S, idx: usize) -> u8 {
let s = s.as_ref();
if idx < s.len() {
s[idx]
} else {
0
}
}
fn next_chr(s: &str) -> char {
s.chars().next().unwrap_or('\0')
}
// Returns (content, suffix).
pub(crate) fn parse_lit_str(s: &str) -> (Box<str>, Box<str>) {
match byte(s, 0) {
b'"' => parse_lit_str_cooked(s),
b'r' => parse_lit_str_raw(s),
_ => unreachable!(),
}
}
fn parse_lit_str_cooked(mut s: &str) -> (Box<str>, Box<str>) {
assert_eq!(byte(s, 0), b'"');
s = &s[1..];
let mut content = String::new();
'outer: loop {
let ch = match byte(s, 0) {
b'"' => break,
b'\\' => {
let b = byte(s, 1);
s = &s[2..];
match b {
b'x' => {
let (byte, rest) = backslash_x(s);
s = rest;
assert!(byte <= 0x80, "Invalid \\x byte in string literal");
char::from_u32(u32::from(byte)).unwrap()
}
b'u' => {
let (chr, rest) = backslash_u(s);
s = rest;
chr
}
b'n' => '\n',
b'r' => '\r',
b't' => '\t',
b'\\' => '\\',
b'0' => '\0',
b'\'' => '\'',
b'"' => '"',
b'\r' | b'\n' => loop {
let ch = next_chr(s);
if ch.is_whitespace() {
s = &s[ch.len_utf8()..];
} else {
continue 'outer;
}
},
b => panic!("unexpected byte {:?} after \\ character in byte literal", b),
}
}
b'\r' => {
assert_eq!(byte(s, 1), b'\n', "Bare CR not allowed in string");
s = &s[2..];
'\n'
}
_ => {
let ch = next_chr(s);
s = &s[ch.len_utf8()..];
ch
}
};
content.push(ch);
}
assert!(s.starts_with('"'));
let content = content.into_boxed_str();
let suffix = s[1..].to_owned().into_boxed_str();
(content, suffix)
}
fn parse_lit_str_raw(mut s: &str) -> (Box<str>, Box<str>) {
assert_eq!(byte(s, 0), b'r');
s = &s[1..];
let mut pounds = 0;
while byte(s, pounds) == b'#' {
pounds += 1;
}
assert_eq!(byte(s, pounds), b'"');
assert_eq!(byte(s, s.len() - pounds - 1), b'"');
for end in s[s.len() - pounds..].bytes() {
assert_eq!(end, b'#');
}
let content = s[pounds + 1..s.len() - pounds - 1]
.to_owned()
.into_boxed_str();
let suffix = Box::<str>::default(); // todo
(content, suffix)
}
pub(crate) fn parse_lit_byte_str(s: &str) -> Vec<u8> {
assert_eq!(byte(s, 0), b'b');
match byte(s, 1) {
b'"' => parse_lit_byte_str_cooked(s),
b'r' => parse_lit_byte_str_raw(s),
_ => unreachable!(),
}
}
fn parse_lit_byte_str_cooked(mut s: &str) -> Vec<u8> {
assert_eq!(byte(s, 0), b'b');
assert_eq!(byte(s, 1), b'"');
s = &s[2..];
// We're going to want to have slices which don't respect codepoint boundaries.
let mut s = s.as_bytes();
let mut out = Vec::new();
'outer: loop {
let byte = match byte(s, 0) {
b'"' => break,
b'\\' => {
let b = byte(s, 1);
s = &s[2..];
match b {
b'x' => {
let (b, rest) = backslash_x(s);
s = rest;
b
}
b'n' => b'\n',
b'r' => b'\r',
b't' => b'\t',
b'\\' => b'\\',
b'0' => b'\0',
b'\'' => b'\'',
b'"' => b'"',
b'\r' | b'\n' => loop {
let byte = byte(s, 0);
let ch = char::from_u32(u32::from(byte)).unwrap();
if ch.is_whitespace() {
s = &s[1..];
} else {
continue 'outer;
}
},
b => panic!("unexpected byte {:?} after \\ character in byte literal", b),
}
}
b'\r' => {
assert_eq!(byte(s, 1), b'\n', "Bare CR not allowed in string");
s = &s[2..];
b'\n'
}
b => {
s = &s[1..];
b
}
};
out.push(byte);
}
assert_eq!(s, b"\"");
out
}
fn parse_lit_byte_str_raw(s: &str) -> Vec<u8> {
assert_eq!(byte(s, 0), b'b');
String::from(parse_lit_str_raw(&s[1..]).0).into_bytes()
}
pub(crate) fn parse_lit_byte(s: &str) -> u8 {
assert_eq!(byte(s, 0), b'b');
assert_eq!(byte(s, 1), b'\'');
// We're going to want to have slices which don't respect codepoint boundaries.
let mut s = s[2..].as_bytes();
let b = match byte(s, 0) {
b'\\' => {
let b = byte(s, 1);
s = &s[2..];
match b {
b'x' => {
let (b, rest) = backslash_x(s);
s = rest;
b
}
b'n' => b'\n',
b'r' => b'\r',
b't' => b'\t',
b'\\' => b'\\',
b'0' => b'\0',
b'\'' => b'\'',
b'"' => b'"',
b => panic!("unexpected byte {:?} after \\ character in byte literal", b),
}
}
b => {
s = &s[1..];
b
}
};
assert_eq!(byte(s, 0), b'\'');
b
}
pub(crate) fn parse_lit_char(mut s: &str) -> char {
assert_eq!(byte(s, 0), b'\'');
s = &s[1..];
let ch = if byte(s, 0) == b'\\' {
let b = byte(s, 1);
s = &s[2..];
match b {
b'x' => {
let (byte, rest) = backslash_x(s);
s = rest;
assert!(byte <= 0x80, "Invalid \\x byte in string literal");
char::from_u32(u32::from(byte)).unwrap()
}
b'u' => {
let (chr, rest) = backslash_u(s);
s = rest;
chr
}
b'n' => '\n',
b'r' => '\r',
b't' => '\t',
b'\\' => '\\',
b'0' => '\0',
b'\'' => '\'',
b'"' => '"',
b => panic!("unexpected byte {:?} after \\ character in byte literal", b),
}
} else {
let ch = next_chr(s);
s = &s[ch.len_utf8()..];
ch
};
assert_eq!(s, "\'", "Expected end of char literal");
ch
}
fn backslash_x<S>(s: &S) -> (u8, &S)
where
S: Index<RangeFrom<usize>, Output = S> + AsRef<[u8]> + ?Sized,
{
let mut ch = 0;
let b0 = byte(s, 0);
let b1 = byte(s, 1);
ch += 0x10
* match b0 {
b'0'..=b'9' => b0 - b'0',
b'a'..=b'f' => 10 + (b0 - b'a'),
b'A'..=b'F' => 10 + (b0 - b'A'),
_ => panic!("unexpected non-hex character after \\x"),
};
ch += match b1 {
b'0'..=b'9' => b1 - b'0',
b'a'..=b'f' => 10 + (b1 - b'a'),
b'A'..=b'F' => 10 + (b1 - b'A'),
_ => panic!("unexpected non-hex character after \\x"),
};
(ch, &s[2..])
}
fn backslash_u(mut s: &str) -> (char, &str) {
if byte(s, 0) != b'{' {
panic!("expected {{ after \\u");
}
s = &s[1..];
let mut ch = 0;
for _ in 0..6 {
let b = byte(s, 0);
match b {
b'0'..=b'9' => {
ch *= 0x10;
ch += u32::from(b - b'0');
s = &s[1..];
}
b'a'..=b'f' => {
ch *= 0x10;
ch += u32::from(10 + b - b'a');
s = &s[1..];
}
b'A'..=b'F' => {
ch *= 0x10;
ch += u32::from(10 + b - b'A');
s = &s[1..];
}
b'}' => break,
_ => panic!("unexpected non-hex character after \\u"),
}
}
assert!(byte(s, 0) == b'}');
s = &s[1..];
if let Some(ch) = char::from_u32(ch) {
(ch, s)
} else {
panic!("character code {:x} is not a valid unicode character", ch);
}
}
pub(crate) fn to_literal(s: &str) -> Literal {
let stream = s.parse::<TokenStream>().unwrap();
match stream.into_iter().next().unwrap() {
TokenTree::Literal(l) => l.ref_into(),
_ => unreachable!(),
}
}
}
mod convert {
use super::*;
// LitStr
impl From<&syn::LitStr> for LitStr {
fn from(other: &syn::LitStr) -> Self {
Self {
token: Literal::string(&other.value()),
}
}
}
impl From<&LitStr> for syn::LitStr {
fn from(other: &LitStr) -> Self {
let (value, _) = value::parse_lit_str(&other.token.text);
Self::new(&value, Span::call_site())
}
}
// LitByteStr
impl From<&syn::LitByteStr> for LitByteStr {
fn from(other: &syn::LitByteStr) -> Self {
Self {
token: Literal::byte_string(&other.value()),
}
}
}
impl From<&LitByteStr> for syn::LitByteStr {
fn from(other: &LitByteStr) -> Self {
let value = value::parse_lit_byte_str(&other.token.text);
Self::new(&value, Span::call_site())
}
}
// LitByte
impl From<&syn::LitByte> for LitByte {
fn from(other: &syn::LitByte) -> Self {
Self {
token: Literal::u8_suffixed(other.value()),
}
}
}
impl From<&LitByte> for syn::LitByte {
fn from(other: &LitByte) -> Self {
let value = value::parse_lit_byte(&other.token.text);
Self::new(value, Span::call_site())
}
}
// LitChar
impl From<&syn::LitChar> for LitChar {
fn from(other: &syn::LitChar) -> Self {
Self {
token: Literal::character(other.value()),
}
}
}
impl From<&LitChar> for syn::LitChar {
fn from(other: &LitChar) -> Self {
let value = value::parse_lit_char(&other.token.text);
Self::new(value, Span::call_site())
}
}
// LitInt
impl From<&syn::LitInt> for LitInt {
fn from(other: &syn::LitInt) -> Self {
Self {
token: value::to_literal(&other.to_string()),
}
}
}
impl From<&LitInt> for syn::LitInt {
fn from(other: &LitInt) -> Self {
Self::new(&other.token.text, Span::call_site())
}
}
// LitFloat
impl From<&syn::LitFloat> for LitFloat {
fn from(other: &syn::LitFloat) -> Self {
Self {
token: value::to_literal(&other.to_string()),
}
}
}
impl From<&LitFloat> for syn::LitFloat {
fn from(other: &LitFloat) -> Self {
Self::new(&other.token.text, Span::call_site())
}
}
}

View file

@ -1,19 +0,0 @@
use super::*;
ast_struct! {
/// A macro invocation: `println!("{}", mac)`.
pub struct Macro {
pub(crate) path: Path,
pub(crate) delimiter: MacroDelimiter,
pub(crate) tokens: TokenStream,
}
}
ast_enum! {
/// A grouping token that surrounds a macro body: `m!(...)` or `m!{...}` or `m![...]`.
pub enum MacroDelimiter {
Paren,
Brace,
Bracket,
}
}

View file

@ -1,57 +0,0 @@
macro_rules! ast_struct {
(
[$($attrs_pub:tt)*]
struct $name:ident $($rest:tt)*
) => {
#[derive(Debug, crate::Serialize, crate::Deserialize)]
$($attrs_pub)* struct $name $($rest)*
};
($($t:tt)*) => {
strip_attrs_pub!(ast_struct!($($t)*));
};
}
macro_rules! ast_enum {
(
[$($attrs_pub:tt)*]
enum $name:ident $($rest:tt)*
) => (
#[derive(Debug, crate::Serialize, crate::Deserialize)]
#[serde(rename_all = "snake_case")]
$($attrs_pub)* enum $name $($rest)*
);
($($t:tt)*) => {
strip_attrs_pub!(ast_enum!($($t)*));
};
}
macro_rules! strip_attrs_pub {
($mac:ident!($(#[$m:meta])* $pub:ident $($t:tt)*)) => {
check_keyword_matches!(pub $pub);
$mac!([$(#[$m])* $pub] $($t)*);
};
}
macro_rules! check_keyword_matches {
(struct struct) => {};
(enum enum) => {};
(pub pub) => {};
}
macro_rules! syn_trait_impl {
($path:ident :: $ty:ident) => {
impl crate::private::Sealed for $path::$ty {}
impl crate::Syn for $path::$ty {
type Adapter = $ty;
fn to_adapter(&self) -> Self::Adapter {
Self::Adapter::from(self)
}
fn from_adapter(adapter: &Self::Adapter) -> Self {
Self::from(adapter)
}
}
};
}

View file

@ -1,104 +0,0 @@
ast_enum! {
/// A binary operator: `+`, `+=`, `&`.
pub enum BinOp {
/// The `+` operator (addition)
#[serde(rename = "+")]
Add,
/// The `-` operator (subtraction)
#[serde(rename = "-")]
Sub,
/// The `*` operator (multiplication)
#[serde(rename = "*")]
Mul,
/// The `/` operator (division)
#[serde(rename = "/")]
Div,
/// The `%` operator (modulus)
#[serde(rename = "%")]
Rem,
/// The `&&` operator (logical and)
#[serde(rename = "&&")]
And,
/// The `||` operator (logical or)
#[serde(rename = "||")]
Or,
/// The `^` operator (bitwise xor)
#[serde(rename = "^")]
BitXor,
/// The `&` operator (bitwise and)
#[serde(rename = "&")]
BitAnd,
/// The `|` operator (bitwise or)
#[serde(rename = "|")]
BitOr,
/// The `<<` operator (shift left)
#[serde(rename = "<<")]
Shl,
/// The `>>` operator (shift right)
#[serde(rename = ">>")]
Shr,
/// The `==` operator (equality)
#[serde(rename = "==")]
Eq,
/// The `<` operator (less than)
#[serde(rename = "<")]
Lt,
/// The `<=` operator (less than or equal to)
#[serde(rename = "<=")]
Le,
/// The `!=` operator (not equal to)
#[serde(rename = "!=")]
Ne,
/// The `>=` operator (greater than or equal to)
#[serde(rename = ">=")]
Ge,
/// The `>` operator (greater than)
#[serde(rename = ">")]
Gt,
/// The `+=` operator
#[serde(rename = "+=")]
AddEq,
/// The `-=` operator
#[serde(rename = "-=")]
SubEq,
/// The `*=` operator
#[serde(rename = "*=")]
MulEq,
/// The `/=` operator
#[serde(rename = "/=")]
DivEq,
/// The `%=` operator
#[serde(rename = "%=")]
RemEq,
/// The `^=` operator
#[serde(rename = "^=")]
BitXorEq,
/// The `&=` operator
#[serde(rename = "&=")]
BitAndEq,
/// The `|=` operator
#[serde(rename = "|=")]
BitOrEq,
/// The `<<=` operator
#[serde(rename = "<<=")]
ShlEq,
/// The `>>=` operator
#[serde(rename = ">>=")]
ShrEq,
}
}
ast_enum! {
/// A unary operator: `*`, `!`, `-`.
pub enum UnOp {
/// The `*` operator for dereferencing
#[serde(rename = "*")]
Deref,
/// The `!` operator for logical inversion
#[serde(rename = "!")]
Not,
/// The `-` operator for negation
#[serde(rename = "-")]
Neg,
}
}

View file

@ -1,250 +0,0 @@
use super::*;
ast_enum! {
/// A pattern in a local binding, function signature, match expression, or
/// various other places.
pub enum Pat {
/// A box pattern: `box v`.
Box(PatBox),
/// A pattern that binds a new variable: `ref mut binding @ SUBPATTERN`.
Ident(PatIdent),
/// A literal pattern: `0`.
///
/// This holds an `Expr` rather than a `Lit` because negative numbers
/// are represented as an `Expr::Unary`.
Lit(PatLit),
/// A macro in pattern position.
Macro(PatMacro),
/// A pattern that matches any one of a set of cases.
Or(PatOr),
/// A path pattern like `Color::Red`, optionally qualified with a
/// self-type.
///
/// Unqualified path patterns can legally refer to variants, structs,
/// constants or associated constants. Qualified path patterns like
/// `<A>::B::C` and `<A as Trait>::B::C` can only legally refer to
/// associated constants.
Path(PatPath),
/// A range pattern: `1..=2`.
Range(PatRange),
/// A reference pattern: `&mut var`.
Reference(PatReference),
/// The dots in a tuple or slice pattern: `[0, 1, ..]`
Rest(PatRest),
/// A dynamically sized slice pattern: `[a, b, ref i @ .., y, z]`.
Slice(PatSlice),
/// A struct or struct variant pattern: `Variant { x, y, .. }`.
Struct(PatStruct),
/// A tuple pattern: `(a, b)`.
Tuple(PatTuple),
/// A tuple struct or tuple variant pattern: `Variant(x, y, .., z)`.
TupleStruct(PatTupleStruct),
/// A type ascription pattern: `foo: f64`.
Type(PatType),
/// Tokens in pattern position not interpreted by Syn.
Verbatim(TokenStream),
/// A pattern that matches any value: `_`.
#[serde(rename = "_")]
Wild(PatWild),
#[doc(hidden)]
__Nonexhaustive,
}
}
ast_struct! {
/// A box pattern: `box v`.
pub struct PatBox {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) pat: Box<Pat>,
}
}
ast_struct! {
/// A pattern that binds a new variable: `ref mut binding @ SUBPATTERN`.
pub struct PatIdent {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(rename = "ref")]
#[serde(default, skip_serializing_if = "not")]
pub(crate) by_ref: bool,
#[serde(rename = "mut")]
#[serde(default, skip_serializing_if = "not")]
pub(crate) mutability: bool,
pub(crate) ident: Ident,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) subpat: Option<Box<Pat>>,
}
}
ast_struct! {
/// A literal pattern: `0`.
///
/// This holds an `Expr` rather than a `Lit` because negative numbers
/// are represented as an `Expr::Unary`.
pub struct PatLit {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) expr: Box<Expr>,
}
}
ast_struct! {
/// A macro in pattern position.
pub struct PatMacro {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(flatten)]
pub(crate) mac: Macro,
}
}
ast_struct! {
/// A pattern that matches any one of a set of cases.
pub struct PatOr {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "not")]
pub(crate) leading_vert: bool,
pub(crate) cases: Punctuated<Pat>,
}
}
ast_struct! {
/// A path pattern like `Color::Red`, optionally qualified with a
/// self-type.
///
/// Unqualified path patterns can legally refer to variants, structs,
/// constants or associated constants. Qualified path patterns like
/// `<A>::B::C` and `<A as Trait>::B::C` can only legally refer to
/// associated constants.
pub struct PatPath {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) qself: Option<QSelf>,
#[serde(flatten)]
pub(crate) path: Path,
}
}
ast_struct! {
/// A range pattern: `1..=2`.
pub struct PatRange {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) lo: Box<Expr>,
pub(crate) limits: RangeLimits,
pub(crate) hi: Box<Expr>,
}
}
ast_struct! {
/// A reference pattern: `&mut var`.
pub struct PatReference {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(rename = "mut")]
#[serde(default, skip_serializing_if = "not")]
pub(crate) mutability: bool,
pub(crate) pat: Box<Pat>,
}
}
ast_struct! {
/// The dots in a tuple or slice pattern: `[0, 1, ..]`
pub struct PatRest {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
}
}
ast_struct! {
/// A dynamically sized slice pattern: `[a, b, ref i @ .., y, z]`.
pub struct PatSlice {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) elems: Punctuated<Pat>,
}
}
ast_struct! {
/// A struct or struct variant pattern: `Variant { x, y, .. }`.
pub struct PatStruct {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) path: Path,
pub(crate) fields: Punctuated<FieldPat>,
#[serde(default, skip_serializing_if = "not")]
pub(crate) dot2_token: bool,
}
}
ast_struct! {
/// A tuple pattern: `(a, b)`.
pub struct PatTuple {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) elems: Punctuated<Pat>,
}
}
ast_struct! {
/// A tuple struct or tuple variant pattern: `Variant(x, y, .., z)`.
pub struct PatTupleStruct {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) path: Path,
pub(crate) pat: PatTuple,
}
}
ast_struct! {
/// A type ascription pattern: `foo: f64`.
pub struct PatType {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) pat: Box<Pat>,
pub(crate) ty: Box<Type>,
}
}
ast_struct! {
/// A pattern that matches any value: `_`.
pub struct PatWild {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
}
}
ast_struct! {
/// A single field in a struct pattern.
///
/// Patterns like the fields of Foo `{ x, ref y, ref mut z }` are treated
/// the same as `x: x, y: ref y, z: ref mut z` but there is no colon token.
pub struct FieldPat {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(flatten)]
pub(crate) member: Member,
#[serde(default, skip_serializing_if = "not")]
pub(crate) colon_token: bool,
pub(crate) pat: Box<Pat>,
}
}

View file

@ -1,136 +0,0 @@
use super::*;
ast_struct! {
/// A path at which a named item is exported: `std::collections::HashMap`.
pub struct Path {
#[serde(default, skip_serializing_if = "not")]
pub(crate) leading_colon: bool,
pub(crate) segments: Punctuated<PathSegment>,
}
}
ast_struct! {
/// A segment of a path together with any path arguments on that segment.
pub struct PathSegment {
pub(crate) ident: Ident,
#[serde(default, skip_serializing_if = "PathArguments::is_none")]
pub(crate) arguments: PathArguments,
}
}
ast_enum! {
/// Angle bracketed or parenthesized arguments of a path segment.
///
/// ## Angle bracketed
///
/// The `<'a, T>` in `std::slice::iter<'a, T>`.
///
/// ## Parenthesized
///
/// The `(A, B) -> C` in `Fn(A, B) -> C`.
pub enum PathArguments {
None,
/// The `<'a, T>` in `std::slice::iter<'a, T>`.
AngleBracketed(AngleBracketedGenericArguments),
/// The `(A, B) -> C` in `Fn(A, B) -> C`.
Parenthesized(ParenthesizedGenericArguments),
}
}
impl Default for PathArguments {
fn default() -> Self {
PathArguments::None
}
}
impl PathArguments {
fn is_none(&self) -> bool {
match self {
PathArguments::None => true,
PathArguments::AngleBracketed(_) | PathArguments::Parenthesized(_) => false,
}
}
}
ast_enum! {
/// An individual generic argument, like `'a`, `T`, or `Item = T`.
pub enum GenericArgument {
/// A lifetime argument.
Lifetime(Lifetime),
/// A type argument.
Type(Type),
/// A binding (equality constraint) on an associated type: the `Item =
/// u8` in `Iterator<Item = u8>`.
Binding(Binding),
/// An associated type bound: `Iterator<Item: Display>`.
Constraint(Constraint),
/// A const expression. Must be inside of a block.
///
/// NOTE: Identity expressions are represented as Type arguments, as
/// they are indistinguishable syntactically.
Const(Expr),
}
}
ast_struct! {
/// Angle bracketed arguments of a path segment: the `<K, V>` in `HashMap<K,
/// V>`.
pub struct AngleBracketedGenericArguments {
#[serde(default, skip_serializing_if = "not")]
pub(crate) colon2_token: bool,
pub(crate) args: Punctuated<GenericArgument>,
}
}
ast_struct! {
/// A binding (equality constraint) on an associated type: `Item = u8`.
pub struct Binding {
pub(crate) ident: Ident,
pub(crate) ty: Type,
}
}
ast_struct! {
/// An associated type bound: `Iterator<Item: Display>`.
pub struct Constraint {
pub(crate) ident: Ident,
pub(crate) bounds: Punctuated<TypeParamBound>,
}
}
ast_struct! {
/// Arguments of a function path segment: the `(A, B) -> C` in `Fn(A,B) ->
/// C`.
pub struct ParenthesizedGenericArguments {
/// `(A, B)`
pub(crate) inputs: Punctuated<Type>,
/// `C`
#[serde(default)]
pub(crate) output: ReturnType,
}
}
ast_struct! {
/// The explicit Self type in a qualified path: the `T` in `<T as
/// Display>::fmt`.
///
/// The actual path, including the trait and the associated item, is stored
/// separately. The `position` field represents the index of the associated
/// item qualified with this Self type.
///
/// ```text
/// <Vec<T> as a::b::Trait>::AssociatedItem
/// ^~~~~~ ~~~~~~~~~~~~~~^
/// ty position = 3
///
/// <Vec<T>>::AssociatedItem
/// ^~~~~~ ^
/// ty position = 0
/// ```
pub struct QSelf {
pub(crate) ty: Box<Type>,
pub(crate) position: usize,
#[serde(default, skip_serializing_if = "not")]
pub(crate) as_token: bool,
}
}

View file

@ -1,39 +0,0 @@
use super::*;
ast_struct! {
/// A braced block containing Rust statements.
#[serde(transparent)]
pub struct Block {
/// Statements in a block
pub(crate) stmts: Vec<Stmt>,
}
}
ast_enum! {
/// A statement, usually ending in a semicolon.
pub enum Stmt {
/// A local (let) binding.
#[serde(rename = "let")]
Local(Local),
/// An item definition.
Item(Item),
/// Expr without trailing semicolon.
Expr(Expr),
/// Expression with trailing semicolon.
Semi(Expr),
}
}
ast_struct! {
/// A local `let` binding: `let x: u64 = s.parse()?`.
pub struct Local {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
pub(crate) pat: Pat,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) init: Option<Box<Expr>>,
}
}

View file

@ -1,345 +0,0 @@
use super::*;
use std::fmt;
ast_struct! {
/// An abstract stream of tokens, or more concretely a sequence of token trees.
///
/// This type provides interfaces for iterating over token trees and for
/// collecting token trees into one stream.
#[derive(Clone, Default)]
#[serde(transparent)]
pub struct TokenStream {
inner: Vec<TokenTree>,
}
}
impl TokenStream {
fn _new(inner: Vec<TokenTree>) -> Self {
Self { inner }
}
pub(crate) fn is_empty(&self) -> bool {
self.inner.is_empty()
}
}
ast_enum! {
/// A single token or a delimited sequence of token trees (e.g. `[1, (), ..]`).
#[derive(Clone)]
pub enum TokenTree {
/// A token stream surrounded by bracket delimiters.
Group(Group),
/// An identifier.
Ident(Ident),
/// A single punctuation character (`+`, `,`, `$`, etc.).
Punct(Punct),
/// A literal character (`'a'`), string (`"hello"`), number (`2.3`), etc.
#[serde(rename = "lit")]
Literal(Literal),
}
}
ast_struct! {
/// A delimited token stream.
///
/// A `Group` internally contains a `TokenStream` which is surrounded by
/// `Delimiter`s.
#[derive(Clone)]
pub struct Group {
delimiter: Delimiter,
stream: TokenStream,
}
}
ast_enum! {
/// Describes how a sequence of token trees is delimited.
#[derive(Clone, Copy)]
pub enum Delimiter {
/// `( ... )`
Parenthesis,
/// `{ ... }`
Brace,
/// `[ ... ]`
Bracket,
/// `Ø ... Ø`
///
/// An implicit delimiter, that may, for example, appear around tokens
/// coming from a "macro variable" `$var`. It is important to preserve
/// operator priorities in cases like `$var * 3` where `$var` is `1 + 2`.
/// Implicit delimiters may not survive roundtrip of a token stream through
/// a string.
None,
}
}
ast_struct! {
/// An `Punct` is an single punctuation character like `+`, `-` or `#`.
///
/// Multicharacter operators like `+=` are represented as two instances of
/// `Punct` with different forms of `Spacing` returned.
#[derive(Clone, Copy)]
pub struct Punct {
op: char,
spacing: Spacing,
}
}
ast_enum! {
/// Whether an `Punct` is followed immediately by another `Punct` or followed by
/// another token or whitespace.
#[derive(Clone, Copy)]
pub enum Spacing {
/// E.g. `+` is `Alone` in `+ =`, `+ident` or `+()`.
Alone,
/// E.g. `+` is `Joint` in `+=` or `'#`.
///
/// Additionally, single quote `'` can join with identifiers to form
/// lifetimes `'ident`.
Joint,
}
}
ast_struct! {
/// A word of Rust code, which may be a keyword or legal variable name.
///
/// An identifier consists of at least one Unicode code point, the first of
/// which has the XID_Start property and the rest of which have the XID_Continue
/// property.
///
/// - The empty string is not an identifier. Use `Option<Ident>`.
/// - A lifetime is not an identifier. Use `syn::Lifetime` instead.
#[derive(Clone, Eq, PartialEq)]
#[serde(transparent)]
pub struct Ident {
inner: String,
}
}
ast_struct! {
/// A literal string (`"hello"`), byte string (`b"hello"`), character (`'a'`),
/// byte character (`b'a'`), an integer or floating point number with or without
/// a suffix (`1`, `1u8`, `2.3`, `2.3f32`).
///
/// Boolean literals like `true` and `false` do not belong here, they are
/// `Ident`s.
#[derive(Clone)]
#[serde(transparent)]
pub struct Literal {
pub(crate) text: String,
}
}
impl Literal {
fn _new(text: String) -> Self {
Self { text }
}
pub(crate) fn u8_suffixed(n: u8) -> Self {
Self::_new(format!(concat!("{}", stringify!(u8)), n))
}
pub(crate) fn string(t: &str) -> Self {
let mut s = t.chars().flat_map(char::escape_default).collect::<String>();
s.push('"');
s.insert(0, '"');
Self::_new(s)
}
pub(crate) fn character(t: char) -> Self {
Self::_new(format!("'{}'", t.escape_default().collect::<String>()))
}
#[allow(clippy::match_overlapping_arm)]
pub(crate) fn byte_string(bytes: &[u8]) -> Self {
let mut escaped = "b\"".to_string();
for b in bytes {
match *b {
b'\0' => escaped.push_str(r"\0"),
b'\t' => escaped.push_str(r"\t"),
b'\n' => escaped.push_str(r"\n"),
b'\r' => escaped.push_str(r"\r"),
b'"' => escaped.push_str("\\\""),
b'\\' => escaped.push_str("\\\\"),
b'\x20'..=b'\x7E' => escaped.push(*b as char),
_ => escaped.push_str(&format!("\\x{:02X}", b)),
}
}
escaped.push('"');
Self::_new(escaped)
}
}
// TODO: when release the next minor version, remove this.
impl fmt::Display for Literal {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.text.fmt(f)
}
}
mod convert {
use super::*;
// TokenStream
syn_trait_impl!(proc_macro2::TokenStream);
impl From<&proc_macro2::TokenStream> for TokenStream {
fn from(other: &proc_macro2::TokenStream) -> Self {
Self::_new(
other
.clone()
.into_iter()
.map::<TokenTree, _>(|x| x.ref_into())
.collect(),
)
}
}
impl From<&TokenStream> for proc_macro2::TokenStream {
fn from(other: &TokenStream) -> Self {
other
.inner
.iter()
.map::<proc_macro2::TokenTree, _>(Into::into)
.collect()
}
}
// TokenTree
syn_trait_impl!(proc_macro2::TokenTree);
impl From<&proc_macro2::TokenTree> for TokenTree {
fn from(other: &proc_macro2::TokenTree) -> Self {
use super::TokenTree::*;
use proc_macro2::TokenTree;
match other {
TokenTree::Group(t) => Group(t.into()),
TokenTree::Ident(t) => Ident(t.into()),
TokenTree::Punct(t) => Punct(t.into()),
TokenTree::Literal(t) => Literal(t.into()),
}
}
}
impl From<&TokenTree> for proc_macro2::TokenTree {
fn from(other: &TokenTree) -> Self {
use proc_macro2::TokenTree::*;
match other {
TokenTree::Group(t) => Group(t.into()),
TokenTree::Ident(t) => Ident(t.into()),
TokenTree::Punct(t) => Punct(t.into()),
TokenTree::Literal(t) => Literal(t.into()),
}
}
}
// Group
syn_trait_impl!(proc_macro2::Group);
impl From<&proc_macro2::Group> for Group {
fn from(other: &proc_macro2::Group) -> Self {
Self {
delimiter: other.delimiter().ref_into(),
stream: other.stream().ref_into(),
}
}
}
impl From<&Group> for proc_macro2::Group {
fn from(other: &Group) -> Self {
Self::new(other.delimiter.ref_into(), other.stream.ref_into())
}
}
// Delimiter
syn_trait_impl!(proc_macro2::Delimiter);
impl From<&proc_macro2::Delimiter> for Delimiter {
fn from(other: &proc_macro2::Delimiter) -> Self {
use super::Delimiter::*;
use proc_macro2::Delimiter;
match other {
Delimiter::Parenthesis => Parenthesis,
Delimiter::Brace => Brace,
Delimiter::Bracket => Bracket,
Delimiter::None => None,
}
}
}
impl From<&Delimiter> for proc_macro2::Delimiter {
fn from(other: &Delimiter) -> Self {
use proc_macro2::Delimiter::*;
match other {
Delimiter::Parenthesis => Parenthesis,
Delimiter::Brace => Brace,
Delimiter::Bracket => Bracket,
Delimiter::None => None,
}
}
}
// Ident
syn_trait_impl!(proc_macro2::Ident);
impl From<&proc_macro2::Ident> for Ident {
fn from(other: &proc_macro2::Ident) -> Self {
Self {
inner: other.to_string(),
}
}
}
impl From<&Ident> for proc_macro2::Ident {
fn from(other: &Ident) -> Self {
Self::new(&other.inner, Span::call_site())
}
}
// Punct
syn_trait_impl!(proc_macro2::Punct);
impl From<&proc_macro2::Punct> for Punct {
fn from(other: &proc_macro2::Punct) -> Self {
Self {
op: other.as_char(),
spacing: other.spacing().ref_into(),
}
}
}
impl From<&Punct> for proc_macro2::Punct {
fn from(other: &Punct) -> Self {
Self::new(other.op, other.spacing.ref_into())
}
}
// Spacing
syn_trait_impl!(proc_macro2::Spacing);
impl From<&proc_macro2::Spacing> for Spacing {
fn from(other: &proc_macro2::Spacing) -> Self {
use super::Spacing::*;
use proc_macro2::Spacing;
match other {
Spacing::Alone => Alone,
Spacing::Joint => Joint,
}
}
}
impl From<&Spacing> for proc_macro2::Spacing {
fn from(other: &Spacing) -> Self {
use proc_macro2::Spacing::*;
match other {
Spacing::Alone => Alone,
Spacing::Joint => Joint,
}
}
}
// Literal
syn_trait_impl!(proc_macro2::Literal);
impl From<&proc_macro2::Literal> for Literal {
fn from(other: &proc_macro2::Literal) -> Self {
Self {
text: other.to_string(),
}
}
}
impl From<&Literal> for proc_macro2::Literal {
fn from(other: &Literal) -> Self {
use proc_macro2::*;
let stream = other.text.parse::<TokenStream>().unwrap();
match stream.into_iter().next().unwrap() {
TokenTree::Literal(l) => l,
_ => unreachable!(),
}
}
}
}

View file

@ -1,291 +0,0 @@
use super::*;
ast_enum! {
/// The possible types that a Rust value could have.
pub enum Type {
/// A fixed size array type: `[T; n]`.
Array(TypeArray),
/// A bare function type: `fn(usize) -> bool`.
BareFn(TypeBareFn),
/// A type contained within invisible delimiters.
Group(TypeGroup),
/// An `impl Bound1 + Bound2 + Bound3` type where `Bound` is a trait or
/// a lifetime.
ImplTrait(TypeImplTrait),
/// Indication that a type should be inferred by the compiler: `_`.
#[serde(rename = "_")]
Infer,
/// A macro in the type position.
Macro(TypeMacro),
/// The never type: `!`.
#[serde(rename = "!")]
Never,
/// A parenthesized type equivalent to the inner type.
Paren(TypeParen),
/// A path like `std::slice::Iter`, optionally qualified with a
/// self-type as in `<Vec<T> as SomeTrait>::Associated`.
Path(TypePath),
/// A raw pointer type: `*const T` or `*mut T`.
Ptr(TypePtr),
/// A reference type: `&'a T` or `&'a mut T`.
Reference(TypeReference),
/// A dynamically sized slice type: `[T]`.
Slice(TypeSlice),
/// A trait object type `Bound1 + Bound2 + Bound3` where `Bound` is a
/// trait or a lifetime.
TraitObject(TypeTraitObject),
/// A tuple type: `(A, B, C, String)`.
Tuple(TypeTuple),
/// Tokens in type position not interpreted by Syn.
Verbatim(TokenStream),
#[doc(hidden)]
__Nonexhaustive,
}
}
ast_struct! {
/// A fixed size array type: `[T; n]`.
pub struct TypeArray {
pub(crate) elem: Box<Type>,
pub(crate) len: Expr,
}
}
ast_struct! {
/// A bare function type: `fn(usize) -> bool`.
pub struct TypeBareFn {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) lifetimes: Option<BoundLifetimes>,
#[serde(rename = "unsafe")]
#[serde(default, skip_serializing_if = "not")]
pub(crate) unsafety: bool,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) abi: Option<Abi>,
pub(crate) inputs: Punctuated<BareFnArg>,
pub(crate) variadic: Option<Variadic>,
pub(crate) output: ReturnType,
}
}
ast_struct! {
/// A type contained within invisible delimiters.
pub struct TypeGroup {
pub(crate) elem: Box<Type>,
}
}
ast_struct! {
/// An `impl Bound1 + Bound2 + Bound3` type where `Bound` is a trait or
/// a lifetime.
pub struct TypeImplTrait {
pub(crate) bounds: Punctuated<TypeParamBound>,
}
}
ast_struct! {
/// A macro in the type position.
pub struct TypeMacro {
#[serde(flatten)]
pub(crate) mac: Macro,
}
}
ast_struct! {
/// A parenthesized type equivalent to the inner type.
pub struct TypeParen {
pub(crate) elem: Box<Type>,
}
}
ast_struct! {
/// A path like `std::slice::Iter`, optionally qualified with a
/// self-type as in `<Vec<T> as SomeTrait>::Associated`.
pub struct TypePath {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) qself: Option<QSelf>,
#[serde(flatten)]
pub(crate) path: Path,
}
}
ast_struct! {
/// A raw pointer type: `*const T` or `*mut T`.
pub struct TypePtr {
#[serde(rename = "const")]
#[serde(default, skip_serializing_if = "not")]
pub(crate) const_token: bool,
#[serde(rename = "mut")]
#[serde(default, skip_serializing_if = "not")]
pub(crate) mutability: bool,
pub(crate) elem: Box<Type>,
}
}
ast_struct! {
/// A reference type: `&'a T` or `&'a mut T`.
pub struct TypeReference {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) lifetime: Option<Lifetime>,
#[serde(rename = "mut")]
#[serde(default, skip_serializing_if = "not")]
pub(crate) mutability: bool,
pub(crate) elem: Box<Type>,
}
}
ast_struct! {
/// A dynamically sized slice type: `[T]`.
pub struct TypeSlice {
pub(crate) elem: Box<Type>,
}
}
ast_struct! {
/// A trait object type `Bound1 + Bound2 + Bound3` where `Bound` is a
/// trait or a lifetime.
pub struct TypeTraitObject {
#[serde(rename = "dyn")]
#[serde(default, skip_serializing_if = "not")]
pub(crate) dyn_token: bool,
pub(crate) bounds: Punctuated<TypeParamBound>,
}
}
ast_struct! {
/// A tuple type: `(A, B, C, String)`.
pub struct TypeTuple {
pub(crate) elems: Punctuated<Type>,
}
}
ast_struct! {
/// The binary interface of a function: `extern "C"`.
pub struct Abi {
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) name: Option<LitStr>,
}
}
ast_struct! {
/// An argument in a function type: the `usize` in `fn(usize) -> bool`.
pub struct BareFnArg {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub(crate) name: Option<Ident>,
pub(crate) ty: Type,
}
}
ast_struct! {
/// The variadic argument of a foreign function.
pub struct Variadic {
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub(crate) attrs: Vec<Attribute>,
}
}
ast_struct! {
/// Return type of a function signature.
#[derive(Default)]
#[serde(transparent)]
pub struct ReturnType {
ty: Option<Box<Type>>,
}
}
mod convert {
use super::*;
// Type
syn_trait_impl!(syn::Type);
impl From<&syn::Type> for Type {
fn from(other: &syn::Type) -> Self {
use super::Type::*;
use syn::Type;
match other {
Type::Slice(x) => Slice(x.into()),
Type::Array(x) => Array(x.into()),
Type::Ptr(x) => Ptr(x.into()),
Type::Reference(x) => Reference(x.into()),
Type::BareFn(x) => BareFn(x.into()),
Type::Never(_) => Never,
Type::Tuple(x) => Tuple(x.into()),
Type::Path(x) => Path(x.into()),
Type::TraitObject(x) => TraitObject(x.into()),
Type::ImplTrait(x) => ImplTrait(x.into()),
Type::Paren(x) => Paren(x.into()),
Type::Group(x) => Group(x.into()),
Type::Infer(_) => Infer,
Type::Macro(x) => Macro(x.into()),
Type::Verbatim(x) => Verbatim(x.into()),
_ => unreachable!(),
}
}
}
impl From<&Type> for syn::Type {
fn from(other: &Type) -> Self {
use syn::Type::*;
match other {
Type::Slice(x) => Slice(x.into()),
Type::Array(x) => Array(x.into()),
Type::Ptr(x) => Ptr(x.into()),
Type::Reference(x) => Reference(x.into()),
Type::BareFn(x) => BareFn(x.into()),
Type::Never => Never(syn::TypeNever {
bang_token: default(),
}),
Type::Tuple(x) => Tuple(x.into()),
Type::Path(x) => Path(x.into()),
Type::TraitObject(x) => TraitObject(x.into()),
Type::ImplTrait(x) => ImplTrait(x.into()),
Type::Paren(x) => Paren(x.into()),
Type::Group(x) => Group(x.into()),
Type::Infer => Infer(syn::TypeInfer {
underscore_token: default(),
}),
Type::Macro(x) => Macro(x.into()),
Type::Verbatim(x) => Verbatim(x.into()),
_ => unreachable!(),
}
}
}
// ReturnType
syn_trait_impl!(syn::ReturnType);
impl From<&syn::ReturnType> for ReturnType {
fn from(other: &syn::ReturnType) -> Self {
use syn::ReturnType;
match other {
ReturnType::Default => Self { ty: None },
ReturnType::Type(_, x) => Self {
ty: Some(x.map_into()),
},
}
}
}
impl From<&ReturnType> for syn::ReturnType {
fn from(other: &ReturnType) -> Self {
use syn::ReturnType;
match &other.ty {
None => ReturnType::Default,
Some(x) => ReturnType::Type(default(), x.map_into()),
}
}
}
}

File diff suppressed because it is too large Load diff

View file

@ -1,740 +0,0 @@
use syn::*;
#[test]
fn test_unit() {
let raw = "struct Unit;";
let json = r#"
{
"struct": {
"ident": "Unit",
"fields": "unit"
}
}
"#;
let actual = syn::parse_str(raw).unwrap();
let ser: syn_serde::Item = serde_json::from_str(json).unwrap();
let ser = Item::from(&ser);
assert_eq!(ser, actual);
}
#[test]
fn test_struct() {
let raw = "
#[derive(Debug, Clone)]
pub struct Item {
pub ident: Ident,
pub attrs: Vec<Attribute>
}
";
let json = r#"
{
"struct": {
"attrs": [
{
"style": "outer",
"path": {
"segments": [
{
"ident": "derive"
}
]
},
"tokens": [
{
"group": {
"delimiter": "parenthesis",
"stream": [
{
"ident": "Debug"
},
{
"punct": {
"op": ",",
"spacing": "alone"
}
},
{
"ident": "Clone"
}
]
}
}
]
}
],
"vis": "pub",
"ident": "Item",
"fields": {
"named": [
{
"vis": "pub",
"ident": "ident",
"colon_token": true,
"ty": {
"path": {
"segments": [
{
"ident": "Ident"
}
]
}
}
},
{
"vis": "pub",
"ident": "attrs",
"colon_token": true,
"ty": {
"path": {
"segments": [
{
"ident": "Vec",
"arguments": {
"angle_bracketed": {
"args": [
{
"type": {
"path": {
"segments": [
{
"ident": "Attribute"
}
]
}
}
}
]
}
}
}
]
}
}
}
]
}
}
}
"#;
let actual = syn::parse_str(raw).unwrap();
let json: syn_serde::Item = serde_json::from_str(json).unwrap();
let json = Item::from(&json);
assert_eq!(json, actual);
}
#[test]
fn test_union() {
let raw = "
union MaybeUninit<T> {
uninit: (),
value: T
}
";
let json = r#"
{
"union": {
"ident": "MaybeUninit",
"generics": {
"params": [
{
"type": {
"ident": "T"
}
}
]
},
"fields": [
{
"ident": "uninit",
"colon_token": true,
"ty": {
"tuple": {
"elems": []
}
}
},
{
"ident": "value",
"colon_token": true,
"ty": {
"path": {
"segments": [
{
"ident": "T"
}
]
}
}
}
]
}
}
"#;
let actual = syn::parse_str(raw).unwrap();
let json: syn_serde::Item = serde_json::from_str(json).unwrap();
let json = Item::from(&json);
assert_eq!(json, actual);
}
#[test]
fn test_enum() {
let raw = r#"
/// See the std::result module documentation for details.
#[must_use]
pub enum Result<T, E> {
Ok(T),
Err(E),
Surprise = 0isize,
// Smuggling data into a proc_macro_derive,
// in the style of https://github.com/dtolnay/proc-macro-hack
ProcMacroHack = (0, "data").0
}
"#;
let json = r#"
{
"enum": {
"attrs": [
{
"style": "outer",
"path": {
"segments": [
{
"ident": "doc"
}
]
},
"tokens": [
{
"punct": {
"op": "=",
"spacing": "alone"
}
},
{
"lit": "\" See the std::result module documentation for details.\""
}
]
},
{
"style": "outer",
"path": {
"segments": [
{
"ident": "must_use"
}
]
}
}
],
"vis": "pub",
"ident": "Result",
"generics": {
"params": [
{
"type": {
"ident": "T"
}
},
{
"type": {
"ident": "E"
}
}
]
},
"variants": [
{
"ident": "Ok",
"fields": {
"unnamed": [
{
"ty": {
"path": {
"segments": [
{
"ident": "T"
}
]
}
}
}
]
}
},
{
"ident": "Err",
"fields": {
"unnamed": [
{
"ty": {
"path": {
"segments": [
{
"ident": "E"
}
]
}
}
}
]
}
},
{
"ident": "Surprise",
"fields": "unit",
"discriminant": {
"lit": {
"int": "0isize"
}
}
},
{
"ident": "ProcMacroHack",
"fields": "unit",
"discriminant": {
"field": {
"base": {
"tuple": {
"elems": [
{
"lit": {
"int": "0"
}
},
{
"lit": {
"str": "\"data\""
}
}
]
}
},
"index": 0
}
}
}
]
}
}
"#;
let actual = syn::parse_str(raw).unwrap();
let json: syn_serde::Item = serde_json::from_str(json).unwrap();
let json = Item::from(&json);
assert_eq!(json, actual);
}
#[test]
fn test_attr_with_path() {
let raw = r#"
#[::attr_args::identity
fn main() { assert_eq!(foo(), "Hello, world!"); }]
struct Dummy;
"#;
let json = r#"
{
"struct": {
"attrs": [
{
"style": "outer",
"path": {
"leading_colon": true,
"segments": [
{
"ident": "attr_args"
},
{
"ident": "identity"
}
]
},
"tokens": [
{
"ident": "fn"
},
{
"ident": "main"
},
{
"group": {
"delimiter": "parenthesis",
"stream": []
}
},
{
"group": {
"delimiter": "brace",
"stream": [
{
"ident": "assert_eq"
},
{
"punct": {
"op": "!",
"spacing": "alone"
}
},
{
"group": {
"delimiter": "parenthesis",
"stream": [
{
"ident": "foo"
},
{
"group": {
"delimiter": "parenthesis",
"stream": []
}
},
{
"punct": {
"op": ",",
"spacing": "alone"
}
},
{
"lit": "\"Hello, world!\""
}
]
}
},
{
"punct": {
"op": ";",
"spacing": "alone"
}
}
]
}
}
]
}
],
"ident": "Dummy",
"fields": "unit"
}
}
"#;
let actual = syn::parse_str(raw).unwrap();
let json: syn_serde::Item = serde_json::from_str(json).unwrap();
let json = Item::from(&json);
assert_eq!(json, actual);
}
#[test]
fn test_attr_with_non_mod_style_path() {
let raw = r#"
#[inert <T>]
struct S;
"#;
let json = r#"
{
"struct": {
"attrs": [
{
"style": "outer",
"path": {
"segments": [
{
"ident": "inert"
}
]
},
"tokens": [
{
"punct": {
"op": "<",
"spacing": "alone"
}
},
{
"ident": "T"
},
{
"punct": {
"op": ">",
"spacing": "alone"
}
}
]
}
],
"ident": "S",
"fields": "unit"
}
}
"#;
let actual = syn::parse_str(raw).unwrap();
let json: syn_serde::Item = serde_json::from_str(json).unwrap();
let json = Item::from(&json);
assert_eq!(json, actual);
}
#[test]
fn test_attr_with_mod_style_path_with_self() {
let raw = r#"
#[foo::self]
struct S;
"#;
let json = r#"
{
"struct": {
"attrs": [
{
"style": "outer",
"path": {
"segments": [
{
"ident": "foo"
},
{
"ident": "self"
}
]
}
}
],
"ident": "S",
"fields": "unit"
}
}
"#;
let actual = syn::parse_str(raw).unwrap();
let json: syn_serde::Item = serde_json::from_str(json).unwrap();
let json = Item::from(&json);
assert_eq!(json, actual);
}
#[test]
fn test_pub_restricted() {
// Taken from tests/rust/src/test/ui/resolve/auxiliary/privacy-struct-ctor.rs
let raw = r#"
pub(in m) struct Z(pub(in m::n) u8);
"#;
let json = r#"
{
"struct": {
"vis": {
"restricted": {
"in_token": true,
"path": {
"segments": [
{
"ident": "m"
}
]
}
}
},
"ident": "Z",
"fields": {
"unnamed": [
{
"vis": {
"restricted": {
"in_token": true,
"path": {
"segments": [
{
"ident": "m"
},
{
"ident": "n"
}
]
}
}
},
"ty": {
"path": {
"segments": [
{
"ident": "u8"
}
]
}
}
}
]
}
}
}
"#;
let actual = syn::parse_str(raw).unwrap();
let json: syn_serde::Item = serde_json::from_str(json).unwrap();
let json = Item::from(&json);
assert_eq!(json, actual);
}
#[test]
fn test_vis_crate() {
let raw = r#"
crate struct S;
"#;
let json = r#"
{
"struct": {
"vis": "crate",
"ident": "S",
"fields": "unit"
}
}
"#;
let actual = syn::parse_str(raw).unwrap();
let json: syn_serde::Item = serde_json::from_str(json).unwrap();
let json = Item::from(&json);
assert_eq!(json, actual);
}
#[test]
fn test_pub_restricted_crate() {
let raw = r#"
pub(crate) struct S;
"#;
let json = r#"
{
"struct": {
"vis": {
"restricted": {
"path": {
"segments": [
{
"ident": "crate"
}
]
}
}
},
"ident": "S",
"fields": "unit"
}
}
"#;
let actual = syn::parse_str(raw).unwrap();
let json: syn_serde::Item = serde_json::from_str(json).unwrap();
let json = Item::from(&json);
assert_eq!(json, actual);
}
#[test]
fn test_pub_restricted_super() {
let raw = r#"
pub(super) struct S;
"#;
let json = r#"
{
"struct": {
"vis": {
"restricted": {
"path": {
"segments": [
{
"ident": "super"
}
]
}
}
},
"ident": "S",
"fields": "unit"
}
}
"#;
let actual = syn::parse_str(raw).unwrap();
let json: syn_serde::Item = serde_json::from_str(json).unwrap();
let json = Item::from(&json);
assert_eq!(json, actual);
}
#[test]
fn test_pub_restricted_in_super() {
let raw = r#"
pub(in super) struct S;
"#;
let json = r#"
{
"struct": {
"vis": {
"restricted": {
"in_token": true,
"path": {
"segments": [
{
"ident": "super"
}
]
}
}
},
"ident": "S",
"fields": "unit"
}
}
"#;
let actual = syn::parse_str(raw).unwrap();
let json: syn_serde::Item = serde_json::from_str(json).unwrap();
let json = Item::from(&json);
assert_eq!(json, actual);
}
#[test]
fn test_ambiguous_crate() {
// The field type is `(crate::X)` not `crate (::X)`.
let raw = "struct S(crate::X);";
let json = r#"
{
"struct": {
"ident": "S",
"fields": {
"unnamed": [
{
"ty": {
"path": {
"segments": [
{
"ident": "crate"
},
{
"ident": "X"
}
]
}
}
}
]
}
}
}
"#;
let actual = syn::parse_str(raw).unwrap();
let json: syn_serde::Item = serde_json::from_str(json).unwrap();
let json = Item::from(&json);
assert_eq!(json, actual);
}