Remove proc_macro_quote feature
This commit is contained in:
parent
c036b7c7c5
commit
78f0e9b1e6
|
@ -17,6 +17,8 @@ proc-macro = true
|
||||||
[dependencies]
|
[dependencies]
|
||||||
lalrpop-util = "0.16.1"
|
lalrpop-util = "0.16.1"
|
||||||
ansi_term = "0.11.0"
|
ansi_term = "0.11.0"
|
||||||
|
proc-macro2 = { version = "0.4.24", features = ["nightly"] }
|
||||||
|
quote = "0.6.10"
|
||||||
|
|
||||||
[build-dependencies]
|
[build-dependencies]
|
||||||
lalrpop = "0.16.1"
|
lalrpop = "0.16.1"
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use proc_macro::{Ident, Span, TokenStream};
|
use proc_macro2::{Ident, Span, TokenStream};
|
||||||
|
|
||||||
use map::StringyMap;
|
use map::StringyMap;
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,9 @@
|
||||||
use proc_macro::{quote, Ident, Literal, Span, TokenStream, TokenTree};
|
use proc_macro2::{Ident, Literal, Span, TokenStream, TokenTree};
|
||||||
|
use quote::quote;
|
||||||
|
|
||||||
use config::{global_attrs, ATTR_EVENTS};
|
use config::{global_attrs, ATTR_EVENTS};
|
||||||
use error::ParseError;
|
use error::ParseError;
|
||||||
|
use ident;
|
||||||
use lexer::{Lexer, Token};
|
use lexer::{Lexer, Token};
|
||||||
use map::StringyMap;
|
use map::StringyMap;
|
||||||
use parser;
|
use parser;
|
||||||
|
@ -41,7 +43,7 @@ impl Declare {
|
||||||
|
|
||||||
fn attrs(&self) -> impl Iterator<Item = (TokenTree, TokenStream, TokenTree)> + '_ {
|
fn attrs(&self) -> impl Iterator<Item = (TokenTree, TokenStream, TokenTree)> + '_ {
|
||||||
self.attrs.iter().map(|(key, value)| {
|
self.attrs.iter().map(|(key, value)| {
|
||||||
let attr_name: TokenTree = Ident::new_raw(&key.to_string(), key.span()).into();
|
let attr_name: TokenTree = ident::new_raw(&key.to_string(), key.span()).into();
|
||||||
let attr_type = value.clone();
|
let attr_type = value.clone();
|
||||||
let attr_str = Literal::string(&key.to_string()).into();
|
let attr_str = Literal::string(&key.to_string()).into();
|
||||||
(attr_name, attr_type, attr_str)
|
(attr_name, attr_type, attr_str)
|
||||||
|
@ -73,13 +75,13 @@ impl Declare {
|
||||||
fn attr_struct(&self) -> TokenStream {
|
fn attr_struct(&self) -> TokenStream {
|
||||||
let mut body = TokenStream::new();
|
let mut body = TokenStream::new();
|
||||||
for (attr_name, attr_type, _) in self.attrs() {
|
for (attr_name, attr_type, _) in self.attrs() {
|
||||||
body.extend(quote!( pub $attr_name: Option<$attr_type>, ));
|
body.extend(quote!( pub #attr_name: Option<#attr_type>, ));
|
||||||
}
|
}
|
||||||
|
|
||||||
let attr_type_name = self.attr_type_name();
|
let attr_type_name = self.attr_type_name();
|
||||||
quote!(
|
quote!(
|
||||||
pub struct $attr_type_name {
|
pub struct #attr_type_name {
|
||||||
$body
|
#body
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -91,21 +93,21 @@ impl Declare {
|
||||||
let mut body = TokenStream::new();
|
let mut body = TokenStream::new();
|
||||||
|
|
||||||
for (child_name, child_type, _) in self.req_children() {
|
for (child_name, child_type, _) in self.req_children() {
|
||||||
body.extend(quote!( pub $child_name: Box<$child_type<T>>, ));
|
body.extend(quote!( pub #child_name: Box<#child_type<T>>, ));
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(child_constraint) = &self.opt_children {
|
if let Some(child_constraint) = &self.opt_children {
|
||||||
let child_constraint = child_constraint.clone();
|
let child_constraint = child_constraint.clone();
|
||||||
body.extend(quote!(pub children: Vec<Box<$child_constraint<T>>>,));
|
body.extend(quote!(pub children: Vec<Box<#child_constraint<T>>>,));
|
||||||
}
|
}
|
||||||
|
|
||||||
quote!(
|
quote!(
|
||||||
pub struct $elem_name<T> where T: ::OutputType {
|
pub struct #elem_name<T> where T: ::OutputType {
|
||||||
phantom_output: std::marker::PhantomData<T>,
|
phantom_output: std::marker::PhantomData<T>,
|
||||||
pub attrs: $attr_type_name,
|
pub attrs: #attr_type_name,
|
||||||
pub data_attributes: Vec<(&'static str, String)>,
|
pub data_attributes: Vec<(&'static str, String)>,
|
||||||
pub events: ::events::Events<T>,
|
pub events: ::events::Events<T>,
|
||||||
$body
|
#body
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -116,34 +118,34 @@ impl Declare {
|
||||||
|
|
||||||
let mut args = TokenStream::new();
|
let mut args = TokenStream::new();
|
||||||
for (child_name, child_type, _) in self.req_children() {
|
for (child_name, child_type, _) in self.req_children() {
|
||||||
args.extend(quote!( $child_name: Box<$child_type<T>>, ));
|
args.extend(quote!( #child_name: Box<#child_type<T>>, ));
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut attrs = TokenStream::new();
|
let mut attrs = TokenStream::new();
|
||||||
for (attr_name, _, _) in self.attrs() {
|
for (attr_name, _, _) in self.attrs() {
|
||||||
attrs.extend(quote!( $attr_name: None, ));
|
attrs.extend(quote!( #attr_name: None, ));
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut body = TokenStream::new();
|
let mut body = TokenStream::new();
|
||||||
body.extend(quote!(
|
body.extend(quote!(
|
||||||
attrs: $attr_type_name { $attrs },
|
attrs: #attr_type_name { #attrs },
|
||||||
));
|
));
|
||||||
body.extend(quote!(data_attributes: Vec::new(),));
|
body.extend(quote!(data_attributes: Vec::new(),));
|
||||||
|
|
||||||
for (child_name, _, _) in self.req_children() {
|
for (child_name, _, _) in self.req_children() {
|
||||||
body.extend(quote!( $child_name, ));
|
body.extend(quote!( #child_name, ));
|
||||||
}
|
}
|
||||||
if self.opt_children.is_some() {
|
if self.opt_children.is_some() {
|
||||||
body.extend(quote!(children: Vec::new()));
|
body.extend(quote!(children: Vec::new()));
|
||||||
}
|
}
|
||||||
|
|
||||||
quote!(
|
quote!(
|
||||||
impl<T> $elem_name<T> where T: ::OutputType {
|
impl<T> #elem_name<T> where T: ::OutputType {
|
||||||
pub fn new($args) -> Self {
|
pub fn new(#args) -> Self {
|
||||||
$elem_name {
|
#elem_name {
|
||||||
phantom_output: std::marker::PhantomData,
|
phantom_output: std::marker::PhantomData,
|
||||||
events: ::events::Events::default(),
|
events: ::events::Events::default(),
|
||||||
$body
|
#body
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -155,7 +157,7 @@ impl Declare {
|
||||||
let mut req_children = TokenStream::new();
|
let mut req_children = TokenStream::new();
|
||||||
for (child_name, _, _) in self.req_children() {
|
for (child_name, _, _) in self.req_children() {
|
||||||
req_children.extend(quote!(
|
req_children.extend(quote!(
|
||||||
children.push(self.$child_name.vnode());
|
children.push(self.#child_name.vnode());
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
let mut opt_children = TokenStream::new();
|
let mut opt_children = TokenStream::new();
|
||||||
|
@ -168,23 +170,23 @@ impl Declare {
|
||||||
let mut push_attrs = TokenStream::new();
|
let mut push_attrs = TokenStream::new();
|
||||||
for (attr_name, _, attr_str) in self.attrs() {
|
for (attr_name, _, attr_str) in self.attrs() {
|
||||||
push_attrs.extend(quote!(
|
push_attrs.extend(quote!(
|
||||||
if let Some(ref value) = self.attrs.$attr_name {
|
if let Some(ref value) = self.attrs.#attr_name {
|
||||||
attributes.push(($attr_str, value.to_string()));
|
attributes.push((#attr_str, value.to_string()));
|
||||||
}
|
}
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
quote!(
|
quote!(
|
||||||
let mut attributes = Vec::new();
|
let mut attributes = Vec::new();
|
||||||
$push_attrs
|
#push_attrs
|
||||||
attributes.extend(self.data_attributes.clone());
|
attributes.extend(self.data_attributes.clone());
|
||||||
|
|
||||||
let mut children = Vec::new();
|
let mut children = Vec::new();
|
||||||
$req_children
|
#req_children
|
||||||
$opt_children
|
#opt_children
|
||||||
|
|
||||||
::dom::VNode::Element(::dom::VElement {
|
::dom::VNode::Element(::dom::VElement {
|
||||||
name: $elem_name,
|
name: #elem_name,
|
||||||
attributes,
|
attributes,
|
||||||
events: &mut self.events,
|
events: &mut self.events,
|
||||||
children
|
children
|
||||||
|
@ -196,9 +198,9 @@ impl Declare {
|
||||||
let elem_name = self.elem_name();
|
let elem_name = self.elem_name();
|
||||||
let vnode = self.impl_vnode();
|
let vnode = self.impl_vnode();
|
||||||
quote!(
|
quote!(
|
||||||
impl<T> ::dom::Node<T> for $elem_name<T> where T: ::OutputType {
|
impl<T> ::dom::Node<T> for #elem_name<T> where T: ::OutputType {
|
||||||
fn vnode(&'_ mut self) -> ::dom::VNode<'_, T> {
|
fn vnode(&'_ mut self) -> ::dom::VNode<'_, T> {
|
||||||
$vnode
|
#vnode
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
@ -208,38 +210,38 @@ impl Declare {
|
||||||
let name: TokenTree = Literal::string(&self.name.to_string()).into();
|
let name: TokenTree = Literal::string(&self.name.to_string()).into();
|
||||||
let elem_name = self.elem_name();
|
let elem_name = self.elem_name();
|
||||||
|
|
||||||
let attrs: TokenStream = self.attrs().map(|(_, _, name)| quote!( $name, )).collect();
|
let attrs: TokenStream = self.attrs().map(|(_, _, name)| quote!( #name, )).collect();
|
||||||
let reqs: TokenStream = self
|
let reqs: TokenStream = self
|
||||||
.req_children()
|
.req_children()
|
||||||
.map(|(_, _, name)| quote!( $name, ))
|
.map(|(_, _, name)| quote!( #name, ))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let mut push_attrs = TokenStream::new();
|
let mut push_attrs = TokenStream::new();
|
||||||
for (attr_name, _, attr_str) in self.attrs() {
|
for (attr_name, _, attr_str) in self.attrs() {
|
||||||
push_attrs.extend(quote!(
|
push_attrs.extend(quote!(
|
||||||
if let Some(ref value) = self.attrs.$attr_name {
|
if let Some(ref value) = self.attrs.#attr_name {
|
||||||
out.push(($attr_str, value.to_string()));
|
out.push((#attr_str, value.to_string()));
|
||||||
}
|
}
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
quote!(
|
quote!(
|
||||||
impl<T> ::dom::Element<T> for $elem_name<T> where T: ::OutputType {
|
impl<T> ::dom::Element<T> for #elem_name<T> where T: ::OutputType {
|
||||||
fn name() -> &'static str {
|
fn name() -> &'static str {
|
||||||
$name
|
#name
|
||||||
}
|
}
|
||||||
|
|
||||||
fn attribute_names() -> &'static [&'static str] {
|
fn attribute_names() -> &'static [&'static str] {
|
||||||
&[ $attrs ]
|
&[ #attrs ]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn required_children() -> &'static [&'static str] {
|
fn required_children() -> &'static [&'static str] {
|
||||||
&[ $reqs ]
|
&[ #reqs ]
|
||||||
}
|
}
|
||||||
|
|
||||||
fn attributes(&self) -> Vec<(&'static str, String)> {
|
fn attributes(&self) -> Vec<(&'static str, String)> {
|
||||||
let mut out = Vec::new();
|
let mut out = Vec::new();
|
||||||
$push_attrs
|
#push_attrs
|
||||||
for (key, value) in &self.data_attributes {
|
for (key, value) in &self.data_attributes {
|
||||||
out.push((key, value.to_string()));
|
out.push((key, value.to_string()));
|
||||||
}
|
}
|
||||||
|
@ -255,7 +257,7 @@ impl Declare {
|
||||||
for t in &self.traits {
|
for t in &self.traits {
|
||||||
let name = t.clone();
|
let name = t.clone();
|
||||||
body.extend(quote!(
|
body.extend(quote!(
|
||||||
impl<T> $name<T> for $elem_name<T> where T: ::OutputType {}
|
impl<T> #name<T> for #elem_name<T> where T: ::OutputType {}
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
body
|
body
|
||||||
|
@ -276,7 +278,7 @@ impl Declare {
|
||||||
let mut print_req_children = TokenStream::new();
|
let mut print_req_children = TokenStream::new();
|
||||||
for (child_name, _, _) in self.req_children() {
|
for (child_name, _, _) in self.req_children() {
|
||||||
print_req_children.extend(quote!(
|
print_req_children.extend(quote!(
|
||||||
self.$child_name.fmt(f)?;
|
self.#child_name.fmt(f)?;
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -286,8 +288,8 @@ impl Declare {
|
||||||
write!(f, "/>")
|
write!(f, "/>")
|
||||||
} else {
|
} else {
|
||||||
write!(f, ">")?;
|
write!(f, ">")?;
|
||||||
$print_opt_children
|
#print_opt_children
|
||||||
write!(f, "</{}>", $name)
|
write!(f, "</{}>", #name)
|
||||||
})
|
})
|
||||||
} else {
|
} else {
|
||||||
quote!(write!(f, "/>"))
|
quote!(write!(f, "/>"))
|
||||||
|
@ -295,17 +297,17 @@ impl Declare {
|
||||||
} else {
|
} else {
|
||||||
quote!(
|
quote!(
|
||||||
write!(f, ">")?;
|
write!(f, ">")?;
|
||||||
$print_req_children
|
#print_req_children
|
||||||
$print_opt_children
|
#print_opt_children
|
||||||
write!(f, "</{}>", $name)
|
write!(f, "</{}>", #name)
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut print_attrs = TokenStream::new();
|
let mut print_attrs = TokenStream::new();
|
||||||
for (attr_name, _, attr_str) in self.attrs() {
|
for (attr_name, _, attr_str) in self.attrs() {
|
||||||
print_attrs.extend(quote!(
|
print_attrs.extend(quote!(
|
||||||
if let Some(ref value) = self.attrs.$attr_name {
|
if let Some(ref value) = self.attrs.#attr_name {
|
||||||
write!(f, " {}=\"{}\"", $attr_str,
|
write!(f, " {}=\"{}\"", #attr_str,
|
||||||
::htmlescape::encode_attribute(&value.to_string()))?;
|
::htmlescape::encode_attribute(&value.to_string()))?;
|
||||||
}
|
}
|
||||||
));
|
));
|
||||||
|
@ -316,24 +318,24 @@ impl Declare {
|
||||||
let event_name = TokenTree::Ident(Ident::new(event, Span::call_site()));
|
let event_name = TokenTree::Ident(Ident::new(event, Span::call_site()));
|
||||||
let event_str = TokenTree::Literal(Literal::string(event));
|
let event_str = TokenTree::Literal(Literal::string(event));
|
||||||
print_events.extend(quote!(
|
print_events.extend(quote!(
|
||||||
if let Some(ref value) = self.events.$event_name {
|
if let Some(ref value) = self.events.#event_name {
|
||||||
write!(f, " on{}=\"{}\"", $event_str,
|
write!(f, " on{}=\"{}\"", #event_str,
|
||||||
::htmlescape::encode_attribute(value.render().unwrap().as_str()))?;
|
::htmlescape::encode_attribute(value.render().unwrap().as_str()))?;
|
||||||
}
|
}
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
quote!(
|
quote!(
|
||||||
impl<T> std::fmt::Display for $elem_name<T> where T: ::OutputType {
|
impl<T> std::fmt::Display for #elem_name<T> where T: ::OutputType {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
|
fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
|
||||||
write!(f, "<{}", $name)?;
|
write!(f, "<{}", #name)?;
|
||||||
$print_attrs
|
#print_attrs
|
||||||
for (key, value) in &self.data_attributes {
|
for (key, value) in &self.data_attributes {
|
||||||
write!(f, " data-{}=\"{}\"", key,
|
write!(f, " data-{}=\"{}\"", key,
|
||||||
::htmlescape::encode_attribute(&value))?;
|
::htmlescape::encode_attribute(&value))?;
|
||||||
}
|
}
|
||||||
$print_events
|
#print_events
|
||||||
$print_children
|
#print_children
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
use ansi_term::Style;
|
use ansi_term::Style;
|
||||||
use lalrpop_util::ParseError::*;
|
use lalrpop_util::ParseError::*;
|
||||||
use lexer::Token;
|
use lexer::Token;
|
||||||
use proc_macro::{Diagnostic, Ident, Level};
|
use proc_macro::{Diagnostic, Level};
|
||||||
|
use proc_macro2::Ident;
|
||||||
|
|
||||||
pub type ParseError = lalrpop_util::ParseError<usize, Token, HtmlParseError>;
|
pub type ParseError = lalrpop_util::ParseError<usize, Token, HtmlParseError>;
|
||||||
|
|
||||||
|
@ -44,7 +45,7 @@ pub fn parse_error(input: &[Token], error: &ParseError) -> Diagnostic {
|
||||||
match error {
|
match error {
|
||||||
InvalidToken { location } => {
|
InvalidToken { location } => {
|
||||||
let loc = &input[*location];
|
let loc = &input[*location];
|
||||||
Diagnostic::spanned(loc.span(), Level::Error, "invalid token")
|
Diagnostic::spanned(loc.span().unstable(), Level::Error, "invalid token")
|
||||||
}
|
}
|
||||||
UnrecognizedToken {
|
UnrecognizedToken {
|
||||||
token: None,
|
token: None,
|
||||||
|
@ -52,7 +53,7 @@ pub fn parse_error(input: &[Token], error: &ParseError) -> Diagnostic {
|
||||||
} => {
|
} => {
|
||||||
let msg = format!("missing {}", pprint_tokens(&expected));
|
let msg = format!("missing {}", pprint_tokens(&expected));
|
||||||
Diagnostic::spanned(
|
Diagnostic::spanned(
|
||||||
input[0].span().join(input[input.len() - 1].span()).unwrap(),
|
input[0].span().unstable().join(input[input.len() - 1].span().unstable()).unwrap(),
|
||||||
Level::Error,
|
Level::Error,
|
||||||
"unexpected end of macro",
|
"unexpected end of macro",
|
||||||
)
|
)
|
||||||
|
@ -63,7 +64,7 @@ pub fn parse_error(input: &[Token], error: &ParseError) -> Diagnostic {
|
||||||
expected,
|
expected,
|
||||||
} => {
|
} => {
|
||||||
let msg = format!("expected {}", pprint_tokens(&expected));
|
let msg = format!("expected {}", pprint_tokens(&expected));
|
||||||
let mut diag = Diagnostic::spanned(token.span(), Level::Error, msg);
|
let mut diag = Diagnostic::spanned(token.span().unstable(), Level::Error, msg);
|
||||||
if is_in_node_position(expected) && token.is_ident() {
|
if is_in_node_position(expected) && token.is_ident() {
|
||||||
// special case: you probably meant to quote that text
|
// special case: you probably meant to quote that text
|
||||||
diag = diag.help(format!(
|
diag = diag.help(format!(
|
||||||
|
@ -75,14 +76,14 @@ pub fn parse_error(input: &[Token], error: &ParseError) -> Diagnostic {
|
||||||
}
|
}
|
||||||
ExtraToken {
|
ExtraToken {
|
||||||
token: (_, token, _),
|
token: (_, token, _),
|
||||||
} => Diagnostic::spanned(token.span(), Level::Error, "superfluous token"),
|
} => Diagnostic::spanned(token.span().unstable(), Level::Error, "superfluous token"),
|
||||||
User {
|
User {
|
||||||
error: HtmlParseError::TagMismatch { open, close },
|
error: HtmlParseError::TagMismatch { open, close },
|
||||||
} => Diagnostic::spanned(
|
} => Diagnostic::spanned(
|
||||||
close.span(),
|
close.span().unstable(),
|
||||||
Level::Error,
|
Level::Error,
|
||||||
format!("expected closing tag '</{}>', found '</{}>'", open, close),
|
format!("expected closing tag '</{}>', found '</{}>'", open, close),
|
||||||
)
|
)
|
||||||
.span_help(open.span(), "opening tag is here:"),
|
.span_help(open.span().unstable(), "opening tag is here:"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,8 +3,9 @@ use error::HtmlParseError;
|
||||||
use html::{Node, Element};
|
use html::{Node, Element};
|
||||||
use declare::Declare;
|
use declare::Declare;
|
||||||
use map::StringyMap;
|
use map::StringyMap;
|
||||||
use proc_macro::{Delimiter, Ident, Literal, Group, TokenTree};
|
use proc_macro2::{Delimiter, Ident, Literal, Group, TokenTree};
|
||||||
use lalrpop_util::ParseError;
|
use lalrpop_util::ParseError;
|
||||||
|
use span;
|
||||||
|
|
||||||
grammar;
|
grammar;
|
||||||
|
|
||||||
|
@ -62,8 +63,8 @@ HtmlIdent: Ident = {
|
||||||
let (span, name) = init.into_iter().fold((None, String::new()), |(span, name), token| {
|
let (span, name) = init.into_iter().fold((None, String::new()), |(span, name), token| {
|
||||||
(
|
(
|
||||||
match span {
|
match span {
|
||||||
None => Some(token.span()),
|
None => Some(token.span().unstable()),
|
||||||
Some(span) => span.join(token.span()),
|
Some(span) => span.join(token.span().unstable()),
|
||||||
},
|
},
|
||||||
if name.is_empty() {
|
if name.is_empty() {
|
||||||
name + &token.to_string()
|
name + &token.to_string()
|
||||||
|
@ -72,7 +73,7 @@ HtmlIdent: Ident = {
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
Ident::new(&name, span.unwrap())
|
Ident::new(&name, span::from_unstable(span.unwrap()))
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -1,13 +1,16 @@
|
||||||
use proc_macro::{
|
use proc_macro::{Diagnostic, Level};
|
||||||
quote, Delimiter, Diagnostic, Group, Ident, Level, Literal, TokenStream, TokenTree,
|
use proc_macro2::{Delimiter, Group, Ident, Literal, TokenStream, TokenTree};
|
||||||
};
|
use quote::quote;
|
||||||
|
|
||||||
use config::{required_children, ATTR_EVENTS};
|
use config::{required_children, ATTR_EVENTS};
|
||||||
use error::ParseError;
|
use error::ParseError;
|
||||||
|
use ident;
|
||||||
use lexer::{Lexer, Token};
|
use lexer::{Lexer, Token};
|
||||||
use map::StringyMap;
|
use map::StringyMap;
|
||||||
use parser::grammar;
|
use parser::grammar;
|
||||||
|
|
||||||
|
use std::iter::FromIterator;
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub enum Node {
|
pub enum Node {
|
||||||
Element(Element),
|
Element(Element),
|
||||||
|
@ -21,7 +24,7 @@ impl Node {
|
||||||
Node::Element(el) => el.into_token_stream(),
|
Node::Element(el) => el.into_token_stream(),
|
||||||
Node::Text(text) => {
|
Node::Text(text) => {
|
||||||
let text = TokenTree::Literal(text);
|
let text = TokenTree::Literal(text);
|
||||||
quote!(Box::new(typed_html::dom::TextNode::new($text.to_string())))
|
quote!(Box::new(typed_html::dom::TextNode::new(#text.to_string())))
|
||||||
}
|
}
|
||||||
Node::Block(_) => panic!("cannot have a block in this position"),
|
Node::Block(_) => panic!("cannot have a block in this position"),
|
||||||
}
|
}
|
||||||
|
@ -32,22 +35,22 @@ impl Node {
|
||||||
Node::Element(el) => {
|
Node::Element(el) => {
|
||||||
let el = el.into_token_stream();
|
let el = el.into_token_stream();
|
||||||
quote!(
|
quote!(
|
||||||
element.children.push($el);
|
element.children.push(#el);
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
tx @ Node::Text(_) => {
|
tx @ Node::Text(_) => {
|
||||||
let tx = tx.into_token_stream();
|
let tx = tx.into_token_stream();
|
||||||
quote!(
|
quote!(
|
||||||
element.children.push($tx);
|
element.children.push(#tx);
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
Node::Block(group) => {
|
Node::Block(group) => {
|
||||||
let group: TokenTree = group.into();
|
let group: TokenTree = group.into();
|
||||||
quote!(
|
quote!(
|
||||||
for child in $group.into_iter() {
|
for child in #group.into_iter() {
|
||||||
element.children.push(child);
|
element.children.push(child);
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -86,7 +89,7 @@ fn extract_event_handlers(
|
||||||
let event_name = &key_name[prefix.len()..];
|
let event_name = &key_name[prefix.len()..];
|
||||||
if ATTR_EVENTS.binary_search(&event_name).is_ok() {
|
if ATTR_EVENTS.binary_search(&event_name).is_ok() {
|
||||||
let value = attrs.remove(&key).unwrap();
|
let value = attrs.remove(&key).unwrap();
|
||||||
events.insert(Ident::new_raw(event_name, key.span()), value);
|
events.insert(ident::new_raw(event_name, key.span()), value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -97,13 +100,13 @@ fn process_value(value: &TokenTree) -> TokenStream {
|
||||||
match value {
|
match value {
|
||||||
TokenTree::Group(g) if g.delimiter() == Delimiter::Bracket => {
|
TokenTree::Group(g) if g.delimiter() == Delimiter::Bracket => {
|
||||||
let content = g.stream();
|
let content = g.stream();
|
||||||
quote!( [ $content ] )
|
quote!( [ #content ] )
|
||||||
}
|
}
|
||||||
TokenTree::Group(g) if g.delimiter() == Delimiter::Parenthesis => {
|
TokenTree::Group(g) if g.delimiter() == Delimiter::Parenthesis => {
|
||||||
let content = g.stream();
|
let content = g.stream();
|
||||||
quote!( ( $content ) )
|
quote!( ( #content ) )
|
||||||
}
|
}
|
||||||
v => v.clone().into(),
|
v => TokenStream::from_iter(vec![v.clone()]),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -120,7 +123,7 @@ impl Element {
|
||||||
let req_names = required_children(&name_str);
|
let req_names = required_children(&name_str);
|
||||||
if req_names.len() > self.children.len() {
|
if req_names.len() > self.children.len() {
|
||||||
Diagnostic::spanned(
|
Diagnostic::spanned(
|
||||||
name.span(),
|
name.span().unstable(),
|
||||||
Level::Error,
|
Level::Error,
|
||||||
format!(
|
format!(
|
||||||
"<{}> requires {} children but there are only {}",
|
"<{}> requires {} children but there are only {}",
|
||||||
|
@ -137,7 +140,7 @@ impl Element {
|
||||||
let attrs = self.attributes.iter().map(|(key, value)| {
|
let attrs = self.attributes.iter().map(|(key, value)| {
|
||||||
(
|
(
|
||||||
key.to_string(),
|
key.to_string(),
|
||||||
TokenTree::Ident(Ident::new_raw(&key.to_string(), key.span())),
|
TokenTree::Ident(ident::new_raw(&key.to_string(), key.span())),
|
||||||
value,
|
value,
|
||||||
)
|
)
|
||||||
});
|
});
|
||||||
|
@ -158,15 +161,15 @@ impl Element {
|
||||||
let span = value.span();
|
let span = value.span();
|
||||||
let pos = format!(
|
let pos = format!(
|
||||||
"{}:{}:{}",
|
"{}:{}:{}",
|
||||||
span.source_file().path().to_str().unwrap_or("unknown"),
|
span.unstable().source_file().path().to_str().unwrap_or("unknown"),
|
||||||
span.start().line,
|
span.unstable().start().line,
|
||||||
span.start().column
|
span.unstable().start().column
|
||||||
);
|
);
|
||||||
let pos_str: TokenTree = Literal::string(&pos).into();
|
let pos_str: TokenTree = Literal::string(&pos).into();
|
||||||
body.extend(quote!(
|
body.extend(quote!(
|
||||||
element.attrs.$key = Some($value.parse().unwrap_or_else(|err| {
|
element.attrs.#key = Some(#value.parse().unwrap_or_else(|err| {
|
||||||
eprintln!("ERROR: {}: <{} {}={:?}> failed to parse attribute value: {}",
|
eprintln!("ERROR: {}: <{} {}={:?}> failed to parse attribute value: {}",
|
||||||
$pos_str, $tag_name, $attr_str, $value, err);
|
#pos_str, #tag_name, #attr_str, #value, err);
|
||||||
panic!("failed to parse string literal");
|
panic!("failed to parse string literal");
|
||||||
}));
|
}));
|
||||||
));
|
));
|
||||||
|
@ -174,7 +177,7 @@ impl Element {
|
||||||
value => {
|
value => {
|
||||||
let value = process_value(value);
|
let value = process_value(value);
|
||||||
body.extend(quote!(
|
body.extend(quote!(
|
||||||
element.attrs.$key = Some(std::convert::Into::into($value));
|
element.attrs.#key = Some(std::convert::Into::into(#value));
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -184,7 +187,7 @@ impl Element {
|
||||||
.map(|(k, v)| (TokenTree::from(Literal::string(&k)), v.clone()))
|
.map(|(k, v)| (TokenTree::from(Literal::string(&k)), v.clone()))
|
||||||
{
|
{
|
||||||
body.extend(quote!(
|
body.extend(quote!(
|
||||||
element.data_attributes.push(($key, $value.into()));
|
element.data_attributes.push((#key, #value.into()));
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
body.extend(opt_children);
|
body.extend(opt_children);
|
||||||
|
@ -193,19 +196,19 @@ impl Element {
|
||||||
let key = TokenTree::Ident(key.clone());
|
let key = TokenTree::Ident(key.clone());
|
||||||
let value = process_value(value);
|
let value = process_value(value);
|
||||||
body.extend(quote!(
|
body.extend(quote!(
|
||||||
element.events.$key = Some(typed_html::events::IntoEventHandler::into_event_handler($value));
|
element.events.#key = Some(typed_html::events::IntoEventHandler::into_event_handler(#value));
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut args = TokenStream::new();
|
let mut args = TokenStream::new();
|
||||||
for arg in req_children {
|
for arg in req_children {
|
||||||
args.extend(quote!( $arg, ));
|
args.extend(quote!( #arg, ));
|
||||||
}
|
}
|
||||||
|
|
||||||
quote!(
|
quote!(
|
||||||
{
|
{
|
||||||
let mut element = typed_html::elements::$typename::new($args);
|
let mut element = typed_html::elements::#typename::new(#args);
|
||||||
$body
|
#body
|
||||||
Box::new(element)
|
Box::new(element)
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
|
@ -0,0 +1,17 @@
|
||||||
|
use proc_macro2::{Ident, Span, TokenStream, TokenTree};
|
||||||
|
|
||||||
|
use std::str::FromStr;
|
||||||
|
|
||||||
|
pub fn new_raw(string: &str, span: Span) -> Ident {
|
||||||
|
// Validate that it is an ident.
|
||||||
|
let _ = Ident::new(string, span);
|
||||||
|
|
||||||
|
let s = format!("r#{}", string);
|
||||||
|
let tts = TokenStream::from_str(&s).unwrap();
|
||||||
|
let mut ident = match tts.into_iter().next().unwrap() {
|
||||||
|
TokenTree::Ident(ident) => ident,
|
||||||
|
_ => unreachable!(),
|
||||||
|
};
|
||||||
|
ident.set_span(span);
|
||||||
|
ident
|
||||||
|
}
|
|
@ -1,5 +1,7 @@
|
||||||
use error::HtmlParseError;
|
use error::HtmlParseError;
|
||||||
use proc_macro::{Delimiter, Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
|
use proc_macro2::{Delimiter, Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
|
||||||
|
|
||||||
|
use std::iter::FromIterator;
|
||||||
|
|
||||||
pub type Spanned<Tok, Loc, Error> = Result<(Loc, Tok, Loc), Error>;
|
pub type Spanned<Tok, Loc, Error> = Result<(Loc, Tok, Loc), Error>;
|
||||||
|
|
||||||
|
@ -51,7 +53,7 @@ impl From<Token> for TokenTree {
|
||||||
|
|
||||||
impl From<Token> for TokenStream {
|
impl From<Token> for TokenStream {
|
||||||
fn from(token: Token) -> Self {
|
fn from(token: Token) -> Self {
|
||||||
TokenTree::from(token).into()
|
TokenStream::from_iter(vec![TokenTree::from(token)])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,22 +1,26 @@
|
||||||
|
#![recursion_limit = "128"]
|
||||||
#![feature(proc_macro_hygiene)]
|
#![feature(proc_macro_hygiene)]
|
||||||
#![feature(proc_macro_quote)]
|
|
||||||
#![feature(proc_macro_span)]
|
#![feature(proc_macro_span)]
|
||||||
#![feature(proc_macro_diagnostic)]
|
#![feature(proc_macro_diagnostic)]
|
||||||
#![feature(proc_macro_raw_ident)]
|
|
||||||
|
|
||||||
extern crate ansi_term;
|
extern crate ansi_term;
|
||||||
extern crate lalrpop_util;
|
extern crate lalrpop_util;
|
||||||
extern crate proc_macro;
|
extern crate proc_macro;
|
||||||
|
extern crate proc_macro2;
|
||||||
|
extern crate quote;
|
||||||
|
|
||||||
use proc_macro::{quote, TokenStream};
|
use proc_macro::TokenStream;
|
||||||
|
use quote::quote;
|
||||||
|
|
||||||
mod config;
|
mod config;
|
||||||
mod declare;
|
mod declare;
|
||||||
mod error;
|
mod error;
|
||||||
mod html;
|
mod html;
|
||||||
|
mod ident;
|
||||||
mod lexer;
|
mod lexer;
|
||||||
mod map;
|
mod map;
|
||||||
mod parser;
|
mod parser;
|
||||||
|
mod span;
|
||||||
|
|
||||||
/// Construct a DOM tree.
|
/// Construct a DOM tree.
|
||||||
///
|
///
|
||||||
|
@ -25,34 +29,34 @@ mod parser;
|
||||||
/// [typed_html]: ../typed_html/index.html
|
/// [typed_html]: ../typed_html/index.html
|
||||||
#[proc_macro]
|
#[proc_macro]
|
||||||
pub fn html(input: TokenStream) -> TokenStream {
|
pub fn html(input: TokenStream) -> TokenStream {
|
||||||
let stream = lexer::unroll_stream(input, false);
|
let stream = lexer::unroll_stream(input.into(), false);
|
||||||
let result = html::expand_html(&stream);
|
let result = html::expand_html(&stream);
|
||||||
match result {
|
TokenStream::from(match result {
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
error::parse_error(&stream, &err).emit();
|
error::parse_error(&stream, &err).emit();
|
||||||
quote!(panic!())
|
quote!(panic!())
|
||||||
}
|
}
|
||||||
Ok(node) => node.into_token_stream(),
|
Ok(node) => node.into_token_stream(),
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
/// This macro is used by `typed_html` internally to generate types and
|
/// This macro is used by `typed_html` internally to generate types and
|
||||||
/// implementations for HTML elements.
|
/// implementations for HTML elements.
|
||||||
#[proc_macro]
|
#[proc_macro]
|
||||||
pub fn declare_elements(input: TokenStream) -> TokenStream {
|
pub fn declare_elements(input: TokenStream) -> TokenStream {
|
||||||
let stream = lexer::keywordise(lexer::unroll_stream(input, true));
|
let stream = lexer::keywordise(lexer::unroll_stream(input.into(), true));
|
||||||
let result = declare::expand_declare(&stream);
|
let result = declare::expand_declare(&stream);
|
||||||
match result {
|
TokenStream::from(match result {
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
error::parse_error(&stream, &err).emit();
|
error::parse_error(&stream, &err).emit();
|
||||||
quote!(panic!())
|
quote!(panic!())
|
||||||
}
|
}
|
||||||
Ok(decls) => {
|
Ok(decls) => {
|
||||||
let mut out = TokenStream::new();
|
let mut out = proc_macro2::TokenStream::new();
|
||||||
for decl in decls {
|
for decl in decls {
|
||||||
out.extend(decl.into_token_stream());
|
out.extend(decl.into_token_stream());
|
||||||
}
|
}
|
||||||
out
|
out
|
||||||
}
|
}
|
||||||
}
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,10 @@
|
||||||
|
use proc_macro;
|
||||||
|
use proc_macro2;
|
||||||
|
|
||||||
|
pub fn from_unstable(span: proc_macro::Span) -> proc_macro2::Span {
|
||||||
|
let ident = proc_macro::Ident::new("_", span);
|
||||||
|
let tt = proc_macro::TokenTree::Ident(ident);
|
||||||
|
let tts = proc_macro::TokenStream::from(tt);
|
||||||
|
let tts2 = proc_macro2::TokenStream::from(tts);
|
||||||
|
tts2.into_iter().next().unwrap().span()
|
||||||
|
}
|
Loading…
Reference in New Issue