2018-11-18 06:43:11 +00:00
|
|
|
use proc_macro::{Diagnostic, Level};
|
|
|
|
use proc_macro2::{Delimiter, Group, Ident, Literal, TokenStream, TokenTree};
|
|
|
|
use quote::quote;
|
2018-10-27 15:59:30 +00:00
|
|
|
|
2018-11-15 23:25:38 +00:00
|
|
|
use config::{required_children, ATTR_EVENTS};
|
2018-11-14 16:22:07 +00:00
|
|
|
use error::ParseError;
|
2018-11-18 06:43:11 +00:00
|
|
|
use ident;
|
2018-11-14 16:22:07 +00:00
|
|
|
use lexer::{Lexer, Token};
|
2018-10-27 18:49:52 +00:00
|
|
|
use map::StringyMap;
|
2018-11-14 00:30:59 +00:00
|
|
|
use parser::grammar;
|
2018-10-27 15:59:30 +00:00
|
|
|
|
2018-11-18 06:43:11 +00:00
|
|
|
use std::iter::FromIterator;
|
|
|
|
|
2018-10-27 15:59:30 +00:00
|
|
|
#[derive(Clone)]
|
2018-11-14 00:30:59 +00:00
|
|
|
pub enum Node {
|
2018-10-27 15:59:30 +00:00
|
|
|
Element(Element),
|
|
|
|
Text(Literal),
|
|
|
|
Block(Group),
|
|
|
|
}
|
|
|
|
|
|
|
|
impl Node {
|
2018-11-14 00:30:59 +00:00
|
|
|
pub fn into_token_stream(self) -> TokenStream {
|
2018-10-27 15:59:30 +00:00
|
|
|
match self {
|
|
|
|
Node::Element(el) => el.into_token_stream(),
|
2018-10-27 16:28:57 +00:00
|
|
|
Node::Text(text) => {
|
2018-10-27 18:49:52 +00:00
|
|
|
let text = TokenTree::Literal(text);
|
2018-11-18 06:43:11 +00:00
|
|
|
quote!(Box::new(typed_html::dom::TextNode::new(#text.to_string())))
|
2018-10-27 16:28:57 +00:00
|
|
|
}
|
2018-10-27 15:59:30 +00:00
|
|
|
Node::Block(_) => panic!("cannot have a block in this position"),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn into_child_stream(self) -> TokenStream {
|
|
|
|
match self {
|
|
|
|
Node::Element(el) => {
|
|
|
|
let el = el.into_token_stream();
|
|
|
|
quote!(
|
2018-11-18 06:43:11 +00:00
|
|
|
element.children.push(#el);
|
2018-10-27 15:59:30 +00:00
|
|
|
)
|
|
|
|
}
|
|
|
|
tx @ Node::Text(_) => {
|
|
|
|
let tx = tx.into_token_stream();
|
|
|
|
quote!(
|
2018-11-18 06:43:11 +00:00
|
|
|
element.children.push(#tx);
|
2018-10-27 15:59:30 +00:00
|
|
|
)
|
|
|
|
}
|
2018-10-27 18:49:52 +00:00
|
|
|
Node::Block(group) => {
|
|
|
|
let group: TokenTree = group.into();
|
|
|
|
quote!(
|
2018-11-18 06:43:11 +00:00
|
|
|
for child in #group.into_iter() {
|
|
|
|
element.children.push(child);
|
|
|
|
}
|
|
|
|
)
|
2018-10-27 18:49:52 +00:00
|
|
|
}
|
2018-10-27 15:59:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Clone)]
|
2018-11-14 00:30:59 +00:00
|
|
|
pub struct Element {
|
|
|
|
pub name: Ident,
|
|
|
|
pub attributes: StringyMap<Ident, TokenTree>,
|
|
|
|
pub children: Vec<Node>,
|
2018-10-27 15:59:30 +00:00
|
|
|
}
|
|
|
|
|
2018-10-27 18:49:52 +00:00
|
|
|
fn extract_data_attrs(attrs: &mut StringyMap<Ident, TokenTree>) -> StringyMap<String, TokenTree> {
|
|
|
|
let mut data = StringyMap::new();
|
2018-10-27 15:59:30 +00:00
|
|
|
let keys: Vec<Ident> = attrs.keys().cloned().collect();
|
|
|
|
for key in keys {
|
|
|
|
let key_name = key.to_string();
|
|
|
|
let prefix = "data_";
|
|
|
|
if key_name.starts_with(prefix) {
|
|
|
|
let value = attrs.remove(&key).unwrap();
|
2018-11-15 23:25:38 +00:00
|
|
|
data.insert(format!("data-{}", &key_name[prefix.len()..]), value);
|
2018-10-27 15:59:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
data
|
|
|
|
}
|
|
|
|
|
2018-11-15 23:25:38 +00:00
|
|
|
fn extract_event_handlers(
|
|
|
|
attrs: &mut StringyMap<Ident, TokenTree>,
|
|
|
|
) -> StringyMap<Ident, TokenTree> {
|
|
|
|
let mut events = StringyMap::new();
|
|
|
|
let keys: Vec<Ident> = attrs.keys().cloned().collect();
|
|
|
|
for key in keys {
|
|
|
|
let key_name = key.to_string();
|
|
|
|
let prefix = "on";
|
|
|
|
if key_name.starts_with(prefix) {
|
|
|
|
let event_name = &key_name[prefix.len()..];
|
|
|
|
if ATTR_EVENTS.binary_search(&event_name).is_ok() {
|
|
|
|
let value = attrs.remove(&key).unwrap();
|
2018-11-18 06:43:11 +00:00
|
|
|
events.insert(ident::new_raw(event_name, key.span()), value);
|
2018-11-15 23:25:38 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
events
|
|
|
|
}
|
|
|
|
|
2018-10-27 23:48:34 +00:00
|
|
|
fn process_value(value: &TokenTree) -> TokenStream {
|
|
|
|
match value {
|
|
|
|
TokenTree::Group(g) if g.delimiter() == Delimiter::Bracket => {
|
|
|
|
let content = g.stream();
|
2018-11-18 06:43:11 +00:00
|
|
|
quote!( [ #content ] )
|
2018-10-27 23:48:34 +00:00
|
|
|
}
|
|
|
|
TokenTree::Group(g) if g.delimiter() == Delimiter::Parenthesis => {
|
|
|
|
let content = g.stream();
|
2018-11-18 06:43:11 +00:00
|
|
|
quote!( ( #content ) )
|
2018-10-27 23:48:34 +00:00
|
|
|
}
|
2018-11-18 06:43:11 +00:00
|
|
|
v => TokenStream::from_iter(vec![v.clone()]),
|
2018-10-27 23:48:34 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
fn is_string_literal(literal: &Literal) -> bool {
|
|
|
|
// This is the worst API
|
|
|
|
literal.to_string().starts_with('"')
|
|
|
|
}
|
|
|
|
|
2018-10-27 15:59:30 +00:00
|
|
|
impl Element {
|
|
|
|
fn into_token_stream(mut self) -> TokenStream {
|
|
|
|
let name = self.name;
|
|
|
|
let name_str = name.to_string();
|
2018-11-14 18:25:05 +00:00
|
|
|
let typename: TokenTree = Ident::new(&name_str, name.span()).into();
|
2018-10-27 15:59:30 +00:00
|
|
|
let req_names = required_children(&name_str);
|
|
|
|
if req_names.len() > self.children.len() {
|
2018-11-14 00:30:59 +00:00
|
|
|
Diagnostic::spanned(
|
2018-11-18 06:43:11 +00:00
|
|
|
name.span().unstable(),
|
2018-11-14 00:30:59 +00:00
|
|
|
Level::Error,
|
|
|
|
format!(
|
|
|
|
"<{}> requires {} children but there are only {}",
|
|
|
|
name_str,
|
|
|
|
req_names.len(),
|
|
|
|
self.children.len()
|
|
|
|
),
|
|
|
|
)
|
|
|
|
.emit();
|
|
|
|
panic!();
|
2018-10-27 15:59:30 +00:00
|
|
|
}
|
2018-11-15 23:25:38 +00:00
|
|
|
let events = extract_event_handlers(&mut self.attributes);
|
2018-10-27 15:59:30 +00:00
|
|
|
let data_attrs = extract_data_attrs(&mut self.attributes);
|
2018-10-27 18:49:52 +00:00
|
|
|
let attrs = self.attributes.iter().map(|(key, value)| {
|
|
|
|
(
|
2018-10-27 23:48:34 +00:00
|
|
|
key.to_string(),
|
2018-11-18 06:43:11 +00:00
|
|
|
TokenTree::Ident(ident::new_raw(&key.to_string(), key.span())),
|
2018-10-27 23:48:34 +00:00
|
|
|
value,
|
2018-10-27 18:49:52 +00:00
|
|
|
)
|
|
|
|
});
|
2018-10-27 15:59:30 +00:00
|
|
|
let opt_children = self
|
|
|
|
.children
|
|
|
|
.split_off(req_names.len())
|
|
|
|
.into_iter()
|
|
|
|
.map(Node::into_child_stream);
|
|
|
|
let req_children = self.children.into_iter().map(Node::into_token_stream);
|
2018-10-27 18:49:52 +00:00
|
|
|
|
|
|
|
let mut body = TokenStream::new();
|
2018-10-27 23:48:34 +00:00
|
|
|
for (attr_str, key, value) in attrs {
|
|
|
|
match value {
|
|
|
|
TokenTree::Literal(l) if is_string_literal(l) => {
|
|
|
|
let value = value.clone();
|
|
|
|
let tag_name: TokenTree = Literal::string(&name_str).into();
|
|
|
|
let attr_str: TokenTree = Literal::string(&attr_str).into();
|
|
|
|
let span = value.span();
|
|
|
|
let pos = format!(
|
|
|
|
"{}:{}:{}",
|
2018-11-18 06:43:11 +00:00
|
|
|
span.unstable().source_file().path().to_str().unwrap_or("unknown"),
|
|
|
|
span.unstable().start().line,
|
|
|
|
span.unstable().start().column
|
2018-10-27 23:48:34 +00:00
|
|
|
);
|
|
|
|
let pos_str: TokenTree = Literal::string(&pos).into();
|
|
|
|
body.extend(quote!(
|
2018-11-18 06:43:11 +00:00
|
|
|
element.attrs.#key = Some(#value.parse().unwrap_or_else(|err| {
|
2018-11-14 00:30:59 +00:00
|
|
|
eprintln!("ERROR: {}: <{} {}={:?}> failed to parse attribute value: {}",
|
2018-11-18 06:43:11 +00:00
|
|
|
#pos_str, #tag_name, #attr_str, #value, err);
|
2018-11-14 00:30:59 +00:00
|
|
|
panic!("failed to parse string literal");
|
2018-10-27 23:48:34 +00:00
|
|
|
}));
|
|
|
|
));
|
|
|
|
}
|
|
|
|
value => {
|
|
|
|
let value = process_value(value);
|
|
|
|
body.extend(quote!(
|
2018-11-18 06:43:11 +00:00
|
|
|
element.attrs.#key = Some(std::convert::Into::into(#value));
|
2018-10-27 23:48:34 +00:00
|
|
|
));
|
|
|
|
}
|
|
|
|
}
|
2018-10-27 18:49:52 +00:00
|
|
|
}
|
|
|
|
for (key, value) in data_attrs
|
|
|
|
.iter()
|
|
|
|
.map(|(k, v)| (TokenTree::from(Literal::string(&k)), v.clone()))
|
|
|
|
{
|
|
|
|
body.extend(quote!(
|
2018-11-18 06:43:11 +00:00
|
|
|
element.data_attributes.push((#key, #value.into()));
|
2018-10-27 18:49:52 +00:00
|
|
|
));
|
|
|
|
}
|
|
|
|
body.extend(opt_children);
|
|
|
|
|
2018-11-15 23:25:38 +00:00
|
|
|
for (key, value) in events.iter() {
|
|
|
|
let key = TokenTree::Ident(key.clone());
|
|
|
|
let value = process_value(value);
|
|
|
|
body.extend(quote!(
|
2018-11-18 06:43:11 +00:00
|
|
|
element.events.#key = Some(typed_html::events::IntoEventHandler::into_event_handler(#value));
|
2018-11-15 23:25:38 +00:00
|
|
|
));
|
|
|
|
}
|
|
|
|
|
2018-10-27 18:49:52 +00:00
|
|
|
let mut args = TokenStream::new();
|
|
|
|
for arg in req_children {
|
2018-11-18 06:43:11 +00:00
|
|
|
args.extend(quote!( #arg, ));
|
2018-10-27 18:49:52 +00:00
|
|
|
}
|
|
|
|
|
2018-10-27 15:59:30 +00:00
|
|
|
quote!(
|
|
|
|
{
|
2018-11-18 06:43:11 +00:00
|
|
|
let mut element = typed_html::elements::#typename::new(#args);
|
|
|
|
#body
|
2018-10-27 16:28:57 +00:00
|
|
|
Box::new(element)
|
2018-10-27 15:59:30 +00:00
|
|
|
}
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-11-17 21:49:04 +00:00
|
|
|
// FIXME report a decent error when the macro contains multiple top level elements
|
2018-11-14 00:30:59 +00:00
|
|
|
pub fn expand_html(input: &[Token]) -> Result<Node, ParseError> {
|
|
|
|
grammar::NodeParser::new().parse(Lexer::new(input))
|
2018-10-27 15:59:30 +00:00
|
|
|
}
|