skhtml/macros/src/html.rs

258 lines
8.9 KiB
Rust
Raw Normal View History

use proc_macro2::{Delimiter, Group, Ident, Literal, Span, TokenStream, TokenTree};
2018-11-18 07:40:17 +00:00
use quote::{quote, quote_spanned};
2018-10-27 15:59:30 +00:00
2019-03-15 23:06:20 +00:00
use crate::config::required_children;
use crate::error::ParseError;
use crate::ident;
use crate::lexer::{to_stream, Lexer, Token};
use crate::map::StringyMap;
use crate::parser::grammar;
2018-10-27 15:59:30 +00:00
2018-11-18 06:43:11 +00:00
use std::iter::FromIterator;
2018-10-27 15:59:30 +00:00
#[derive(Clone)]
2018-11-14 00:30:59 +00:00
pub enum Node {
2018-10-27 15:59:30 +00:00
Element(Element),
Text(Literal),
Block(Group),
}
impl Node {
pub fn into_token_stream(self, ty: &Option<Vec<Token>>) -> Result<TokenStream, TokenStream> {
2018-10-27 15:59:30 +00:00
match self {
Node::Element(el) => el.into_token_stream(ty),
2018-10-27 16:28:57 +00:00
Node::Text(text) => {
let text = TokenTree::Literal(text);
2018-11-18 07:40:17 +00:00
Ok(quote!(Box::new(typed_html::dom::TextNode::new(#text.to_string()))))
2018-10-27 16:28:57 +00:00
}
2018-11-29 16:08:59 +00:00
Node::Block(group) => {
let span = group.span();
let error =
"you cannot use a block as a top level element or a required child element";
Err(quote_spanned! { span=>
2018-11-29 16:08:59 +00:00
compile_error! { #error }
})
}
2018-10-27 15:59:30 +00:00
}
}
fn into_child_stream(self, ty: &Option<Vec<Token>>) -> Result<TokenStream, TokenStream> {
2018-10-27 15:59:30 +00:00
match self {
Node::Element(el) => {
let el = el.into_token_stream(ty)?;
2018-11-18 07:40:17 +00:00
Ok(quote!(
2018-11-18 06:43:11 +00:00
element.children.push(#el);
2018-11-18 07:40:17 +00:00
))
2018-10-27 15:59:30 +00:00
}
tx @ Node::Text(_) => {
let tx = tx.into_token_stream(ty)?;
2018-11-18 07:40:17 +00:00
Ok(quote!(
2018-11-18 06:43:11 +00:00
element.children.push(#tx);
2018-11-18 07:40:17 +00:00
))
2018-10-27 15:59:30 +00:00
}
Node::Block(group) => {
let group: TokenTree = group.into();
2018-11-18 07:40:17 +00:00
Ok(quote!(
2018-11-18 06:43:11 +00:00
for child in #group.into_iter() {
element.children.push(child);
}
2018-11-18 07:40:17 +00:00
))
}
2018-10-27 15:59:30 +00:00
}
}
}
#[derive(Clone)]
2018-11-14 00:30:59 +00:00
pub struct Element {
pub name: Ident,
pub attributes: StringyMap<Ident, TokenTree>,
pub children: Vec<Node>,
2018-10-27 15:59:30 +00:00
}
fn extract_data_attrs(attrs: &mut StringyMap<Ident, TokenTree>) -> StringyMap<String, TokenTree> {
let mut data = StringyMap::new();
2018-10-27 15:59:30 +00:00
let keys: Vec<Ident> = attrs.keys().cloned().collect();
for key in keys {
let key_name = key.to_string();
let prefix = "data_";
if key_name.starts_with(prefix) {
let value = attrs.remove(&key).unwrap();
data.insert(key_name[prefix.len()..].to_string(), value);
2018-10-27 15:59:30 +00:00
}
}
data
}
2018-11-15 23:25:38 +00:00
fn extract_event_handlers(
attrs: &mut StringyMap<Ident, TokenTree>,
) -> StringyMap<Ident, TokenTree> {
let mut events = StringyMap::new();
let keys: Vec<Ident> = attrs.keys().cloned().collect();
for key in keys {
let key_name = key.to_string();
let prefix = "on";
if key_name.starts_with(prefix) {
let event_name = &key_name[prefix.len()..];
let value = attrs.remove(&key).unwrap();
events.insert(ident::new_raw(event_name, key.span()), value);
2018-11-15 23:25:38 +00:00
}
}
events
}
2018-10-27 23:48:34 +00:00
fn process_value(value: &TokenTree) -> TokenStream {
match value {
TokenTree::Group(g) if g.delimiter() == Delimiter::Bracket => {
let content = g.stream();
2018-11-18 06:43:11 +00:00
quote!( [ #content ] )
2018-10-27 23:48:34 +00:00
}
TokenTree::Group(g) if g.delimiter() == Delimiter::Parenthesis => {
let content = g.stream();
2018-11-18 06:43:11 +00:00
quote!( ( #content ) )
2018-10-27 23:48:34 +00:00
}
2018-11-18 06:43:11 +00:00
v => TokenStream::from_iter(vec![v.clone()]),
2018-10-27 23:48:34 +00:00
}
}
fn is_string_literal(literal: &Literal) -> bool {
// This is the worst API
literal.to_string().starts_with('"')
}
2018-10-27 15:59:30 +00:00
impl Element {
fn into_token_stream(mut self, ty: &Option<Vec<Token>>) -> Result<TokenStream, TokenStream> {
2018-10-27 15:59:30 +00:00
let name = self.name;
let name_str = name.to_string();
2018-11-14 18:25:05 +00:00
let typename: TokenTree = Ident::new(&name_str, name.span()).into();
2018-10-27 15:59:30 +00:00
let req_names = required_children(&name_str);
if req_names.len() > self.children.len() {
2018-11-18 07:40:17 +00:00
let span = name.span();
let error = format!(
"<{}> requires {} children but there are only {}",
name_str,
req_names.len(),
self.children.len()
);
return Err(quote_spanned! {span=>
compile_error! { #error }
});
2018-10-27 15:59:30 +00:00
}
2018-11-15 23:25:38 +00:00
let events = extract_event_handlers(&mut self.attributes);
2018-10-27 15:59:30 +00:00
let data_attrs = extract_data_attrs(&mut self.attributes);
let attrs = self.attributes.iter().map(|(key, value)| {
(
2018-10-27 23:48:34 +00:00
key.to_string(),
2018-11-18 06:43:11 +00:00
TokenTree::Ident(ident::new_raw(&key.to_string(), key.span())),
2018-10-27 23:48:34 +00:00
value,
)
});
2018-10-27 15:59:30 +00:00
let opt_children = self
.children
.split_off(req_names.len())
.into_iter()
.map(|node| node.into_child_stream(ty))
2018-11-18 07:40:17 +00:00
.collect::<Result<Vec<TokenStream>, TokenStream>>()?;
let req_children = self
.children
.into_iter()
.map(|node| node.into_token_stream(ty))
2018-11-18 07:40:17 +00:00
.collect::<Result<Vec<TokenStream>, TokenStream>>()?;
let mut body = TokenStream::new();
2018-11-24 13:21:41 +00:00
2018-10-27 23:48:34 +00:00
for (attr_str, key, value) in attrs {
match value {
2018-11-18 08:06:17 +00:00
TokenTree::Literal(lit) if is_string_literal(lit) => {
let mut eprintln_msg = "ERROR: ".to_owned();
#[cfg(can_show_location_of_runtime_parse_error)]
{
let span = lit.span();
eprintln_msg += &format!(
"{}:{}:{}: ",
span.unstable()
.source_file()
.path()
.to_str()
.unwrap_or("unknown"),
span.unstable().start().line,
span.unstable().start().column
);
}
eprintln_msg += &format!(
"<{} {}={}> failed to parse attribute value: {{}}",
name_str, attr_str, lit,
2018-10-27 23:48:34 +00:00
);
2018-11-18 08:06:17 +00:00
#[cfg(not(can_show_location_of_runtime_parse_error))]
{
eprintln_msg += "\nERROR: rebuild with nightly to print source location";
}
2018-10-27 23:48:34 +00:00
body.extend(quote!(
2018-11-18 08:06:17 +00:00
element.attrs.#key = Some(#lit.parse().unwrap_or_else(|err| {
eprintln!(#eprintln_msg, err);
2018-11-14 00:30:59 +00:00
panic!("failed to parse string literal");
2018-10-27 23:48:34 +00:00
}));
));
}
value => {
let value = process_value(value);
body.extend(quote!(
2018-11-18 06:43:11 +00:00
element.attrs.#key = Some(std::convert::Into::into(#value));
2018-10-27 23:48:34 +00:00
));
}
}
}
for (key, value) in data_attrs
.iter()
.map(|(k, v)| (TokenTree::from(Literal::string(&k)), v.clone()))
{
body.extend(quote!(
2018-11-18 06:43:11 +00:00
element.data_attributes.push((#key, #value.into()));
));
}
body.extend(opt_children);
2018-11-15 23:25:38 +00:00
for (key, value) in events.iter() {
if ty.is_none() {
let mut err = quote_spanned! { key.span() =>
compile_error! { "when using event handlers, you must declare the output type inside the html! macro" }
};
let hint = quote_spanned! { Span::call_site() =>
compile_error! { "for example: change html!(<div>...</div>) to html!(<div>...</div> : String)" }
};
err.extend(hint);
return Err(err);
}
2018-11-15 23:25:38 +00:00
let key = TokenTree::Ident(key.clone());
let value = process_value(value);
body.extend(quote!(
2018-11-18 06:43:11 +00:00
element.events.#key = Some(typed_html::events::IntoEventHandler::into_event_handler(#value));
2018-11-15 23:25:38 +00:00
));
}
let mut args = TokenStream::new();
for arg in req_children {
2018-11-18 06:43:11 +00:00
args.extend(quote!( #arg, ));
}
let mut type_annotation = TokenStream::new();
if let Some(ty) = ty {
let type_var = to_stream(ty.clone());
type_annotation.extend(quote!(: typed_html::elements::#typename<#type_var>));
}
2018-11-18 07:40:17 +00:00
Ok(quote!(
2018-10-27 15:59:30 +00:00
{
let mut element #type_annotation = typed_html::elements::#typename::new(#args);
2018-11-18 06:43:11 +00:00
#body
2018-10-27 16:28:57 +00:00
Box::new(element)
2018-10-27 15:59:30 +00:00
}
2018-11-18 07:40:17 +00:00
))
2018-10-27 15:59:30 +00:00
}
}
// FIXME report a decent error when the macro contains multiple top level elements
pub fn expand_html(input: &[Token]) -> Result<(Node, Option<Vec<Token>>), ParseError> {
grammar::NodeWithTypeParser::new().parse(Lexer::new(input))
2018-10-27 15:59:30 +00:00
}