skhtml/macros/src/html.rs

189 lines
6.1 KiB
Rust
Raw Normal View History

2018-11-14 00:30:59 +00:00
use proc_macro::{
quote, Delimiter, Diagnostic, Group, Ident, Level, Literal, TokenStream, TokenTree,
};
2018-10-27 15:59:30 +00:00
use config::required_children;
2018-11-14 00:30:59 +00:00
use lexer::{Lexer, ParseError, Token};
use map::StringyMap;
2018-11-14 00:30:59 +00:00
use parser::grammar;
2018-10-27 15:59:30 +00:00
#[derive(Clone)]
2018-11-14 00:30:59 +00:00
pub enum Node {
2018-10-27 15:59:30 +00:00
Element(Element),
Text(Literal),
Block(Group),
}
impl Node {
2018-11-14 00:30:59 +00:00
pub fn into_token_stream(self) -> TokenStream {
2018-10-27 15:59:30 +00:00
match self {
Node::Element(el) => el.into_token_stream(),
2018-10-27 16:28:57 +00:00
Node::Text(text) => {
let text = TokenTree::Literal(text);
quote!(Box::new(typed_html::elements::TextNode::new($text.to_string())))
2018-10-27 16:28:57 +00:00
}
2018-10-27 15:59:30 +00:00
Node::Block(_) => panic!("cannot have a block in this position"),
}
}
fn into_child_stream(self) -> TokenStream {
match self {
Node::Element(el) => {
let el = el.into_token_stream();
quote!(
element.children.push($el);
2018-10-27 15:59:30 +00:00
)
}
tx @ Node::Text(_) => {
let tx = tx.into_token_stream();
quote!(
element.children.push($tx);
2018-10-27 15:59:30 +00:00
)
}
Node::Block(group) => {
let group: TokenTree = group.into();
quote!(
for child in $group.into_iter() {
2018-10-27 16:28:57 +00:00
element.children.push(child);
2018-10-27 15:59:30 +00:00
}
)
}
2018-10-27 15:59:30 +00:00
}
}
}
#[derive(Clone)]
2018-11-14 00:30:59 +00:00
pub struct Element {
pub name: Ident,
pub attributes: StringyMap<Ident, TokenTree>,
pub children: Vec<Node>,
2018-10-27 15:59:30 +00:00
}
fn extract_data_attrs(attrs: &mut StringyMap<Ident, TokenTree>) -> StringyMap<String, TokenTree> {
let mut data = StringyMap::new();
2018-10-27 15:59:30 +00:00
let keys: Vec<Ident> = attrs.keys().cloned().collect();
for key in keys {
let key_name = key.to_string();
let prefix = "data_";
if key_name.starts_with(prefix) {
let value = attrs.remove(&key).unwrap();
data.insert(key_name[prefix.len()..].to_string(), value);
}
}
data
}
2018-10-27 23:48:34 +00:00
fn process_value(value: &TokenTree) -> TokenStream {
match value {
TokenTree::Group(g) if g.delimiter() == Delimiter::Bracket => {
let content = g.stream();
quote!( [ $content ] )
}
TokenTree::Group(g) if g.delimiter() == Delimiter::Parenthesis => {
let content = g.stream();
quote!( ( $content ) )
}
v => v.clone().into(),
}
}
fn is_string_literal(literal: &Literal) -> bool {
// This is the worst API
literal.to_string().starts_with('"')
}
2018-10-27 15:59:30 +00:00
impl Element {
fn into_token_stream(mut self) -> TokenStream {
let name = self.name;
let name_str = name.to_string();
let typename: TokenTree = Ident::new(&format!("Element_{}", &name_str), name.span()).into();
2018-10-27 15:59:30 +00:00
let req_names = required_children(&name_str);
if req_names.len() > self.children.len() {
2018-11-14 00:30:59 +00:00
Diagnostic::spanned(
name.span(),
Level::Error,
format!(
"<{}> requires {} children but there are only {}",
name_str,
req_names.len(),
self.children.len()
),
)
.emit();
panic!();
2018-10-27 15:59:30 +00:00
}
let data_attrs = extract_data_attrs(&mut self.attributes);
let attrs = self.attributes.iter().map(|(key, value)| {
(
2018-10-27 23:48:34 +00:00
key.to_string(),
2018-11-14 00:30:59 +00:00
TokenTree::Ident(Ident::new_raw(&key.to_string(), key.span())),
2018-10-27 23:48:34 +00:00
value,
)
});
2018-10-27 15:59:30 +00:00
let opt_children = self
.children
.split_off(req_names.len())
.into_iter()
.map(Node::into_child_stream);
let req_children = self.children.into_iter().map(Node::into_token_stream);
let mut body = TokenStream::new();
2018-10-27 23:48:34 +00:00
for (attr_str, key, value) in attrs {
match value {
TokenTree::Literal(l) if is_string_literal(l) => {
let value = value.clone();
let tag_name: TokenTree = Literal::string(&name_str).into();
let attr_str: TokenTree = Literal::string(&attr_str).into();
let span = value.span();
let pos = format!(
"{}:{}:{}",
span.source_file().path().to_str().unwrap_or("unknown"),
span.start().line,
span.start().column
);
let pos_str: TokenTree = Literal::string(&pos).into();
body.extend(quote!(
element.attrs.$key = Some($value.parse().unwrap_or_else(|err| {
2018-11-14 00:30:59 +00:00
eprintln!("ERROR: {}: <{} {}={:?}> failed to parse attribute value: {}",
2018-10-27 23:48:34 +00:00
$pos_str, $tag_name, $attr_str, $value, err);
2018-11-14 00:30:59 +00:00
panic!("failed to parse string literal");
2018-10-27 23:48:34 +00:00
}));
));
}
value => {
let value = process_value(value);
body.extend(quote!(
element.attrs.$key = Some(std::convert::TryInto::try_into($value).unwrap());
));
}
}
}
for (key, value) in data_attrs
.iter()
.map(|(k, v)| (TokenTree::from(Literal::string(&k)), v.clone()))
{
body.extend(quote!(
element.data_attributes.insert($key.into(), $value.into());
));
}
body.extend(opt_children);
let mut args = TokenStream::new();
for arg in req_children {
args.extend(quote!( $arg, ));
}
2018-10-27 15:59:30 +00:00
quote!(
{
let mut element = typed_html::elements::$typename::new($args);
$body
2018-10-27 16:28:57 +00:00
Box::new(element)
2018-10-27 15:59:30 +00:00
}
)
}
}
2018-11-14 00:30:59 +00:00
pub fn expand_html(input: &[Token]) -> Result<Node, ParseError> {
grammar::NodeParser::new().parse(Lexer::new(input))
2018-10-27 15:59:30 +00:00
}