diff --git a/macros/src/error.rs b/macros/src/error.rs index 93b1cf0..4cf5321 100644 --- a/macros/src/error.rs +++ b/macros/src/error.rs @@ -1,8 +1,8 @@ use ansi_term::Style; use lalrpop_util::ParseError::*; use lexer::Token; -use proc_macro::{Diagnostic, Level}; -use proc_macro2::Ident; +use proc_macro2::{Ident, TokenStream}; +use quote::{quote, quote_spanned}; pub type ParseError = lalrpop_util::ParseError; @@ -41,49 +41,76 @@ fn is_in_node_position(tokens: &[String]) -> bool { input == output } -pub fn parse_error(input: &[Token], error: &ParseError) -> Diagnostic { +pub fn parse_error(input: &[Token], error: &ParseError) -> TokenStream { match error { InvalidToken { location } => { - let loc = &input[*location]; - Diagnostic::spanned(loc.span().unstable(), Level::Error, "invalid token") + let span = input[*location].span(); + quote_spanned! {span=> + compile_error! { "invalid token" } + } } UnrecognizedToken { token: None, expected, } => { - let msg = format!("missing {}", pprint_tokens(&expected)); - Diagnostic::spanned( - input[0].span().unstable().join(input[input.len() - 1].span().unstable()).unwrap(), - Level::Error, - "unexpected end of macro", - ) - .help(msg) + let msg = format!( + "unexpected end of macro; missing {}", + pprint_tokens(&expected) + ); + quote! { + compile_error! { #msg } + } } UnrecognizedToken { token: Some((_, token, _)), expected, } => { - let msg = format!("expected {}", pprint_tokens(&expected)); - let mut diag = Diagnostic::spanned(token.span().unstable(), Level::Error, msg); - if is_in_node_position(expected) && token.is_ident() { + let span = token.span(); + let error_msg = format!("expected {}", pprint_tokens(&expected)); + let error = quote_spanned! {span=> + compile_error! { #error_msg } + }; + let help = if is_in_node_position(expected) && token.is_ident() { // special case: you probably meant to quote that text - diag = diag.help(format!( + let help_msg = format!( "text nodes need to be quoted, eg. {}", Style::new().bold().paint("

\"Hello Joe!\"

") - )) - } - diag + ); + Some(quote_spanned! {span=> + compile_error! { #help_msg } + }) + } else { + None + }; + quote! {{ + #error + #help + }} } ExtraToken { token: (_, token, _), - } => Diagnostic::spanned(token.span().unstable(), Level::Error, "superfluous token"), + } => { + let span = token.span(); + quote_spanned! {span=> + compile_error! { "superfluous token" } + } + } User { error: HtmlParseError::TagMismatch { open, close }, - } => Diagnostic::spanned( - close.span().unstable(), - Level::Error, - format!("expected closing tag '', found ''", open, close), - ) - .span_help(open.span().unstable(), "opening tag is here:"), + } => { + let close_span = close.span(); + let close_msg = format!("expected closing tag '', found ''", open, close); + let close_error = quote_spanned! {close_span=> + compile_error! { #close_msg } + }; + let open_span = open.span(); + let open_error = quote_spanned! {open_span=> + compile_error! { "unclosed tag" } + }; + quote! {{ + #close_error + #open_error + }} + } } } diff --git a/macros/src/html.rs b/macros/src/html.rs index d431826..68a279e 100644 --- a/macros/src/html.rs +++ b/macros/src/html.rs @@ -1,6 +1,5 @@ -use proc_macro::{Diagnostic, Level}; use proc_macro2::{Delimiter, Group, Ident, Literal, TokenStream, TokenTree}; -use quote::quote; +use quote::{quote, quote_spanned}; use config::{required_children, ATTR_EVENTS}; use error::ParseError; @@ -19,38 +18,38 @@ pub enum Node { } impl Node { - pub fn into_token_stream(self) -> TokenStream { + pub fn into_token_stream(self) -> Result { match self { Node::Element(el) => el.into_token_stream(), Node::Text(text) => { let text = TokenTree::Literal(text); - quote!(Box::new(typed_html::dom::TextNode::new(#text.to_string()))) + Ok(quote!(Box::new(typed_html::dom::TextNode::new(#text.to_string())))) } Node::Block(_) => panic!("cannot have a block in this position"), } } - fn into_child_stream(self) -> TokenStream { + fn into_child_stream(self) -> Result { match self { Node::Element(el) => { - let el = el.into_token_stream(); - quote!( + let el = el.into_token_stream()?; + Ok(quote!( element.children.push(#el); - ) + )) } tx @ Node::Text(_) => { - let tx = tx.into_token_stream(); - quote!( + let tx = tx.into_token_stream()?; + Ok(quote!( element.children.push(#tx); - ) + )) } Node::Block(group) => { let group: TokenTree = group.into(); - quote!( + Ok(quote!( for child in #group.into_iter() { element.children.push(child); } - ) + )) } } } @@ -116,24 +115,22 @@ fn is_string_literal(literal: &Literal) -> bool { } impl Element { - fn into_token_stream(mut self) -> TokenStream { + fn into_token_stream(mut self) -> Result { let name = self.name; let name_str = name.to_string(); let typename: TokenTree = Ident::new(&name_str, name.span()).into(); let req_names = required_children(&name_str); if req_names.len() > self.children.len() { - Diagnostic::spanned( - name.span().unstable(), - Level::Error, - format!( - "<{}> requires {} children but there are only {}", - name_str, - req_names.len(), - self.children.len() - ), - ) - .emit(); - panic!(); + let span = name.span(); + let error = format!( + "<{}> requires {} children but there are only {}", + name_str, + req_names.len(), + self.children.len() + ); + return Err(quote_spanned! {span=> + compile_error! { #error } + }); } let events = extract_event_handlers(&mut self.attributes); let data_attrs = extract_data_attrs(&mut self.attributes); @@ -148,8 +145,13 @@ impl Element { .children .split_off(req_names.len()) .into_iter() - .map(Node::into_child_stream); - let req_children = self.children.into_iter().map(Node::into_token_stream); + .map(Node::into_child_stream) + .collect::, TokenStream>>()?; + let req_children = self + .children + .into_iter() + .map(Node::into_token_stream) + .collect::, TokenStream>>()?; let mut body = TokenStream::new(); for (attr_str, key, value) in attrs { @@ -205,13 +207,13 @@ impl Element { args.extend(quote!( #arg, )); } - quote!( + Ok(quote!( { let mut element = typed_html::elements::#typename::new(#args); #body Box::new(element) } - ) + )) } } diff --git a/macros/src/lib.rs b/macros/src/lib.rs index 4758b95..b9ebf35 100644 --- a/macros/src/lib.rs +++ b/macros/src/lib.rs @@ -1,7 +1,6 @@ #![recursion_limit = "128"] #![feature(proc_macro_hygiene)] #![feature(proc_macro_span)] -#![feature(proc_macro_diagnostic)] extern crate ansi_term; extern crate lalrpop_util; @@ -10,7 +9,6 @@ extern crate proc_macro2; extern crate quote; use proc_macro::TokenStream; -use quote::quote; mod config; mod declare; @@ -32,11 +30,11 @@ pub fn html(input: TokenStream) -> TokenStream { let stream = lexer::unroll_stream(input.into(), false); let result = html::expand_html(&stream); TokenStream::from(match result { - Err(err) => { - error::parse_error(&stream, &err).emit(); - quote!(panic!()) - } - Ok(node) => node.into_token_stream(), + Err(err) => error::parse_error(&stream, &err), + Ok(node) => match node.into_token_stream() { + Err(err) => err, + Ok(success) => success, + }, }) } @@ -47,10 +45,7 @@ pub fn declare_elements(input: TokenStream) -> TokenStream { let stream = lexer::keywordise(lexer::unroll_stream(input.into(), true)); let result = declare::expand_declare(&stream); TokenStream::from(match result { - Err(err) => { - error::parse_error(&stream, &err).emit(); - quote!(panic!()) - } + Err(err) => error::parse_error(&stream, &err), Ok(decls) => { let mut out = proc_macro2::TokenStream::new(); for decl in decls {