Remove proc_macro_diagnostic feature

This commit is contained in:
David Tolnay 2018-11-17 23:40:17 -08:00
parent 78f0e9b1e6
commit dcda57c8af
No known key found for this signature in database
GPG Key ID: F9BA143B95FF6D82
3 changed files with 91 additions and 67 deletions

View File

@ -1,8 +1,8 @@
use ansi_term::Style;
use lalrpop_util::ParseError::*;
use lexer::Token;
use proc_macro::{Diagnostic, Level};
use proc_macro2::Ident;
use proc_macro2::{Ident, TokenStream};
use quote::{quote, quote_spanned};
pub type ParseError = lalrpop_util::ParseError<usize, Token, HtmlParseError>;
@ -41,49 +41,76 @@ fn is_in_node_position(tokens: &[String]) -> bool {
input == output
}
pub fn parse_error(input: &[Token], error: &ParseError) -> Diagnostic {
pub fn parse_error(input: &[Token], error: &ParseError) -> TokenStream {
match error {
InvalidToken { location } => {
let loc = &input[*location];
Diagnostic::spanned(loc.span().unstable(), Level::Error, "invalid token")
let span = input[*location].span();
quote_spanned! {span=>
compile_error! { "invalid token" }
}
}
UnrecognizedToken {
token: None,
expected,
} => {
let msg = format!("missing {}", pprint_tokens(&expected));
Diagnostic::spanned(
input[0].span().unstable().join(input[input.len() - 1].span().unstable()).unwrap(),
Level::Error,
"unexpected end of macro",
)
.help(msg)
let msg = format!(
"unexpected end of macro; missing {}",
pprint_tokens(&expected)
);
quote! {
compile_error! { #msg }
}
}
UnrecognizedToken {
token: Some((_, token, _)),
expected,
} => {
let msg = format!("expected {}", pprint_tokens(&expected));
let mut diag = Diagnostic::spanned(token.span().unstable(), Level::Error, msg);
if is_in_node_position(expected) && token.is_ident() {
let span = token.span();
let error_msg = format!("expected {}", pprint_tokens(&expected));
let error = quote_spanned! {span=>
compile_error! { #error_msg }
};
let help = if is_in_node_position(expected) && token.is_ident() {
// special case: you probably meant to quote that text
diag = diag.help(format!(
let help_msg = format!(
"text nodes need to be quoted, eg. {}",
Style::new().bold().paint("<p>\"Hello Joe!\"</p>")
))
}
diag
);
Some(quote_spanned! {span=>
compile_error! { #help_msg }
})
} else {
None
};
quote! {{
#error
#help
}}
}
ExtraToken {
token: (_, token, _),
} => Diagnostic::spanned(token.span().unstable(), Level::Error, "superfluous token"),
} => {
let span = token.span();
quote_spanned! {span=>
compile_error! { "superfluous token" }
}
}
User {
error: HtmlParseError::TagMismatch { open, close },
} => Diagnostic::spanned(
close.span().unstable(),
Level::Error,
format!("expected closing tag '</{}>', found '</{}>'", open, close),
)
.span_help(open.span().unstable(), "opening tag is here:"),
} => {
let close_span = close.span();
let close_msg = format!("expected closing tag '</{}>', found '</{}>'", open, close);
let close_error = quote_spanned! {close_span=>
compile_error! { #close_msg }
};
let open_span = open.span();
let open_error = quote_spanned! {open_span=>
compile_error! { "unclosed tag" }
};
quote! {{
#close_error
#open_error
}}
}
}
}

View File

@ -1,6 +1,5 @@
use proc_macro::{Diagnostic, Level};
use proc_macro2::{Delimiter, Group, Ident, Literal, TokenStream, TokenTree};
use quote::quote;
use quote::{quote, quote_spanned};
use config::{required_children, ATTR_EVENTS};
use error::ParseError;
@ -19,38 +18,38 @@ pub enum Node {
}
impl Node {
pub fn into_token_stream(self) -> TokenStream {
pub fn into_token_stream(self) -> Result<TokenStream, TokenStream> {
match self {
Node::Element(el) => el.into_token_stream(),
Node::Text(text) => {
let text = TokenTree::Literal(text);
quote!(Box::new(typed_html::dom::TextNode::new(#text.to_string())))
Ok(quote!(Box::new(typed_html::dom::TextNode::new(#text.to_string()))))
}
Node::Block(_) => panic!("cannot have a block in this position"),
}
}
fn into_child_stream(self) -> TokenStream {
fn into_child_stream(self) -> Result<TokenStream, TokenStream> {
match self {
Node::Element(el) => {
let el = el.into_token_stream();
quote!(
let el = el.into_token_stream()?;
Ok(quote!(
element.children.push(#el);
)
))
}
tx @ Node::Text(_) => {
let tx = tx.into_token_stream();
quote!(
let tx = tx.into_token_stream()?;
Ok(quote!(
element.children.push(#tx);
)
))
}
Node::Block(group) => {
let group: TokenTree = group.into();
quote!(
Ok(quote!(
for child in #group.into_iter() {
element.children.push(child);
}
)
))
}
}
}
@ -116,24 +115,22 @@ fn is_string_literal(literal: &Literal) -> bool {
}
impl Element {
fn into_token_stream(mut self) -> TokenStream {
fn into_token_stream(mut self) -> Result<TokenStream, TokenStream> {
let name = self.name;
let name_str = name.to_string();
let typename: TokenTree = Ident::new(&name_str, name.span()).into();
let req_names = required_children(&name_str);
if req_names.len() > self.children.len() {
Diagnostic::spanned(
name.span().unstable(),
Level::Error,
format!(
"<{}> requires {} children but there are only {}",
name_str,
req_names.len(),
self.children.len()
),
)
.emit();
panic!();
let span = name.span();
let error = format!(
"<{}> requires {} children but there are only {}",
name_str,
req_names.len(),
self.children.len()
);
return Err(quote_spanned! {span=>
compile_error! { #error }
});
}
let events = extract_event_handlers(&mut self.attributes);
let data_attrs = extract_data_attrs(&mut self.attributes);
@ -148,8 +145,13 @@ impl Element {
.children
.split_off(req_names.len())
.into_iter()
.map(Node::into_child_stream);
let req_children = self.children.into_iter().map(Node::into_token_stream);
.map(Node::into_child_stream)
.collect::<Result<Vec<TokenStream>, TokenStream>>()?;
let req_children = self
.children
.into_iter()
.map(Node::into_token_stream)
.collect::<Result<Vec<TokenStream>, TokenStream>>()?;
let mut body = TokenStream::new();
for (attr_str, key, value) in attrs {
@ -205,13 +207,13 @@ impl Element {
args.extend(quote!( #arg, ));
}
quote!(
Ok(quote!(
{
let mut element = typed_html::elements::#typename::new(#args);
#body
Box::new(element)
}
)
))
}
}

View File

@ -1,7 +1,6 @@
#![recursion_limit = "128"]
#![feature(proc_macro_hygiene)]
#![feature(proc_macro_span)]
#![feature(proc_macro_diagnostic)]
extern crate ansi_term;
extern crate lalrpop_util;
@ -10,7 +9,6 @@ extern crate proc_macro2;
extern crate quote;
use proc_macro::TokenStream;
use quote::quote;
mod config;
mod declare;
@ -32,11 +30,11 @@ pub fn html(input: TokenStream) -> TokenStream {
let stream = lexer::unroll_stream(input.into(), false);
let result = html::expand_html(&stream);
TokenStream::from(match result {
Err(err) => {
error::parse_error(&stream, &err).emit();
quote!(panic!())
}
Ok(node) => node.into_token_stream(),
Err(err) => error::parse_error(&stream, &err),
Ok(node) => match node.into_token_stream() {
Err(err) => err,
Ok(success) => success,
},
})
}
@ -47,10 +45,7 @@ pub fn declare_elements(input: TokenStream) -> TokenStream {
let stream = lexer::keywordise(lexer::unroll_stream(input.into(), true));
let result = declare::expand_declare(&stream);
TokenStream::from(match result {
Err(err) => {
error::parse_error(&stream, &err).emit();
quote!(panic!())
}
Err(err) => error::parse_error(&stream, &err),
Ok(decls) => {
let mut out = proc_macro2::TokenStream::new();
for decl in decls {