commit
a1a4f8fadd
14
.travis.yml
14
.travis.yml
|
@ -1,8 +1,14 @@
|
|||
language: rust
|
||||
rust:
|
||||
- nightly
|
||||
|
||||
cache:
|
||||
directories:
|
||||
- /home/travis/.cargo
|
||||
script:
|
||||
- cargo test
|
||||
|
||||
matrix:
|
||||
include:
|
||||
- rust: nightly
|
||||
script: cargo test
|
||||
- rust: beta
|
||||
script: cargo check --manifest-path examples/wasm/Cargo.toml
|
||||
- rust: stable
|
||||
script: cargo check --manifest-path examples/wasm/Cargo.toml
|
||||
|
|
|
@ -5,13 +5,6 @@
|
|||
This crate provides the `html!` macro for building HTML documents inside your
|
||||
Rust code using roughly [JSX] compatible syntax.
|
||||
|
||||
## Nightly Warning!
|
||||
|
||||
This crate currently needs nightly rustc, and in order to use it you'll need to
|
||||
add `#![feature(proc_macro_hygiene)]` to the top of your crate. The compiler
|
||||
will tell you to do this if you forget. When this feature stabilises, the crate
|
||||
should work on stable rustc without issues.
|
||||
|
||||
## Quick Preview
|
||||
|
||||
```rust
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
#![recursion_limit = "256"]
|
||||
#![feature(proc_macro_hygiene, decl_macro)]
|
||||
|
||||
extern crate rocket;
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
#![feature(proc_macro_hygiene)]
|
||||
#![recursion_limit = "256"]
|
||||
|
||||
extern crate stdweb;
|
||||
extern crate typed_html;
|
||||
|
|
|
@ -17,6 +17,10 @@ proc-macro = true
|
|||
[dependencies]
|
||||
lalrpop-util = "0.16.1"
|
||||
ansi_term = "0.11.0"
|
||||
proc-macro2 = { version = "0.4.24", features = ["nightly"] }
|
||||
proc-macro-hack = "0.5.2"
|
||||
quote = "0.6.10"
|
||||
|
||||
[build-dependencies]
|
||||
lalrpop = "0.16.1"
|
||||
version_check = "0.1.5"
|
||||
|
|
|
@ -1,5 +1,11 @@
|
|||
extern crate lalrpop;
|
||||
extern crate version_check;
|
||||
|
||||
fn main() {
|
||||
lalrpop::process_root().unwrap();
|
||||
|
||||
if version_check::is_nightly().unwrap_or(false) {
|
||||
println!("cargo:rustc-cfg=can_join_spans");
|
||||
println!("cargo:rustc-cfg=can_show_location_of_runtime_parse_error");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
use proc_macro::{Ident, Span, TokenStream};
|
||||
use proc_macro2::{Ident, Span, TokenStream};
|
||||
|
||||
use map::StringyMap;
|
||||
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
use proc_macro::{quote, Ident, Literal, Span, TokenStream, TokenTree};
|
||||
use proc_macro2::{Ident, Literal, Span, TokenStream, TokenTree};
|
||||
use quote::quote;
|
||||
|
||||
use config::{global_attrs, ATTR_EVENTS};
|
||||
use error::ParseError;
|
||||
use ident;
|
||||
use lexer::{Lexer, Token};
|
||||
use map::StringyMap;
|
||||
use parser;
|
||||
|
@ -41,7 +43,7 @@ impl Declare {
|
|||
|
||||
fn attrs(&self) -> impl Iterator<Item = (TokenTree, TokenStream, TokenTree)> + '_ {
|
||||
self.attrs.iter().map(|(key, value)| {
|
||||
let attr_name: TokenTree = Ident::new_raw(&key.to_string(), key.span()).into();
|
||||
let attr_name: TokenTree = ident::new_raw(&key.to_string(), key.span()).into();
|
||||
let attr_type = value.clone();
|
||||
let attr_str = Literal::string(&key.to_string()).into();
|
||||
(attr_name, attr_type, attr_str)
|
||||
|
@ -73,13 +75,13 @@ impl Declare {
|
|||
fn attr_struct(&self) -> TokenStream {
|
||||
let mut body = TokenStream::new();
|
||||
for (attr_name, attr_type, _) in self.attrs() {
|
||||
body.extend(quote!( pub $attr_name: Option<$attr_type>, ));
|
||||
body.extend(quote!( pub #attr_name: Option<#attr_type>, ));
|
||||
}
|
||||
|
||||
let attr_type_name = self.attr_type_name();
|
||||
quote!(
|
||||
pub struct $attr_type_name {
|
||||
$body
|
||||
pub struct #attr_type_name {
|
||||
#body
|
||||
}
|
||||
)
|
||||
}
|
||||
|
@ -91,21 +93,21 @@ impl Declare {
|
|||
let mut body = TokenStream::new();
|
||||
|
||||
for (child_name, child_type, _) in self.req_children() {
|
||||
body.extend(quote!( pub $child_name: Box<$child_type<T>>, ));
|
||||
body.extend(quote!( pub #child_name: Box<#child_type<T>>, ));
|
||||
}
|
||||
|
||||
if let Some(child_constraint) = &self.opt_children {
|
||||
let child_constraint = child_constraint.clone();
|
||||
body.extend(quote!(pub children: Vec<Box<$child_constraint<T>>>,));
|
||||
body.extend(quote!(pub children: Vec<Box<#child_constraint<T>>>,));
|
||||
}
|
||||
|
||||
quote!(
|
||||
pub struct $elem_name<T> where T: ::OutputType {
|
||||
pub struct #elem_name<T> where T: ::OutputType {
|
||||
phantom_output: std::marker::PhantomData<T>,
|
||||
pub attrs: $attr_type_name,
|
||||
pub attrs: #attr_type_name,
|
||||
pub data_attributes: Vec<(&'static str, String)>,
|
||||
pub events: ::events::Events<T>,
|
||||
$body
|
||||
#body
|
||||
}
|
||||
)
|
||||
}
|
||||
|
@ -116,34 +118,34 @@ impl Declare {
|
|||
|
||||
let mut args = TokenStream::new();
|
||||
for (child_name, child_type, _) in self.req_children() {
|
||||
args.extend(quote!( $child_name: Box<$child_type<T>>, ));
|
||||
args.extend(quote!( #child_name: Box<#child_type<T>>, ));
|
||||
}
|
||||
|
||||
let mut attrs = TokenStream::new();
|
||||
for (attr_name, _, _) in self.attrs() {
|
||||
attrs.extend(quote!( $attr_name: None, ));
|
||||
attrs.extend(quote!( #attr_name: None, ));
|
||||
}
|
||||
|
||||
let mut body = TokenStream::new();
|
||||
body.extend(quote!(
|
||||
attrs: $attr_type_name { $attrs },
|
||||
attrs: #attr_type_name { #attrs },
|
||||
));
|
||||
body.extend(quote!(data_attributes: Vec::new(),));
|
||||
|
||||
for (child_name, _, _) in self.req_children() {
|
||||
body.extend(quote!( $child_name, ));
|
||||
body.extend(quote!( #child_name, ));
|
||||
}
|
||||
if self.opt_children.is_some() {
|
||||
body.extend(quote!(children: Vec::new()));
|
||||
}
|
||||
|
||||
quote!(
|
||||
impl<T> $elem_name<T> where T: ::OutputType {
|
||||
pub fn new($args) -> Self {
|
||||
$elem_name {
|
||||
impl<T> #elem_name<T> where T: ::OutputType {
|
||||
pub fn new(#args) -> Self {
|
||||
#elem_name {
|
||||
phantom_output: std::marker::PhantomData,
|
||||
events: ::events::Events::default(),
|
||||
$body
|
||||
#body
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -155,7 +157,7 @@ impl Declare {
|
|||
let mut req_children = TokenStream::new();
|
||||
for (child_name, _, _) in self.req_children() {
|
||||
req_children.extend(quote!(
|
||||
children.push(self.$child_name.vnode());
|
||||
children.push(self.#child_name.vnode());
|
||||
));
|
||||
}
|
||||
let mut opt_children = TokenStream::new();
|
||||
|
@ -168,23 +170,23 @@ impl Declare {
|
|||
let mut push_attrs = TokenStream::new();
|
||||
for (attr_name, _, attr_str) in self.attrs() {
|
||||
push_attrs.extend(quote!(
|
||||
if let Some(ref value) = self.attrs.$attr_name {
|
||||
attributes.push(($attr_str, value.to_string()));
|
||||
if let Some(ref value) = self.attrs.#attr_name {
|
||||
attributes.push((#attr_str, value.to_string()));
|
||||
}
|
||||
));
|
||||
}
|
||||
|
||||
quote!(
|
||||
let mut attributes = Vec::new();
|
||||
$push_attrs
|
||||
#push_attrs
|
||||
attributes.extend(self.data_attributes.clone());
|
||||
|
||||
let mut children = Vec::new();
|
||||
$req_children
|
||||
$opt_children
|
||||
#req_children
|
||||
#opt_children
|
||||
|
||||
::dom::VNode::Element(::dom::VElement {
|
||||
name: $elem_name,
|
||||
name: #elem_name,
|
||||
attributes,
|
||||
events: &mut self.events,
|
||||
children
|
||||
|
@ -196,9 +198,9 @@ impl Declare {
|
|||
let elem_name = self.elem_name();
|
||||
let vnode = self.impl_vnode();
|
||||
quote!(
|
||||
impl<T> ::dom::Node<T> for $elem_name<T> where T: ::OutputType {
|
||||
impl<T> ::dom::Node<T> for #elem_name<T> where T: ::OutputType {
|
||||
fn vnode(&'_ mut self) -> ::dom::VNode<'_, T> {
|
||||
$vnode
|
||||
#vnode
|
||||
}
|
||||
}
|
||||
)
|
||||
|
@ -208,38 +210,38 @@ impl Declare {
|
|||
let name: TokenTree = Literal::string(&self.name.to_string()).into();
|
||||
let elem_name = self.elem_name();
|
||||
|
||||
let attrs: TokenStream = self.attrs().map(|(_, _, name)| quote!( $name, )).collect();
|
||||
let attrs: TokenStream = self.attrs().map(|(_, _, name)| quote!( #name, )).collect();
|
||||
let reqs: TokenStream = self
|
||||
.req_children()
|
||||
.map(|(_, _, name)| quote!( $name, ))
|
||||
.map(|(_, _, name)| quote!( #name, ))
|
||||
.collect();
|
||||
|
||||
let mut push_attrs = TokenStream::new();
|
||||
for (attr_name, _, attr_str) in self.attrs() {
|
||||
push_attrs.extend(quote!(
|
||||
if let Some(ref value) = self.attrs.$attr_name {
|
||||
out.push(($attr_str, value.to_string()));
|
||||
if let Some(ref value) = self.attrs.#attr_name {
|
||||
out.push((#attr_str, value.to_string()));
|
||||
}
|
||||
));
|
||||
}
|
||||
|
||||
quote!(
|
||||
impl<T> ::dom::Element<T> for $elem_name<T> where T: ::OutputType {
|
||||
impl<T> ::dom::Element<T> for #elem_name<T> where T: ::OutputType {
|
||||
fn name() -> &'static str {
|
||||
$name
|
||||
#name
|
||||
}
|
||||
|
||||
fn attribute_names() -> &'static [&'static str] {
|
||||
&[ $attrs ]
|
||||
&[ #attrs ]
|
||||
}
|
||||
|
||||
fn required_children() -> &'static [&'static str] {
|
||||
&[ $reqs ]
|
||||
&[ #reqs ]
|
||||
}
|
||||
|
||||
fn attributes(&self) -> Vec<(&'static str, String)> {
|
||||
let mut out = Vec::new();
|
||||
$push_attrs
|
||||
#push_attrs
|
||||
for (key, value) in &self.data_attributes {
|
||||
out.push((key, value.to_string()));
|
||||
}
|
||||
|
@ -255,7 +257,7 @@ impl Declare {
|
|||
for t in &self.traits {
|
||||
let name = t.clone();
|
||||
body.extend(quote!(
|
||||
impl<T> $name<T> for $elem_name<T> where T: ::OutputType {}
|
||||
impl<T> #name<T> for #elem_name<T> where T: ::OutputType {}
|
||||
));
|
||||
}
|
||||
body
|
||||
|
@ -276,7 +278,7 @@ impl Declare {
|
|||
let mut print_req_children = TokenStream::new();
|
||||
for (child_name, _, _) in self.req_children() {
|
||||
print_req_children.extend(quote!(
|
||||
self.$child_name.fmt(f)?;
|
||||
self.#child_name.fmt(f)?;
|
||||
));
|
||||
}
|
||||
|
||||
|
@ -286,8 +288,8 @@ impl Declare {
|
|||
write!(f, "/>")
|
||||
} else {
|
||||
write!(f, ">")?;
|
||||
$print_opt_children
|
||||
write!(f, "</{}>", $name)
|
||||
#print_opt_children
|
||||
write!(f, "</{}>", #name)
|
||||
})
|
||||
} else {
|
||||
quote!(write!(f, "/>"))
|
||||
|
@ -295,17 +297,17 @@ impl Declare {
|
|||
} else {
|
||||
quote!(
|
||||
write!(f, ">")?;
|
||||
$print_req_children
|
||||
$print_opt_children
|
||||
write!(f, "</{}>", $name)
|
||||
#print_req_children
|
||||
#print_opt_children
|
||||
write!(f, "</{}>", #name)
|
||||
)
|
||||
};
|
||||
|
||||
let mut print_attrs = TokenStream::new();
|
||||
for (attr_name, _, attr_str) in self.attrs() {
|
||||
print_attrs.extend(quote!(
|
||||
if let Some(ref value) = self.attrs.$attr_name {
|
||||
write!(f, " {}=\"{}\"", $attr_str,
|
||||
if let Some(ref value) = self.attrs.#attr_name {
|
||||
write!(f, " {}=\"{}\"", #attr_str,
|
||||
::htmlescape::encode_attribute(&value.to_string()))?;
|
||||
}
|
||||
));
|
||||
|
@ -316,24 +318,24 @@ impl Declare {
|
|||
let event_name = TokenTree::Ident(Ident::new(event, Span::call_site()));
|
||||
let event_str = TokenTree::Literal(Literal::string(event));
|
||||
print_events.extend(quote!(
|
||||
if let Some(ref value) = self.events.$event_name {
|
||||
write!(f, " on{}=\"{}\"", $event_str,
|
||||
if let Some(ref value) = self.events.#event_name {
|
||||
write!(f, " on{}=\"{}\"", #event_str,
|
||||
::htmlescape::encode_attribute(value.render().unwrap().as_str()))?;
|
||||
}
|
||||
));
|
||||
}
|
||||
|
||||
quote!(
|
||||
impl<T> std::fmt::Display for $elem_name<T> where T: ::OutputType {
|
||||
impl<T> std::fmt::Display for #elem_name<T> where T: ::OutputType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {
|
||||
write!(f, "<{}", $name)?;
|
||||
$print_attrs
|
||||
write!(f, "<{}", #name)?;
|
||||
#print_attrs
|
||||
for (key, value) in &self.data_attributes {
|
||||
write!(f, " data-{}=\"{}\"", key,
|
||||
::htmlescape::encode_attribute(&value))?;
|
||||
}
|
||||
$print_events
|
||||
$print_children
|
||||
#print_events
|
||||
#print_children
|
||||
}
|
||||
}
|
||||
)
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
use ansi_term::Style;
|
||||
use lalrpop_util::ParseError::*;
|
||||
use lexer::Token;
|
||||
use proc_macro::{Diagnostic, Ident, Level};
|
||||
use proc_macro2::{Ident, TokenStream};
|
||||
use quote::{quote, quote_spanned};
|
||||
|
||||
pub type ParseError = lalrpop_util::ParseError<usize, Token, HtmlParseError>;
|
||||
|
||||
|
@ -40,49 +41,76 @@ fn is_in_node_position(tokens: &[String]) -> bool {
|
|||
input == output
|
||||
}
|
||||
|
||||
pub fn parse_error(input: &[Token], error: &ParseError) -> Diagnostic {
|
||||
pub fn parse_error(input: &[Token], error: &ParseError) -> TokenStream {
|
||||
match error {
|
||||
InvalidToken { location } => {
|
||||
let loc = &input[*location];
|
||||
Diagnostic::spanned(loc.span(), Level::Error, "invalid token")
|
||||
let span = input[*location].span();
|
||||
quote_spanned! {span=>
|
||||
compile_error! { "invalid token" }
|
||||
}
|
||||
}
|
||||
UnrecognizedToken {
|
||||
token: None,
|
||||
expected,
|
||||
} => {
|
||||
let msg = format!("missing {}", pprint_tokens(&expected));
|
||||
Diagnostic::spanned(
|
||||
input[0].span().join(input[input.len() - 1].span()).unwrap(),
|
||||
Level::Error,
|
||||
"unexpected end of macro",
|
||||
)
|
||||
.help(msg)
|
||||
let msg = format!(
|
||||
"unexpected end of macro; missing {}",
|
||||
pprint_tokens(&expected)
|
||||
);
|
||||
quote! {
|
||||
compile_error! { #msg }
|
||||
}
|
||||
}
|
||||
UnrecognizedToken {
|
||||
token: Some((_, token, _)),
|
||||
expected,
|
||||
} => {
|
||||
let msg = format!("expected {}", pprint_tokens(&expected));
|
||||
let mut diag = Diagnostic::spanned(token.span(), Level::Error, msg);
|
||||
if is_in_node_position(expected) && token.is_ident() {
|
||||
let span = token.span();
|
||||
let error_msg = format!("expected {}", pprint_tokens(&expected));
|
||||
let error = quote_spanned! {span=>
|
||||
compile_error! { #error_msg }
|
||||
};
|
||||
let help = if is_in_node_position(expected) && token.is_ident() {
|
||||
// special case: you probably meant to quote that text
|
||||
diag = diag.help(format!(
|
||||
let help_msg = format!(
|
||||
"text nodes need to be quoted, eg. {}",
|
||||
Style::new().bold().paint("<p>\"Hello Joe!\"</p>")
|
||||
))
|
||||
}
|
||||
diag
|
||||
);
|
||||
Some(quote_spanned! {span=>
|
||||
compile_error! { #help_msg }
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
quote! {{
|
||||
#error
|
||||
#help
|
||||
}}
|
||||
}
|
||||
ExtraToken {
|
||||
token: (_, token, _),
|
||||
} => Diagnostic::spanned(token.span(), Level::Error, "superfluous token"),
|
||||
} => {
|
||||
let span = token.span();
|
||||
quote_spanned! {span=>
|
||||
compile_error! { "superfluous token" }
|
||||
}
|
||||
}
|
||||
User {
|
||||
error: HtmlParseError::TagMismatch { open, close },
|
||||
} => Diagnostic::spanned(
|
||||
close.span(),
|
||||
Level::Error,
|
||||
format!("expected closing tag '</{}>', found '</{}>'", open, close),
|
||||
)
|
||||
.span_help(open.span(), "opening tag is here:"),
|
||||
} => {
|
||||
let close_span = close.span();
|
||||
let close_msg = format!("expected closing tag '</{}>', found '</{}>'", open, close);
|
||||
let close_error = quote_spanned! {close_span=>
|
||||
compile_error! { #close_msg }
|
||||
};
|
||||
let open_span = open.span();
|
||||
let open_error = quote_spanned! {open_span=>
|
||||
compile_error! { "unclosed tag" }
|
||||
};
|
||||
quote! {{
|
||||
#close_error
|
||||
#open_error
|
||||
}}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,8 +3,9 @@ use error::HtmlParseError;
|
|||
use html::{Node, Element};
|
||||
use declare::Declare;
|
||||
use map::StringyMap;
|
||||
use proc_macro::{Delimiter, Ident, Literal, Group, TokenTree};
|
||||
use proc_macro2::{Delimiter, Ident, Literal, Group, TokenTree};
|
||||
use lalrpop_util::ParseError;
|
||||
use span;
|
||||
|
||||
grammar;
|
||||
|
||||
|
@ -62,8 +63,17 @@ HtmlIdent: Ident = {
|
|||
let (span, name) = init.into_iter().fold((None, String::new()), |(span, name), token| {
|
||||
(
|
||||
match span {
|
||||
None => Some(token.span()),
|
||||
Some(span) => span.join(token.span()),
|
||||
None => Some(token.span().unstable()),
|
||||
Some(span) => {
|
||||
#[cfg(can_join_spans)]
|
||||
{
|
||||
span.join(token.span().unstable())
|
||||
}
|
||||
#[cfg(not(can_join_spans))]
|
||||
{
|
||||
Some(span)
|
||||
}
|
||||
}
|
||||
},
|
||||
if name.is_empty() {
|
||||
name + &token.to_string()
|
||||
|
@ -72,7 +82,7 @@ HtmlIdent: Ident = {
|
|||
}
|
||||
)
|
||||
});
|
||||
Ident::new(&name, span.unwrap())
|
||||
Ident::new(&name, span::from_unstable(span.unwrap()))
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -1,13 +1,15 @@
|
|||
use proc_macro::{
|
||||
quote, Delimiter, Diagnostic, Group, Ident, Level, Literal, TokenStream, TokenTree,
|
||||
};
|
||||
use proc_macro2::{Delimiter, Group, Ident, Literal, TokenStream, TokenTree};
|
||||
use quote::{quote, quote_spanned};
|
||||
|
||||
use config::{required_children, ATTR_EVENTS};
|
||||
use error::ParseError;
|
||||
use ident;
|
||||
use lexer::{Lexer, Token};
|
||||
use map::StringyMap;
|
||||
use parser::grammar;
|
||||
|
||||
use std::iter::FromIterator;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum Node {
|
||||
Element(Element),
|
||||
|
@ -16,38 +18,38 @@ pub enum Node {
|
|||
}
|
||||
|
||||
impl Node {
|
||||
pub fn into_token_stream(self) -> TokenStream {
|
||||
pub fn into_token_stream(self) -> Result<TokenStream, TokenStream> {
|
||||
match self {
|
||||
Node::Element(el) => el.into_token_stream(),
|
||||
Node::Text(text) => {
|
||||
let text = TokenTree::Literal(text);
|
||||
quote!(Box::new(typed_html::dom::TextNode::new($text.to_string())))
|
||||
Ok(quote!(Box::new(typed_html::dom::TextNode::new(#text.to_string()))))
|
||||
}
|
||||
Node::Block(_) => panic!("cannot have a block in this position"),
|
||||
}
|
||||
}
|
||||
|
||||
fn into_child_stream(self) -> TokenStream {
|
||||
fn into_child_stream(self) -> Result<TokenStream, TokenStream> {
|
||||
match self {
|
||||
Node::Element(el) => {
|
||||
let el = el.into_token_stream();
|
||||
quote!(
|
||||
element.children.push($el);
|
||||
)
|
||||
let el = el.into_token_stream()?;
|
||||
Ok(quote!(
|
||||
element.children.push(#el);
|
||||
))
|
||||
}
|
||||
tx @ Node::Text(_) => {
|
||||
let tx = tx.into_token_stream();
|
||||
quote!(
|
||||
element.children.push($tx);
|
||||
)
|
||||
let tx = tx.into_token_stream()?;
|
||||
Ok(quote!(
|
||||
element.children.push(#tx);
|
||||
))
|
||||
}
|
||||
Node::Block(group) => {
|
||||
let group: TokenTree = group.into();
|
||||
quote!(
|
||||
for child in $group.into_iter() {
|
||||
element.children.push(child);
|
||||
}
|
||||
)
|
||||
Ok(quote!(
|
||||
for child in #group.into_iter() {
|
||||
element.children.push(child);
|
||||
}
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -86,7 +88,7 @@ fn extract_event_handlers(
|
|||
let event_name = &key_name[prefix.len()..];
|
||||
if ATTR_EVENTS.binary_search(&event_name).is_ok() {
|
||||
let value = attrs.remove(&key).unwrap();
|
||||
events.insert(Ident::new_raw(event_name, key.span()), value);
|
||||
events.insert(ident::new_raw(event_name, key.span()), value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -97,13 +99,13 @@ fn process_value(value: &TokenTree) -> TokenStream {
|
|||
match value {
|
||||
TokenTree::Group(g) if g.delimiter() == Delimiter::Bracket => {
|
||||
let content = g.stream();
|
||||
quote!( [ $content ] )
|
||||
quote!( [ #content ] )
|
||||
}
|
||||
TokenTree::Group(g) if g.delimiter() == Delimiter::Parenthesis => {
|
||||
let content = g.stream();
|
||||
quote!( ( $content ) )
|
||||
quote!( ( #content ) )
|
||||
}
|
||||
v => v.clone().into(),
|
||||
v => TokenStream::from_iter(vec![v.clone()]),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -113,31 +115,29 @@ fn is_string_literal(literal: &Literal) -> bool {
|
|||
}
|
||||
|
||||
impl Element {
|
||||
fn into_token_stream(mut self) -> TokenStream {
|
||||
fn into_token_stream(mut self) -> Result<TokenStream, TokenStream> {
|
||||
let name = self.name;
|
||||
let name_str = name.to_string();
|
||||
let typename: TokenTree = Ident::new(&name_str, name.span()).into();
|
||||
let req_names = required_children(&name_str);
|
||||
if req_names.len() > self.children.len() {
|
||||
Diagnostic::spanned(
|
||||
name.span(),
|
||||
Level::Error,
|
||||
format!(
|
||||
"<{}> requires {} children but there are only {}",
|
||||
name_str,
|
||||
req_names.len(),
|
||||
self.children.len()
|
||||
),
|
||||
)
|
||||
.emit();
|
||||
panic!();
|
||||
let span = name.span();
|
||||
let error = format!(
|
||||
"<{}> requires {} children but there are only {}",
|
||||
name_str,
|
||||
req_names.len(),
|
||||
self.children.len()
|
||||
);
|
||||
return Err(quote_spanned! {span=>
|
||||
compile_error! { #error }
|
||||
});
|
||||
}
|
||||
let events = extract_event_handlers(&mut self.attributes);
|
||||
let data_attrs = extract_data_attrs(&mut self.attributes);
|
||||
let attrs = self.attributes.iter().map(|(key, value)| {
|
||||
(
|
||||
key.to_string(),
|
||||
TokenTree::Ident(Ident::new_raw(&key.to_string(), key.span())),
|
||||
TokenTree::Ident(ident::new_raw(&key.to_string(), key.span())),
|
||||
value,
|
||||
)
|
||||
});
|
||||
|
@ -145,28 +145,45 @@ impl Element {
|
|||
.children
|
||||
.split_off(req_names.len())
|
||||
.into_iter()
|
||||
.map(Node::into_child_stream);
|
||||
let req_children = self.children.into_iter().map(Node::into_token_stream);
|
||||
.map(Node::into_child_stream)
|
||||
.collect::<Result<Vec<TokenStream>, TokenStream>>()?;
|
||||
let req_children = self
|
||||
.children
|
||||
.into_iter()
|
||||
.map(Node::into_token_stream)
|
||||
.collect::<Result<Vec<TokenStream>, TokenStream>>()?;
|
||||
|
||||
let mut body = TokenStream::new();
|
||||
for (attr_str, key, value) in attrs {
|
||||
match value {
|
||||
TokenTree::Literal(l) if is_string_literal(l) => {
|
||||
let value = value.clone();
|
||||
let tag_name: TokenTree = Literal::string(&name_str).into();
|
||||
let attr_str: TokenTree = Literal::string(&attr_str).into();
|
||||
let span = value.span();
|
||||
let pos = format!(
|
||||
"{}:{}:{}",
|
||||
span.source_file().path().to_str().unwrap_or("unknown"),
|
||||
span.start().line,
|
||||
span.start().column
|
||||
TokenTree::Literal(lit) if is_string_literal(lit) => {
|
||||
let mut eprintln_msg = "ERROR: ".to_owned();
|
||||
#[cfg(can_show_location_of_runtime_parse_error)]
|
||||
{
|
||||
let span = lit.span();
|
||||
eprintln_msg += &format!(
|
||||
"{}:{}:{}: ",
|
||||
span.unstable()
|
||||
.source_file()
|
||||
.path()
|
||||
.to_str()
|
||||
.unwrap_or("unknown"),
|
||||
span.unstable().start().line,
|
||||
span.unstable().start().column
|
||||
);
|
||||
}
|
||||
eprintln_msg += &format!(
|
||||
"<{} {}={}> failed to parse attribute value: {{}}",
|
||||
name_str, attr_str, lit,
|
||||
);
|
||||
let pos_str: TokenTree = Literal::string(&pos).into();
|
||||
#[cfg(not(can_show_location_of_runtime_parse_error))]
|
||||
{
|
||||
eprintln_msg += "\nERROR: rebuild with nightly to print source location";
|
||||
}
|
||||
|
||||
body.extend(quote!(
|
||||
element.attrs.$key = Some($value.parse().unwrap_or_else(|err| {
|
||||
eprintln!("ERROR: {}: <{} {}={:?}> failed to parse attribute value: {}",
|
||||
$pos_str, $tag_name, $attr_str, $value, err);
|
||||
element.attrs.#key = Some(#lit.parse().unwrap_or_else(|err| {
|
||||
eprintln!(#eprintln_msg, err);
|
||||
panic!("failed to parse string literal");
|
||||
}));
|
||||
));
|
||||
|
@ -174,7 +191,7 @@ impl Element {
|
|||
value => {
|
||||
let value = process_value(value);
|
||||
body.extend(quote!(
|
||||
element.attrs.$key = Some(std::convert::Into::into($value));
|
||||
element.attrs.#key = Some(std::convert::Into::into(#value));
|
||||
));
|
||||
}
|
||||
}
|
||||
|
@ -184,7 +201,7 @@ impl Element {
|
|||
.map(|(k, v)| (TokenTree::from(Literal::string(&k)), v.clone()))
|
||||
{
|
||||
body.extend(quote!(
|
||||
element.data_attributes.push(($key, $value.into()));
|
||||
element.data_attributes.push((#key, #value.into()));
|
||||
));
|
||||
}
|
||||
body.extend(opt_children);
|
||||
|
@ -193,22 +210,22 @@ impl Element {
|
|||
let key = TokenTree::Ident(key.clone());
|
||||
let value = process_value(value);
|
||||
body.extend(quote!(
|
||||
element.events.$key = Some(typed_html::events::IntoEventHandler::into_event_handler($value));
|
||||
element.events.#key = Some(typed_html::events::IntoEventHandler::into_event_handler(#value));
|
||||
));
|
||||
}
|
||||
|
||||
let mut args = TokenStream::new();
|
||||
for arg in req_children {
|
||||
args.extend(quote!( $arg, ));
|
||||
args.extend(quote!( #arg, ));
|
||||
}
|
||||
|
||||
quote!(
|
||||
Ok(quote!(
|
||||
{
|
||||
let mut element = typed_html::elements::$typename::new($args);
|
||||
$body
|
||||
let mut element = typed_html::elements::#typename::new(#args);
|
||||
#body
|
||||
Box::new(element)
|
||||
}
|
||||
)
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,17 @@
|
|||
use proc_macro2::{Ident, Span, TokenStream, TokenTree};
|
||||
|
||||
use std::str::FromStr;
|
||||
|
||||
pub fn new_raw(string: &str, span: Span) -> Ident {
|
||||
// Validate that it is an ident.
|
||||
let _ = Ident::new(string, span);
|
||||
|
||||
let s = format!("r#{}", string);
|
||||
let tts = TokenStream::from_str(&s).unwrap();
|
||||
let mut ident = match tts.into_iter().next().unwrap() {
|
||||
TokenTree::Ident(ident) => ident,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
ident.set_span(span);
|
||||
ident
|
||||
}
|
|
@ -1,5 +1,7 @@
|
|||
use error::HtmlParseError;
|
||||
use proc_macro::{Delimiter, Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
|
||||
use proc_macro2::{Delimiter, Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
|
||||
|
||||
use std::iter::FromIterator;
|
||||
|
||||
pub type Spanned<Tok, Loc, Error> = Result<(Loc, Tok, Loc), Error>;
|
||||
|
||||
|
@ -51,7 +53,7 @@ impl From<Token> for TokenTree {
|
|||
|
||||
impl From<Token> for TokenStream {
|
||||
fn from(token: Token) -> Self {
|
||||
TokenTree::from(token).into()
|
||||
TokenStream::from_iter(vec![TokenTree::from(token)])
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,58 +1,58 @@
|
|||
#![feature(proc_macro_hygiene)]
|
||||
#![feature(proc_macro_quote)]
|
||||
#![feature(proc_macro_span)]
|
||||
#![feature(proc_macro_diagnostic)]
|
||||
#![feature(proc_macro_raw_ident)]
|
||||
#![recursion_limit = "128"]
|
||||
#![cfg_attr(can_show_location_of_runtime_parse_error, feature(proc_macro_span))]
|
||||
|
||||
extern crate ansi_term;
|
||||
extern crate lalrpop_util;
|
||||
extern crate proc_macro;
|
||||
extern crate proc_macro2;
|
||||
extern crate proc_macro_hack;
|
||||
extern crate quote;
|
||||
|
||||
use proc_macro::{quote, TokenStream};
|
||||
use proc_macro::TokenStream;
|
||||
use proc_macro_hack::proc_macro_hack;
|
||||
|
||||
mod config;
|
||||
mod declare;
|
||||
mod error;
|
||||
mod html;
|
||||
mod ident;
|
||||
mod lexer;
|
||||
mod map;
|
||||
mod parser;
|
||||
mod span;
|
||||
|
||||
/// Construct a DOM tree.
|
||||
///
|
||||
/// See the crate documentation for [`typed_html`][typed_html].
|
||||
///
|
||||
/// [typed_html]: ../typed_html/index.html
|
||||
#[proc_macro]
|
||||
#[proc_macro_hack]
|
||||
pub fn html(input: TokenStream) -> TokenStream {
|
||||
let stream = lexer::unroll_stream(input, false);
|
||||
let stream = lexer::unroll_stream(input.into(), false);
|
||||
let result = html::expand_html(&stream);
|
||||
match result {
|
||||
Err(err) => {
|
||||
error::parse_error(&stream, &err).emit();
|
||||
quote!(panic!())
|
||||
}
|
||||
Ok(node) => node.into_token_stream(),
|
||||
}
|
||||
TokenStream::from(match result {
|
||||
Err(err) => error::parse_error(&stream, &err),
|
||||
Ok(node) => match node.into_token_stream() {
|
||||
Err(err) => err,
|
||||
Ok(success) => success,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/// This macro is used by `typed_html` internally to generate types and
|
||||
/// implementations for HTML elements.
|
||||
#[proc_macro]
|
||||
pub fn declare_elements(input: TokenStream) -> TokenStream {
|
||||
let stream = lexer::keywordise(lexer::unroll_stream(input, true));
|
||||
let stream = lexer::keywordise(lexer::unroll_stream(input.into(), true));
|
||||
let result = declare::expand_declare(&stream);
|
||||
match result {
|
||||
Err(err) => {
|
||||
error::parse_error(&stream, &err).emit();
|
||||
quote!(panic!())
|
||||
}
|
||||
TokenStream::from(match result {
|
||||
Err(err) => error::parse_error(&stream, &err),
|
||||
Ok(decls) => {
|
||||
let mut out = TokenStream::new();
|
||||
let mut out = proc_macro2::TokenStream::new();
|
||||
for decl in decls {
|
||||
out.extend(decl.into_token_stream());
|
||||
}
|
||||
out
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
|
|
@ -0,0 +1,10 @@
|
|||
use proc_macro;
|
||||
use proc_macro2;
|
||||
|
||||
pub fn from_unstable(span: proc_macro::Span) -> proc_macro2::Span {
|
||||
let ident = proc_macro::Ident::new("_", span);
|
||||
let tt = proc_macro::TokenTree::Ident(ident);
|
||||
let tts = proc_macro::TokenStream::from(tt);
|
||||
let tts2 = proc_macro2::TokenStream::from(tts);
|
||||
tts2.into_iter().next().unwrap().span()
|
||||
}
|
|
@ -23,3 +23,5 @@ language-tags = "0.2.2"
|
|||
http = "0.1.13"
|
||||
htmlescape = "0.3.1"
|
||||
stdweb = "0.4.10"
|
||||
proc-macro-hack = "0.5.2"
|
||||
proc-macro-nested = "0.1.0"
|
||||
|
|
|
@ -13,7 +13,6 @@ use htmlescape::encode_minimal;
|
|||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// # #![feature(proc_macro_hygiene)]
|
||||
/// # use typed_html::html;
|
||||
/// # use typed_html::dom::DOMTree;
|
||||
/// # fn main() {
|
||||
|
@ -42,13 +41,13 @@ pub type DOMTree<T> = Box<Node<T>>;
|
|||
/// ```
|
||||
///
|
||||
/// [Node]: trait.Node.html
|
||||
pub enum VNode<'a, T: OutputType> {
|
||||
pub enum VNode<'a, T: OutputType + 'a> {
|
||||
Text(&'a str),
|
||||
Element(VElement<'a, T>),
|
||||
}
|
||||
|
||||
/// An untyped representation of an HTML element.
|
||||
pub struct VElement<'a, T: OutputType> {
|
||||
pub struct VElement<'a, T: OutputType + 'a> {
|
||||
pub name: &'static str,
|
||||
pub attributes: Vec<(&'static str, String)>,
|
||||
pub events: &'a mut Events<T>,
|
||||
|
|
|
@ -30,7 +30,6 @@ macro_rules! declare_events {
|
|||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// # #![feature(proc_macro_hygiene)]
|
||||
/// # use typed_html::{html, for_events};
|
||||
/// # use typed_html::dom::{DOMTree, VNode};
|
||||
/// # fn main() {
|
||||
|
|
|
@ -1,17 +1,10 @@
|
|||
//! This crate provides the `html!` macro for building HTML documents inside your
|
||||
//! Rust code using roughly [JSX] compatible syntax.
|
||||
//!
|
||||
//! # Nightly Warning!
|
||||
//!
|
||||
//! This crate currently needs nightly rustc, and in order to use it you'll need to
|
||||
//! add `#![feature(proc_macro_hygiene)]` to the top of your crate. The compiler
|
||||
//! will tell you to do this if you forget. When this feature stabilises, the crate
|
||||
//! should work on stable rustc without issues.
|
||||
//!
|
||||
//! # Quick Preview
|
||||
//!
|
||||
//! ```
|
||||
//! # #![feature(proc_macro_hygiene)]
|
||||
//! # #![recursion_limit = "128"]
|
||||
//! # use typed_html::{html, for_events};
|
||||
//! # use typed_html::dom::{DOMTree, VNode};
|
||||
//! # use typed_html::types::Metadata;
|
||||
|
@ -95,7 +88,6 @@
|
|||
//! ## Example
|
||||
//!
|
||||
//! ```
|
||||
//! # #![feature(proc_macro_hygiene)]
|
||||
//! # use typed_html::html;
|
||||
//! # use typed_html::dom::DOMTree;
|
||||
//! # use typed_html::types::{Class, SpacedSet};
|
||||
|
@ -126,7 +118,6 @@
|
|||
//! ## Example
|
||||
//!
|
||||
//! ```
|
||||
//! # #![feature(proc_macro_hygiene)]
|
||||
//! # use typed_html::{html, text};
|
||||
//! # use typed_html::dom::DOMTree;
|
||||
//! # fn main() {
|
||||
|
@ -154,7 +145,6 @@
|
|||
//! ensure you're not using any event handlers that can't be printed.
|
||||
//!
|
||||
//! ```
|
||||
//! # #![feature(proc_macro_hygiene)]
|
||||
//! # use typed_html::html;
|
||||
//! # use typed_html::dom::DOMTree;
|
||||
//! # fn main() {
|
||||
|
@ -208,11 +198,15 @@ pub extern crate htmlescape;
|
|||
extern crate http;
|
||||
extern crate language_tags;
|
||||
extern crate mime;
|
||||
extern crate proc_macro_hack;
|
||||
extern crate proc_macro_nested;
|
||||
extern crate stdweb;
|
||||
extern crate strum;
|
||||
extern crate typed_html_macros;
|
||||
|
||||
#[doc(inline)]
|
||||
use proc_macro_hack::proc_macro_hack;
|
||||
|
||||
#[proc_macro_hack(support_nested)]
|
||||
pub use typed_html_macros::html;
|
||||
|
||||
pub mod dom;
|
||||
|
|
|
@ -85,7 +85,7 @@ impl<A: Debug> Debug for SpacedList<A> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<A: FromStr> From<(&str, &str)> for SpacedList<A>
|
||||
impl<'a, 'b, A: FromStr> From<(&'a str, &'b str)> for SpacedList<A>
|
||||
where
|
||||
<A as FromStr>::Err: Debug,
|
||||
{
|
||||
|
@ -97,7 +97,7 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
impl<A: FromStr> From<(&str, &str, &str)> for SpacedList<A>
|
||||
impl<'a, 'b, 'c, A: FromStr> From<(&'a str, &'b str, &'c str)> for SpacedList<A>
|
||||
where
|
||||
<A as FromStr>::Err: Debug,
|
||||
{
|
||||
|
@ -110,7 +110,7 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
impl<A: FromStr> From<(&str, &str, &str, &str)> for SpacedList<A>
|
||||
impl<'a, 'b, 'c, 'd, A: FromStr> From<(&'a str, &'b str, &'c str, &'d str)> for SpacedList<A>
|
||||
where
|
||||
<A as FromStr>::Err: Debug,
|
||||
{
|
||||
|
@ -124,7 +124,8 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
impl<A: FromStr> From<(&str, &str, &str, &str, &str)> for SpacedList<A>
|
||||
impl<'a, 'b, 'c, 'd, 'e, A: FromStr> From<(&'a str, &'b str, &'c str, &'d str, &'e str)>
|
||||
for SpacedList<A>
|
||||
where
|
||||
<A as FromStr>::Err: Debug,
|
||||
{
|
||||
|
@ -139,7 +140,8 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
impl<A: FromStr> From<(&str, &str, &str, &str, &str, &str)> for SpacedList<A>
|
||||
impl<'a, 'b, 'c, 'd, 'e, 'f, A: FromStr>
|
||||
From<(&'a str, &'b str, &'c str, &'d str, &'e str, &'f str)> for SpacedList<A>
|
||||
where
|
||||
<A as FromStr>::Err: Debug,
|
||||
{
|
||||
|
@ -155,7 +157,16 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
impl<A: FromStr> From<(&str, &str, &str, &str, &str, &str, &str)> for SpacedList<A>
|
||||
impl<'a, 'b, 'c, 'd, 'e, 'f, 'g, A: FromStr>
|
||||
From<(
|
||||
&'a str,
|
||||
&'b str,
|
||||
&'c str,
|
||||
&'d str,
|
||||
&'e str,
|
||||
&'f str,
|
||||
&'g str,
|
||||
)> for SpacedList<A>
|
||||
where
|
||||
<A as FromStr>::Err: Debug,
|
||||
{
|
||||
|
@ -172,7 +183,17 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
impl<A: FromStr> From<(&str, &str, &str, &str, &str, &str, &str, &str)> for SpacedList<A>
|
||||
impl<'a, 'b, 'c, 'd, 'e, 'f, 'g, 'h, A: FromStr>
|
||||
From<(
|
||||
&'a str,
|
||||
&'b str,
|
||||
&'c str,
|
||||
&'d str,
|
||||
&'e str,
|
||||
&'f str,
|
||||
&'g str,
|
||||
&'h str,
|
||||
)> for SpacedList<A>
|
||||
where
|
||||
<A as FromStr>::Err: Debug,
|
||||
{
|
||||
|
@ -192,7 +213,7 @@ where
|
|||
|
||||
macro_rules! spacedlist_from_array {
|
||||
($num:tt) => {
|
||||
impl<A: FromStr> From<[&str; $num]> for SpacedList<A>
|
||||
impl<'a, A: FromStr> From<[&'a str; $num]> for SpacedList<A>
|
||||
where
|
||||
<A as FromStr>::Err: Debug,
|
||||
{
|
||||
|
|
|
@ -113,7 +113,7 @@ impl<A: Ord + Debug> Debug for SpacedSet<A> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<A: Ord + FromStr> From<(&str, &str)> for SpacedSet<A>
|
||||
impl<'a, 'b, A: Ord + FromStr> From<(&'a str, &'b str)> for SpacedSet<A>
|
||||
where
|
||||
<A as FromStr>::Err: Debug,
|
||||
{
|
||||
|
@ -125,7 +125,7 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
impl<A: Ord + FromStr> From<(&str, &str, &str)> for SpacedSet<A>
|
||||
impl<'a, 'b, 'c, A: Ord + FromStr> From<(&'a str, &'b str, &'c str)> for SpacedSet<A>
|
||||
where
|
||||
<A as FromStr>::Err: Debug,
|
||||
{
|
||||
|
@ -138,7 +138,7 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
impl<A: Ord + FromStr> From<(&str, &str, &str, &str)> for SpacedSet<A>
|
||||
impl<'a, 'b, 'c, 'd, A: Ord + FromStr> From<(&'a str, &'b str, &'c str, &'d str)> for SpacedSet<A>
|
||||
where
|
||||
<A as FromStr>::Err: Debug,
|
||||
{
|
||||
|
@ -152,7 +152,8 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
impl<A: Ord + FromStr> From<(&str, &str, &str, &str, &str)> for SpacedSet<A>
|
||||
impl<'a, 'b, 'c, 'd, 'e, A: Ord + FromStr> From<(&'a str, &'b str, &'c str, &'d str, &'e str)>
|
||||
for SpacedSet<A>
|
||||
where
|
||||
<A as FromStr>::Err: Debug,
|
||||
{
|
||||
|
@ -167,7 +168,8 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
impl<A: Ord + FromStr> From<(&str, &str, &str, &str, &str, &str)> for SpacedSet<A>
|
||||
impl<'a, 'b, 'c, 'd, 'e, 'f, A: Ord + FromStr>
|
||||
From<(&'a str, &'b str, &'c str, &'d str, &'e str, &'f str)> for SpacedSet<A>
|
||||
where
|
||||
<A as FromStr>::Err: Debug,
|
||||
{
|
||||
|
@ -183,7 +185,16 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
impl<A: Ord + FromStr> From<(&str, &str, &str, &str, &str, &str, &str)> for SpacedSet<A>
|
||||
impl<'a, 'b, 'c, 'd, 'e, 'f, 'g, A: Ord + FromStr>
|
||||
From<(
|
||||
&'a str,
|
||||
&'b str,
|
||||
&'c str,
|
||||
&'d str,
|
||||
&'e str,
|
||||
&'f str,
|
||||
&'g str,
|
||||
)> for SpacedSet<A>
|
||||
where
|
||||
<A as FromStr>::Err: Debug,
|
||||
{
|
||||
|
@ -200,7 +211,17 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
impl<A: Ord + FromStr> From<(&str, &str, &str, &str, &str, &str, &str, &str)> for SpacedSet<A>
|
||||
impl<'a, 'b, 'c, 'd, 'e, 'f, 'g, 'h, A: Ord + FromStr>
|
||||
From<(
|
||||
&'a str,
|
||||
&'b str,
|
||||
&'c str,
|
||||
&'d str,
|
||||
&'e str,
|
||||
&'f str,
|
||||
&'g str,
|
||||
&'h str,
|
||||
)> for SpacedSet<A>
|
||||
where
|
||||
<A as FromStr>::Err: Debug,
|
||||
{
|
||||
|
@ -220,7 +241,7 @@ where
|
|||
|
||||
macro_rules! spacedlist_from_array {
|
||||
($num:tt) => {
|
||||
impl<A: Ord + FromStr> From<[&str; $num]> for SpacedSet<A>
|
||||
impl<'a, A: Ord + FromStr> From<[&'a str; $num]> for SpacedSet<A>
|
||||
where
|
||||
<A as FromStr>::Err: Debug,
|
||||
{
|
||||
|
|
Loading…
Reference in New Issue