Completely transitioned to LALRPOP and new declare macro.

This commit is contained in:
Bodil Stokke 2018-11-14 16:22:07 +00:00
parent cb7e148310
commit 7d1e95f262
9 changed files with 385 additions and 523 deletions

View File

@ -8,8 +8,8 @@ build = "build.rs"
proc-macro = true
[dependencies]
pom = "2.0.1"
lalrpop-util = "0.16.1"
ansi_term = "0.11.0"
[build-dependencies]
lalrpop = "0.16.1"

View File

@ -1,11 +1,10 @@
use pom::combinator::*;
use pom::Parser;
use proc_macro::{quote, Group, Ident, Literal, TokenStream, TokenTree};
use proc_macro::{quote, Ident, Literal, TokenStream, TokenTree};
use config::global_attrs;
use lexer::{Lexer, ParseError, Token};
use error::ParseError;
use lexer::{Lexer, Token};
use map::StringyMap;
use parser::{self, *};
use parser;
// State
@ -327,60 +326,6 @@ impl Declare {
}
}
// Parser
fn declare_attrs<'a>() -> Combinator<impl Parser<'a, TokenTree, Output = Vec<(Ident, TokenStream)>>>
{
group().map(|group: Group| -> Vec<(Ident, TokenStream)> {
let attr = ident() - punct(':') + type_spec() - punct(',').opt();
let parser = attr.repeat(0..);
let input: Vec<TokenTree> = group.stream().into_iter().collect();
// FIXME the borrow checker won't let me use plain &input, it seems like a bug.
// It works in Rust 2018, so please get rid of this unsafe block when it stabilises.
parser
.parse(unsafe { &*(input.as_slice() as *const _) })
.unwrap()
})
}
fn declare_children<'a>() -> Combinator<impl Parser<'a, TokenTree, Output = Vec<Ident>>> {
group().map(|group: Group| {
let input: Vec<TokenTree> = group.stream().into_iter().collect();
let children = (ident() - punct(',').opt()).repeat(0..);
let result = children.parse(&input);
result.unwrap()
})
}
fn declare_traits<'a>() -> Combinator<impl Parser<'a, TokenTree, Output = Vec<TokenStream>>> {
group().map(|group: Group| {
let input: Vec<TokenTree> = group.stream().into_iter().collect();
let traits = (type_spec() - punct(',').opt()).repeat(0..);
let result = traits.parse(&input);
result.unwrap()
})
}
fn declare<'a>() -> Combinator<impl Parser<'a, TokenTree, Output = Declare>> {
(ident() + declare_attrs() + declare_children() + declare_traits().opt() + type_spec().opt())
.map(|((((name, attrs), children), traits), child_type)| {
let mut declare = Declare::new(name);
for (key, value) in attrs {
declare.attrs.insert(key, value);
}
for child in children {
declare.req_children.push(child);
}
declare.opt_children = child_type;
declare.traits = traits.unwrap_or_default();
declare
})
}
pub fn expand_declare(input: &[TokenTree]) -> pom::Result<TokenStream> {
declare().parse(input).map(|decl| decl.into_token_stream())
}
pub fn expand_declare_lalrpop(input: &[Token]) -> Result<Vec<Declare>, ParseError> {
pub fn expand_declare(input: &[Token]) -> Result<Vec<Declare>, ParseError> {
parser::grammar::DeclarationsParser::new().parse(Lexer::new(input))
}

88
macros/src/error.rs Normal file
View File

@ -0,0 +1,88 @@
use ansi_term::Style;
use lalrpop_util::ParseError::*;
use lexer::Token;
use proc_macro::{Diagnostic, Ident, Level};
pub type ParseError = lalrpop_util::ParseError<usize, Token, HtmlParseError>;
#[derive(Debug)]
pub enum HtmlParseError {
TagMismatch { open: Ident, close: Ident },
}
fn pprint_token(token: &str) -> &str {
match token {
"BraceGroupToken" => "code block",
"LiteralToken" => "literal",
"IdentToken" => "identifier",
a => a,
}
}
fn pprint_tokens(tokens: &[String]) -> String {
let tokens: Vec<&str> = tokens.iter().map(|s| pprint_token(&s)).collect();
if tokens.len() > 1 {
let start = tokens[..tokens.len() - 1].join(", ");
let end = &tokens[tokens.len() - 1];
format!("{} or {}", start, end)
} else {
tokens[0].to_string()
}
}
fn is_in_node_position(tokens: &[String]) -> bool {
use std::collections::HashSet;
let input: HashSet<&str> = tokens.iter().map(String::as_str).collect();
let output: HashSet<&str> = ["\"<\"", "BraceGroupToken", "LiteralToken"]
.iter()
.cloned()
.collect();
input == output
}
pub fn parse_error(input: &[Token], error: &ParseError) -> Diagnostic {
match error {
InvalidToken { location } => {
let loc = &input[*location];
Diagnostic::spanned(loc.span(), Level::Error, "invalid token")
}
UnrecognizedToken {
token: None,
expected,
} => {
let msg = format!("missing {}", pprint_tokens(&expected));
Diagnostic::spanned(
input[0].span().join(input[input.len() - 1].span()).unwrap(),
Level::Error,
"unexpected end of macro",
)
.help(msg)
}
UnrecognizedToken {
token: Some((_, token, _)),
expected,
} => {
let msg = format!("expected {}", pprint_tokens(&expected));
let mut diag = Diagnostic::spanned(token.span(), Level::Error, msg);
if is_in_node_position(expected) && token.is_ident() {
// special case: you probably meant to quote that text
diag = diag.help(format!(
"text nodes need to be quoted, eg. {}",
Style::new().bold().paint("<p>\"Hello Joe!\"</p>")
))
}
diag
}
ExtraToken {
token: (_, token, _),
} => Diagnostic::spanned(token.span(), Level::Error, "superfluous token"),
User {
error: HtmlParseError::TagMismatch { open, close },
} => Diagnostic::spanned(
close.span(),
Level::Error,
format!("expected closing tag '</{}>', found '</{}>'", open, close),
)
.span_help(open.span(), "opening tag is here:"),
}
}

View File

@ -1,4 +1,5 @@
use lexer::{Token, to_stream, HtmlParseError, Keyword};
use lexer::{self, Token, to_stream};
use error::HtmlParseError;
use html::{Node, Element};
use declare::Declare;
use map::StringyMap;
@ -130,9 +131,9 @@ ParentTag: Element = <opening:OpeningTag> <children:Node*> <closing:ClosingTag>
children,
})
} else {
Err(ParseError::User { error: HtmlParseError {
token: closing.into(),
message: format!("expected closing tag '</{}>', found '</{}>'", name.to_string(), closing_name),
Err(ParseError::User { error: HtmlParseError::TagMismatch {
open: name.into(),
close: closing.into(),
}})
}
};
@ -260,7 +261,7 @@ extern {
type Location = usize;
type Error = HtmlParseError;
enum Token {
enum lexer::Token {
"<" => Token::Punct('<', _),
">" => Token::Punct('>', _),
"/" => Token::Punct('/', _),
@ -276,8 +277,8 @@ extern {
"}" => Token::GroupClose(Delimiter::Brace, _),
"[" => Token::GroupOpen(Delimiter::Bracket, _),
"]" => Token::GroupClose(Delimiter::Bracket, _),
"in" => Token::Keyword(Keyword::In, _),
"with" => Token::Keyword(Keyword::With, _),
"in" => Token::Keyword(lexer::Keyword::In, _),
"with" => Token::Keyword(lexer::Keyword::With, _),
IdentToken => Token::Ident(_),
LiteralToken => Token::Literal(_),
ParenGroupToken => Token::Group(Delimiter::Parenthesis, _),

View File

@ -3,7 +3,8 @@ use proc_macro::{
};
use config::required_children;
use lexer::{Lexer, ParseError, Token};
use error::ParseError;
use lexer::{Lexer, Token};
use map::StringyMap;
use parser::grammar;

View File

@ -1,10 +1,7 @@
use lalrpop_util::ParseError::*;
use proc_macro::{
Delimiter, Diagnostic, Group, Ident, Level, Literal, Punct, Span, TokenStream, TokenTree,
};
use error::HtmlParseError;
use proc_macro::{Delimiter, Group, Ident, Literal, Punct, Span, TokenStream, TokenTree};
pub type Spanned<Tok, Loc, Error> = Result<(Loc, Tok, Loc), Error>;
pub type ParseError = lalrpop_util::ParseError<usize, Token, HtmlParseError>;
#[derive(Clone, Debug)]
pub enum Token {
@ -107,75 +104,6 @@ pub fn keywordise(tokens: Vec<Token>) -> Vec<Token> {
.collect()
}
#[derive(Debug)]
pub struct HtmlParseError {
pub token: Token,
pub message: String,
}
fn pprint_token(token: &str) -> &str {
match token {
"BraceGroupToken" => "code block",
"LiteralToken" => "literal",
"IdentToken" => "identifier",
a => a,
}
}
fn pprint_tokens(tokens: &[String]) -> String {
let tokens: Vec<&str> = tokens.iter().map(|s| pprint_token(&s)).collect();
if tokens.len() > 1 {
let start = tokens[..tokens.len() - 1].join(", ");
let end = &tokens[tokens.len() - 1];
format!("{} or {}", start, end)
} else {
tokens[0].to_string()
}
}
fn is_in_node_position(tokens: &[String]) -> bool {
use std::collections::HashSet;
let input: HashSet<&str> = tokens.iter().map(String::as_str).collect();
let output: HashSet<&str> = ["\"<\"", "BraceGroupToken", "LiteralToken"]
.iter()
.cloned()
.collect();
input == output
}
pub fn parse_error(input: &[Token], error: &ParseError) -> Diagnostic {
match error {
InvalidToken { location } => {
let loc = &input[*location];
Diagnostic::spanned(loc.span(), Level::Error, "invalid token")
}
UnrecognizedToken {
token: None,
expected,
} => panic!(
"unexpected end of macro: expecting {}",
pprint_tokens(&expected)
),
UnrecognizedToken {
token: Some((_, token, _)),
expected,
} => {
let mut msg = format!("expected {}", pprint_tokens(&expected));
if is_in_node_position(expected) && token.is_ident() {
// special case: you probably meant to quote that text
msg += "; looks like you forgot to put \"quotes\" around your text nodes";
}
Diagnostic::spanned(token.span(), Level::Error, msg)
}
ExtraToken {
token: (_, token, _),
} => Diagnostic::spanned(token.span(), Level::Error, "superfluous token"),
User { error } => {
Diagnostic::spanned(error.token.span(), Level::Error, error.message.to_owned())
}
}
}
pub fn to_stream<I: IntoIterator<Item = Token>>(tokens: I) -> TokenStream {
let mut stream = TokenStream::new();
stream.extend(tokens.into_iter().map(TokenTree::from));

View File

@ -4,14 +4,15 @@
#![feature(proc_macro_diagnostic)]
#![feature(proc_macro_raw_ident)]
extern crate ansi_term;
extern crate lalrpop_util;
extern crate pom;
extern crate proc_macro;
use proc_macro::{TokenStream, TokenTree};
use proc_macro::{quote, TokenStream};
mod config;
mod declare;
mod error;
mod html;
mod lexer;
mod map;
@ -22,22 +23,22 @@ pub fn html(input: TokenStream) -> TokenStream {
let stream = lexer::unroll_stream(input, false);
let result = html::expand_html(&stream);
match result {
Err(error) => {
lexer::parse_error(&stream, &error).emit();
panic!("macro expansion produced errors; see above.")
Err(err) => {
error::parse_error(&stream, &err).emit();
quote!(panic!())
}
Ok(node) => node.into_token_stream(),
}
}
#[proc_macro]
pub fn declalrpop_element(input: TokenStream) -> TokenStream {
pub fn declare_elements(input: TokenStream) -> TokenStream {
let stream = lexer::keywordise(lexer::unroll_stream(input, true));
let result = declare::expand_declare_lalrpop(&stream);
let result = declare::expand_declare(&stream);
match result {
Err(error) => {
lexer::parse_error(&stream, &error).emit();
panic!("macro expansion produced errors; see above.")
Err(err) => {
error::parse_error(&stream, &err).emit();
quote!(panic!())
}
Ok(decls) => {
let mut out = TokenStream::new();
@ -48,16 +49,3 @@ pub fn declalrpop_element(input: TokenStream) -> TokenStream {
}
}
}
#[proc_macro]
pub fn declare_element(input: TokenStream) -> TokenStream {
let input: Vec<TokenTree> = input.into_iter().collect();
let result = declare::expand_declare(&input);
match result {
Err(error) => {
parser::parse_error(&input, &error).emit();
panic!("macro expansion produced errors; see above.")
}
Ok(ts) => ts,
}
}

View File

@ -1,89 +1,3 @@
use lalrpop_util::lalrpop_mod;
use pom::combinator::*;
use pom::{Error, Parser};
use proc_macro::{Diagnostic, Group, Ident, Level, Punct, TokenStream, TokenTree};
lalrpop_mod!(pub grammar);
pub fn punct<'a>(punct: char) -> Combinator<impl Parser<'a, TokenTree, Output = Punct>> {
comb(move |input: &[TokenTree], start| match input.get(start) {
Some(TokenTree::Punct(p)) if p.as_char() == punct => Ok((p.clone(), start + 1)),
_ => Err(Error::Mismatch {
message: format!("expected {:?}", punct),
position: start,
}),
})
}
pub fn ident<'a>() -> Combinator<impl Parser<'a, TokenTree, Output = Ident>> {
comb(|input: &[TokenTree], start| match input.get(start) {
Some(TokenTree::Ident(i)) => Ok((i.clone(), start + 1)),
_ => Err(Error::Mismatch {
message: "expected identifier".to_string(),
position: start,
}),
})
}
pub fn group<'a>() -> Combinator<impl Parser<'a, TokenTree, Output = Group>> {
comb(|input: &[TokenTree], start| match input.get(start) {
Some(TokenTree::Group(g)) => Ok((g.clone(), start + 1)),
_ => Err(Error::Mismatch {
message: "expected group".to_string(),
position: start,
}),
})
}
fn to_stream<'a, I: IntoIterator<Item = &'a TokenTree>>(tokens: I) -> TokenStream {
let mut stream = TokenStream::new();
stream.extend(tokens.into_iter().cloned());
stream
}
pub fn type_spec<'a>() -> Combinator<impl Parser<'a, TokenTree, Output = TokenStream>> {
let valid = ident().map(TokenTree::Ident)
| punct(':').map(TokenTree::Punct)
| punct('<').map(TokenTree::Punct)
| punct('>').map(TokenTree::Punct)
| punct('&').map(TokenTree::Punct)
| punct('\'').map(TokenTree::Punct);
valid.repeat(1..).collect().map(to_stream)
}
/// Turn a parser error into a proc_macro diagnostic.
pub fn parse_error(input: &[TokenTree], error: &pom::Error) -> Diagnostic {
match error {
pom::Error::Incomplete => Diagnostic::new(Level::Error, "unexpected end of macro!"),
pom::Error::Mismatch { message, position } => {
Diagnostic::spanned(input[*position].span(), Level::Error, message.as_str())
}
pom::Error::Conversion { message, position } => {
Diagnostic::spanned(input[*position].span(), Level::Error, message.as_str())
}
pom::Error::Expect {
message,
position,
inner,
} => {
let mut diag =
Diagnostic::spanned(input[*position].span(), Level::Error, message.as_str());
let child = parse_error(input, &inner);
diag.span_error(child.spans(), child.message())
}
pom::Error::Custom {
message,
position,
inner,
} => {
let mut diag =
Diagnostic::spanned(input[*position].span(), Level::Error, message.as_str());
if let Some(inner) = inner {
let child = parse_error(input, &inner);
diag.span_error(child.spans(), child.message())
} else {
diag
}
}
}
}

View File

@ -2,7 +2,7 @@
#![allow(dead_code)]
use std::fmt::Display;
use typed_html_macros::{declalrpop_element, declare_element};
use typed_html_macros::declare_elements;
use super::types::*;
@ -99,7 +99,7 @@ impl Node for TextNode {
impl FlowContent for TextNode {}
impl PhrasingContent for TextNode {}
declalrpop_element!{
declare_elements!{
html {
xmlns: Uri,
} with [head, body];
@ -196,273 +196,270 @@ declalrpop_element!{
details {
open: bool,
} in [FlowContent, SectioningContent, InteractiveContent] with [summary] FlowContent;
dfn in [FlowContent, PhrasingContent] with PhrasingContent;
div in [FlowContent] with FlowContent;
dl in [FlowContent] with DescriptionListContent;
em in [FlowContent, PhrasingContent] with PhrasingContent;
embed {
height: usize,
src: Uri,
type: Mime,
width: usize,
} in [FlowContent, PhrasingContent, EmbeddedContent, InteractiveContent];
// FIXME the legend attribute should be optional
fieldset in [FlowContent, SectioningContent, FormContent] with [legend] FlowContent;
// FIXME the figcaption attribute should be optional
figure in [FlowContent, SectioningContent] with [figcaption] FlowContent;
footer in [FlowContent] with FlowContent;
form {
accept-charset: SpacedList<CharacterEncoding>,
action: Uri,
autocomplete: OnOff,
enctype: FormEncodingType,
method: FormMethod,
name: Id,
novalidate: bool,
target: Target,
} in [FlowContent] with FlowContent;
h1 in [FlowContent, HeadingContent, HGroupContent] with PhrasingContent;
h2 in [FlowContent, HeadingContent, HGroupContent] with PhrasingContent;
h3 in [FlowContent, HeadingContent, HGroupContent] with PhrasingContent;
h4 in [FlowContent, HeadingContent, HGroupContent] with PhrasingContent;
h5 in [FlowContent, HeadingContent, HGroupContent] with PhrasingContent;
h6 in [FlowContent, HeadingContent, HGroupContent] with PhrasingContent;
header in [FlowContent] with FlowContent;
hgroup in [FlowContent, HeadingContent] with HGroupContent;
hr in [FlowContent];
i in [FlowContent, PhrasingContent] with PhrasingContent;
iframe {
allow: FeaturePolicy,
allowfullscreen: bool,
allowpaymentrequest: bool,
height: usize,
name: Id,
referrerpolicy: ReferrerPolicy,
sandbox: SpacedSet<Sandbox>,
src: Uri,
srcdoc: Uri,
width: usize,
} in [FlowContent, PhrasingContent, EmbeddedContent, InteractiveContent] with FlowContent;
img {
alt: String,
crossorigin: CrossOrigin,
decoding: ImageDecoding,
height: usize,
ismap: bool,
sizes: SpacedList<String>, // FIXME it's not really just a string
src: Uri,
srcset: String, // FIXME this is much more complicated
usemap: String, // FIXME should be a fragment starting with '#'
width: usize,
} in [FlowContent, PhrasingContent, EmbeddedContent];
input {
autocomplete: String,
autofocus: bool,
disabled: bool,
form: Id,
list: Id,
name: Id,
required: bool,
tabindex: usize,
type: InputType,
value: String,
} in [FlowContent, FormContent, PhrasingContent];
ins {
cite: Uri,
datetime: Datetime,
} in [FlowContent, PhrasingContent] with FlowContent;
kbd in [FlowContent, PhrasingContent] with PhrasingContent;
label {
for: Id,
form: Id,
} in [FlowContent, PhrasingContent, InteractiveContent, FormContent] with PhrasingContent;
main in [FlowContent] with FlowContent;
map {
name: Id,
} in [FlowContent, PhrasingContent] with MapContent;
mark in [FlowContent, PhrasingContent] with PhrasingContent;
// TODO the <math> element
meter {
value: isize,
min: isize,
max: isize,
low: isize,
high: isize,
optimum: isize,
form: Id,
} in [FlowContent, PhrasingContent] with PhrasingContent;
nav in [FlowContent, SectioningContent] with PhrasingContent;
noscript in [MetadataContent, FlowContent, PhrasingContent] with Node;
object {
data: Uri,
form: Id,
height: usize,
name: Id,
type: Mime,
typemustmatch: bool,
usemap: String, // TODO should be a fragment starting with '#'
width: usize,
} in [FlowContent, PhrasingContent, EmbeddedContent, InteractiveContent, FormContent] with Element_param;
ol {
reversed: bool,
start: isize,
type: OrderedListType,
} in [FlowContent] with Element_li;
output {
for: SpacedSet<Id>,
form: Id,
name: Id,
} in [FlowContent, PhrasingContent, FormContent] with PhrasingContent;
p in [FlowContent] with PhrasingContent;
pre in [FlowContent] with PhrasingContent;
progress {
max: f64,
value: f64,
} in [FlowContent, PhrasingContent] with PhrasingContent;
q {
cite: Uri,
} in [FlowContent, PhrasingContent] with PhrasingContent;
ruby in [FlowContent, PhrasingContent] with PhrasingContent;
s in [FlowContent, PhrasingContent] with PhrasingContent;
samp in [FlowContent, PhrasingContent] with PhrasingContent;
script {
async: bool,
crossorigin: CrossOrigin,
defer: bool,
integrity: Integrity,
nomodule: bool,
nonce: Nonce,
src: Uri,
text: String,
type: String, // TODO could be an enum
} in [MetadataContent, FlowContent, PhrasingContent, TableColumnContent] with TextNode;
section in [FlowContent, SectioningContent] with FlowContent;
select {
autocomplete: String,
autofocus: bool,
disabled: bool,
form: Id,
multiple: bool,
name: Id,
required: bool,
size: usize,
} in [FlowContent, PhrasingContent, InteractiveContent, FormContent] with SelectContent;
small in [FlowContent, PhrasingContent] with PhrasingContent;
span in [FlowContent, PhrasingContent] with PhrasingContent;
strong in [FlowContent, PhrasingContent] with PhrasingContent;
sub in [FlowContent, PhrasingContent] with PhrasingContent;
sup in [FlowContent, PhrasingContent] with PhrasingContent;
table in [FlowContent] with TableContent;
template in [MetadataContent, FlowContent, PhrasingContent, TableColumnContent] with Node;
textarea {
autocomplete: OnOff,
autofocus: bool,
cols: usize,
disabled: bool,
form: Id,
maxlength: usize,
minlength: usize,
name: Id,
placeholder: String,
readonly: bool,
required: bool,
rows: usize,
spellcheck: BoolOrDefault,
wrap: Wrap,
} in [FlowContent, PhrasingContent, InteractiveContent, FormContent] with TextNode;
time {
datetime: Datetime,
} in [FlowContent, PhrasingContent] with PhrasingContent;
ul in [FlowContent] with Element_li;
var in [FlowContent, PhrasingContent] with PhrasingContent;
video in [FlowContent, PhrasingContent, EmbeddedContent] with MediaContent;
wbr in [FlowContent, PhrasingContent];
// Non-group elements
area {
alt: String,
coords: String, // TODO could perhaps be validated
download: bool,
href: Uri,
hreflang: LanguageTag,
ping: SpacedList<Uri>,
rel: SpacedSet<LinkType>,
shape: AreaShape,
target: Target,
} in [MapContent];
caption in [TableContent] with FlowContent;
col {
span: usize,
};
colgroup {
span: usize,
} in [TableContent] with Element_col;
dd in [DescriptionListContent] with FlowContent;
dt in [DescriptionListContent] with FlowContent;
figcaption with FlowContent;
legend with PhrasingContent;
li {
value: isize,
} with FlowContent;
option {
disabled: bool,
label: String,
selected: bool,
value: String,
} in [SelectContent] with TextNode;
optgroup {
disabled: bool,
label: String,
} in [SelectContent] with Element_option;
param {
name: String,
value: String,
};
source {
src: Uri,
type: Mime,
} in [MediaContent];
summary with PhrasingContent;
tbody in [TableContent] with Element_tr;
td {
colspan: usize,
headers: SpacedSet<Id>,
rowspan: usize,
} in [TableColumnContent] with FlowContent;
tfoot in [TableContent] with Element_tr;
th {
abbr: String,
colspan: usize,
headers: SpacedSet<Id>,
rowspan: usize,
scope: TableHeaderScope,
} in [TableColumnContent] with FlowContent;
thead in [TableContent] with Element_tr;
tr in [TableContent] with TableColumnContent;
track {
default: bool,
kind: VideoKind,
label: String,
src: Uri,
srclang: LanguageTag,
} in [MediaContent];
// Don't @ me
blink in [FlowContent, PhrasingContent] with PhrasingContent;
marquee {
behavior: String, // FIXME enum
bgcolor: String, // FIXME colour
direction: String, // FIXME direction enum
height: String, // FIXME size
hspace: String, // FIXME size
loop: isize,
scrollamount: usize,
scrolldelay: usize,
truespeed: bool,
vspace: String, // FIXME size
width: String, // FIXME size
} in [FlowContent, PhrasingContent] with PhrasingContent;
}
// Flow content
declare_element!(dfn {} [] [FlowContent, PhrasingContent] PhrasingContent);
declare_element!(div {} [] [FlowContent] FlowContent);
declare_element!(dl {} [] [FlowContent] DescriptionListContent);
declare_element!(em {} [] [FlowContent, PhrasingContent] PhrasingContent);
declare_element!(embed {
height: usize,
src: Uri,
type: Mime,
width: usize,
} [] [FlowContent, PhrasingContent, EmbeddedContent, InteractiveContent]);
// FIXME the legend attribute should be optional
declare_element!(fieldset {} [legend] [FlowContent, SectioningContent, FormContent] FlowContent);
// FIXME the figcaption attribute should be optional
declare_element!(figure {} [figcaption] [FlowContent, SectioningContent] FlowContent);
declare_element!(footer {} [] [FlowContent] FlowContent);
declare_element!(form {
accept-charset: SpacedList<CharacterEncoding>,
action: Uri,
autocomplete: OnOff,
enctype: FormEncodingType,
method: FormMethod,
name: Id,
novalidate: bool,
target: Target,
} [] [FlowContent] FlowContent);
declare_element!(h1 {} [] [FlowContent, HeadingContent, HGroupContent] PhrasingContent);
declare_element!(h2 {} [] [FlowContent, HeadingContent, HGroupContent] PhrasingContent);
declare_element!(h3 {} [] [FlowContent, HeadingContent, HGroupContent] PhrasingContent);
declare_element!(h4 {} [] [FlowContent, HeadingContent, HGroupContent] PhrasingContent);
declare_element!(h5 {} [] [FlowContent, HeadingContent, HGroupContent] PhrasingContent);
declare_element!(h6 {} [] [FlowContent, HeadingContent, HGroupContent] PhrasingContent);
declare_element!(header {} [] [FlowContent] FlowContent);
declare_element!(hgroup {} [] [FlowContent, HeadingContent] HGroupContent);
declare_element!(hr {} [] [FlowContent]);
declare_element!(i {} [] [FlowContent, PhrasingContent] PhrasingContent);
declare_element!(iframe {
allow: FeaturePolicy,
allowfullscreen: bool,
allowpaymentrequest: bool,
height: usize,
name: Id,
referrerpolicy: ReferrerPolicy,
sandbox: SpacedSet<Sandbox>,
src: Uri,
srcdoc: Uri,
width: usize,
} [] [FlowContent, PhrasingContent, EmbeddedContent, InteractiveContent] FlowContent);
declare_element!(img {
alt: String,
crossorigin: CrossOrigin,
decoding: ImageDecoding,
height: usize,
ismap: bool,
sizes: SpacedList<String>, // FIXME it's not really just a string
src: Uri,
srcset: String, // FIXME this is much more complicated
usemap: String, // FIXME should be a fragment starting with '#'
width: usize,
} [] [FlowContent, PhrasingContent, EmbeddedContent]);
declare_element!(input {
autocomplete: String,
autofocus: bool,
disabled: bool,
form: Id,
list: Id,
name: Id,
required: bool,
tabindex: usize,
type: InputType,
value: String,
} [] [FlowContent, FormContent, PhrasingContent]);
declare_element!(ins {
cite: Uri,
datetime: Datetime,
} [] [FlowContent, PhrasingContent] FlowContent);
declare_element!(kbd {} [] [FlowContent, PhrasingContent] PhrasingContent);
declare_element!(label {
for: Id,
form: Id,
} [] [FlowContent, PhrasingContent, InteractiveContent, FormContent] PhrasingContent);
declare_element!(main {} [] [FlowContent] FlowContent);
declare_element!(map {
name: Id,
} [] [FlowContent, PhrasingContent] MapContent);
declare_element!(mark {} [] [FlowContent, PhrasingContent] PhrasingContent);
// TODO the <math> element
declare_element!(meter {
value: isize,
min: isize,
max: isize,
low: isize,
high: isize,
optimum: isize,
form: Id,
} [] [FlowContent, PhrasingContent] PhrasingContent);
declare_element!(nav {} [] [FlowContent, SectioningContent] PhrasingContent);
declare_element!(noscript {} [] [MetadataContent, FlowContent, PhrasingContent] Node);
declare_element!(object {
data: Uri,
form: Id,
height: usize,
name: Id,
type: Mime,
typemustmatch: bool,
usemap: String, // TODO should be a fragment starting with '#'
width: usize,
} [] [FlowContent, PhrasingContent, EmbeddedContent, InteractiveContent, FormContent] Element_param);
declare_element!(ol {
reversed: bool,
start: isize,
type: OrderedListType,
} [] [FlowContent] Element_li);
declare_element!(output {
for: SpacedSet<Id>,
form: Id,
name: Id,
} [] [FlowContent, PhrasingContent, FormContent] PhrasingContent);
declare_element!(p {} [] [FlowContent] PhrasingContent);
declare_element!(pre {} [] [FlowContent] PhrasingContent);
declare_element!(progress {
max: f64,
value: f64,
} [] [FlowContent, PhrasingContent] PhrasingContent);
declare_element!(q {
cite: Uri,
} [] [FlowContent, PhrasingContent] PhrasingContent);
declare_element!(ruby {} [] [FlowContent, PhrasingContent] PhrasingContent);
declare_element!(s {} [] [FlowContent, PhrasingContent] PhrasingContent);
declare_element!(samp {} [] [FlowContent, PhrasingContent] PhrasingContent);
declare_element!(script {
async: bool,
crossorigin: CrossOrigin,
defer: bool,
integrity: Integrity,
nomodule: bool,
nonce: Nonce,
src: Uri,
text: String,
type: String, // TODO could be an enum
} [] [MetadataContent, FlowContent, PhrasingContent, TableColumnContent] TextNode);
declare_element!(section {} [] [FlowContent, SectioningContent] FlowContent);
declare_element!(select {
autocomplete: String,
autofocus: bool,
disabled: bool,
form: Id,
multiple: bool,
name: Id,
required: bool,
size: usize,
} [] [FlowContent, PhrasingContent, InteractiveContent, FormContent] SelectContent);
declare_element!(small {} [] [FlowContent, PhrasingContent] PhrasingContent);
declare_element!(span {} [] [FlowContent, PhrasingContent] PhrasingContent);
declare_element!(strong {} [] [FlowContent, PhrasingContent] PhrasingContent);
declare_element!(sub {} [] [FlowContent, PhrasingContent] PhrasingContent);
declare_element!(sup {} [] [FlowContent, PhrasingContent] PhrasingContent);
// TODO the <svg> element
declare_element!(table {} [] [FlowContent] TableContent);
declare_element!(template {} [] [MetadataContent, FlowContent, PhrasingContent, TableColumnContent] Node);
declare_element!(textarea {
autocomplete: OnOff,
autofocus: bool,
cols: usize,
disabled: bool,
form: Id,
maxlength: usize,
minlength: usize,
name: Id,
placeholder: String,
readonly: bool,
required: bool,
rows: usize,
spellcheck: BoolOrDefault,
wrap: Wrap,
} [] [FlowContent, PhrasingContent, InteractiveContent, FormContent] TextNode);
declare_element!(time {
datetime: Datetime,
} [] [FlowContent, PhrasingContent] PhrasingContent);
declare_element!(ul {} [] [FlowContent] Element_li);
declare_element!(var {} [] [FlowContent, PhrasingContent] PhrasingContent);
declare_element!(video {} [] [FlowContent, PhrasingContent, EmbeddedContent] MediaContent);
declare_element!(wbr {} [] [FlowContent, PhrasingContent]);
// Non-group elements
declare_element!(area {
alt: String,
coords: String, // TODO could perhaps be validated
download: bool,
href: Uri,
hreflang: LanguageTag,
ping: SpacedList<Uri>,
rel: SpacedSet<LinkType>,
shape: AreaShape,
target: Target,
} [] [MapContent]);
declare_element!(caption {} [] [TableContent] FlowContent);
declare_element!(col {
span: usize,
} [] []);
declare_element!(colgroup {
span: usize,
} [] [TableContent] Element_col);
declare_element!(dd {} [] [DescriptionListContent] FlowContent);
declare_element!(dt {} [] [DescriptionListContent] FlowContent);
declare_element!(figcaption {} [] [] FlowContent);
declare_element!(legend {} [] [] PhrasingContent);
declare_element!(li {
value: isize,
} [] [] FlowContent);
declare_element!(option {
disabled: bool,
label: String,
selected: bool,
value: String,
} [] [SelectContent] TextNode);
declare_element!(optgroup {
disabled: bool,
label: String,
} [] [SelectContent] Element_option);
declare_element!(param {
name: String,
value: String,
} [] []);
declare_element!(source {
src: Uri,
type: Mime,
} [] [MediaContent]);
declare_element!(summary {} [] [] PhrasingContent);
declare_element!(tbody {} [] [TableContent] Element_tr);
declare_element!(td {
colspan: usize,
headers: SpacedSet<Id>,
rowspan: usize,
} [] [TableColumnContent] FlowContent);
declare_element!(tfoot {} [] [TableContent] Element_tr);
declare_element!(th {
abbr: String,
colspan: usize,
headers: SpacedSet<Id>,
rowspan: usize,
scope: TableHeaderScope,
} [] [TableColumnContent] FlowContent);
declare_element!(thead {} [] [TableContent] Element_tr);
declare_element!(tr {} [] [TableContent] TableColumnContent);
declare_element!(track {
default: bool,
kind: VideoKind,
label: String,
src: Uri,
srclang: LanguageTag,
} [] [MediaContent]);
// Don't @ me
declare_element!(blink {} [] [FlowContent, PhrasingContent] PhrasingContent);
declare_element!(marquee {
behavior: String, // FIXME enum
bgcolor: String, // FIXME colour
direction: String, // FIXME direction enum
height: String, // FIXME size
hspace: String, // FIXME size
loop: isize,
scrollamount: usize,
scrolldelay: usize,
truespeed: bool,
vspace: String, // FIXME size
width: String, // FIXME size
} [] [FlowContent, PhrasingContent] PhrasingContent);