Add OWNERS; upgrade to 0.5.15 am: d5ad55beb3 am: fd5220cf2d
Change-Id: I37222c99ad8c654a4c271e00cae91e26acaebee6
diff --git a/.cargo_vcs_info.json b/.cargo_vcs_info.json
index 8216174..020ca37 100644
--- a/.cargo_vcs_info.json
+++ b/.cargo_vcs_info.json
@@ -1,5 +1,5 @@
{
"git": {
- "sha1": "d29a20e44207f915f42e6d1668d2864b0152e5a8"
+ "sha1": "29f01e8f33414f20a953dd1363277ac5d1cf7974"
}
}
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..3e6107d
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,13 @@
+sudo: false
+
+language: rust
+
+rust:
+ - nightly
+ - beta
+ - stable
+ - 1.31.0
+
+script:
+ - cargo run --manifest-path example/Cargo.toml
+ - cargo test --all
diff --git a/Cargo.toml b/Cargo.toml
index 0096538..66f7685 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -13,28 +13,34 @@
[package]
edition = "2018"
name = "proc-macro-hack"
-version = "0.5.12"
+version = "0.5.15"
authors = ["David Tolnay <[email protected]>"]
description = "Procedural macros in expression position"
readme = "README.md"
categories = ["development-tools::procedural-macro-helpers"]
license = "MIT OR Apache-2.0"
repository = "https://github.com/dtolnay/proc-macro-hack"
+[package.metadata.docs.rs]
+targets = ["x86_64-unknown-linux-gnu"]
[lib]
proc-macro = true
-[dependencies.proc-macro2]
-version = "1.0"
-
-[dependencies.quote]
-version = "1.0"
-
-[dependencies.syn]
-version = "1.0.5"
[dev-dependencies.demo-hack]
version = "0.0.5"
[dev-dependencies.demo-hack-impl]
version = "0.0.5"
+
+[dev-dependencies.quote]
+version = "1.0"
+
+[dev-dependencies.rustversion]
+version = "1.0"
+
+[dev-dependencies.syn]
+version = "1.0.5"
+
+[dev-dependencies.trybuild]
+version = "1.0"
[badges.travis-ci]
repository = "dtolnay/proc-macro-hack"
diff --git a/Cargo.toml.orig b/Cargo.toml.orig
index ddc7286..3076694 100644
--- a/Cargo.toml.orig
+++ b/Cargo.toml.orig
@@ -1,6 +1,6 @@
[package]
name = "proc-macro-hack"
-version = "0.5.12"
+version = "0.5.15"
authors = ["David Tolnay <[email protected]>"]
edition = "2018"
license = "MIT OR Apache-2.0"
@@ -12,17 +12,19 @@
[lib]
proc-macro = true
-[dependencies]
-proc-macro2 = "1.0"
-quote = "1.0"
-syn = "1.0.5"
-
[dev-dependencies]
demo-hack = { version = "0.0.5", path = "demo-hack" }
demo-hack-impl = { version = "0.0.5", path = "demo-hack-impl" }
+quote = "1.0"
+rustversion = "1.0"
+syn = "1.0.5"
+trybuild = "1.0"
[badges]
travis-ci = { repository = "dtolnay/proc-macro-hack" }
[workspace]
members = ["demo-hack", "demo-hack-impl", "example", "nested"]
+
+[package.metadata.docs.rs]
+targets = ["x86_64-unknown-linux-gnu"]
diff --git a/METADATA b/METADATA
index f5a6faa..1b6ac6b 100644
--- a/METADATA
+++ b/METADATA
@@ -1,7 +1,5 @@
name: "proc-macro-hack"
-description:
- "Procedural macros in expression position"
-
+description: "Procedural macros in expression position"
third_party {
url {
type: HOMEPAGE
@@ -11,7 +9,11 @@
type: GIT
value: "https://github.com/dtolnay/proc-macro-hack"
}
- version: "0.5.12"
- last_upgrade_date { year: 2020 month: 3 day: 19 }
+ version: "0.5.15"
license_type: NOTICE
+ last_upgrade_date {
+ year: 2020
+ month: 4
+ day: 17
+ }
}
diff --git a/OWNERS b/OWNERS
new file mode 100644
index 0000000..46fc303
--- /dev/null
+++ b/OWNERS
@@ -0,0 +1 @@
+include platform/prebuilts/rust:/OWNERS
diff --git a/src/error.rs b/src/error.rs
new file mode 100644
index 0000000..7334a53
--- /dev/null
+++ b/src/error.rs
@@ -0,0 +1,39 @@
+use proc_macro::{Delimiter, Group, Ident, Literal, Punct, Spacing, Span, TokenStream, TokenTree};
+use std::iter::FromIterator;
+
+pub struct Error {
+ span: Span,
+ msg: String,
+}
+
+impl Error {
+ pub fn new(span: Span, msg: impl Into<String>) -> Self {
+ Error {
+ span,
+ msg: msg.into(),
+ }
+ }
+}
+
+pub fn compile_error(err: Error) -> TokenStream {
+ // compile_error!($msg)
+ TokenStream::from_iter(vec![
+ TokenTree::Ident(Ident::new("compile_error", err.span)),
+ TokenTree::Punct({
+ let mut punct = Punct::new('!', Spacing::Alone);
+ punct.set_span(err.span);
+ punct
+ }),
+ TokenTree::Group({
+ let mut group = Group::new(Delimiter::Brace, {
+ TokenStream::from_iter(vec![TokenTree::Literal({
+ let mut string = Literal::string(&err.msg);
+ string.set_span(err.span);
+ string
+ })])
+ });
+ group.set_span(err.span);
+ group
+ }),
+ ])
+}
diff --git a/src/lib.rs b/src/lib.rs
index e9b80e6..292ac74 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -123,19 +123,24 @@
//! [`proc-macro-nested`]: https://docs.rs/proc-macro-nested
#![recursion_limit = "512"]
-#![cfg_attr(feature = "cargo-clippy", allow(renamed_and_removed_lints))]
-#![cfg_attr(feature = "cargo-clippy", allow(needless_pass_by_value))]
+#![allow(clippy::needless_doctest_main, clippy::toplevel_ref_arg)]
extern crate proc_macro;
-use proc_macro2::{Span, TokenStream, TokenTree};
-use quote::{format_ident, quote, ToTokens};
-use std::fmt::Write;
-use syn::ext::IdentExt;
-use syn::parse::{Parse, ParseStream, Result};
-use syn::{braced, bracketed, parenthesized, parse_macro_input, token, Ident, LitInt, Token};
+#[macro_use]
+mod quote;
-type Visibility = Option<Token![pub]>;
+mod error;
+mod parse;
+
+use crate::error::{compile_error, Error};
+use crate::parse::*;
+use proc_macro::{token_stream, Ident, Punct, Spacing, Span, TokenStream, TokenTree};
+use std::fmt::Write;
+use std::iter::Peekable;
+
+type Iter<'a> = &'a mut Peekable<token_stream::IntoIter>;
+type Visibility = Option<Span>;
enum Input {
Export(Export),
@@ -162,122 +167,61 @@
export_as: Ident,
}
-impl Parse for Input {
- fn parse(input: ParseStream) -> Result<Self> {
- let ahead = input.fork();
- parse_attributes(&ahead)?;
- ahead.parse::<Visibility>()?;
-
- if ahead.peek(Token![use]) {
- input.parse().map(Input::Export)
- } else if ahead.peek(Token![fn]) {
- input.parse().map(Input::Define)
- } else {
- Err(input.error("unexpected input to #[proc_macro_hack]"))
- }
- }
-}
-
-impl Parse for Export {
- fn parse(input: ParseStream) -> Result<Self> {
- let attrs = input.call(parse_attributes)?;
- let vis: Visibility = input.parse()?;
- input.parse::<Token![use]>()?;
- input.parse::<Option<Token![::]>>()?;
- let from: Ident = input.parse()?;
- input.parse::<Token![::]>()?;
-
- let mut macros = Vec::new();
- if input.peek(token::Brace) {
- let content;
- braced!(content in input);
- loop {
- macros.push(content.parse()?);
- if content.is_empty() {
- break;
- }
- content.parse::<Token![,]>()?;
- if content.is_empty() {
- break;
- }
- }
- } else {
- macros.push(input.parse()?);
- }
-
- input.parse::<Token![;]>()?;
- Ok(Export {
- attrs,
- vis,
- from,
- macros,
- })
- }
-}
-
-impl Parse for Define {
- fn parse(input: ParseStream) -> Result<Self> {
- let attrs = input.call(parse_attributes)?;
- let vis: Visibility = input.parse()?;
- if vis.is_none() {
- return Err(input.error("functions tagged with `#[proc_macro_hack]` must be `pub`"));
- }
-
- input.parse::<Token![fn]>()?;
- let name: Ident = input.parse()?;
- let body: TokenStream = input.parse()?;
- Ok(Define { attrs, name, body })
- }
-}
-
-impl Parse for Macro {
- fn parse(input: ParseStream) -> Result<Self> {
- let name: Ident = input.parse()?;
- let renamed: Option<Token![as]> = input.parse()?;
- let export_as = if renamed.is_some() {
- input.parse()?
- } else {
- name.clone()
- };
- Ok(Macro { name, export_as })
- }
-}
-
-fn parse_attributes(input: ParseStream) -> Result<TokenStream> {
- let mut attrs = TokenStream::new();
- while input.peek(Token![#]) {
- let pound: Token![#] = input.parse()?;
- pound.to_tokens(&mut attrs);
- let content;
- let bracket_token = bracketed!(content in input);
- let content: TokenStream = content.parse()?;
- bracket_token.surround(&mut attrs, |tokens| content.to_tokens(tokens));
- }
- Ok(attrs)
-}
-
#[proc_macro_attribute]
-pub fn proc_macro_hack(
- args: proc_macro::TokenStream,
- input: proc_macro::TokenStream,
-) -> proc_macro::TokenStream {
- proc_macro::TokenStream::from(match parse_macro_input!(input) {
+pub fn proc_macro_hack(args: TokenStream, input: TokenStream) -> TokenStream {
+ let ref mut args = args.into_iter().peekable();
+ let ref mut input = input.into_iter().peekable();
+ expand_proc_macro_hack(args, input).unwrap_or_else(compile_error)
+}
+
+fn expand_proc_macro_hack(args: Iter, input: Iter) -> Result<TokenStream, Error> {
+ match parse_input(input)? {
Input::Export(export) => {
- let args = parse_macro_input!(args as ExportArgs);
- expand_export(export, args)
+ let args = parse_export_args(args)?;
+ Ok(expand_export(export, args))
}
Input::Define(define) => {
- parse_macro_input!(args as DefineArgs);
- expand_define(define)
+ parse_define_args(args)?;
+ Ok(expand_define(define))
}
- })
+ }
}
-mod kw {
- syn::custom_keyword!(derive);
- syn::custom_keyword!(fake_call_site);
- syn::custom_keyword!(internal_macro_calls);
- syn::custom_keyword!(support_nested);
+#[doc(hidden)]
+#[proc_macro_derive(ProcMacroHack)]
+pub fn enum_hack(input: TokenStream) -> TokenStream {
+ let ref mut input = input.into_iter().peekable();
+ parse_enum_hack(input).unwrap_or_else(compile_error)
+}
+
+struct FakeCallSite {
+ derive: Ident,
+ rest: TokenStream,
+}
+
+#[doc(hidden)]
+#[proc_macro_attribute]
+pub fn fake_call_site(args: TokenStream, input: TokenStream) -> TokenStream {
+ let ref mut args = args.into_iter().peekable();
+ let ref mut input = input.into_iter().peekable();
+ expand_fake_call_site(args, input).unwrap_or_else(compile_error)
+}
+
+fn expand_fake_call_site(args: Iter, input: Iter) -> Result<TokenStream, Error> {
+ let span = match args.next() {
+ Some(token) => token.span(),
+ None => return Ok(input.collect()),
+ };
+
+ let input = parse_fake_call_site(input)?;
+ let mut derive = input.derive;
+ derive.set_span(span);
+ let rest = input.rest;
+
+ Ok(quote! {
+ #[derive(#derive)]
+ #rest
+ })
}
struct ExportArgs {
@@ -286,151 +230,25 @@
fake_call_site: bool,
}
-impl Parse for ExportArgs {
- fn parse(input: ParseStream) -> Result<Self> {
- let mut args = ExportArgs {
- support_nested: false,
- internal_macro_calls: 0,
- fake_call_site: false,
- };
-
- while !input.is_empty() {
- let ahead = input.lookahead1();
- if ahead.peek(kw::support_nested) {
- input.parse::<kw::support_nested>()?;
- args.support_nested = true;
- } else if ahead.peek(kw::internal_macro_calls) {
- input.parse::<kw::internal_macro_calls>()?;
- input.parse::<Token![=]>()?;
- let calls = input.parse::<LitInt>()?.base10_parse()?;
- args.internal_macro_calls = calls;
- } else if ahead.peek(kw::fake_call_site) {
- input.parse::<kw::fake_call_site>()?;
- args.fake_call_site = true;
- } else {
- return Err(ahead.error());
- }
- if input.is_empty() {
- break;
- }
- input.parse::<Token![,]>()?;
- }
-
- Ok(args)
- }
-}
-
-struct DefineArgs;
-
-impl Parse for DefineArgs {
- fn parse(_input: ParseStream) -> Result<Self> {
- Ok(DefineArgs)
- }
-}
-
-struct EnumHack {
- token_stream: TokenStream,
-}
-
-impl Parse for EnumHack {
- fn parse(input: ParseStream) -> Result<Self> {
- input.parse::<Token![enum]>()?;
- input.parse::<Ident>()?;
-
- let braces;
- braced!(braces in input);
- braces.parse::<Ident>()?;
- braces.parse::<Token![=]>()?;
-
- let parens;
- parenthesized!(parens in braces);
- parens.parse::<Ident>()?;
- parens.parse::<Token![!]>()?;
-
- let inner;
- braced!(inner in parens);
- let token_stream: TokenStream = inner.parse()?;
-
- parens.parse::<Token![,]>()?;
- parens.parse::<TokenTree>()?;
- braces.parse::<Token![.]>()?;
- braces.parse::<TokenTree>()?;
- braces.parse::<Token![,]>()?;
-
- Ok(EnumHack { token_stream })
- }
-}
-
-#[doc(hidden)]
-#[proc_macro_derive(ProcMacroHack)]
-pub fn enum_hack(input: proc_macro::TokenStream) -> proc_macro::TokenStream {
- let inner = parse_macro_input!(input as EnumHack);
- proc_macro::TokenStream::from(inner.token_stream)
-}
-
-struct FakeCallSite {
- derive: Ident,
- rest: TokenStream,
-}
-
-impl Parse for FakeCallSite {
- fn parse(input: ParseStream) -> Result<Self> {
- input.parse::<Token![#]>()?;
- let attr;
- bracketed!(attr in input);
- attr.parse::<kw::derive>()?;
- let path;
- parenthesized!(path in attr);
- Ok(FakeCallSite {
- derive: path.parse()?,
- rest: input.parse()?,
- })
- }
-}
-
-#[doc(hidden)]
-#[proc_macro_attribute]
-pub fn fake_call_site(
- args: proc_macro::TokenStream,
- input: proc_macro::TokenStream,
-) -> proc_macro::TokenStream {
- let args = TokenStream::from(args);
- let span = match args.into_iter().next() {
- Some(token) => token.span(),
- None => return input,
- };
-
- let input = parse_macro_input!(input as FakeCallSite);
- let mut derive = input.derive;
- derive.set_span(span);
- let rest = input.rest;
-
- let expanded = quote! {
- #[derive(#derive)]
- #rest
- };
-
- proc_macro::TokenStream::from(expanded)
-}
-
fn expand_export(export: Export, args: ExportArgs) -> TokenStream {
let dummy = dummy_name_for_export(&export);
let attrs = export.attrs;
- let vis = export.vis;
+ let ref vis = export.vis.map(|span| Ident::new("pub", span));
let macro_export = match vis {
Some(_) => quote!(#[macro_export]),
None => quote!(),
};
- let crate_prefix = vis.map(|_| quote!($crate::));
+ let crate_prefix = vis.as_ref().map(|_| quote!($crate::));
let enum_variant = if args.support_nested {
if args.internal_macro_calls == 0 {
- quote!(Nested)
+ Ident::new("Nested", Span::call_site())
} else {
- format_ident!("Nested{}", args.internal_macro_calls).to_token_stream()
+ let name = format!("Nested{}", args.internal_macro_calls);
+ Ident::new(&name, Span::call_site())
}
} else {
- quote!(Value)
+ Ident::new("Value", Span::call_site())
};
let from = export.from;
@@ -452,9 +270,11 @@
};
let proc_macro_call = if args.support_nested {
- let extra_bangs = (0..args.internal_macro_calls).map(|_| quote!(!));
+ let extra_bangs = (0..args.internal_macro_calls)
+ .map(|_| TokenTree::Punct(Punct::new('!', Spacing::Alone)))
+ .collect::<TokenStream>();
quote! {
- #crate_prefix #dispatch! { ($($proc_macro)*) #(#extra_bangs)* }
+ #crate_prefix #dispatch! { ($($proc_macro)*) #extra_bangs }
}
} else {
quote! {
@@ -500,6 +320,7 @@
macro_rules! #export_as {
($($proc_macro:tt)*) => {{
#do_derive
+ #[allow(dead_code)]
enum ProcMacroHack {
#enum_variant = (stringify! { $($proc_macro)* }, 0).1,
}
@@ -533,6 +354,8 @@
let mut iter = input.into_iter();
iter.next().unwrap(); // `enum`
iter.next().unwrap(); // `ProcMacroHack`
+ iter.next().unwrap(); // `#`
+ iter.next().unwrap(); // `[allow(dead_code)]`
let mut braces = match iter.next().unwrap() {
#dummy::TokenTree::Group(group) => group.stream().into_iter(),
@@ -624,28 +447,46 @@
}
fn actual_proc_macro_name(conceptual: &Ident) -> Ident {
- format_ident!("proc_macro_hack_{}", conceptual)
+ Ident::new(
+ &format!("proc_macro_hack_{}", conceptual),
+ conceptual.span(),
+ )
}
fn dispatch_macro_name(conceptual: &Ident) -> Ident {
- format_ident!("proc_macro_call_{}", conceptual)
+ Ident::new(
+ &format!("proc_macro_call_{}", conceptual),
+ conceptual.span(),
+ )
}
fn call_site_macro_name(conceptual: &Ident) -> Ident {
- format_ident!("proc_macro_fake_call_site_{}", conceptual)
+ Ident::new(
+ &format!("proc_macro_fake_call_site_{}", conceptual),
+ conceptual.span(),
+ )
}
fn dummy_name_for_export(export: &Export) -> String {
let mut dummy = String::new();
- let from = export.from.unraw().to_string();
+ let from = unraw(&export.from).to_string();
write!(dummy, "_{}{}", from.len(), from).unwrap();
for m in &export.macros {
- let name = m.name.unraw().to_string();
+ let name = unraw(&m.name).to_string();
write!(dummy, "_{}{}", name.len(), name).unwrap();
}
dummy
}
+fn unraw(ident: &Ident) -> Ident {
+ let string = ident.to_string();
+ if string.starts_with("r#") {
+ Ident::new(&string[2..], ident.span())
+ } else {
+ ident.clone()
+ }
+}
+
fn wrap_in_enum_hack(dummy: String, inner: TokenStream) -> TokenStream {
let dummy = Ident::new(&dummy, Span::call_site());
quote! {
diff --git a/src/parse.rs b/src/parse.rs
new file mode 100644
index 0000000..c6b1665
--- /dev/null
+++ b/src/parse.rs
@@ -0,0 +1,248 @@
+use crate::{Define, Error, Export, ExportArgs, FakeCallSite, Input, Iter, Macro, Visibility};
+use proc_macro::Delimiter::{Brace, Bracket, Parenthesis};
+use proc_macro::{token_stream, Delimiter, Ident, Span, TokenStream, TokenTree};
+use std::iter::Peekable;
+
+pub(crate) fn parse_input(tokens: Iter) -> Result<Input, Error> {
+ let attrs = parse_attributes(tokens)?;
+ let vis = parse_visibility(tokens)?;
+ let kw = parse_ident(tokens)?;
+ if kw.to_string() == "use" {
+ parse_export(attrs, vis, tokens).map(Input::Export)
+ } else if kw.to_string() == "fn" {
+ parse_define(attrs, vis, kw.span(), tokens).map(Input::Define)
+ } else {
+ Err(Error::new(
+ kw.span(),
+ "unexpected input to #[proc_macro_hack]",
+ ))
+ }
+}
+
+fn parse_export(attrs: TokenStream, vis: Visibility, tokens: Iter) -> Result<Export, Error> {
+ let _ = parse_punct(tokens, ':');
+ let _ = parse_punct(tokens, ':');
+ let from = parse_ident(tokens)?;
+ parse_punct(tokens, ':')?;
+ parse_punct(tokens, ':')?;
+
+ let mut macros = Vec::new();
+ match tokens.peek() {
+ Some(TokenTree::Group(group)) if group.delimiter() == Brace => {
+ let ref mut content = group.stream().into_iter().peekable();
+ loop {
+ macros.push(parse_macro(content)?);
+ if content.peek().is_none() {
+ break;
+ }
+ parse_punct(content, ',')?;
+ if content.peek().is_none() {
+ break;
+ }
+ }
+ tokens.next().unwrap();
+ }
+ _ => macros.push(parse_macro(tokens)?),
+ }
+
+ parse_punct(tokens, ';')?;
+ Ok(Export {
+ attrs,
+ vis,
+ from,
+ macros,
+ })
+}
+
+fn parse_punct(tokens: Iter, ch: char) -> Result<(), Error> {
+ match tokens.peek() {
+ Some(TokenTree::Punct(punct)) if punct.as_char() == ch => {
+ tokens.next().unwrap();
+ Ok(())
+ }
+ tt => Err(Error::new(
+ tt.map_or_else(Span::call_site, TokenTree::span),
+ format!("expected `{}`", ch),
+ )),
+ }
+}
+
+fn parse_define(
+ attrs: TokenStream,
+ vis: Visibility,
+ fn_token: Span,
+ tokens: Iter,
+) -> Result<Define, Error> {
+ if vis.is_none() {
+ return Err(Error::new(
+ fn_token,
+ "functions tagged with `#[proc_macro_hack]` must be `pub`",
+ ));
+ }
+ let name = parse_ident(tokens)?;
+ let body = tokens.collect();
+ Ok(Define { attrs, name, body })
+}
+
+fn parse_macro(tokens: Iter) -> Result<Macro, Error> {
+ let name = parse_ident(tokens)?;
+ let export_as = match tokens.peek() {
+ Some(TokenTree::Ident(ident)) if ident.to_string() == "as" => {
+ tokens.next().unwrap();
+ parse_ident(tokens)?
+ }
+ _ => name.clone(),
+ };
+ Ok(Macro { name, export_as })
+}
+
+fn parse_ident(tokens: Iter) -> Result<Ident, Error> {
+ match tokens.next() {
+ Some(TokenTree::Ident(ident)) => Ok(ident),
+ tt => Err(Error::new(
+ tt.as_ref().map_or_else(Span::call_site, TokenTree::span),
+ "expected identifier",
+ )),
+ }
+}
+
+fn parse_keyword(tokens: Iter, kw: &'static str) -> Result<(), Error> {
+ match &tokens.next() {
+ Some(TokenTree::Ident(ident)) if ident.to_string() == kw => Ok(()),
+ tt => Err(Error::new(
+ tt.as_ref().map_or_else(Span::call_site, TokenTree::span),
+ format!("expected `{}`", kw),
+ )),
+ }
+}
+
+fn parse_int(tokens: Iter) -> Result<u16, Span> {
+ match tokens.next() {
+ Some(TokenTree::Literal(lit)) => lit.to_string().parse().map_err(|_| lit.span()),
+ Some(tt) => Err(tt.span()),
+ None => Err(Span::call_site()),
+ }
+}
+
+fn parse_group(
+ tokens: Iter,
+ delimiter: Delimiter,
+) -> Result<Peekable<token_stream::IntoIter>, Error> {
+ match &tokens.next() {
+ Some(TokenTree::Group(group)) if group.delimiter() == delimiter => {
+ Ok(group.stream().into_iter().peekable())
+ }
+ tt => Err(Error::new(
+ tt.as_ref().map_or_else(Span::call_site, TokenTree::span),
+ "expected delimiter",
+ )),
+ }
+}
+
+fn parse_visibility(tokens: Iter) -> Result<Visibility, Error> {
+ if let Some(TokenTree::Ident(ident)) = tokens.peek() {
+ if ident.to_string() == "pub" {
+ return Ok(Some(tokens.next().unwrap().span()));
+ }
+ }
+ Ok(None)
+}
+
+fn parse_attributes(tokens: Iter) -> Result<TokenStream, Error> {
+ let mut attrs = TokenStream::new();
+ while let Some(TokenTree::Punct(punct)) = tokens.peek() {
+ if punct.as_char() != '#' {
+ break;
+ }
+ let span = punct.span();
+ attrs.extend(tokens.next());
+ match tokens.peek() {
+ Some(TokenTree::Group(group)) if group.delimiter() == Bracket => {
+ attrs.extend(tokens.next());
+ }
+ _ => return Err(Error::new(span, "unexpected input")),
+ }
+ }
+ Ok(attrs)
+}
+
+pub(crate) fn parse_export_args(tokens: Iter) -> Result<ExportArgs, Error> {
+ let mut args = ExportArgs {
+ support_nested: false,
+ internal_macro_calls: 0,
+ fake_call_site: false,
+ };
+
+ while let Some(tt) = tokens.next() {
+ match &tt {
+ TokenTree::Ident(ident) if ident.to_string() == "support_nested" => {
+ args.support_nested = true;
+ }
+ TokenTree::Ident(ident) if ident.to_string() == "internal_macro_calls" => {
+ parse_punct(tokens, '=')?;
+ let calls = parse_int(tokens).map_err(|span| {
+ Error::new(span, "expected integer value for internal_macro_calls")
+ })?;
+ args.internal_macro_calls = calls;
+ }
+ TokenTree::Ident(ident) if ident.to_string() == "fake_call_site" => {
+ args.fake_call_site = true;
+ }
+ _ => {
+ return Err(Error::new(
+ tt.span(),
+ "expected one of: `support_nested`, `internal_macro_calls`, `fake_call_site`",
+ ))
+ }
+ }
+ if tokens.peek().is_none() {
+ break;
+ }
+ parse_punct(tokens, ',')?;
+ }
+
+ Ok(args)
+}
+
+pub(crate) fn parse_define_args(tokens: Iter) -> Result<(), Error> {
+ if tokens.peek().is_none() {
+ Ok(())
+ } else {
+ Err(Error::new(Span::call_site(), "unexpected input"))
+ }
+}
+
+pub(crate) fn parse_enum_hack(tokens: Iter) -> Result<TokenStream, Error> {
+ parse_keyword(tokens, "enum")?;
+ parse_ident(tokens)?;
+
+ let ref mut braces = parse_group(tokens, Brace)?;
+ parse_ident(braces)?;
+ parse_punct(braces, '=')?;
+
+ let ref mut parens = parse_group(braces, Parenthesis)?;
+ parse_ident(parens)?;
+ parse_punct(parens, '!')?;
+
+ let ref mut inner = parse_group(parens, Brace)?;
+ let token_stream = inner.collect();
+
+ parse_punct(parens, ',')?;
+ let _ = parens.next();
+ parse_punct(braces, '.')?;
+ let _ = braces.next();
+ parse_punct(braces, ',')?;
+
+ Ok(token_stream)
+}
+
+pub(crate) fn parse_fake_call_site(tokens: Iter) -> Result<FakeCallSite, Error> {
+ parse_punct(tokens, '#')?;
+ let ref mut attr = parse_group(tokens, Bracket)?;
+ parse_keyword(attr, "derive")?;
+ let ref mut path = parse_group(attr, Parenthesis)?;
+ Ok(FakeCallSite {
+ derive: parse_ident(path)?,
+ rest: tokens.collect(),
+ })
+}
diff --git a/src/quote.rs b/src/quote.rs
new file mode 100644
index 0000000..a3d648d
--- /dev/null
+++ b/src/quote.rs
@@ -0,0 +1,114 @@
+use proc_macro::{Ident, TokenStream, TokenTree};
+use std::iter;
+
+macro_rules! quote {
+ () => {
+ ::proc_macro::TokenStream::new()
+ };
+ ($($tt:tt)*) => {{
+ let mut tokens = ::proc_macro::TokenStream::new();
+ quote_each_token!(tokens $($tt)*);
+ tokens
+ }};
+}
+
+macro_rules! quote_each_token {
+ ($tokens:ident # $var:ident $($rest:tt)*) => {
+ $crate::quote::Tokens::extend(&mut $tokens, &$var);
+ quote_each_token!($tokens $($rest)*);
+ };
+ ($tokens:ident $ident:ident $($rest:tt)*) => {
+ <::proc_macro::TokenStream as ::std::iter::Extend<_>>::extend(
+ &mut $tokens,
+ ::std::iter::once(
+ ::proc_macro::TokenTree::Ident(
+ ::proc_macro::Ident::new(
+ stringify!($ident),
+ ::proc_macro::Span::call_site(),
+ ),
+ ),
+ ),
+ );
+ quote_each_token!($tokens $($rest)*);
+ };
+ ($tokens:ident ( $($inner:tt)* ) $($rest:tt)*) => {
+ <::proc_macro::TokenStream as ::std::iter::Extend<_>>::extend(
+ &mut $tokens,
+ ::std::iter::once(
+ ::proc_macro::TokenTree::Group(
+ ::proc_macro::Group::new(
+ ::proc_macro::Delimiter::Parenthesis,
+ quote!($($inner)*),
+ ),
+ ),
+ ),
+ );
+ quote_each_token!($tokens $($rest)*);
+ };
+ ($tokens:ident [ $($inner:tt)* ] $($rest:tt)*) => {
+ <::proc_macro::TokenStream as ::std::iter::Extend<_>>::extend(
+ &mut $tokens,
+ ::std::iter::once(
+ ::proc_macro::TokenTree::Group(
+ ::proc_macro::Group::new(
+ ::proc_macro::Delimiter::Bracket,
+ quote!($($inner)*),
+ ),
+ ),
+ ),
+ );
+ quote_each_token!($tokens $($rest)*);
+ };
+ ($tokens:ident { $($inner:tt)* } $($rest:tt)*) => {
+ <::proc_macro::TokenStream as ::std::iter::Extend<_>>::extend(
+ &mut $tokens,
+ ::std::iter::once(
+ ::proc_macro::TokenTree::Group(
+ ::proc_macro::Group::new(
+ ::proc_macro::Delimiter::Brace,
+ quote!($($inner)*),
+ ),
+ ),
+ ),
+ );
+ quote_each_token!($tokens $($rest)*);
+ };
+ ($tokens:ident $punct:tt $($rest:tt)*) => {
+ <::proc_macro::TokenStream as ::std::iter::Extend<_>>::extend(
+ &mut $tokens,
+ stringify!($punct).parse::<::proc_macro::TokenStream>(),
+ );
+ quote_each_token!($tokens $($rest)*);
+ };
+ ($tokens:ident) => {};
+}
+
+pub trait Tokens {
+ fn extend(tokens: &mut TokenStream, var: &Self);
+}
+
+impl Tokens for Ident {
+ fn extend(tokens: &mut TokenStream, var: &Self) {
+ tokens.extend(iter::once(TokenTree::Ident(var.clone())));
+ }
+}
+
+impl Tokens for TokenStream {
+ fn extend(tokens: &mut TokenStream, var: &Self) {
+ tokens.extend(var.clone());
+ }
+}
+
+impl<T: Tokens> Tokens for Option<T> {
+ fn extend(tokens: &mut TokenStream, var: &Self) {
+ if let Some(var) = var {
+ T::extend(tokens, var);
+ }
+ }
+}
+
+impl<T: Tokens> Tokens for &T {
+ fn extend(tokens: &mut TokenStream, var: &Self) {
+ T::extend(tokens, var);
+ }
+}
diff --git a/tests/compiletest.rs b/tests/compiletest.rs
new file mode 100644
index 0000000..f9aea23
--- /dev/null
+++ b/tests/compiletest.rs
@@ -0,0 +1,6 @@
+#[rustversion::attr(not(nightly), ignore)]
+#[test]
+fn ui() {
+ let t = trybuild::TestCases::new();
+ t.compile_fail("tests/ui/*.rs");
+}
diff --git a/tests/ui/private.rs b/tests/ui/private.rs
new file mode 100644
index 0000000..1ab8444
--- /dev/null
+++ b/tests/ui/private.rs
@@ -0,0 +1,8 @@
+use proc_macro_hack::proc_macro_hack;
+
+#[proc_macro_hack]
+fn my_macro(input: TokenStream) -> TokenStream {
+ unimplemented!()
+}
+
+fn main() {}
diff --git a/tests/ui/private.stderr b/tests/ui/private.stderr
new file mode 100644
index 0000000..be79443
--- /dev/null
+++ b/tests/ui/private.stderr
@@ -0,0 +1,5 @@
+error: functions tagged with `#[proc_macro_hack]` must be `pub`
+ --> $DIR/private.rs:4:1
+ |
+4 | fn my_macro(input: TokenStream) -> TokenStream {
+ | ^^
diff --git a/tests/ui/unexpected.rs b/tests/ui/unexpected.rs
new file mode 100644
index 0000000..122ded5
--- /dev/null
+++ b/tests/ui/unexpected.rs
@@ -0,0 +1,6 @@
+use proc_macro_hack::proc_macro_hack;
+
+#[proc_macro_hack]
+pub struct What;
+
+fn main() {}
diff --git a/tests/ui/unexpected.stderr b/tests/ui/unexpected.stderr
new file mode 100644
index 0000000..e3da97a
--- /dev/null
+++ b/tests/ui/unexpected.stderr
@@ -0,0 +1,5 @@
+error: unexpected input to #[proc_macro_hack]
+ --> $DIR/unexpected.rs:4:5
+ |
+4 | pub struct What;
+ | ^^^^^^
diff --git a/tests/ui/unknown-arg.rs b/tests/ui/unknown-arg.rs
new file mode 100644
index 0000000..aa9b82c
--- /dev/null
+++ b/tests/ui/unknown-arg.rs
@@ -0,0 +1,6 @@
+use proc_macro_hack::proc_macro_hack;
+
+#[proc_macro_hack(fake_call_site, support_nexted)]
+pub use demo::some_macro;
+
+fn main() {}
diff --git a/tests/ui/unknown-arg.stderr b/tests/ui/unknown-arg.stderr
new file mode 100644
index 0000000..d3cd8c9
--- /dev/null
+++ b/tests/ui/unknown-arg.stderr
@@ -0,0 +1,5 @@
+error: expected one of: `support_nested`, `internal_macro_calls`, `fake_call_site`
+ --> $DIR/unknown-arg.rs:3:35
+ |
+3 | #[proc_macro_hack(fake_call_site, support_nexted)]
+ | ^^^^^^^^^^^^^^