Importing rustc-1.38.0
diff --git a/src/libsyntax_ext/Cargo.toml b/src/libsyntax_ext/Cargo.toml
index 773f094..73310df3 100644
--- a/src/libsyntax_ext/Cargo.toml
+++ b/src/libsyntax_ext/Cargo.toml
@@ -7,14 +7,14 @@
[lib]
name = "syntax_ext"
path = "lib.rs"
-crate-type = ["dylib"]
+doctest = false
[dependencies]
-fmt_macros = { path = "../libfmt_macros" }
errors = { path = "../librustc_errors", package = "rustc_errors" }
-syntax = { path = "../libsyntax" }
-syntax_pos = { path = "../libsyntax_pos" }
+fmt_macros = { path = "../libfmt_macros" }
+log = "0.4"
rustc_data_structures = { path = "../librustc_data_structures" }
rustc_target = { path = "../librustc_target" }
smallvec = { version = "0.6.7", features = ["union", "may_dangle"] }
-log = "0.4"
+syntax = { path = "../libsyntax" }
+syntax_pos = { path = "../libsyntax_pos" }
diff --git a/src/libsyntax_ext/asm.rs b/src/libsyntax_ext/asm.rs
index b015815..c1c2732 100644
--- a/src/libsyntax_ext/asm.rs
+++ b/src/libsyntax_ext/asm.rs
@@ -8,7 +8,6 @@
use syntax::ast;
use syntax::ext::base::{self, *};
-use syntax::feature_gate;
use syntax::parse;
use syntax::parse::token::{self, Token};
use syntax::ptr::P;
@@ -46,14 +45,6 @@
sp: Span,
tts: &[tokenstream::TokenTree])
-> Box<dyn base::MacResult + 'cx> {
- if !cx.ecfg.enable_asm() {
- feature_gate::emit_feature_err(&cx.parse_sess,
- sym::asm,
- sp,
- feature_gate::GateIssue::Language,
- feature_gate::EXPLAIN_ASM);
- }
-
let mut inline_asm = match parse_inline_asm(cx, sp, tts) {
Ok(Some(inline_asm)) => inline_asm,
Ok(None) => return DummyResult::expr(sp),
diff --git a/src/libsyntax_ext/assert.rs b/src/libsyntax_ext/assert.rs
index 10d323f..d7571f4 100644
--- a/src/libsyntax_ext/assert.rs
+++ b/src/libsyntax_ext/assert.rs
@@ -3,7 +3,6 @@
use syntax::ast::{self, *};
use syntax::source_map::Spanned;
use syntax::ext::base::*;
-use syntax::ext::build::AstBuilder;
use syntax::parse::token::{self, TokenKind};
use syntax::parse::parser::Parser;
use syntax::print::pprust;
@@ -25,7 +24,7 @@
}
};
- let sp = sp.apply_mark(cx.current_expansion.mark);
+ let sp = sp.apply_mark(cx.current_expansion.id);
let panic_call = Mac_ {
path: Path::from_ident(Ident::new(sym::panic, sp)),
tts: custom_message.unwrap_or_else(|| {
@@ -38,6 +37,7 @@
))
}).into(),
delim: MacDelimiter::Parenthesis,
+ prior_type_ascription: None,
};
let if_expr = cx.expr_if(
sp,
@@ -131,7 +131,7 @@
Ok(Assert { cond_expr, custom_message })
}
-fn parse_custom_message<'a>(parser: &mut Parser<'a>) -> Option<TokenStream> {
+fn parse_custom_message(parser: &mut Parser<'_>) -> Option<TokenStream> {
let ts = parser.parse_tokens();
if !ts.is_empty() {
Some(ts)
diff --git a/src/libsyntax_ext/cfg.rs b/src/libsyntax_ext/cfg.rs
index e952515..84830e6 100644
--- a/src/libsyntax_ext/cfg.rs
+++ b/src/libsyntax_ext/cfg.rs
@@ -6,7 +6,6 @@
use syntax::ast;
use syntax::ext::base::{self, *};
-use syntax::ext::build::AstBuilder;
use syntax::attr;
use syntax::tokenstream;
use syntax::parse::token;
@@ -17,7 +16,7 @@
sp: Span,
tts: &[tokenstream::TokenTree],
) -> Box<dyn base::MacResult + 'static> {
- let sp = sp.apply_mark(cx.current_expansion.mark);
+ let sp = sp.apply_mark(cx.current_expansion.id);
match parse_cfg(cx, sp, tts) {
Ok(cfg) => {
diff --git a/src/libsyntax_ext/concat.rs b/src/libsyntax_ext/concat.rs
index 68d5178..f1d079e 100644
--- a/src/libsyntax_ext/concat.rs
+++ b/src/libsyntax_ext/concat.rs
@@ -1,6 +1,5 @@
use syntax::ast;
use syntax::ext::base;
-use syntax::ext::build::AstBuilder;
use syntax::symbol::Symbol;
use syntax::tokenstream;
@@ -60,6 +59,6 @@
} else if has_errors {
return base::DummyResult::expr(sp);
}
- let sp = sp.apply_mark(cx.current_expansion.mark);
+ let sp = sp.apply_mark(cx.current_expansion.id);
base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&accumulator)))
}
diff --git a/src/libsyntax_ext/concat_idents.rs b/src/libsyntax_ext/concat_idents.rs
index 8f061ab..8184fc4 100644
--- a/src/libsyntax_ext/concat_idents.rs
+++ b/src/libsyntax_ext/concat_idents.rs
@@ -2,25 +2,16 @@
use syntax::ast;
use syntax::ext::base::{self, *};
-use syntax::feature_gate;
use syntax::parse::token::{self, Token};
use syntax::ptr::P;
use syntax_pos::Span;
-use syntax_pos::symbol::{Symbol, sym};
+use syntax_pos::symbol::Symbol;
use syntax::tokenstream::TokenTree;
pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt<'_>,
sp: Span,
tts: &[TokenTree])
-> Box<dyn base::MacResult + 'cx> {
- if !cx.ecfg.enable_concat_idents() {
- feature_gate::emit_feature_err(&cx.parse_sess,
- sym::concat_idents,
- sp,
- feature_gate::GateIssue::Language,
- feature_gate::EXPLAIN_CONCAT_IDENTS);
- }
-
if tts.is_empty() {
cx.span_err(sp, "concat_idents! takes 1 or more arguments.");
return DummyResult::any(sp);
@@ -48,7 +39,7 @@
}
}
- let ident = ast::Ident::new(Symbol::intern(&res_str), sp.apply_mark(cx.current_expansion.mark));
+ let ident = ast::Ident::new(Symbol::intern(&res_str), sp.apply_mark(cx.current_expansion.id));
struct ConcatIdentsResult { ident: ast::Ident }
diff --git a/src/libsyntax_ext/deriving/clone.rs b/src/libsyntax_ext/deriving/clone.rs
index b3b6328..3b1edf9 100644
--- a/src/libsyntax_ext/deriving/clone.rs
+++ b/src/libsyntax_ext/deriving/clone.rs
@@ -3,9 +3,7 @@
use crate::deriving::generic::ty::*;
use syntax::ast::{self, Expr, GenericArg, Generics, ItemKind, MetaItem, VariantData};
-use syntax::attr;
-use syntax::ext::base::{Annotatable, ExtCtxt};
-use syntax::ext::build::AstBuilder;
+use syntax::ext::base::{Annotatable, ExtCtxt, SpecialDerives};
use syntax::ptr::P;
use syntax::symbol::{kw, sym, Symbol};
use syntax_pos::Span;
@@ -37,7 +35,8 @@
match annitem.node {
ItemKind::Struct(_, Generics { ref params, .. }) |
ItemKind::Enum(_, Generics { ref params, .. }) => {
- if attr::contains_name(&annitem.attrs, sym::rustc_copy_clone_marker) &&
+ let container_id = cx.current_expansion.id.parent();
+ if cx.resolver.has_derives(container_id, SpecialDerives::COPY) &&
!params.iter().any(|param| match param.kind {
ast::GenericParamKind::Type { .. } => true,
_ => false,
@@ -73,11 +72,11 @@
}
}
- _ => cx.span_bug(span, "#[derive(Clone)] on trait item or impl item"),
+ _ => cx.span_bug(span, "`#[derive(Clone)]` on trait item or impl item"),
}
let inline = cx.meta_word(span, sym::inline);
- let attrs = vec![cx.attribute(span, inline)];
+ let attrs = vec![cx.attribute(inline)];
let trait_def = TraitDef {
span,
attributes: Vec::new(),
diff --git a/src/libsyntax_ext/deriving/cmp/eq.rs b/src/libsyntax_ext/deriving/cmp/eq.rs
index 1d981e0..1909729 100644
--- a/src/libsyntax_ext/deriving/cmp/eq.rs
+++ b/src/libsyntax_ext/deriving/cmp/eq.rs
@@ -3,8 +3,7 @@
use crate::deriving::generic::ty::*;
use syntax::ast::{self, Expr, MetaItem, GenericArg};
-use syntax::ext::base::{Annotatable, ExtCtxt};
-use syntax::ext::build::AstBuilder;
+use syntax::ext::base::{Annotatable, ExtCtxt, SpecialDerives};
use syntax::ptr::P;
use syntax::symbol::{sym, Symbol};
use syntax_pos::Span;
@@ -14,10 +13,12 @@
mitem: &MetaItem,
item: &Annotatable,
push: &mut dyn FnMut(Annotatable)) {
+ cx.resolver.add_derives(cx.current_expansion.id.parent(), SpecialDerives::EQ);
+
let inline = cx.meta_word(span, sym::inline);
let hidden = cx.meta_list_item_word(span, sym::hidden);
let doc = cx.meta_list(span, sym::doc, vec![hidden]);
- let attrs = vec![cx.attribute(span, inline), cx.attribute(span, doc)];
+ let attrs = vec![cx.attribute(inline), cx.attribute(doc)];
let trait_def = TraitDef {
span,
attributes: Vec::new(),
diff --git a/src/libsyntax_ext/deriving/cmp/ord.rs b/src/libsyntax_ext/deriving/cmp/ord.rs
index 844865d..885cfee 100644
--- a/src/libsyntax_ext/deriving/cmp/ord.rs
+++ b/src/libsyntax_ext/deriving/cmp/ord.rs
@@ -4,7 +4,6 @@
use syntax::ast::{self, Expr, MetaItem};
use syntax::ext::base::{Annotatable, ExtCtxt};
-use syntax::ext::build::AstBuilder;
use syntax::ptr::P;
use syntax::symbol::sym;
use syntax_pos::Span;
@@ -15,7 +14,7 @@
item: &Annotatable,
push: &mut dyn FnMut(Annotatable)) {
let inline = cx.meta_word(span, sym::inline);
- let attrs = vec![cx.attribute(span, inline)];
+ let attrs = vec![cx.attribute(inline)];
let trait_def = TraitDef {
span,
attributes: Vec::new(),
diff --git a/src/libsyntax_ext/deriving/cmp/partial_eq.rs b/src/libsyntax_ext/deriving/cmp/partial_eq.rs
index 732bb23..7d7c4ae 100644
--- a/src/libsyntax_ext/deriving/cmp/partial_eq.rs
+++ b/src/libsyntax_ext/deriving/cmp/partial_eq.rs
@@ -3,8 +3,7 @@
use crate::deriving::generic::ty::*;
use syntax::ast::{BinOpKind, Expr, MetaItem};
-use syntax::ext::base::{Annotatable, ExtCtxt};
-use syntax::ext::build::AstBuilder;
+use syntax::ext::base::{Annotatable, ExtCtxt, SpecialDerives};
use syntax::ptr::P;
use syntax::symbol::sym;
use syntax_pos::Span;
@@ -14,6 +13,8 @@
mitem: &MetaItem,
item: &Annotatable,
push: &mut dyn FnMut(Annotatable)) {
+ cx.resolver.add_derives(cx.current_expansion.id.parent(), SpecialDerives::PARTIAL_EQ);
+
// structures are equal if all fields are equal, and non equal, if
// any fields are not equal or if the enum variants are different
fn cs_op(cx: &mut ExtCtxt<'_>,
@@ -63,7 +64,7 @@
macro_rules! md {
($name:expr, $f:ident) => { {
let inline = cx.meta_word(span, sym::inline);
- let attrs = vec![cx.attribute(span, inline)];
+ let attrs = vec![cx.attribute(inline)];
MethodDef {
name: $name,
generics: LifetimeBounds::empty(),
diff --git a/src/libsyntax_ext/deriving/cmp/partial_ord.rs b/src/libsyntax_ext/deriving/cmp/partial_ord.rs
index a30a7d7..0ec30f5 100644
--- a/src/libsyntax_ext/deriving/cmp/partial_ord.rs
+++ b/src/libsyntax_ext/deriving/cmp/partial_ord.rs
@@ -6,7 +6,6 @@
use syntax::ast::{self, BinOpKind, Expr, MetaItem};
use syntax::ext::base::{Annotatable, ExtCtxt};
-use syntax::ext::build::AstBuilder;
use syntax::ptr::P;
use syntax::symbol::{sym, Symbol};
use syntax_pos::Span;
@@ -19,7 +18,7 @@
macro_rules! md {
($name:expr, $op:expr, $equal:expr) => { {
let inline = cx.meta_word(span, sym::inline);
- let attrs = vec![cx.attribute(span, inline)];
+ let attrs = vec![cx.attribute(inline)];
MethodDef {
name: $name,
generics: LifetimeBounds::empty(),
@@ -43,7 +42,7 @@
PathKind::Std));
let inline = cx.meta_word(span, sym::inline);
- let attrs = vec![cx.attribute(span, inline)];
+ let attrs = vec![cx.attribute(inline)];
let partial_cmp_def = MethodDef {
name: "partial_cmp",
diff --git a/src/libsyntax_ext/deriving/custom.rs b/src/libsyntax_ext/deriving/custom.rs
deleted file mode 100644
index 98465d7..0000000
--- a/src/libsyntax_ext/deriving/custom.rs
+++ /dev/null
@@ -1,119 +0,0 @@
-use crate::proc_macro_impl::EXEC_STRATEGY;
-use crate::proc_macro_server;
-
-use errors::FatalError;
-use rustc_data_structures::sync::Lrc;
-use syntax::ast::{self, ItemKind, Attribute, Mac};
-use syntax::attr::{mark_used, mark_known};
-use syntax::source_map::Span;
-use syntax::ext::base::*;
-use syntax::parse;
-use syntax::parse::token;
-use syntax::tokenstream;
-use syntax::visit::Visitor;
-use syntax_pos::DUMMY_SP;
-
-struct MarkAttrs<'a>(&'a [ast::Name]);
-
-impl<'a> Visitor<'a> for MarkAttrs<'a> {
- fn visit_attribute(&mut self, attr: &Attribute) {
- if let Some(ident) = attr.ident() {
- if self.0.contains(&ident.name) {
- mark_used(attr);
- mark_known(attr);
- }
- }
- }
-
- fn visit_mac(&mut self, _mac: &Mac) {}
-}
-
-pub struct ProcMacroDerive {
- pub client: proc_macro::bridge::client::Client<
- fn(proc_macro::TokenStream) -> proc_macro::TokenStream,
- >,
- pub attrs: Vec<ast::Name>,
-}
-
-impl MultiItemModifier for ProcMacroDerive {
- fn expand(&self,
- ecx: &mut ExtCtxt<'_>,
- span: Span,
- _meta_item: &ast::MetaItem,
- item: Annotatable)
- -> Vec<Annotatable> {
- let item = match item {
- Annotatable::Item(item) => item,
- Annotatable::ImplItem(_) |
- Annotatable::TraitItem(_) |
- Annotatable::ForeignItem(_) |
- Annotatable::Stmt(_) |
- Annotatable::Expr(_) => {
- ecx.span_err(span, "proc-macro derives may only be \
- applied to a struct, enum, or union");
- return Vec::new()
- }
- };
- match item.node {
- ItemKind::Struct(..) |
- ItemKind::Enum(..) |
- ItemKind::Union(..) => {},
- _ => {
- ecx.span_err(span, "proc-macro derives may only be \
- applied to a struct, enum, or union");
- return Vec::new()
- }
- }
-
- // Mark attributes as known, and used.
- MarkAttrs(&self.attrs).visit_item(&item);
-
- let token = token::Interpolated(Lrc::new(token::NtItem(item)));
- let input = tokenstream::TokenTree::token(token, DUMMY_SP).into();
-
- let server = proc_macro_server::Rustc::new(ecx);
- let stream = match self.client.run(&EXEC_STRATEGY, server, input) {
- Ok(stream) => stream,
- Err(e) => {
- let msg = "proc-macro derive panicked";
- let mut err = ecx.struct_span_fatal(span, msg);
- if let Some(s) = e.as_str() {
- err.help(&format!("message: {}", s));
- }
-
- err.emit();
- FatalError.raise();
- }
- };
-
- let error_count_before = ecx.parse_sess.span_diagnostic.err_count();
- let msg = "proc-macro derive produced unparseable tokens";
-
- let mut parser = parse::stream_to_parser(ecx.parse_sess, stream, Some("proc-macro derive"));
- let mut items = vec![];
-
- loop {
- match parser.parse_item() {
- Ok(None) => break,
- Ok(Some(item)) => {
- items.push(Annotatable::Item(item))
- }
- Err(mut err) => {
- // FIXME: handle this better
- err.cancel();
- ecx.struct_span_fatal(span, msg).emit();
- FatalError.raise();
- }
- }
- }
-
-
- // fail if there have been errors emitted
- if ecx.parse_sess.span_diagnostic.err_count() > error_count_before {
- ecx.struct_span_fatal(span, msg).emit();
- FatalError.raise();
- }
-
- items
- }
-}
diff --git a/src/libsyntax_ext/deriving/debug.rs b/src/libsyntax_ext/deriving/debug.rs
index 44ddbb9..0f70963 100644
--- a/src/libsyntax_ext/deriving/debug.rs
+++ b/src/libsyntax_ext/deriving/debug.rs
@@ -7,7 +7,6 @@
use syntax::ast::{self, Ident};
use syntax::ast::{Expr, MetaItem};
use syntax::ext::base::{Annotatable, ExtCtxt};
-use syntax::ext::build::AstBuilder;
use syntax::ptr::P;
use syntax::symbol::sym;
use syntax_pos::{DUMMY_SP, Span};
diff --git a/src/libsyntax_ext/deriving/decodable.rs b/src/libsyntax_ext/deriving/decodable.rs
index d773f3f..293c5a1 100644
--- a/src/libsyntax_ext/deriving/decodable.rs
+++ b/src/libsyntax_ext/deriving/decodable.rs
@@ -3,12 +3,10 @@
use crate::deriving::{self, pathvec_std};
use crate::deriving::generic::*;
use crate::deriving::generic::ty::*;
-use crate::deriving::warn_if_deprecated;
use syntax::ast;
use syntax::ast::{Expr, MetaItem, Mutability};
use syntax::ext::base::{Annotatable, ExtCtxt};
-use syntax::ext::build::AstBuilder;
use syntax::ptr::P;
use syntax::symbol::Symbol;
use syntax_pos::Span;
@@ -18,24 +16,7 @@
mitem: &MetaItem,
item: &Annotatable,
push: &mut dyn FnMut(Annotatable)) {
- expand_deriving_decodable_imp(cx, span, mitem, item, push, "rustc_serialize")
-}
-
-pub fn expand_deriving_decodable(cx: &mut ExtCtxt<'_>,
- span: Span,
- mitem: &MetaItem,
- item: &Annotatable,
- push: &mut dyn FnMut(Annotatable)) {
- warn_if_deprecated(cx, span, "Decodable");
- expand_deriving_decodable_imp(cx, span, mitem, item, push, "serialize")
-}
-
-fn expand_deriving_decodable_imp(cx: &mut ExtCtxt<'_>,
- span: Span,
- mitem: &MetaItem,
- item: &Annotatable,
- push: &mut dyn FnMut(Annotatable),
- krate: &'static str) {
+ let krate = "rustc_serialize";
let typaram = &*deriving::hygienic_type_parameter(item, "__D");
let trait_def = TraitDef {
diff --git a/src/libsyntax_ext/deriving/default.rs b/src/libsyntax_ext/deriving/default.rs
index fd8e87e..2fdea10 100644
--- a/src/libsyntax_ext/deriving/default.rs
+++ b/src/libsyntax_ext/deriving/default.rs
@@ -4,7 +4,6 @@
use syntax::ast::{Expr, MetaItem};
use syntax::ext::base::{Annotatable, DummyResult, ExtCtxt};
-use syntax::ext::build::AstBuilder;
use syntax::ptr::P;
use syntax::symbol::{kw, sym};
use syntax::span_err;
@@ -16,7 +15,7 @@
item: &Annotatable,
push: &mut dyn FnMut(Annotatable)) {
let inline = cx.meta_word(span, sym::inline);
- let attrs = vec![cx.attribute(span, inline)];
+ let attrs = vec![cx.attribute(inline)];
let trait_def = TraitDef {
span,
attributes: Vec::new(),
diff --git a/src/libsyntax_ext/deriving/encodable.rs b/src/libsyntax_ext/deriving/encodable.rs
index faaedba..6d0d3b9 100644
--- a/src/libsyntax_ext/deriving/encodable.rs
+++ b/src/libsyntax_ext/deriving/encodable.rs
@@ -85,11 +85,9 @@
use crate::deriving::{self, pathvec_std};
use crate::deriving::generic::*;
use crate::deriving::generic::ty::*;
-use crate::deriving::warn_if_deprecated;
use syntax::ast::{Expr, ExprKind, MetaItem, Mutability};
use syntax::ext::base::{Annotatable, ExtCtxt};
-use syntax::ext::build::AstBuilder;
use syntax::ptr::P;
use syntax::symbol::Symbol;
use syntax_pos::Span;
@@ -99,24 +97,7 @@
mitem: &MetaItem,
item: &Annotatable,
push: &mut dyn FnMut(Annotatable)) {
- expand_deriving_encodable_imp(cx, span, mitem, item, push, "rustc_serialize")
-}
-
-pub fn expand_deriving_encodable(cx: &mut ExtCtxt<'_>,
- span: Span,
- mitem: &MetaItem,
- item: &Annotatable,
- push: &mut dyn FnMut(Annotatable)) {
- warn_if_deprecated(cx, span, "Encodable");
- expand_deriving_encodable_imp(cx, span, mitem, item, push, "serialize")
-}
-
-fn expand_deriving_encodable_imp(cx: &mut ExtCtxt<'_>,
- span: Span,
- mitem: &MetaItem,
- item: &Annotatable,
- push: &mut dyn FnMut(Annotatable),
- krate: &'static str) {
+ let krate = "rustc_serialize";
let typaram = &*deriving::hygienic_type_parameter(item, "__S");
let trait_def = TraitDef {
diff --git a/src/libsyntax_ext/deriving/generic/mod.rs b/src/libsyntax_ext/deriving/generic/mod.rs
index 444cf12..7e6d912 100644
--- a/src/libsyntax_ext/deriving/generic/mod.rs
+++ b/src/libsyntax_ext/deriving/generic/mod.rs
@@ -186,8 +186,7 @@
use syntax::ast::{self, BinOpKind, EnumDef, Expr, Generics, Ident, PatKind};
use syntax::ast::{VariantData, GenericParamKind, GenericArg};
use syntax::attr;
-use syntax::ext::base::{Annotatable, ExtCtxt};
-use syntax::ext::build::AstBuilder;
+use syntax::ext::base::{Annotatable, ExtCtxt, SpecialDerives};
use syntax::source_map::{self, respan};
use syntax::util::map_in_place::MapInPlace;
use syntax::ptr::P;
@@ -331,8 +330,8 @@
pub type EnumNonMatchCollapsedFunc<'a> =
Box<dyn FnMut(&mut ExtCtxt<'_>, Span, (&[Ident], &[Ident]), &[P<Expr>]) -> P<Expr> + 'a>;
-pub fn combine_substructure<'a>(f: CombineSubstructureFunc<'a>)
- -> RefCell<CombineSubstructureFunc<'a>> {
+pub fn combine_substructure(f: CombineSubstructureFunc<'_>)
+ -> RefCell<CombineSubstructureFunc<'_>> {
RefCell::new(f)
}
@@ -426,8 +425,9 @@
return;
}
};
+ let container_id = cx.current_expansion.id.parent();
let is_always_copy =
- attr::contains_name(&item.attrs, sym::rustc_copy_clone_marker) &&
+ cx.resolver.has_derives(container_id, SpecialDerives::COPY) &&
has_no_type_params;
let use_temporaries = is_packed && is_always_copy;
@@ -530,7 +530,8 @@
defaultness: ast::Defaultness::Final,
attrs: Vec::new(),
generics: Generics::default(),
- node: ast::ImplItemKind::Type(type_def.to_ty(cx, self.span, type_ident, generics)),
+ node: ast::ImplItemKind::TyAlias(
+ type_def.to_ty(cx, self.span, type_ident, generics)),
tokens: None,
}
});
@@ -666,14 +667,13 @@
let path = cx.path_all(self.span, false, vec![type_ident], self_params, vec![]);
let self_type = cx.ty_path(path);
- let attr = cx.attribute(self.span,
- cx.meta_word(self.span, sym::automatically_derived));
+ let attr = cx.attribute(cx.meta_word(self.span, sym::automatically_derived));
// Just mark it now since we know that it'll end up used downstream
attr::mark_used(&attr);
let opt_trait_ref = Some(trait_ref);
let unused_qual = {
let word = cx.meta_list_item_word(self.span, Symbol::intern("unused_qualifications"));
- cx.attribute(self.span, cx.meta_list(self.span, sym::allow, vec![word]))
+ cx.attribute(cx.meta_list(self.span, sym::allow, vec![word]))
};
let mut a = vec![attr, unused_qual];
@@ -1770,50 +1770,6 @@
}
}
-/// Call the method that is being derived on all the fields, and then
-/// process the collected results. i.e.
-///
-/// ```ignore (only-for-syntax-highlight)
-/// f(cx, span, vec![self_1.method(__arg_1_1, __arg_2_1),
-/// self_2.method(__arg_1_2, __arg_2_2)])
-/// ```
-#[inline]
-pub fn cs_same_method<F>(f: F,
- mut enum_nonmatch_f: EnumNonMatchCollapsedFunc<'_>,
- cx: &mut ExtCtxt<'_>,
- trait_span: Span,
- substructure: &Substructure<'_>)
- -> P<Expr>
- where F: FnOnce(&mut ExtCtxt<'_>, Span, Vec<P<Expr>>) -> P<Expr>
-{
- match *substructure.fields {
- EnumMatching(.., ref all_fields) |
- Struct(_, ref all_fields) => {
- // call self_n.method(other_1_n, other_2_n, ...)
- let called = all_fields.iter()
- .map(|field| {
- cx.expr_method_call(field.span,
- field.self_.clone(),
- substructure.method_ident,
- field.other
- .iter()
- .map(|e| cx.expr_addr_of(field.span, e.clone()))
- .collect())
- })
- .collect();
-
- f(cx, trait_span, called)
- }
- EnumNonMatchingCollapsed(ref all_self_args, _, tuple) => {
- enum_nonmatch_f(cx,
- trait_span,
- (&all_self_args[..], tuple),
- substructure.nonself_args)
- }
- StaticEnum(..) | StaticStruct(..) => cx.span_bug(trait_span, "static function in `derive`"),
- }
-}
-
/// Returns `true` if the type has no value fields
/// (for an enum, no variant has any fields)
pub fn is_type_without_fields(item: &Annotatable) -> bool {
diff --git a/src/libsyntax_ext/deriving/generic/ty.rs b/src/libsyntax_ext/deriving/generic/ty.rs
index 90d8264..399829e 100644
--- a/src/libsyntax_ext/deriving/generic/ty.rs
+++ b/src/libsyntax_ext/deriving/generic/ty.rs
@@ -6,7 +6,6 @@
use syntax::ast::{self, Expr, GenericParamKind, Generics, Ident, SelfKind, GenericArg};
use syntax::ext::base::ExtCtxt;
-use syntax::ext::build::AstBuilder;
use syntax::source_map::{respan, DUMMY_SP};
use syntax::ptr::P;
use syntax_pos::Span;
@@ -18,6 +17,7 @@
/// &'lifetime mut
Borrowed(Option<&'a str>, ast::Mutability),
/// *mut
+ #[allow(dead_code)]
Raw(ast::Mutability),
}
@@ -39,10 +39,10 @@
}
impl<'a> Path<'a> {
- pub fn new<'r>(path: Vec<&'r str>) -> Path<'r> {
+ pub fn new(path: Vec<&str>) -> Path<'_> {
Path::new_(path, None, Vec::new(), PathKind::Std)
}
- pub fn new_local<'r>(path: &'r str) -> Path<'r> {
+ pub fn new_local(path: &str) -> Path<'_> {
Path::new_(vec![path], None, Vec::new(), PathKind::Local)
}
pub fn new_<'r>(path: Vec<&'r str>,
@@ -85,7 +85,7 @@
PathKind::Global => cx.path_all(span, true, idents, params, Vec::new()),
PathKind::Local => cx.path_all(span, false, idents, params, Vec::new()),
PathKind::Std => {
- let def_site = DUMMY_SP.apply_mark(cx.current_expansion.mark);
+ let def_site = DUMMY_SP.apply_mark(cx.current_expansion.id);
idents.insert(0, Ident::new(kw::DollarCrate, def_site));
cx.path_all(span, false, idents, params, Vec::new())
}
@@ -107,17 +107,10 @@
Tuple(Vec<Ty<'a>>),
}
-/// A const expression. Supports literals and blocks.
-#[derive(Clone, Eq, PartialEq)]
-pub enum Const {
- Literal,
- Block,
-}
-
pub fn borrowed_ptrty<'r>() -> PtrTy<'r> {
Borrowed(None, ast::Mutability::Immutable)
}
-pub fn borrowed<'r>(ty: Box<Ty<'r>>) -> Ty<'r> {
+pub fn borrowed(ty: Box<Ty<'_>>) -> Ty<'_> {
Ptr(ty, borrowed_ptrty())
}
diff --git a/src/libsyntax_ext/deriving/hash.rs b/src/libsyntax_ext/deriving/hash.rs
index 7ad04ae..9787722 100644
--- a/src/libsyntax_ext/deriving/hash.rs
+++ b/src/libsyntax_ext/deriving/hash.rs
@@ -4,7 +4,6 @@
use syntax::ast::{Expr, MetaItem, Mutability};
use syntax::ext::base::{Annotatable, ExtCtxt};
-use syntax::ext::build::AstBuilder;
use syntax::ptr::P;
use syntax::symbol::sym;
use syntax_pos::Span;
diff --git a/src/libsyntax_ext/deriving/mod.rs b/src/libsyntax_ext/deriving/mod.rs
index aa9913d..8cd2853 100644
--- a/src/libsyntax_ext/deriving/mod.rs
+++ b/src/libsyntax_ext/deriving/mod.rs
@@ -1,11 +1,7 @@
//! The compiler code necessary to implement the `#[derive]` extensions.
-use rustc_data_structures::sync::Lrc;
use syntax::ast::{self, MetaItem};
-use syntax::edition::Edition;
-use syntax::ext::base::{Annotatable, ExtCtxt, Resolver, MultiItemModifier};
-use syntax::ext::base::{SyntaxExtension, SyntaxExtensionKind};
-use syntax::ext::build::AstBuilder;
+use syntax::ext::base::{Annotatable, ExtCtxt, MultiItemModifier};
use syntax::ptr::P;
use syntax::symbol::{Symbol, sym};
use syntax_pos::Span;
@@ -29,7 +25,6 @@
pub mod hash;
pub mod debug;
pub mod default;
-pub mod custom;
#[path="cmp/partial_eq.rs"]
pub mod partial_eq;
@@ -42,8 +37,8 @@
pub mod generic;
-struct BuiltinDerive(
- fn(&mut ExtCtxt<'_>, Span, &MetaItem, &Annotatable, &mut dyn FnMut(Annotatable))
+crate struct BuiltinDerive(
+ crate fn(&mut ExtCtxt<'_>, Span, &MetaItem, &Annotatable, &mut dyn FnMut(Annotatable))
);
impl MultiItemModifier for BuiltinDerive {
@@ -59,79 +54,6 @@
}
}
-macro_rules! derive_traits {
- ($( $name:expr => $func:path, )+) => {
- pub fn is_builtin_trait(name: ast::Name) -> bool {
- match &*name.as_str() {
- $( $name )|+ => true,
- _ => false,
- }
- }
-
- pub fn register_builtin_derives(resolver: &mut dyn Resolver, edition: Edition) {
- let allow_internal_unstable = Some([
- sym::core_intrinsics,
- sym::rustc_attrs,
- Symbol::intern("derive_clone_copy"),
- Symbol::intern("derive_eq"),
- Symbol::intern("libstd_sys_internals"), // RustcDeserialize and RustcSerialize
- ][..].into());
-
- $(
- resolver.add_builtin(
- ast::Ident::with_empty_ctxt(Symbol::intern($name)),
- Lrc::new(SyntaxExtension {
- allow_internal_unstable: allow_internal_unstable.clone(),
- ..SyntaxExtension::default(
- SyntaxExtensionKind::LegacyDerive(Box::new(BuiltinDerive($func))),
- edition,
- )
- }),
- );
- )+
- }
- }
-}
-
-derive_traits! {
- "Clone" => clone::expand_deriving_clone,
-
- "Hash" => hash::expand_deriving_hash,
-
- "RustcEncodable" => encodable::expand_deriving_rustc_encodable,
-
- "RustcDecodable" => decodable::expand_deriving_rustc_decodable,
-
- "PartialEq" => partial_eq::expand_deriving_partial_eq,
- "Eq" => eq::expand_deriving_eq,
- "PartialOrd" => partial_ord::expand_deriving_partial_ord,
- "Ord" => ord::expand_deriving_ord,
-
- "Debug" => debug::expand_deriving_debug,
-
- "Default" => default::expand_deriving_default,
-
- "Copy" => bounds::expand_deriving_copy,
-
- // deprecated
- "Encodable" => encodable::expand_deriving_encodable,
- "Decodable" => decodable::expand_deriving_decodable,
-}
-
-#[inline] // because `name` is a compile-time constant
-fn warn_if_deprecated(ecx: &mut ExtCtxt<'_>, sp: Span, name: &str) {
- if let Some(replacement) = match name {
- "Encodable" => Some("RustcEncodable"),
- "Decodable" => Some("RustcDecodable"),
- _ => None,
- } {
- ecx.span_warn(sp,
- &format!("derive({}) is deprecated in favor of derive({})",
- name,
- replacement));
- }
-}
-
/// Construct a name for the inner type parameter that can't collide with any type parameters of
/// the item. This is achieved by starting with a base and then concatenating the names of all
/// other type parameters.
diff --git a/src/libsyntax_ext/env.rs b/src/libsyntax_ext/env.rs
index b7f2ecf..39fc90d 100644
--- a/src/libsyntax_ext/env.rs
+++ b/src/libsyntax_ext/env.rs
@@ -5,7 +5,6 @@
use syntax::ast::{self, Ident, GenericArg};
use syntax::ext::base::{self, *};
-use syntax::ext::build::AstBuilder;
use syntax::symbol::{kw, sym, Symbol};
use syntax_pos::Span;
use syntax::tokenstream;
@@ -21,7 +20,7 @@
Some(v) => v,
};
- let sp = sp.apply_mark(cx.current_expansion.mark);
+ let sp = sp.apply_mark(cx.current_expansion.id);
let e = match env::var(&*var.as_str()) {
Err(..) => {
let lt = cx.lifetime(sp, Ident::with_empty_ctxt(kw::StaticLifetime));
diff --git a/src/libsyntax_ext/error_codes.rs b/src/libsyntax_ext/error_codes.rs
index 9bbd9fd..5982a4d 100644
--- a/src/libsyntax_ext/error_codes.rs
+++ b/src/libsyntax_ext/error_codes.rs
@@ -1,6 +1,4 @@
-#![allow(non_snake_case)]
-
-use syntax::{register_diagnostic, register_long_diagnostics};
+use syntax::register_long_diagnostics;
// Error messages for EXXXX errors.
// Each message should start and end with a new line, and be wrapped to 80 characters.
diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs
index a5f9655..2ae13b6 100644
--- a/src/libsyntax_ext/format.rs
+++ b/src/libsyntax_ext/format.rs
@@ -8,8 +8,6 @@
use syntax::ast;
use syntax::ext::base::{self, *};
-use syntax::ext::build::AstBuilder;
-use syntax::feature_gate;
use syntax::parse::token;
use syntax::ptr::P;
use syntax::symbol::{Symbol, sym};
@@ -110,6 +108,8 @@
invalid_refs: Vec<(usize, usize)>,
/// Spans of all the formatting arguments, in order.
arg_spans: Vec<Span>,
+ /// All the formatting arguments that have formatting flags set, in order for diagnostics.
+ arg_with_formatting: Vec<parse::FormatSpec<'a>>,
/// Whether this formatting string is a literal or it comes from a macro.
is_literal: bool,
}
@@ -142,21 +142,21 @@
while p.token != token::Eof {
if !p.eat(&token::Comma) {
- return Err(ecx.struct_span_err(p.token.span, "expected token: `,`"));
+ let mut err = ecx.struct_span_err(p.token.span, "expected token: `,`");
+ err.span_label(p.token.span, "expected `,`");
+ p.maybe_annotate_with_ascription(&mut err, false);
+ return Err(err);
}
if p.token == token::Eof {
break;
} // accept trailing commas
- if named || (p.token.is_ident() && p.look_ahead(1, |t| *t == token::Eq)) {
+ if p.token.is_ident() && p.look_ahead(1, |t| *t == token::Eq) {
named = true;
let name = if let token::Ident(name, _) = p.token.kind {
p.bump();
name
} else {
- return Err(ecx.struct_span_err(
- p.token.span,
- "expected ident, positional arguments cannot follow named arguments",
- ));
+ unreachable!();
};
p.expect(&token::Eq)?;
@@ -177,6 +177,17 @@
args.push(e);
} else {
let e = p.parse_expr()?;
+ if named {
+ let mut err = ecx.struct_span_err(
+ e.span,
+ "positional arguments cannot follow named arguments",
+ );
+ err.span_label(e.span, "positional arguments must be before named arguments");
+ for (_, pos) in &names {
+ err.span_label(args[*pos].span, "named argument");
+ }
+ err.emit();
+ }
args.push(e);
}
}
@@ -263,31 +274,44 @@
} else {
MultiSpan::from_span(self.fmtsp)
};
- let refs_len = self.invalid_refs.len();
- let mut refs = self
+ let refs = self
.invalid_refs
.iter()
.map(|(r, pos)| (r.to_string(), self.arg_spans.get(*pos)));
- if self.names.is_empty() && !numbered_position_args {
+ let mut zero_based_note = false;
+
+ let count = self.pieces.len() + self.arg_with_formatting
+ .iter()
+ .filter(|fmt| fmt.precision_span.is_some())
+ .count();
+ if self.names.is_empty() && !numbered_position_args && count != self.args.len() {
e = self.ecx.mut_span_err(
sp,
&format!(
"{} positional argument{} in format string, but {}",
- self.pieces.len(),
- if self.pieces.len() > 1 { "s" } else { "" },
- self.describe_num_args()
+ count,
+ if count > 1 { "s" } else { "" },
+ self.describe_num_args(),
),
);
} else {
- let (arg_list, mut sp) = if refs_len == 1 {
- let (reg, pos) = refs.next().unwrap();
+ let (mut refs, spans): (Vec<_>, Vec<_>) = refs.unzip();
+ // Avoid `invalid reference to positional arguments 7 and 7 (there is 1 argument)`
+ // for `println!("{7:7$}", 1);`
+ refs.sort();
+ refs.dedup();
+ let (arg_list, mut sp) = if refs.len() == 1 {
+ let spans: Vec<_> = spans.into_iter().filter_map(|sp| sp.map(|sp| *sp)).collect();
(
- format!("argument {}", reg),
- MultiSpan::from_span(*pos.unwrap_or(&self.fmtsp)),
+ format!("argument {}", refs[0]),
+ if spans.is_empty() {
+ MultiSpan::from_span(self.fmtsp)
+ } else {
+ MultiSpan::from_spans(spans)
+ },
)
} else {
- let (mut refs, spans): (Vec<_>, Vec<_>) = refs.unzip();
let pos = MultiSpan::from_spans(spans.into_iter().map(|s| *s.unwrap()).collect());
let reg = refs.pop().unwrap();
(
@@ -307,9 +331,70 @@
&format!("invalid reference to positional {} ({})",
arg_list,
self.describe_num_args()));
- e.note("positional arguments are zero-based");
+ zero_based_note = true;
};
+ for fmt in &self.arg_with_formatting {
+ if let Some(span) = fmt.precision_span {
+ let span = self.fmtsp.from_inner(span);
+ match fmt.precision {
+ parse::CountIsParam(pos) if pos > self.args.len() => {
+ e.span_label(span, &format!(
+ "this precision flag expects an `usize` argument at position {}, \
+ but {}",
+ pos,
+ self.describe_num_args(),
+ ));
+ zero_based_note = true;
+ }
+ parse::CountIsParam(pos) => {
+ let count = self.pieces.len() + self.arg_with_formatting
+ .iter()
+ .filter(|fmt| fmt.precision_span.is_some())
+ .count();
+ e.span_label(span, &format!(
+ "this precision flag adds an extra required argument at position {}, \
+ which is why there {} expected",
+ pos,
+ if count == 1 {
+ "is 1 argument".to_string()
+ } else {
+ format!("are {} arguments", count)
+ },
+ ));
+ e.span_label(
+ self.args[pos].span,
+ "this parameter corresponds to the precision flag",
+ );
+ zero_based_note = true;
+ }
+ _ => {}
+ }
+ }
+ if let Some(span) = fmt.width_span {
+ let span = self.fmtsp.from_inner(span);
+ match fmt.width {
+ parse::CountIsParam(pos) if pos > self.args.len() => {
+ e.span_label(span, &format!(
+ "this width flag expects an `usize` argument at position {}, \
+ but {}",
+ pos,
+ self.describe_num_args(),
+ ));
+ zero_based_note = true;
+ }
+ _ => {}
+ }
+ }
+ }
+ if zero_based_note {
+ e.note("positional arguments are zero-based");
+ }
+ if !self.arg_with_formatting.is_empty() {
+ e.note("for information about formatting flags, visit \
+ https://doc.rust-lang.org/std/fmt/index.html");
+ }
+
e.emit();
}
@@ -425,10 +510,11 @@
/// Builds a static `rt::Argument` from a `parse::Piece` or append
/// to the `literal` string.
- fn build_piece(&mut self,
- piece: &parse::Piece<'_>,
- arg_index_consumed: &mut Vec<usize>)
- -> Option<P<ast::Expr>> {
+ fn build_piece(
+ &mut self,
+ piece: &parse::Piece<'a>,
+ arg_index_consumed: &mut Vec<usize>,
+ ) -> Option<P<ast::Expr>> {
let sp = self.macsp;
match *piece {
parse::String(s) => {
@@ -486,7 +572,9 @@
align: parse::AlignUnknown,
flags: 0,
precision: parse::CountImplied,
+ precision_span: None,
width: parse::CountImplied,
+ width_span: None,
ty: arg.format.ty,
},
};
@@ -496,6 +584,9 @@
let pos_simple =
arg.position.index() == simple_arg.position.index();
+ if arg.format.precision_span.is_some() || arg.format.width_span.is_some() {
+ self.arg_with_formatting.push(arg.format);
+ }
if !pos_simple || arg.format != simple_arg.format || fill != ' ' {
self.all_pieces_simple = false;
}
@@ -520,7 +611,7 @@
let path = self.ecx.path_global(sp, Context::rtpath(self.ecx, "FormatSpec"));
let fmt = self.ecx.expr_struct(
sp,
- path,
+ path,
vec![
self.ecx.field_imm(sp, self.ecx.ident_of("fill"), fill),
self.ecx.field_imm(sp, self.ecx.ident_of("align"), align),
@@ -554,7 +645,7 @@
let mut heads = Vec::with_capacity(self.args.len());
let names_pos: Vec<_> = (0..self.args.len())
- .map(|i| self.ecx.ident_of(&format!("arg{}", i)).gensym())
+ .map(|i| ast::Ident::from_str_and_span(&format!("arg{}", i), self.macsp))
.collect();
// First, build up the static array which will become our precompiled
@@ -576,7 +667,7 @@
for (i, e) in self.args.into_iter().enumerate() {
let name = names_pos[i];
let span =
- DUMMY_SP.with_ctxt(e.span.ctxt().apply_mark(self.ecx.current_expansion.mark));
+ DUMMY_SP.with_ctxt(e.span.ctxt().apply_mark(self.ecx.current_expansion.id));
pats.push(self.ecx.pat_ident(span, name));
for ref arg_ty in self.arg_unique_types[i].iter() {
locals.push(Context::format_arg(self.ecx, self.macsp, e.span, arg_ty, name));
@@ -647,13 +738,14 @@
self.ecx.expr_call_global(self.macsp, path, fn_args)
}
- fn format_arg(ecx: &ExtCtxt<'_>,
- macsp: Span,
- mut sp: Span,
- ty: &ArgumentType,
- arg: ast::Ident)
- -> P<ast::Expr> {
- sp = sp.apply_mark(ecx.current_expansion.mark);
+ fn format_arg(
+ ecx: &ExtCtxt<'_>,
+ macsp: Span,
+ mut sp: Span,
+ ty: &ArgumentType,
+ arg: ast::Ident,
+ ) -> P<ast::Expr> {
+ sp = sp.apply_mark(ecx.current_expansion.id);
let arg = ecx.expr_ident(sp, arg);
let trait_ = match *ty {
Placeholder(ref tyname) => {
@@ -668,7 +760,21 @@
"x" => "LowerHex",
"X" => "UpperHex",
_ => {
- ecx.span_err(sp, &format!("unknown format trait `{}`", *tyname));
+ let mut err = ecx.struct_span_err(
+ sp,
+ &format!("unknown format trait `{}`", *tyname),
+ );
+ err.note("the only appropriate formatting traits are:\n\
+ - ``, which uses the `Display` trait\n\
+ - `?`, which uses the `Debug` trait\n\
+ - `e`, which uses the `LowerExp` trait\n\
+ - `E`, which uses the `UpperExp` trait\n\
+ - `o`, which uses the `Octal` trait\n\
+ - `p`, which uses the `Pointer` trait\n\
+ - `b`, which uses the `Binary` trait\n\
+ - `x`, which uses the `LowerHex` trait\n\
+ - `X`, which uses the `UpperHex` trait");
+ err.emit();
return DummyResult::raw_expr(sp, true);
}
}
@@ -686,14 +792,16 @@
}
}
-pub fn expand_format_args<'cx>(ecx: &'cx mut ExtCtxt<'_>,
- mut sp: Span,
- tts: &[tokenstream::TokenTree])
- -> Box<dyn base::MacResult + 'cx> {
- sp = sp.apply_mark(ecx.current_expansion.mark);
+fn expand_format_args_impl<'cx>(
+ ecx: &'cx mut ExtCtxt<'_>,
+ mut sp: Span,
+ tts: &[tokenstream::TokenTree],
+ nl: bool,
+) -> Box<dyn base::MacResult + 'cx> {
+ sp = sp.apply_mark(ecx.current_expansion.id);
match parse_args(ecx, sp, tts) {
Ok((efmt, args, names)) => {
- MacEager::expr(expand_preparsed_format_args(ecx, sp, efmt, args, names, false))
+ MacEager::expr(expand_preparsed_format_args(ecx, sp, efmt, args, names, nl))
}
Err(mut err) => {
err.emit();
@@ -702,52 +810,39 @@
}
}
+pub fn expand_format_args<'cx>(
+ ecx: &'cx mut ExtCtxt<'_>,
+ sp: Span,
+ tts: &[tokenstream::TokenTree],
+) -> Box<dyn base::MacResult + 'cx> {
+ expand_format_args_impl(ecx, sp, tts, false)
+}
+
pub fn expand_format_args_nl<'cx>(
ecx: &'cx mut ExtCtxt<'_>,
- mut sp: Span,
+ sp: Span,
tts: &[tokenstream::TokenTree],
) -> Box<dyn base::MacResult + 'cx> {
- //if !ecx.ecfg.enable_allow_internal_unstable() {
-
- // For some reason, the only one that actually works for `println` is the first check
- if !sp.allows_unstable(sym::format_args_nl) // the span is marked `#[allow_insternal_unsable]`
- && !ecx.ecfg.enable_allow_internal_unstable() // NOTE: when is this enabled?
- && !ecx.ecfg.enable_format_args_nl() // enabled using `#[feature(format_args_nl]`
- {
- feature_gate::emit_feature_err(&ecx.parse_sess,
- sym::format_args_nl,
- sp,
- feature_gate::GateIssue::Language,
- feature_gate::EXPLAIN_FORMAT_ARGS_NL);
- }
- sp = sp.apply_mark(ecx.current_expansion.mark);
- match parse_args(ecx, sp, tts) {
- Ok((efmt, args, names)) => {
- MacEager::expr(expand_preparsed_format_args(ecx, sp, efmt, args, names, true))
- }
- Err(mut err) => {
- err.emit();
- DummyResult::expr(sp)
- }
- }
+ expand_format_args_impl(ecx, sp, tts, true)
}
/// Take the various parts of `format_args!(efmt, args..., name=names...)`
/// and construct the appropriate formatting expression.
-pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt<'_>,
- sp: Span,
- efmt: P<ast::Expr>,
- args: Vec<P<ast::Expr>>,
- names: FxHashMap<Symbol, usize>,
- append_newline: bool)
- -> P<ast::Expr> {
+pub fn expand_preparsed_format_args(
+ ecx: &mut ExtCtxt<'_>,
+ sp: Span,
+ efmt: P<ast::Expr>,
+ args: Vec<P<ast::Expr>>,
+ names: FxHashMap<Symbol, usize>,
+ append_newline: bool,
+) -> P<ast::Expr> {
// NOTE: this verbose way of initializing `Vec<Vec<ArgumentType>>` is because
// `ArgumentType` does not derive `Clone`.
let arg_types: Vec<_> = (0..args.len()).map(|_| Vec::new()).collect();
let arg_unique_types: Vec<_> = (0..args.len()).map(|_| Vec::new()).collect();
let mut macsp = ecx.call_site();
- macsp = macsp.apply_mark(ecx.current_expansion.mark);
+ macsp = macsp.with_ctxt(ecx.backtrace());
let msg = "format argument must be a string literal";
let fmt_sp = efmt.span;
@@ -919,6 +1014,8 @@
.map(|span| fmt.span.from_inner(*span))
.collect();
+ let named_pos: FxHashSet<usize> = names.values().cloned().collect();
+
let mut cx = Context {
ecx,
args,
@@ -940,6 +1037,7 @@
fmtsp: fmt.span,
invalid_refs: Vec::new(),
arg_spans,
+ arg_with_formatting: Vec::new(),
is_literal,
};
@@ -984,14 +1082,12 @@
}
// Make sure that all arguments were used and all arguments have types.
- let num_pos_args = cx.args.len() - cx.names.len();
-
let errs = cx.arg_types
.iter()
.enumerate()
.filter(|(i, ty)| ty.is_empty() && !cx.count_positions.contains_key(&i))
.map(|(i, _)| {
- let msg = if i >= num_pos_args {
+ let msg = if named_pos.contains(&i) {
// named argument
"named argument never used"
} else {
diff --git a/src/libsyntax_ext/global_allocator.rs b/src/libsyntax_ext/global_allocator.rs
new file mode 100644
index 0000000..f788b51
--- /dev/null
+++ b/src/libsyntax_ext/global_allocator.rs
@@ -0,0 +1,190 @@
+use syntax::ast::{ItemKind, Mutability, Stmt, Ty, TyKind, Unsafety};
+use syntax::ast::{self, Arg, Attribute, Expr, FnHeader, Generics, Ident};
+use syntax::attr::check_builtin_macro_attribute;
+use syntax::ext::allocator::{AllocatorKind, AllocatorMethod, AllocatorTy, ALLOCATOR_METHODS};
+use syntax::ext::base::{Annotatable, ExtCtxt};
+use syntax::ext::hygiene::SyntaxContext;
+use syntax::ptr::P;
+use syntax::symbol::{kw, sym, Symbol};
+use syntax_pos::Span;
+
+pub fn expand(
+ ecx: &mut ExtCtxt<'_>,
+ _span: Span,
+ meta_item: &ast::MetaItem,
+ item: Annotatable,
+) -> Vec<Annotatable> {
+ check_builtin_macro_attribute(ecx, meta_item, sym::global_allocator);
+
+ let not_static = |item: Annotatable| {
+ ecx.parse_sess.span_diagnostic.span_err(item.span(), "allocators must be statics");
+ vec![item]
+ };
+ let item = match item {
+ Annotatable::Item(item) => match item.node {
+ ItemKind::Static(..) => item,
+ _ => return not_static(Annotatable::Item(item)),
+ }
+ _ => return not_static(item),
+ };
+
+ // Generate a bunch of new items using the AllocFnFactory
+ let span = item.span.with_ctxt(SyntaxContext::empty().apply_mark(ecx.current_expansion.id));
+ let f = AllocFnFactory {
+ span,
+ kind: AllocatorKind::Global,
+ global: item.ident,
+ cx: ecx,
+ };
+
+ // Generate item statements for the allocator methods.
+ let stmts = ALLOCATOR_METHODS.iter().map(|method| f.allocator_fn(method)).collect();
+
+ // Generate anonymous constant serving as container for the allocator methods.
+ let const_ty = ecx.ty(span, TyKind::Tup(Vec::new()));
+ let const_body = ecx.expr_block(ecx.block(span, stmts));
+ let const_item =
+ ecx.item_const(span, Ident::with_empty_ctxt(kw::Underscore), const_ty, const_body);
+
+ // Return the original item and the new methods.
+ vec![Annotatable::Item(item), Annotatable::Item(const_item)]
+}
+
+struct AllocFnFactory<'a, 'b> {
+ span: Span,
+ kind: AllocatorKind,
+ global: Ident,
+ cx: &'b ExtCtxt<'a>,
+}
+
+impl AllocFnFactory<'_, '_> {
+ fn allocator_fn(&self, method: &AllocatorMethod) -> Stmt {
+ let mut abi_args = Vec::new();
+ let mut i = 0;
+ let ref mut mk = || {
+ let name = Ident::from_str(&format!("arg{}", i));
+ i += 1;
+ name
+ };
+ let args = method
+ .inputs
+ .iter()
+ .map(|ty| self.arg_ty(ty, &mut abi_args, mk))
+ .collect();
+ let result = self.call_allocator(method.name, args);
+ let (output_ty, output_expr) = self.ret_ty(&method.output, result);
+ let kind = ItemKind::Fn(
+ self.cx.fn_decl(abi_args, ast::FunctionRetTy::Ty(output_ty)),
+ FnHeader {
+ unsafety: Unsafety::Unsafe,
+ ..FnHeader::default()
+ },
+ Generics::default(),
+ self.cx.block_expr(output_expr),
+ );
+ let item = self.cx.item(
+ self.span,
+ Ident::from_str(&self.kind.fn_name(method.name)),
+ self.attrs(),
+ kind,
+ );
+ self.cx.stmt_item(self.span, item)
+ }
+
+ fn call_allocator(&self, method: &str, mut args: Vec<P<Expr>>) -> P<Expr> {
+ let method = self.cx.std_path(&[
+ Symbol::intern("alloc"),
+ Symbol::intern("GlobalAlloc"),
+ Symbol::intern(method),
+ ]);
+ let method = self.cx.expr_path(self.cx.path(self.span, method));
+ let allocator = self.cx.path_ident(self.span, self.global);
+ let allocator = self.cx.expr_path(allocator);
+ let allocator = self.cx.expr_addr_of(self.span, allocator);
+ args.insert(0, allocator);
+
+ self.cx.expr_call(self.span, method, args)
+ }
+
+ fn attrs(&self) -> Vec<Attribute> {
+ let special = sym::rustc_std_internal_symbol;
+ let special = self.cx.meta_word(self.span, special);
+ vec![self.cx.attribute(special)]
+ }
+
+ fn arg_ty(
+ &self,
+ ty: &AllocatorTy,
+ args: &mut Vec<Arg>,
+ ident: &mut dyn FnMut() -> Ident,
+ ) -> P<Expr> {
+ match *ty {
+ AllocatorTy::Layout => {
+ let usize = self.cx.path_ident(self.span, Ident::with_empty_ctxt(sym::usize));
+ let ty_usize = self.cx.ty_path(usize);
+ let size = ident();
+ let align = ident();
+ args.push(self.cx.arg(self.span, size, ty_usize.clone()));
+ args.push(self.cx.arg(self.span, align, ty_usize));
+
+ let layout_new = self.cx.std_path(&[
+ Symbol::intern("alloc"),
+ Symbol::intern("Layout"),
+ Symbol::intern("from_size_align_unchecked"),
+ ]);
+ let layout_new = self.cx.expr_path(self.cx.path(self.span, layout_new));
+ let size = self.cx.expr_ident(self.span, size);
+ let align = self.cx.expr_ident(self.span, align);
+ let layout = self.cx.expr_call(self.span, layout_new, vec![size, align]);
+ layout
+ }
+
+ AllocatorTy::Ptr => {
+ let ident = ident();
+ args.push(self.cx.arg(self.span, ident, self.ptr_u8()));
+ let arg = self.cx.expr_ident(self.span, ident);
+ self.cx.expr_cast(self.span, arg, self.ptr_u8())
+ }
+
+ AllocatorTy::Usize => {
+ let ident = ident();
+ args.push(self.cx.arg(self.span, ident, self.usize()));
+ self.cx.expr_ident(self.span, ident)
+ }
+
+ AllocatorTy::ResultPtr | AllocatorTy::Unit => {
+ panic!("can't convert AllocatorTy to an argument")
+ }
+ }
+ }
+
+ fn ret_ty(&self, ty: &AllocatorTy, expr: P<Expr>) -> (P<Ty>, P<Expr>) {
+ match *ty {
+ AllocatorTy::ResultPtr => {
+ // We're creating:
+ //
+ // #expr as *mut u8
+
+ let expr = self.cx.expr_cast(self.span, expr, self.ptr_u8());
+ (self.ptr_u8(), expr)
+ }
+
+ AllocatorTy::Unit => (self.cx.ty(self.span, TyKind::Tup(Vec::new())), expr),
+
+ AllocatorTy::Layout | AllocatorTy::Usize | AllocatorTy::Ptr => {
+ panic!("can't convert `AllocatorTy` to an output")
+ }
+ }
+ }
+
+ fn usize(&self) -> P<Ty> {
+ let usize = self.cx.path_ident(self.span, Ident::with_empty_ctxt(sym::usize));
+ self.cx.ty_path(usize)
+ }
+
+ fn ptr_u8(&self) -> P<Ty> {
+ let u8 = self.cx.path_ident(self.span, Ident::with_empty_ctxt(sym::u8));
+ let ty_u8 = self.cx.ty_path(u8);
+ self.cx.ty_ptr(self.span, ty_u8, Mutability::Mutable)
+ }
+}
diff --git a/src/libsyntax_ext/global_asm.rs b/src/libsyntax_ext/global_asm.rs
index 5220143..112192f 100644
--- a/src/libsyntax_ext/global_asm.rs
+++ b/src/libsyntax_ext/global_asm.rs
@@ -13,27 +13,15 @@
use syntax::ast;
use syntax::source_map::respan;
use syntax::ext::base::{self, *};
-use syntax::feature_gate;
use syntax::parse::token;
use syntax::ptr::P;
-use syntax::symbol::{Symbol, sym};
use syntax_pos::Span;
use syntax::tokenstream;
use smallvec::smallvec;
-pub const MACRO: Symbol = sym::global_asm;
-
pub fn expand_global_asm<'cx>(cx: &'cx mut ExtCtxt<'_>,
sp: Span,
tts: &[tokenstream::TokenTree]) -> Box<dyn base::MacResult + 'cx> {
- if !cx.ecfg.enable_global_asm() {
- feature_gate::emit_feature_err(&cx.parse_sess,
- MACRO,
- sp,
- feature_gate::GateIssue::Language,
- feature_gate::EXPLAIN_GLOBAL_ASM);
- }
-
match parse_global_asm(cx, sp, tts) {
Ok(Some(global_asm)) => {
MacEager::items(smallvec![P(ast::Item {
diff --git a/src/libsyntax_ext/lib.rs b/src/libsyntax_ext/lib.rs
index b868f5b..0f3f5c0 100644
--- a/src/libsyntax_ext/lib.rs
+++ b/src/libsyntax_ext/lib.rs
@@ -1,22 +1,20 @@
-//! Syntax extensions in the Rust compiler.
+//! This crate contains implementations of built-in macros and other code generating facilities
+//! injecting code into the crate before it is lowered to HIR.
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
-#![deny(rust_2018_idioms)]
-#![deny(internal)]
-#![deny(unused_lifetimes)]
-
-#![feature(in_band_lifetimes)]
-#![feature(proc_macro_diagnostic)]
-#![feature(proc_macro_internals)]
-#![feature(proc_macro_span)]
+#![feature(crate_visibility_modifier)]
#![feature(decl_macro)]
+#![feature(mem_take)]
#![feature(nll)]
#![feature(rustc_diagnostic_macros)]
-#![recursion_limit="256"]
+use crate::deriving::*;
-extern crate proc_macro;
+use syntax::ast::Ident;
+use syntax::edition::Edition;
+use syntax::ext::base::{SyntaxExtension, SyntaxExtensionKind, MacroExpanderFn};
+use syntax::symbol::sym;
mod error_codes;
@@ -26,98 +24,81 @@
mod compile_error;
mod concat;
mod concat_idents;
+mod deriving;
mod env;
mod format;
mod format_foreign;
+mod global_allocator;
mod global_asm;
mod log_syntax;
-mod proc_macro_server;
+mod source_util;
mod test;
-mod test_case;
mod trace_macros;
-pub mod deriving;
-pub mod proc_macro_decls;
-pub mod proc_macro_impl;
+pub mod plugin_macro_defs;
+pub mod proc_macro_harness;
+pub mod standard_library_imports;
+pub mod test_harness;
-use rustc_data_structures::sync::Lrc;
-use syntax::ast;
-
-use syntax::ext::base::MacroExpanderFn;
-use syntax::ext::base::{NamedSyntaxExtension, SyntaxExtension, SyntaxExtensionKind};
-use syntax::edition::Edition;
-use syntax::symbol::{sym, Symbol};
-
-pub fn register_builtins(resolver: &mut dyn syntax::ext::base::Resolver,
- user_exts: Vec<NamedSyntaxExtension>,
- edition: Edition) {
- deriving::register_builtin_derives(resolver, edition);
-
- let mut register = |name, ext| {
- resolver.add_builtin(ast::Ident::with_empty_ctxt(name), Lrc::new(ext));
- };
- macro_rules! register {
- ($( $name:ident: $f:expr, )*) => { $(
- register(Symbol::intern(stringify!($name)), SyntaxExtension::default(
- SyntaxExtensionKind::LegacyBang(Box::new($f as MacroExpanderFn)), edition
- ));
- )* }
+pub fn register_builtin_macros(resolver: &mut dyn syntax::ext::base::Resolver, edition: Edition) {
+ let mut register = |name, kind| resolver.register_builtin_macro(
+ Ident::with_empty_ctxt(name), SyntaxExtension {
+ is_builtin: true, ..SyntaxExtension::default(kind, edition)
+ },
+ );
+ macro register_bang($($name:ident: $f:expr,)*) {
+ $(register(sym::$name, SyntaxExtensionKind::LegacyBang(Box::new($f as MacroExpanderFn)));)*
}
- macro_rules! register_attr {
- ($( $name:ident: $f:expr, )*) => { $(
- register(Symbol::intern(stringify!($name)), SyntaxExtension::default(
- SyntaxExtensionKind::LegacyAttr(Box::new($f)), edition
- ));
- )* }
+ macro register_attr($($name:ident: $f:expr,)*) {
+ $(register(sym::$name, SyntaxExtensionKind::LegacyAttr(Box::new($f)));)*
+ }
+ macro register_derive($($name:ident: $f:expr,)*) {
+ $(register(sym::$name, SyntaxExtensionKind::LegacyDerive(Box::new(BuiltinDerive($f))));)*
}
- use syntax::ext::source_util::*;
- register! {
- line: expand_line,
- __rust_unstable_column: expand_column_gated,
- column: expand_column,
- file: expand_file,
- stringify: expand_stringify,
- include: expand_include,
- include_str: expand_include_str,
- include_bytes: expand_include_bytes,
- module_path: expand_mod,
-
+ register_bang! {
+ __rust_unstable_column: source_util::expand_column,
asm: asm::expand_asm,
- global_asm: global_asm::expand_global_asm,
- cfg: cfg::expand_cfg,
- concat: concat::expand_syntax_ext,
- concat_idents: concat_idents::expand_syntax_ext,
- env: env::expand_env,
- option_env: env::expand_option_env,
- log_syntax: log_syntax::expand_syntax_ext,
- trace_macros: trace_macros::expand_trace_macros,
- compile_error: compile_error::expand_compile_error,
assert: assert::expand_assert,
+ cfg: cfg::expand_cfg,
+ column: source_util::expand_column,
+ compile_error: compile_error::expand_compile_error,
+ concat_idents: concat_idents::expand_syntax_ext,
+ concat: concat::expand_syntax_ext,
+ env: env::expand_env,
+ file: source_util::expand_file,
+ format_args_nl: format::expand_format_args_nl,
+ format_args: format::expand_format_args,
+ global_asm: global_asm::expand_global_asm,
+ include_bytes: source_util::expand_include_bytes,
+ include_str: source_util::expand_include_str,
+ include: source_util::expand_include,
+ line: source_util::expand_line,
+ log_syntax: log_syntax::expand_syntax_ext,
+ module_path: source_util::expand_mod,
+ option_env: env::expand_option_env,
+ stringify: source_util::expand_stringify,
+ trace_macros: trace_macros::expand_trace_macros,
}
register_attr! {
- test_case: test_case::expand,
- test: test::expand_test,
bench: test::expand_bench,
+ global_allocator: global_allocator::expand,
+ test: test::expand_test,
+ test_case: test::expand_test_case,
}
- // format_args uses `unstable` things internally.
- let allow_internal_unstable = Some([sym::fmt_internals][..].into());
- register(Symbol::intern("format_args"), SyntaxExtension {
- allow_internal_unstable: allow_internal_unstable.clone(),
- ..SyntaxExtension::default(
- SyntaxExtensionKind::LegacyBang(Box::new(format::expand_format_args)), edition
- )
- });
- register(sym::format_args_nl, SyntaxExtension {
- allow_internal_unstable,
- ..SyntaxExtension::default(
- SyntaxExtensionKind::LegacyBang(Box::new(format::expand_format_args_nl)), edition
- )
- });
-
- for (name, ext) in user_exts {
- register(name, ext);
+ register_derive! {
+ Clone: clone::expand_deriving_clone,
+ Copy: bounds::expand_deriving_copy,
+ Debug: debug::expand_deriving_debug,
+ Default: default::expand_deriving_default,
+ Eq: eq::expand_deriving_eq,
+ Hash: hash::expand_deriving_hash,
+ Ord: ord::expand_deriving_ord,
+ PartialEq: partial_eq::expand_deriving_partial_eq,
+ PartialOrd: partial_ord::expand_deriving_partial_ord,
+ RustcDecodable: decodable::expand_deriving_rustc_decodable,
+ RustcEncodable: encodable::expand_deriving_rustc_encodable,
}
}
diff --git a/src/libsyntax_ext/log_syntax.rs b/src/libsyntax_ext/log_syntax.rs
index 1be3990..cbdfd08 100644
--- a/src/libsyntax_ext/log_syntax.rs
+++ b/src/libsyntax_ext/log_syntax.rs
@@ -1,22 +1,12 @@
use syntax::ext::base;
-use syntax::feature_gate;
use syntax::print;
use syntax::tokenstream;
-use syntax::symbol::sym;
use syntax_pos;
-pub fn expand_syntax_ext<'cx>(cx: &'cx mut base::ExtCtxt<'_>,
+pub fn expand_syntax_ext<'cx>(_cx: &'cx mut base::ExtCtxt<'_>,
sp: syntax_pos::Span,
tts: &[tokenstream::TokenTree])
-> Box<dyn base::MacResult + 'cx> {
- if !cx.ecfg.enable_log_syntax() {
- feature_gate::emit_feature_err(&cx.parse_sess,
- sym::log_syntax,
- sp,
- feature_gate::GateIssue::Language,
- feature_gate::EXPLAIN_LOG_SYNTAX);
- }
-
println!("{}", print::pprust::tts_to_string(tts));
// any so that `log_syntax` can be invoked as an expression and item.
diff --git a/src/libsyntax_ext/plugin_macro_defs.rs b/src/libsyntax_ext/plugin_macro_defs.rs
new file mode 100644
index 0000000..a725f5e
--- /dev/null
+++ b/src/libsyntax_ext/plugin_macro_defs.rs
@@ -0,0 +1,58 @@
+//! Each macro must have a definition, so `#[plugin]` attributes
+//! inject a dummy `macro_rules` item for each macro they define.
+
+use syntax::ast::*;
+use syntax::attr;
+use syntax::edition::Edition;
+use syntax::ext::base::{Resolver, NamedSyntaxExtension};
+use syntax::parse::token;
+use syntax::ptr::P;
+use syntax::source_map::respan;
+use syntax::symbol::sym;
+use syntax::tokenstream::*;
+use syntax_pos::{Span, DUMMY_SP};
+use syntax_pos::hygiene::{ExpnId, ExpnInfo, ExpnKind, MacroKind};
+
+use std::mem;
+
+fn plugin_macro_def(name: Name, span: Span) -> P<Item> {
+ let rustc_builtin_macro = attr::mk_attr_outer(
+ attr::mk_word_item(Ident::new(sym::rustc_builtin_macro, span)));
+
+ let parens: TreeAndJoint = TokenTree::Delimited(
+ DelimSpan::from_single(span), token::Paren, TokenStream::empty()
+ ).into();
+ let trees = vec![parens.clone(), TokenTree::token(token::FatArrow, span).into(), parens];
+
+ P(Item {
+ ident: Ident::new(name, span),
+ attrs: vec![rustc_builtin_macro],
+ id: DUMMY_NODE_ID,
+ node: ItemKind::MacroDef(MacroDef { tokens: TokenStream::new(trees), legacy: true }),
+ vis: respan(span, VisibilityKind::Inherited),
+ span: span,
+ tokens: None,
+ })
+}
+
+pub fn inject(
+ krate: &mut Crate,
+ resolver: &mut dyn Resolver,
+ named_exts: Vec<NamedSyntaxExtension>,
+ edition: Edition,
+) {
+ if !named_exts.is_empty() {
+ let mut extra_items = Vec::new();
+ let span = DUMMY_SP.fresh_expansion(ExpnId::root(), ExpnInfo::allow_unstable(
+ ExpnKind::Macro(MacroKind::Attr, sym::plugin), DUMMY_SP, edition,
+ [sym::rustc_attrs][..].into(),
+ ));
+ for (name, ext) in named_exts {
+ resolver.register_builtin_macro(Ident::with_empty_ctxt(name), ext);
+ extra_items.push(plugin_macro_def(name, span));
+ }
+ // The `macro_rules` items must be inserted before any other items.
+ mem::swap(&mut extra_items, &mut krate.module.items);
+ krate.module.items.append(&mut extra_items);
+ }
+}
diff --git a/src/libsyntax_ext/proc_macro_decls.rs b/src/libsyntax_ext/proc_macro_harness.rs
similarity index 92%
rename from src/libsyntax_ext/proc_macro_decls.rs
rename to src/libsyntax_ext/proc_macro_harness.rs
index 45e6528..7913a74 100644
--- a/src/libsyntax_ext/proc_macro_decls.rs
+++ b/src/libsyntax_ext/proc_macro_harness.rs
@@ -1,29 +1,20 @@
use std::mem;
-use crate::deriving;
-
use syntax::ast::{self, Ident};
use syntax::attr;
-use syntax::source_map::{ExpnInfo, MacroAttribute, respan};
-use syntax::ext::base::ExtCtxt;
-use syntax::ext::build::AstBuilder;
+use syntax::source_map::{ExpnInfo, ExpnKind, respan};
+use syntax::ext::base::{ExtCtxt, MacroKind};
use syntax::ext::expand::ExpansionConfig;
-use syntax::ext::hygiene::Mark;
+use syntax::ext::hygiene::ExpnId;
+use syntax::ext::proc_macro::is_proc_macro_attr;
use syntax::mut_visit::MutVisitor;
use syntax::parse::ParseSess;
use syntax::ptr::P;
-use syntax::symbol::Symbol;
use syntax::symbol::{kw, sym};
use syntax::visit::{self, Visitor};
use syntax_pos::{Span, DUMMY_SP};
-const PROC_MACRO_KINDS: [Symbol; 3] = [
- sym::proc_macro_derive,
- sym::proc_macro_attribute,
- sym::proc_macro
-];
-
struct ProcMacroDerive {
trait_name: ast::Name,
function_name: Ident,
@@ -46,7 +37,7 @@
is_test_crate: bool,
}
-pub fn modify(sess: &ParseSess,
+pub fn inject(sess: &ParseSess,
resolver: &mut dyn (::syntax::ext::base::Resolver),
mut krate: ast::Crate,
is_proc_macro_crate: bool,
@@ -90,10 +81,6 @@
krate
}
-pub fn is_proc_macro_attr(attr: &ast::Attribute) -> bool {
- PROC_MACRO_KINDS.iter().any(|kind| attr.check_name(*kind))
-}
-
impl<'a> CollectProcMacros<'a> {
fn check_not_pub_in_root(&self, vis: &ast::Visibility, sp: Span) {
if self.is_proc_macro_crate && self.in_root && vis.node.is_pub() {
@@ -136,10 +123,6 @@
self.handler.span_err(trait_attr.span,
&format!("`{}` cannot be a name of derive macro", trait_ident));
}
- if deriving::is_builtin_trait(trait_ident.name) {
- self.handler.span_err(trait_attr.span,
- "cannot override a built-in derive macro");
- }
let attributes_attr = list.get(1);
let proc_attrs: Vec<_> = if let Some(attr) = attributes_attr {
@@ -346,16 +329,14 @@
custom_attrs: &[ProcMacroDef],
custom_macros: &[ProcMacroDef],
) -> P<ast::Item> {
- let mark = Mark::fresh(Mark::root());
- mark.set_expn_info(ExpnInfo::with_unstable(
- MacroAttribute(sym::proc_macro), DUMMY_SP, cx.parse_sess.edition,
- &[sym::rustc_attrs, Symbol::intern("proc_macro_internals")],
+ let span = DUMMY_SP.fresh_expansion(ExpnId::root(), ExpnInfo::allow_unstable(
+ ExpnKind::Macro(MacroKind::Attr, sym::proc_macro), DUMMY_SP, cx.parse_sess.edition,
+ [sym::rustc_attrs, sym::proc_macro_internals][..].into(),
));
- let span = DUMMY_SP.apply_mark(mark);
let hidden = cx.meta_list_item_word(span, sym::hidden);
let doc = cx.meta_list(span, sym::doc, vec![hidden]);
- let doc_hidden = cx.attribute(span, doc);
+ let doc_hidden = cx.attribute(doc);
let proc_macro = Ident::with_empty_ctxt(sym::proc_macro);
let krate = cx.item(span,
@@ -412,7 +393,7 @@
cx.expr_vec_slice(span, decls),
).map(|mut i| {
let attr = cx.meta_word(span, sym::rustc_proc_macro_decls);
- i.attrs.push(cx.attribute(span, attr));
+ i.attrs.push(cx.attribute(attr));
i.vis = respan(span, ast::VisibilityKind::Public);
i
});
diff --git a/src/libsyntax_ext/proc_macro_impl.rs b/src/libsyntax_ext/proc_macro_impl.rs
deleted file mode 100644
index f0fc639..0000000
--- a/src/libsyntax_ext/proc_macro_impl.rs
+++ /dev/null
@@ -1,68 +0,0 @@
-use crate::proc_macro_server;
-
-use errors::FatalError;
-use syntax::source_map::Span;
-use syntax::ext::base::{self, *};
-use syntax::tokenstream::TokenStream;
-
-pub const EXEC_STRATEGY: proc_macro::bridge::server::SameThread =
- proc_macro::bridge::server::SameThread;
-
-pub struct AttrProcMacro {
- pub client: proc_macro::bridge::client::Client<
- fn(proc_macro::TokenStream, proc_macro::TokenStream) -> proc_macro::TokenStream,
- >,
-}
-
-impl base::AttrProcMacro for AttrProcMacro {
- fn expand<'cx>(&self,
- ecx: &'cx mut ExtCtxt<'_>,
- span: Span,
- annotation: TokenStream,
- annotated: TokenStream)
- -> TokenStream {
- let server = proc_macro_server::Rustc::new(ecx);
- match self.client.run(&EXEC_STRATEGY, server, annotation, annotated) {
- Ok(stream) => stream,
- Err(e) => {
- let msg = "custom attribute panicked";
- let mut err = ecx.struct_span_fatal(span, msg);
- if let Some(s) = e.as_str() {
- err.help(&format!("message: {}", s));
- }
-
- err.emit();
- FatalError.raise();
- }
- }
- }
-}
-
-pub struct BangProcMacro {
- pub client: proc_macro::bridge::client::Client<
- fn(proc_macro::TokenStream) -> proc_macro::TokenStream,
- >,
-}
-
-impl base::ProcMacro for BangProcMacro {
- fn expand<'cx>(&self,
- ecx: &'cx mut ExtCtxt<'_>,
- span: Span,
- input: TokenStream)
- -> TokenStream {
- let server = proc_macro_server::Rustc::new(ecx);
- match self.client.run(&EXEC_STRATEGY, server, input) {
- Ok(stream) => stream,
- Err(e) => {
- let msg = "proc macro panicked";
- let mut err = ecx.struct_span_fatal(span, msg);
- if let Some(s) = e.as_str() {
- err.help(&format!("message: {}", s));
- }
-
- err.emit();
- FatalError.raise();
- }
- }
- }
-}
diff --git a/src/libsyntax_ext/proc_macro_server.rs b/src/libsyntax_ext/proc_macro_server.rs
deleted file mode 100644
index c9d99e5..0000000
--- a/src/libsyntax_ext/proc_macro_server.rs
+++ /dev/null
@@ -1,717 +0,0 @@
-use errors::{Diagnostic, DiagnosticBuilder};
-
-use std::panic;
-
-use proc_macro::bridge::{server, TokenTree};
-use proc_macro::{Delimiter, Level, LineColumn, Spacing};
-
-use rustc_data_structures::sync::Lrc;
-use std::ascii;
-use std::ops::Bound;
-use syntax::ast;
-use syntax::ext::base::ExtCtxt;
-use syntax::parse::lexer::comments;
-use syntax::parse::{self, token, ParseSess};
-use syntax::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint};
-use syntax_pos::hygiene::{SyntaxContext, Transparency};
-use syntax_pos::symbol::{kw, sym, Symbol};
-use syntax_pos::{BytePos, FileName, MultiSpan, Pos, SourceFile, Span};
-
-trait FromInternal<T> {
- fn from_internal(x: T) -> Self;
-}
-
-trait ToInternal<T> {
- fn to_internal(self) -> T;
-}
-
-impl FromInternal<token::DelimToken> for Delimiter {
- fn from_internal(delim: token::DelimToken) -> Delimiter {
- match delim {
- token::Paren => Delimiter::Parenthesis,
- token::Brace => Delimiter::Brace,
- token::Bracket => Delimiter::Bracket,
- token::NoDelim => Delimiter::None,
- }
- }
-}
-
-impl ToInternal<token::DelimToken> for Delimiter {
- fn to_internal(self) -> token::DelimToken {
- match self {
- Delimiter::Parenthesis => token::Paren,
- Delimiter::Brace => token::Brace,
- Delimiter::Bracket => token::Bracket,
- Delimiter::None => token::NoDelim,
- }
- }
-}
-
-impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
- for TokenTree<Group, Punct, Ident, Literal>
-{
- fn from_internal(((tree, is_joint), sess, stack): (TreeAndJoint, &ParseSess, &mut Vec<Self>))
- -> Self {
- use syntax::parse::token::*;
-
- let joint = is_joint == Joint;
- let Token { kind, span } = match tree {
- tokenstream::TokenTree::Delimited(span, delim, tts) => {
- let delimiter = Delimiter::from_internal(delim);
- return TokenTree::Group(Group {
- delimiter,
- stream: tts.into(),
- span,
- });
- }
- tokenstream::TokenTree::Token(token) => token,
- };
-
- macro_rules! tt {
- ($ty:ident { $($field:ident $(: $value:expr)*),+ $(,)? }) => (
- TokenTree::$ty(self::$ty {
- $($field $(: $value)*,)+
- span,
- })
- );
- ($ty:ident::$method:ident($($value:expr),*)) => (
- TokenTree::$ty(self::$ty::$method($($value,)* span))
- );
- }
- macro_rules! op {
- ($a:expr) => {
- tt!(Punct::new($a, joint))
- };
- ($a:expr, $b:expr) => {{
- stack.push(tt!(Punct::new($b, joint)));
- tt!(Punct::new($a, true))
- }};
- ($a:expr, $b:expr, $c:expr) => {{
- stack.push(tt!(Punct::new($c, joint)));
- stack.push(tt!(Punct::new($b, true)));
- tt!(Punct::new($a, true))
- }};
- }
-
- match kind {
- Eq => op!('='),
- Lt => op!('<'),
- Le => op!('<', '='),
- EqEq => op!('=', '='),
- Ne => op!('!', '='),
- Ge => op!('>', '='),
- Gt => op!('>'),
- AndAnd => op!('&', '&'),
- OrOr => op!('|', '|'),
- Not => op!('!'),
- Tilde => op!('~'),
- BinOp(Plus) => op!('+'),
- BinOp(Minus) => op!('-'),
- BinOp(Star) => op!('*'),
- BinOp(Slash) => op!('/'),
- BinOp(Percent) => op!('%'),
- BinOp(Caret) => op!('^'),
- BinOp(And) => op!('&'),
- BinOp(Or) => op!('|'),
- BinOp(Shl) => op!('<', '<'),
- BinOp(Shr) => op!('>', '>'),
- BinOpEq(Plus) => op!('+', '='),
- BinOpEq(Minus) => op!('-', '='),
- BinOpEq(Star) => op!('*', '='),
- BinOpEq(Slash) => op!('/', '='),
- BinOpEq(Percent) => op!('%', '='),
- BinOpEq(Caret) => op!('^', '='),
- BinOpEq(And) => op!('&', '='),
- BinOpEq(Or) => op!('|', '='),
- BinOpEq(Shl) => op!('<', '<', '='),
- BinOpEq(Shr) => op!('>', '>', '='),
- At => op!('@'),
- Dot => op!('.'),
- DotDot => op!('.', '.'),
- DotDotDot => op!('.', '.', '.'),
- DotDotEq => op!('.', '.', '='),
- Comma => op!(','),
- Semi => op!(';'),
- Colon => op!(':'),
- ModSep => op!(':', ':'),
- RArrow => op!('-', '>'),
- LArrow => op!('<', '-'),
- FatArrow => op!('=', '>'),
- Pound => op!('#'),
- Dollar => op!('$'),
- Question => op!('?'),
- SingleQuote => op!('\''),
-
- Ident(name, false) if name == kw::DollarCrate => tt!(Ident::dollar_crate()),
- Ident(name, is_raw) => tt!(Ident::new(name, is_raw)),
- Lifetime(name) => {
- let ident = ast::Ident::new(name, span).without_first_quote();
- stack.push(tt!(Ident::new(ident.name, false)));
- tt!(Punct::new('\'', true))
- }
- Literal(lit) => tt!(Literal { lit }),
- DocComment(c) => {
- let style = comments::doc_comment_style(&c.as_str());
- let stripped = comments::strip_doc_comment_decoration(&c.as_str());
- let mut escaped = String::new();
- for ch in stripped.chars() {
- escaped.extend(ch.escape_debug());
- }
- let stream = vec![
- Ident(sym::doc, false),
- Eq,
- TokenKind::lit(token::Str, Symbol::intern(&escaped), None),
- ]
- .into_iter()
- .map(|kind| tokenstream::TokenTree::token(kind, span))
- .collect();
- stack.push(TokenTree::Group(Group {
- delimiter: Delimiter::Bracket,
- stream,
- span: DelimSpan::from_single(span),
- }));
- if style == ast::AttrStyle::Inner {
- stack.push(tt!(Punct::new('!', false)));
- }
- tt!(Punct::new('#', false))
- }
-
- Interpolated(nt) => {
- let stream = nt.to_tokenstream(sess, span);
- TokenTree::Group(Group {
- delimiter: Delimiter::None,
- stream,
- span: DelimSpan::from_single(span),
- })
- }
-
- OpenDelim(..) | CloseDelim(..) => unreachable!(),
- Whitespace | Comment | Shebang(..) | Eof => unreachable!(),
- }
- }
-}
-
-impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
- fn to_internal(self) -> TokenStream {
- use syntax::parse::token::*;
-
- let (ch, joint, span) = match self {
- TokenTree::Punct(Punct { ch, joint, span }) => (ch, joint, span),
- TokenTree::Group(Group {
- delimiter,
- stream,
- span,
- }) => {
- return tokenstream::TokenTree::Delimited(
- span,
- delimiter.to_internal(),
- stream.into(),
- )
- .into();
- }
- TokenTree::Ident(self::Ident { sym, is_raw, span }) => {
- return tokenstream::TokenTree::token(Ident(sym, is_raw), span).into();
- }
- TokenTree::Literal(self::Literal {
- lit: token::Lit { kind: token::Integer, symbol, suffix },
- span,
- }) if symbol.as_str().starts_with("-") => {
- let minus = BinOp(BinOpToken::Minus);
- let symbol = Symbol::intern(&symbol.as_str()[1..]);
- let integer = TokenKind::lit(token::Integer, symbol, suffix);
- let a = tokenstream::TokenTree::token(minus, span);
- let b = tokenstream::TokenTree::token(integer, span);
- return vec![a, b].into_iter().collect();
- }
- TokenTree::Literal(self::Literal {
- lit: token::Lit { kind: token::Float, symbol, suffix },
- span,
- }) if symbol.as_str().starts_with("-") => {
- let minus = BinOp(BinOpToken::Minus);
- let symbol = Symbol::intern(&symbol.as_str()[1..]);
- let float = TokenKind::lit(token::Float, symbol, suffix);
- let a = tokenstream::TokenTree::token(minus, span);
- let b = tokenstream::TokenTree::token(float, span);
- return vec![a, b].into_iter().collect();
- }
- TokenTree::Literal(self::Literal { lit, span }) => {
- return tokenstream::TokenTree::token(Literal(lit), span).into()
- }
- };
-
- let kind = match ch {
- '=' => Eq,
- '<' => Lt,
- '>' => Gt,
- '!' => Not,
- '~' => Tilde,
- '+' => BinOp(Plus),
- '-' => BinOp(Minus),
- '*' => BinOp(Star),
- '/' => BinOp(Slash),
- '%' => BinOp(Percent),
- '^' => BinOp(Caret),
- '&' => BinOp(And),
- '|' => BinOp(Or),
- '@' => At,
- '.' => Dot,
- ',' => Comma,
- ';' => Semi,
- ':' => Colon,
- '#' => Pound,
- '$' => Dollar,
- '?' => Question,
- '\'' => SingleQuote,
- _ => unreachable!(),
- };
-
- let tree = tokenstream::TokenTree::token(kind, span);
- TokenStream::new(vec![(tree, if joint { Joint } else { NonJoint })])
- }
-}
-
-impl ToInternal<errors::Level> for Level {
- fn to_internal(self) -> errors::Level {
- match self {
- Level::Error => errors::Level::Error,
- Level::Warning => errors::Level::Warning,
- Level::Note => errors::Level::Note,
- Level::Help => errors::Level::Help,
- _ => unreachable!("unknown proc_macro::Level variant: {:?}", self),
- }
- }
-}
-
-#[derive(Clone)]
-pub struct TokenStreamIter {
- cursor: tokenstream::Cursor,
- stack: Vec<TokenTree<Group, Punct, Ident, Literal>>,
-}
-
-#[derive(Clone)]
-pub struct Group {
- delimiter: Delimiter,
- stream: TokenStream,
- span: DelimSpan,
-}
-
-#[derive(Copy, Clone, PartialEq, Eq, Hash)]
-pub struct Punct {
- ch: char,
- // NB. not using `Spacing` here because it doesn't implement `Hash`.
- joint: bool,
- span: Span,
-}
-
-impl Punct {
- fn new(ch: char, joint: bool, span: Span) -> Punct {
- const LEGAL_CHARS: &[char] = &['=', '<', '>', '!', '~', '+', '-', '*', '/', '%', '^',
- '&', '|', '@', '.', ',', ';', ':', '#', '$', '?', '\''];
- if !LEGAL_CHARS.contains(&ch) {
- panic!("unsupported character `{:?}`", ch)
- }
- Punct { ch, joint, span }
- }
-}
-
-#[derive(Copy, Clone, PartialEq, Eq, Hash)]
-pub struct Ident {
- sym: Symbol,
- is_raw: bool,
- span: Span,
-}
-
-impl Ident {
- fn is_valid(string: &str) -> bool {
- let mut chars = string.chars();
- if let Some(start) = chars.next() {
- (start == '_' || start.is_xid_start())
- && chars.all(|cont| cont == '_' || cont.is_xid_continue())
- } else {
- false
- }
- }
- fn new(sym: Symbol, is_raw: bool, span: Span) -> Ident {
- let string = sym.as_str();
- if !Self::is_valid(&string) {
- panic!("`{:?}` is not a valid identifier", string)
- }
- // Get rid of gensyms to conservatively check rawness on the string contents only.
- if is_raw && !sym.as_interned_str().as_symbol().can_be_raw() {
- panic!("`{}` cannot be a raw identifier", string);
- }
- Ident { sym, is_raw, span }
- }
- fn dollar_crate(span: Span) -> Ident {
- // `$crate` is accepted as an ident only if it comes from the compiler.
- Ident { sym: kw::DollarCrate, is_raw: false, span }
- }
-}
-
-// FIXME(eddyb) `Literal` should not expose internal `Debug` impls.
-#[derive(Clone, Debug)]
-pub struct Literal {
- lit: token::Lit,
- span: Span,
-}
-
-pub(crate) struct Rustc<'a> {
- sess: &'a ParseSess,
- def_site: Span,
- call_site: Span,
-}
-
-impl<'a> Rustc<'a> {
- pub fn new(cx: &'a ExtCtxt<'_>) -> Self {
- // No way to determine def location for a proc macro right now, so use call location.
- let location = cx.current_expansion.mark.expn_info().unwrap().call_site;
- let to_span = |transparency| {
- location.with_ctxt(
- SyntaxContext::empty()
- .apply_mark_with_transparency(cx.current_expansion.mark, transparency),
- )
- };
- Rustc {
- sess: cx.parse_sess,
- def_site: to_span(Transparency::Opaque),
- call_site: to_span(Transparency::Transparent),
- }
- }
-
- fn lit(&mut self, kind: token::LitKind, symbol: Symbol, suffix: Option<Symbol>) -> Literal {
- Literal {
- lit: token::Lit::new(kind, symbol, suffix),
- span: server::Span::call_site(self),
- }
- }
-}
-
-impl server::Types for Rustc<'_> {
- type TokenStream = TokenStream;
- type TokenStreamBuilder = tokenstream::TokenStreamBuilder;
- type TokenStreamIter = TokenStreamIter;
- type Group = Group;
- type Punct = Punct;
- type Ident = Ident;
- type Literal = Literal;
- type SourceFile = Lrc<SourceFile>;
- type MultiSpan = Vec<Span>;
- type Diagnostic = Diagnostic;
- type Span = Span;
-}
-
-impl server::TokenStream for Rustc<'_> {
- fn new(&mut self) -> Self::TokenStream {
- TokenStream::empty()
- }
- fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
- stream.is_empty()
- }
- fn from_str(&mut self, src: &str) -> Self::TokenStream {
- parse::parse_stream_from_source_str(
- FileName::proc_macro_source_code(src),
- src.to_string(),
- self.sess,
- Some(self.call_site),
- )
- }
- fn to_string(&mut self, stream: &Self::TokenStream) -> String {
- stream.to_string()
- }
- fn from_token_tree(
- &mut self,
- tree: TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>,
- ) -> Self::TokenStream {
- tree.to_internal()
- }
- fn into_iter(&mut self, stream: Self::TokenStream) -> Self::TokenStreamIter {
- TokenStreamIter {
- cursor: stream.trees(),
- stack: vec![],
- }
- }
-}
-
-impl server::TokenStreamBuilder for Rustc<'_> {
- fn new(&mut self) -> Self::TokenStreamBuilder {
- tokenstream::TokenStreamBuilder::new()
- }
- fn push(&mut self, builder: &mut Self::TokenStreamBuilder, stream: Self::TokenStream) {
- builder.push(stream);
- }
- fn build(&mut self, builder: Self::TokenStreamBuilder) -> Self::TokenStream {
- builder.build()
- }
-}
-
-impl server::TokenStreamIter for Rustc<'_> {
- fn next(
- &mut self,
- iter: &mut Self::TokenStreamIter,
- ) -> Option<TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> {
- loop {
- let tree = iter.stack.pop().or_else(|| {
- let next = iter.cursor.next_with_joint()?;
- Some(TokenTree::from_internal((next, self.sess, &mut iter.stack)))
- })?;
- // HACK: The condition "dummy span + group with empty delimiter" represents an AST
- // fragment approximately converted into a token stream. This may happen, for
- // example, with inputs to proc macro attributes, including derives. Such "groups"
- // need to flattened during iteration over stream's token trees.
- // Eventually this needs to be removed in favor of keeping original token trees
- // and not doing the roundtrip through AST.
- if let TokenTree::Group(ref group) = tree {
- if group.delimiter == Delimiter::None && group.span.entire().is_dummy() {
- iter.cursor.append(group.stream.clone());
- continue;
- }
- }
- return Some(tree);
- }
- }
-}
-
-impl server::Group for Rustc<'_> {
- fn new(&mut self, delimiter: Delimiter, stream: Self::TokenStream) -> Self::Group {
- Group {
- delimiter,
- stream,
- span: DelimSpan::from_single(server::Span::call_site(self)),
- }
- }
- fn delimiter(&mut self, group: &Self::Group) -> Delimiter {
- group.delimiter
- }
- fn stream(&mut self, group: &Self::Group) -> Self::TokenStream {
- group.stream.clone()
- }
- fn span(&mut self, group: &Self::Group) -> Self::Span {
- group.span.entire()
- }
- fn span_open(&mut self, group: &Self::Group) -> Self::Span {
- group.span.open
- }
- fn span_close(&mut self, group: &Self::Group) -> Self::Span {
- group.span.close
- }
- fn set_span(&mut self, group: &mut Self::Group, span: Self::Span) {
- group.span = DelimSpan::from_single(span);
- }
-}
-
-impl server::Punct for Rustc<'_> {
- fn new(&mut self, ch: char, spacing: Spacing) -> Self::Punct {
- Punct::new(ch, spacing == Spacing::Joint, server::Span::call_site(self))
- }
- fn as_char(&mut self, punct: Self::Punct) -> char {
- punct.ch
- }
- fn spacing(&mut self, punct: Self::Punct) -> Spacing {
- if punct.joint {
- Spacing::Joint
- } else {
- Spacing::Alone
- }
- }
- fn span(&mut self, punct: Self::Punct) -> Self::Span {
- punct.span
- }
- fn with_span(&mut self, punct: Self::Punct, span: Self::Span) -> Self::Punct {
- Punct { span, ..punct }
- }
-}
-
-impl server::Ident for Rustc<'_> {
- fn new(&mut self, string: &str, span: Self::Span, is_raw: bool) -> Self::Ident {
- Ident::new(Symbol::intern(string), is_raw, span)
- }
- fn span(&mut self, ident: Self::Ident) -> Self::Span {
- ident.span
- }
- fn with_span(&mut self, ident: Self::Ident, span: Self::Span) -> Self::Ident {
- Ident { span, ..ident }
- }
-}
-
-impl server::Literal for Rustc<'_> {
- // FIXME(eddyb) `Literal` should not expose internal `Debug` impls.
- fn debug(&mut self, literal: &Self::Literal) -> String {
- format!("{:?}", literal)
- }
- fn integer(&mut self, n: &str) -> Self::Literal {
- self.lit(token::Integer, Symbol::intern(n), None)
- }
- fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal {
- self.lit(token::Integer, Symbol::intern(n), Some(Symbol::intern(kind)))
- }
- fn float(&mut self, n: &str) -> Self::Literal {
- self.lit(token::Float, Symbol::intern(n), None)
- }
- fn f32(&mut self, n: &str) -> Self::Literal {
- self.lit(token::Float, Symbol::intern(n), Some(Symbol::intern("f32")))
- }
- fn f64(&mut self, n: &str) -> Self::Literal {
- self.lit(token::Float, Symbol::intern(n), Some(Symbol::intern("f64")))
- }
- fn string(&mut self, string: &str) -> Self::Literal {
- let mut escaped = String::new();
- for ch in string.chars() {
- escaped.extend(ch.escape_debug());
- }
- self.lit(token::Str, Symbol::intern(&escaped), None)
- }
- fn character(&mut self, ch: char) -> Self::Literal {
- let mut escaped = String::new();
- escaped.extend(ch.escape_unicode());
- self.lit(token::Char, Symbol::intern(&escaped), None)
- }
- fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal {
- let string = bytes
- .iter()
- .cloned()
- .flat_map(ascii::escape_default)
- .map(Into::<char>::into)
- .collect::<String>();
- self.lit(token::ByteStr, Symbol::intern(&string), None)
- }
- fn span(&mut self, literal: &Self::Literal) -> Self::Span {
- literal.span
- }
- fn set_span(&mut self, literal: &mut Self::Literal, span: Self::Span) {
- literal.span = span;
- }
- fn subspan(
- &mut self,
- literal: &Self::Literal,
- start: Bound<usize>,
- end: Bound<usize>,
- ) -> Option<Self::Span> {
- let span = literal.span;
- let length = span.hi().to_usize() - span.lo().to_usize();
-
- let start = match start {
- Bound::Included(lo) => lo,
- Bound::Excluded(lo) => lo + 1,
- Bound::Unbounded => 0,
- };
-
- let end = match end {
- Bound::Included(hi) => hi + 1,
- Bound::Excluded(hi) => hi,
- Bound::Unbounded => length,
- };
-
- // Bounds check the values, preventing addition overflow and OOB spans.
- if start > u32::max_value() as usize
- || end > u32::max_value() as usize
- || (u32::max_value() - start as u32) < span.lo().to_u32()
- || (u32::max_value() - end as u32) < span.lo().to_u32()
- || start >= end
- || end > length
- {
- return None;
- }
-
- let new_lo = span.lo() + BytePos::from_usize(start);
- let new_hi = span.lo() + BytePos::from_usize(end);
- Some(span.with_lo(new_lo).with_hi(new_hi))
- }
-}
-
-impl server::SourceFile for Rustc<'_> {
- fn eq(&mut self, file1: &Self::SourceFile, file2: &Self::SourceFile) -> bool {
- Lrc::ptr_eq(file1, file2)
- }
- fn path(&mut self, file: &Self::SourceFile) -> String {
- match file.name {
- FileName::Real(ref path) => path
- .to_str()
- .expect("non-UTF8 file path in `proc_macro::SourceFile::path`")
- .to_string(),
- _ => file.name.to_string(),
- }
- }
- fn is_real(&mut self, file: &Self::SourceFile) -> bool {
- file.is_real_file()
- }
-}
-
-impl server::MultiSpan for Rustc<'_> {
- fn new(&mut self) -> Self::MultiSpan {
- vec![]
- }
- fn push(&mut self, spans: &mut Self::MultiSpan, span: Self::Span) {
- spans.push(span)
- }
-}
-
-impl server::Diagnostic for Rustc<'_> {
- fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic {
- let mut diag = Diagnostic::new(level.to_internal(), msg);
- diag.set_span(MultiSpan::from_spans(spans));
- diag
- }
- fn sub(
- &mut self,
- diag: &mut Self::Diagnostic,
- level: Level,
- msg: &str,
- spans: Self::MultiSpan,
- ) {
- diag.sub(level.to_internal(), msg, MultiSpan::from_spans(spans), None);
- }
- fn emit(&mut self, diag: Self::Diagnostic) {
- DiagnosticBuilder::new_diagnostic(&self.sess.span_diagnostic, diag).emit()
- }
-}
-
-impl server::Span for Rustc<'_> {
- fn debug(&mut self, span: Self::Span) -> String {
- format!("{:?} bytes({}..{})", span.ctxt(), span.lo().0, span.hi().0)
- }
- fn def_site(&mut self) -> Self::Span {
- self.def_site
- }
- fn call_site(&mut self) -> Self::Span {
- self.call_site
- }
- fn source_file(&mut self, span: Self::Span) -> Self::SourceFile {
- self.sess.source_map().lookup_char_pos(span.lo()).file
- }
- fn parent(&mut self, span: Self::Span) -> Option<Self::Span> {
- span.ctxt().outer_expn_info().map(|i| i.call_site)
- }
- fn source(&mut self, span: Self::Span) -> Self::Span {
- span.source_callsite()
- }
- fn start(&mut self, span: Self::Span) -> LineColumn {
- let loc = self.sess.source_map().lookup_char_pos(span.lo());
- LineColumn {
- line: loc.line,
- column: loc.col.to_usize(),
- }
- }
- fn end(&mut self, span: Self::Span) -> LineColumn {
- let loc = self.sess.source_map().lookup_char_pos(span.hi());
- LineColumn {
- line: loc.line,
- column: loc.col.to_usize(),
- }
- }
- fn join(&mut self, first: Self::Span, second: Self::Span) -> Option<Self::Span> {
- let self_loc = self.sess.source_map().lookup_char_pos(first.lo());
- let other_loc = self.sess.source_map().lookup_char_pos(second.lo());
-
- if self_loc.file.name != other_loc.file.name {
- return None;
- }
-
- Some(first.to(second))
- }
- fn resolved_at(&mut self, span: Self::Span, at: Self::Span) -> Self::Span {
- span.with_ctxt(at.ctxt())
- }
- fn source_text(&mut self, span: Self::Span) -> Option<String> {
- self.sess.source_map().span_to_snippet(span).ok()
- }
-}
diff --git a/src/libsyntax_ext/source_util.rs b/src/libsyntax_ext/source_util.rs
new file mode 100644
index 0000000..2c8d53a
--- /dev/null
+++ b/src/libsyntax_ext/source_util.rs
@@ -0,0 +1,164 @@
+use syntax::{ast, panictry};
+use syntax::ext::base::{self, *};
+use syntax::parse::{self, token, DirectoryOwnership};
+use syntax::print::pprust;
+use syntax::ptr::P;
+use syntax::symbol::Symbol;
+use syntax::tokenstream;
+
+use smallvec::SmallVec;
+use syntax_pos::{self, Pos, Span};
+
+use std::fs;
+use std::io::ErrorKind;
+use rustc_data_structures::sync::Lrc;
+
+// These macros all relate to the file system; they either return
+// the column/row/filename of the expression, or they include
+// a given file into the current one.
+
+/// line!(): expands to the current line number
+pub fn expand_line(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
+ -> Box<dyn base::MacResult+'static> {
+ base::check_zero_tts(cx, sp, tts, "line!");
+
+ let topmost = cx.expansion_cause().unwrap_or(sp);
+ let loc = cx.source_map().lookup_char_pos(topmost.lo());
+
+ base::MacEager::expr(cx.expr_u32(topmost, loc.line as u32))
+}
+
+/* column!(): expands to the current column number */
+pub fn expand_column(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
+ -> Box<dyn base::MacResult+'static> {
+ base::check_zero_tts(cx, sp, tts, "column!");
+
+ let topmost = cx.expansion_cause().unwrap_or(sp);
+ let loc = cx.source_map().lookup_char_pos(topmost.lo());
+
+ base::MacEager::expr(cx.expr_u32(topmost, loc.col.to_usize() as u32 + 1))
+}
+
+/// file!(): expands to the current filename */
+/// The source_file (`loc.file`) contains a bunch more information we could spit
+/// out if we wanted.
+pub fn expand_file(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
+ -> Box<dyn base::MacResult+'static> {
+ base::check_zero_tts(cx, sp, tts, "file!");
+
+ let topmost = cx.expansion_cause().unwrap_or(sp);
+ let loc = cx.source_map().lookup_char_pos(topmost.lo());
+ base::MacEager::expr(cx.expr_str(topmost, Symbol::intern(&loc.file.name.to_string())))
+}
+
+pub fn expand_stringify(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
+ -> Box<dyn base::MacResult+'static> {
+ let s = pprust::tts_to_string(tts);
+ base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&s)))
+}
+
+pub fn expand_mod(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
+ -> Box<dyn base::MacResult+'static> {
+ base::check_zero_tts(cx, sp, tts, "module_path!");
+ let mod_path = &cx.current_expansion.module.mod_path;
+ let string = mod_path.iter().map(|x| x.to_string()).collect::<Vec<String>>().join("::");
+
+ base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&string)))
+}
+
+/// include! : parse the given file as an expr
+/// This is generally a bad idea because it's going to behave
+/// unhygienically.
+pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
+ -> Box<dyn base::MacResult+'cx> {
+ let file = match get_single_str_from_tts(cx, sp, tts, "include!") {
+ Some(f) => f,
+ None => return DummyResult::any(sp),
+ };
+ // The file will be added to the code map by the parser
+ let file = cx.resolve_path(file, sp);
+ let directory_ownership = DirectoryOwnership::Owned { relative: None };
+ let p = parse::new_sub_parser_from_file(cx.parse_sess(), &file, directory_ownership, None, sp);
+
+ struct ExpandResult<'a> {
+ p: parse::parser::Parser<'a>,
+ }
+ impl<'a> base::MacResult for ExpandResult<'a> {
+ fn make_expr(mut self: Box<ExpandResult<'a>>) -> Option<P<ast::Expr>> {
+ Some(panictry!(self.p.parse_expr()))
+ }
+
+ fn make_items(mut self: Box<ExpandResult<'a>>) -> Option<SmallVec<[P<ast::Item>; 1]>> {
+ let mut ret = SmallVec::new();
+ while self.p.token != token::Eof {
+ match panictry!(self.p.parse_item()) {
+ Some(item) => ret.push(item),
+ None => self.p.sess.span_diagnostic.span_fatal(self.p.token.span,
+ &format!("expected item, found `{}`",
+ self.p.this_token_to_string()))
+ .raise()
+ }
+ }
+ Some(ret)
+ }
+ }
+
+ Box::new(ExpandResult { p })
+}
+
+// include_str! : read the given file, insert it as a literal string expr
+pub fn expand_include_str(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
+ -> Box<dyn base::MacResult+'static> {
+ let file = match get_single_str_from_tts(cx, sp, tts, "include_str!") {
+ Some(f) => f,
+ None => return DummyResult::expr(sp)
+ };
+ let file = cx.resolve_path(file, sp);
+ match fs::read_to_string(&file) {
+ Ok(src) => {
+ let interned_src = Symbol::intern(&src);
+
+ // Add this input file to the code map to make it available as
+ // dependency information
+ cx.source_map().new_source_file(file.into(), src);
+
+ base::MacEager::expr(cx.expr_str(sp, interned_src))
+ },
+ Err(ref e) if e.kind() == ErrorKind::InvalidData => {
+ cx.span_err(sp, &format!("{} wasn't a utf-8 file", file.display()));
+ DummyResult::expr(sp)
+ }
+ Err(e) => {
+ cx.span_err(sp, &format!("couldn't read {}: {}", file.display(), e));
+ DummyResult::expr(sp)
+ }
+ }
+}
+
+pub fn expand_include_bytes(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
+ -> Box<dyn base::MacResult+'static> {
+ let file = match get_single_str_from_tts(cx, sp, tts, "include_bytes!") {
+ Some(f) => f,
+ None => return DummyResult::expr(sp)
+ };
+ let file = cx.resolve_path(file, sp);
+ match fs::read(&file) {
+ Ok(bytes) => {
+ // Add the contents to the source map if it contains UTF-8.
+ let (contents, bytes) = match String::from_utf8(bytes) {
+ Ok(s) => {
+ let bytes = s.as_bytes().to_owned();
+ (s, bytes)
+ },
+ Err(e) => (String::new(), e.into_bytes()),
+ };
+ cx.source_map().new_source_file(file.into(), contents);
+
+ base::MacEager::expr(cx.expr_lit(sp, ast::LitKind::ByteStr(Lrc::new(bytes))))
+ },
+ Err(e) => {
+ cx.span_err(sp, &format!("couldn't read {}: {}", file.display(), e));
+ DummyResult::expr(sp)
+ }
+ }
+}
diff --git a/src/libsyntax_ext/standard_library_imports.rs b/src/libsyntax_ext/standard_library_imports.rs
new file mode 100644
index 0000000..68b13bd
--- /dev/null
+++ b/src/libsyntax_ext/standard_library_imports.rs
@@ -0,0 +1,86 @@
+use syntax::{ast, attr};
+use syntax::edition::Edition;
+use syntax::ext::hygiene::{ExpnId, MacroKind};
+use syntax::ptr::P;
+use syntax::source_map::{ExpnInfo, ExpnKind, dummy_spanned, respan};
+use syntax::symbol::{Ident, Symbol, kw, sym};
+use syntax_pos::DUMMY_SP;
+
+use std::iter;
+
+pub fn inject(
+ mut krate: ast::Crate, alt_std_name: Option<&str>, edition: Edition
+) -> (ast::Crate, Option<Symbol>) {
+ let rust_2018 = edition >= Edition::Edition2018;
+
+ // the first name in this list is the crate name of the crate with the prelude
+ let names: &[&str] = if attr::contains_name(&krate.attrs, sym::no_core) {
+ return (krate, None);
+ } else if attr::contains_name(&krate.attrs, sym::no_std) {
+ if attr::contains_name(&krate.attrs, sym::compiler_builtins) {
+ &["core"]
+ } else {
+ &["core", "compiler_builtins"]
+ }
+ } else {
+ &["std"]
+ };
+
+ // .rev() to preserve ordering above in combination with insert(0, ...)
+ let alt_std_name = alt_std_name.map(Symbol::intern);
+ for orig_name_str in names.iter().rev() {
+ // HACK(eddyb) gensym the injected crates on the Rust 2018 edition,
+ // so they don't accidentally interfere with the new import paths.
+ let orig_name_sym = Symbol::intern(orig_name_str);
+ let orig_name_ident = Ident::with_empty_ctxt(orig_name_sym);
+ let (rename, orig_name) = if rust_2018 {
+ (orig_name_ident.gensym(), Some(orig_name_sym))
+ } else {
+ (orig_name_ident, None)
+ };
+ krate.module.items.insert(0, P(ast::Item {
+ attrs: vec![attr::mk_attr_outer(
+ attr::mk_word_item(ast::Ident::with_empty_ctxt(sym::macro_use))
+ )],
+ vis: dummy_spanned(ast::VisibilityKind::Inherited),
+ node: ast::ItemKind::ExternCrate(alt_std_name.or(orig_name)),
+ ident: rename,
+ id: ast::DUMMY_NODE_ID,
+ span: DUMMY_SP,
+ tokens: None,
+ }));
+ }
+
+ // the crates have been injected, the assumption is that the first one is the one with
+ // the prelude.
+ let name = names[0];
+
+ let span = DUMMY_SP.fresh_expansion(ExpnId::root(), ExpnInfo::allow_unstable(
+ ExpnKind::Macro(MacroKind::Attr, sym::std_inject), DUMMY_SP, edition,
+ [sym::prelude_import][..].into(),
+ ));
+
+ krate.module.items.insert(0, P(ast::Item {
+ attrs: vec![attr::mk_attr_outer(
+ attr::mk_word_item(ast::Ident::new(sym::prelude_import, span)))],
+ vis: respan(span.shrink_to_lo(), ast::VisibilityKind::Inherited),
+ node: ast::ItemKind::Use(P(ast::UseTree {
+ prefix: ast::Path {
+ segments: iter::once(ast::Ident::with_empty_ctxt(kw::PathRoot))
+ .chain(
+ [name, "prelude", "v1"].iter().cloned()
+ .map(ast::Ident::from_str)
+ ).map(ast::PathSegment::from_ident).collect(),
+ span,
+ },
+ kind: ast::UseTreeKind::Glob,
+ span,
+ })),
+ id: ast::DUMMY_NODE_ID,
+ ident: ast::Ident::invalid(),
+ span,
+ tokens: None,
+ }));
+
+ (krate, Some(Symbol::intern(name)))
+}
diff --git a/src/libsyntax_ext/test.rs b/src/libsyntax_ext/test.rs
index 24d3055..993ef25 100644
--- a/src/libsyntax_ext/test.rs
+++ b/src/libsyntax_ext/test.rs
@@ -1,32 +1,65 @@
/// The expansion from a test function to the appropriate test struct for libtest
/// Ideally, this code would be in libtest but for efficiency and error messages it lives here.
-use syntax::ext::base::*;
-use syntax::ext::build::AstBuilder;
-use syntax::ext::hygiene::{Mark, SyntaxContext};
-use syntax::attr;
use syntax::ast;
+use syntax::attr::{self, check_builtin_macro_attribute};
+use syntax::ext::base::*;
+use syntax::ext::hygiene::SyntaxContext;
use syntax::print::pprust;
+use syntax::source_map::respan;
use syntax::symbol::{Symbol, sym};
use syntax_pos::Span;
-use syntax::source_map::{ExpnInfo, MacroAttribute};
+
use std::iter;
+// #[test_case] is used by custom test authors to mark tests
+// When building for test, it needs to make the item public and gensym the name
+// Otherwise, we'll omit the item. This behavior means that any item annotated
+// with #[test_case] is never addressable.
+//
+// We mark item with an inert attribute "rustc_test_marker" which the test generation
+// logic will pick up on.
+pub fn expand_test_case(
+ ecx: &mut ExtCtxt<'_>,
+ attr_sp: Span,
+ meta_item: &ast::MetaItem,
+ anno_item: Annotatable
+) -> Vec<Annotatable> {
+ check_builtin_macro_attribute(ecx, meta_item, sym::test_case);
+
+ if !ecx.ecfg.should_test { return vec![]; }
+
+ let sp = attr_sp.with_ctxt(SyntaxContext::empty().apply_mark(ecx.current_expansion.id));
+ let mut item = anno_item.expect_item();
+ item = item.map(|mut item| {
+ item.vis = respan(item.vis.span, ast::VisibilityKind::Public);
+ item.ident = item.ident.gensym();
+ item.attrs.push(
+ ecx.attribute(ecx.meta_word(sp, sym::rustc_test_marker))
+ );
+ item
+ });
+
+ return vec![Annotatable::Item(item)]
+}
+
pub fn expand_test(
cx: &mut ExtCtxt<'_>,
attr_sp: Span,
- _meta_item: &ast::MetaItem,
+ meta_item: &ast::MetaItem,
item: Annotatable,
) -> Vec<Annotatable> {
+ check_builtin_macro_attribute(cx, meta_item, sym::test);
expand_test_or_bench(cx, attr_sp, item, false)
}
pub fn expand_bench(
cx: &mut ExtCtxt<'_>,
attr_sp: Span,
- _meta_item: &ast::MetaItem,
+ meta_item: &ast::MetaItem,
item: Annotatable,
) -> Vec<Annotatable> {
+ check_builtin_macro_attribute(cx, meta_item, sym::bench);
expand_test_or_bench(cx, attr_sp, item, true)
}
@@ -43,12 +76,12 @@
if let Annotatable::Item(i) = item { i }
else {
cx.parse_sess.span_diagnostic.span_fatal(item.span(),
- "#[test] attribute is only allowed on non associated functions").raise();
+ "`#[test]` attribute is only allowed on non associated functions").raise();
};
if let ast::ItemKind::Mac(_) = item.node {
cx.parse_sess.span_diagnostic.span_warn(item.span,
- "#[test] attribute should not be used on macros. Use #[cfg(test)] instead.");
+ "`#[test]` attribute should not be used on macros. Use `#[cfg(test)]` instead.");
return vec![Annotatable::Item(item)];
}
@@ -60,15 +93,8 @@
return vec![Annotatable::Item(item)];
}
- let (sp, attr_sp) = {
- let mark = Mark::fresh(Mark::root());
- mark.set_expn_info(ExpnInfo::with_unstable(
- MacroAttribute(sym::test), attr_sp, cx.parse_sess.edition,
- &[sym::rustc_attrs, sym::test],
- ));
- (item.span.with_ctxt(SyntaxContext::empty().apply_mark(mark)),
- attr_sp.with_ctxt(SyntaxContext::empty().apply_mark(mark)))
- };
+ let ctxt = SyntaxContext::empty().apply_mark(cx.current_expansion.id);
+ let (sp, attr_sp) = (item.span.with_ctxt(ctxt), attr_sp.with_ctxt(ctxt));
// Gensym "test" so we can extern crate without conflicting with any local names
let test_id = cx.ident_of("test").gensym();
@@ -122,11 +148,11 @@
let mut test_const = cx.item(sp, ast::Ident::new(item.ident.name, sp).gensym(),
vec![
// #[cfg(test)]
- cx.attribute(attr_sp, cx.meta_list(attr_sp, sym::cfg, vec![
+ cx.attribute(cx.meta_list(attr_sp, sym::cfg, vec![
cx.meta_list_item_word(attr_sp, sym::test)
])),
// #[rustc_test_marker]
- cx.attribute(attr_sp, cx.meta_word(attr_sp, sym::rustc_test_marker)),
+ cx.attribute(cx.meta_word(attr_sp, sym::rustc_test_marker)),
],
// const $ident: test::TestDescAndFn =
ast::ItemKind::Const(cx.ty(sp, ast::TyKind::Path(None, test_path("TestDescAndFn"))),
@@ -175,7 +201,7 @@
ast::ItemKind::ExternCrate(Some(sym::test))
);
- log::debug!("Synthetic test item:\n{}\n", pprust::item_to_string(&test_const));
+ log::debug!("synthetic test item:\n{}\n", pprust::item_to_string(&test_const));
vec![
// Access to libtest under a gensymed name
diff --git a/src/libsyntax_ext/test_case.rs b/src/libsyntax_ext/test_case.rs
deleted file mode 100644
index 6e3bc05..0000000
--- a/src/libsyntax_ext/test_case.rs
+++ /dev/null
@@ -1,60 +0,0 @@
-// http://rust-lang.org/COPYRIGHT.
-//
-
-// #[test_case] is used by custom test authors to mark tests
-// When building for test, it needs to make the item public and gensym the name
-// Otherwise, we'll omit the item. This behavior means that any item annotated
-// with #[test_case] is never addressable.
-//
-// We mark item with an inert attribute "rustc_test_marker" which the test generation
-// logic will pick up on.
-
-use syntax::ext::base::*;
-use syntax::ext::build::AstBuilder;
-use syntax::ext::hygiene::{Mark, SyntaxContext};
-use syntax::ast;
-use syntax::source_map::respan;
-use syntax::symbol::sym;
-use syntax_pos::Span;
-use syntax::source_map::{ExpnInfo, MacroAttribute};
-use syntax::feature_gate;
-
-pub fn expand(
- ecx: &mut ExtCtxt<'_>,
- attr_sp: Span,
- _meta_item: &ast::MetaItem,
- anno_item: Annotatable
-) -> Vec<Annotatable> {
- if !ecx.ecfg.enable_custom_test_frameworks() {
- feature_gate::emit_feature_err(&ecx.parse_sess,
- sym::custom_test_frameworks,
- attr_sp,
- feature_gate::GateIssue::Language,
- feature_gate::EXPLAIN_CUSTOM_TEST_FRAMEWORKS);
- }
-
- if !ecx.ecfg.should_test { return vec![]; }
-
- let sp = {
- let mark = Mark::fresh(Mark::root());
- mark.set_expn_info(ExpnInfo::with_unstable(
- MacroAttribute(sym::test_case), attr_sp, ecx.parse_sess.edition,
- &[sym::test, sym::rustc_attrs],
- ));
- attr_sp.with_ctxt(SyntaxContext::empty().apply_mark(mark))
- };
-
- let mut item = anno_item.expect_item();
-
- item = item.map(|mut item| {
- item.vis = respan(item.vis.span, ast::VisibilityKind::Public);
- item.ident = item.ident.gensym();
- item.attrs.push(
- ecx.attribute(sp,
- ecx.meta_word(sp, sym::rustc_test_marker))
- );
- item
- });
-
- return vec![Annotatable::Item(item)]
-}
diff --git a/src/libsyntax_ext/test_harness.rs b/src/libsyntax_ext/test_harness.rs
new file mode 100644
index 0000000..eec8a3f
--- /dev/null
+++ b/src/libsyntax_ext/test_harness.rs
@@ -0,0 +1,390 @@
+// Code that generates a test runner to run all the tests in a crate
+
+use log::debug;
+use smallvec::{smallvec, SmallVec};
+use syntax::ast::{self, Ident};
+use syntax::attr;
+use syntax::entry::{self, EntryPointType};
+use syntax::ext::base::{ExtCtxt, Resolver};
+use syntax::ext::expand::ExpansionConfig;
+use syntax::ext::hygiene::{ExpnId, MacroKind};
+use syntax::feature_gate::Features;
+use syntax::mut_visit::{*, ExpectOne};
+use syntax::parse::ParseSess;
+use syntax::ptr::P;
+use syntax::source_map::{ExpnInfo, ExpnKind, dummy_spanned};
+use syntax::symbol::{kw, sym, Symbol};
+use syntax_pos::{Span, DUMMY_SP};
+
+use std::{iter, mem};
+
+struct Test {
+ span: Span,
+ path: Vec<Ident>,
+}
+
+struct TestCtxt<'a> {
+ span_diagnostic: &'a errors::Handler,
+ path: Vec<Ident>,
+ ext_cx: ExtCtxt<'a>,
+ test_cases: Vec<Test>,
+ reexport_test_harness_main: Option<Symbol>,
+ test_runner: Option<ast::Path>,
+ // top-level re-export submodule, filled out after folding is finished
+ toplevel_reexport: Option<Ident>,
+}
+
+// Traverse the crate, collecting all the test functions, eliding any
+// existing main functions, and synthesizing a main test harness
+pub fn inject(
+ sess: &ParseSess,
+ resolver: &mut dyn Resolver,
+ should_test: bool,
+ krate: &mut ast::Crate,
+ span_diagnostic: &errors::Handler,
+ features: &Features,
+) {
+ // Check for #[reexport_test_harness_main = "some_name"] which
+ // creates a `use __test::main as some_name;`. This needs to be
+ // unconditional, so that the attribute is still marked as used in
+ // non-test builds.
+ let reexport_test_harness_main =
+ attr::first_attr_value_str_by_name(&krate.attrs, sym::reexport_test_harness_main);
+
+ // Do this here so that the test_runner crate attribute gets marked as used
+ // even in non-test builds
+ let test_runner = get_test_runner(span_diagnostic, &krate);
+
+ if should_test {
+ generate_test_harness(sess, resolver, reexport_test_harness_main,
+ krate, span_diagnostic, features, test_runner)
+ }
+}
+
+struct TestHarnessGenerator<'a> {
+ cx: TestCtxt<'a>,
+ tests: Vec<Ident>,
+
+ // submodule name, gensym'd identifier for re-exports
+ tested_submods: Vec<(Ident, Ident)>,
+}
+
+impl<'a> MutVisitor for TestHarnessGenerator<'a> {
+ fn visit_crate(&mut self, c: &mut ast::Crate) {
+ noop_visit_crate(c, self);
+
+ // Create a main function to run our tests
+ let test_main = {
+ let unresolved = mk_main(&mut self.cx);
+ self.cx.ext_cx.monotonic_expander().flat_map_item(unresolved).pop().unwrap()
+ };
+
+ c.module.items.push(test_main);
+ }
+
+ fn flat_map_item(&mut self, i: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> {
+ let ident = i.ident;
+ if ident.name != kw::Invalid {
+ self.cx.path.push(ident);
+ }
+ debug!("current path: {}", path_name_i(&self.cx.path));
+
+ let mut item = i.into_inner();
+ if is_test_case(&item) {
+ debug!("this is a test item");
+
+ let test = Test {
+ span: item.span,
+ path: self.cx.path.clone(),
+ };
+ self.cx.test_cases.push(test);
+ self.tests.push(item.ident);
+ }
+
+ // We don't want to recurse into anything other than mods, since
+ // mods or tests inside of functions will break things
+ if let ast::ItemKind::Mod(mut module) = item.node {
+ let tests = mem::take(&mut self.tests);
+ let tested_submods = mem::take(&mut self.tested_submods);
+ noop_visit_mod(&mut module, self);
+ let tests = mem::replace(&mut self.tests, tests);
+ let tested_submods = mem::replace(&mut self.tested_submods, tested_submods);
+
+ if !tests.is_empty() || !tested_submods.is_empty() {
+ let (it, sym) = mk_reexport_mod(&mut self.cx, item.id, tests, tested_submods);
+ module.items.push(it);
+
+ if !self.cx.path.is_empty() {
+ self.tested_submods.push((self.cx.path[self.cx.path.len()-1], sym));
+ } else {
+ debug!("pushing nothing, sym: {:?}", sym);
+ self.cx.toplevel_reexport = Some(sym);
+ }
+ }
+ item.node = ast::ItemKind::Mod(module);
+ }
+ if ident.name != kw::Invalid {
+ self.cx.path.pop();
+ }
+ smallvec![P(item)]
+ }
+
+ fn visit_mac(&mut self, _mac: &mut ast::Mac) {
+ // Do nothing.
+ }
+}
+
+/// A folder used to remove any entry points (like fn main) because the harness
+/// generator will provide its own
+struct EntryPointCleaner {
+ // Current depth in the ast
+ depth: usize,
+}
+
+impl MutVisitor for EntryPointCleaner {
+ fn flat_map_item(&mut self, i: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> {
+ self.depth += 1;
+ let item = noop_flat_map_item(i, self).expect_one("noop did something");
+ self.depth -= 1;
+
+ // Remove any #[main] or #[start] from the AST so it doesn't
+ // clash with the one we're going to add, but mark it as
+ // #[allow(dead_code)] to avoid printing warnings.
+ let item = match entry::entry_point_type(&item, self.depth) {
+ EntryPointType::MainNamed |
+ EntryPointType::MainAttr |
+ EntryPointType::Start =>
+ item.map(|ast::Item {id, ident, attrs, node, vis, span, tokens}| {
+ let allow_ident = Ident::with_empty_ctxt(sym::allow);
+ let dc_nested = attr::mk_nested_word_item(Ident::from_str("dead_code"));
+ let allow_dead_code_item = attr::mk_list_item(allow_ident, vec![dc_nested]);
+ let allow_dead_code = attr::mk_attr_outer(allow_dead_code_item);
+
+ ast::Item {
+ id,
+ ident,
+ attrs: attrs.into_iter()
+ .filter(|attr| {
+ !attr.check_name(sym::main) && !attr.check_name(sym::start)
+ })
+ .chain(iter::once(allow_dead_code))
+ .collect(),
+ node,
+ vis,
+ span,
+ tokens,
+ }
+ }),
+ EntryPointType::None |
+ EntryPointType::OtherMain => item,
+ };
+
+ smallvec![item]
+ }
+
+ fn visit_mac(&mut self, _mac: &mut ast::Mac) {
+ // Do nothing.
+ }
+}
+
+/// Creates an item (specifically a module) that "pub use"s the tests passed in.
+/// Each tested submodule will contain a similar reexport module that we will export
+/// under the name of the original module. That is, `submod::__test_reexports` is
+/// reexported like so `pub use submod::__test_reexports as submod`.
+fn mk_reexport_mod(cx: &mut TestCtxt<'_>,
+ parent: ast::NodeId,
+ tests: Vec<Ident>,
+ tested_submods: Vec<(Ident, Ident)>)
+ -> (P<ast::Item>, Ident) {
+ let super_ = Ident::with_empty_ctxt(kw::Super);
+
+ let items = tests.into_iter().map(|r| {
+ cx.ext_cx.item_use_simple(DUMMY_SP, dummy_spanned(ast::VisibilityKind::Public),
+ cx.ext_cx.path(DUMMY_SP, vec![super_, r]))
+ }).chain(tested_submods.into_iter().map(|(r, sym)| {
+ let path = cx.ext_cx.path(DUMMY_SP, vec![super_, r, sym]);
+ cx.ext_cx.item_use_simple_(DUMMY_SP, dummy_spanned(ast::VisibilityKind::Public),
+ Some(r), path)
+ })).collect();
+
+ let reexport_mod = ast::Mod {
+ inline: true,
+ inner: DUMMY_SP,
+ items,
+ };
+
+ let name = Ident::from_str("__test_reexports").gensym();
+ let parent = if parent == ast::DUMMY_NODE_ID { ast::CRATE_NODE_ID } else { parent };
+ cx.ext_cx.current_expansion.id = cx.ext_cx.resolver.get_module_scope(parent);
+ let it = cx.ext_cx.monotonic_expander().flat_map_item(P(ast::Item {
+ ident: name,
+ attrs: Vec::new(),
+ id: ast::DUMMY_NODE_ID,
+ node: ast::ItemKind::Mod(reexport_mod),
+ vis: dummy_spanned(ast::VisibilityKind::Public),
+ span: DUMMY_SP,
+ tokens: None,
+ })).pop().unwrap();
+
+ (it, name)
+}
+
+/// Crawl over the crate, inserting test reexports and the test main function
+fn generate_test_harness(sess: &ParseSess,
+ resolver: &mut dyn Resolver,
+ reexport_test_harness_main: Option<Symbol>,
+ krate: &mut ast::Crate,
+ sd: &errors::Handler,
+ features: &Features,
+ test_runner: Option<ast::Path>) {
+ // Remove the entry points
+ let mut cleaner = EntryPointCleaner { depth: 0 };
+ cleaner.visit_crate(krate);
+
+ let mut econfig = ExpansionConfig::default("test".to_string());
+ econfig.features = Some(features);
+
+ let cx = TestCtxt {
+ span_diagnostic: sd,
+ ext_cx: ExtCtxt::new(sess, econfig, resolver),
+ path: Vec::new(),
+ test_cases: Vec::new(),
+ reexport_test_harness_main,
+ toplevel_reexport: None,
+ test_runner
+ };
+
+ TestHarnessGenerator {
+ cx,
+ tests: Vec::new(),
+ tested_submods: Vec::new(),
+ }.visit_crate(krate);
+}
+
+/// Creates a function item for use as the main function of a test build.
+/// This function will call the `test_runner` as specified by the crate attribute
+fn mk_main(cx: &mut TestCtxt<'_>) -> P<ast::Item> {
+ // Writing this out by hand:
+ // pub fn main() {
+ // #![main]
+ // test::test_main_static(&[..tests]);
+ // }
+ let sp = DUMMY_SP.fresh_expansion(ExpnId::root(), ExpnInfo::allow_unstable(
+ ExpnKind::Macro(MacroKind::Attr, sym::test_case), DUMMY_SP, cx.ext_cx.parse_sess.edition,
+ [sym::main, sym::test, sym::rustc_attrs][..].into(),
+ ));
+ let ecx = &cx.ext_cx;
+ let test_id = Ident::with_empty_ctxt(sym::test);
+
+ // test::test_main_static(...)
+ let mut test_runner = cx.test_runner.clone().unwrap_or(
+ ecx.path(sp, vec![
+ test_id, ecx.ident_of("test_main_static")
+ ]));
+
+ test_runner.span = sp;
+
+ let test_main_path_expr = ecx.expr_path(test_runner);
+ let call_test_main = ecx.expr_call(sp, test_main_path_expr,
+ vec![mk_tests_slice(cx)]);
+ let call_test_main = ecx.stmt_expr(call_test_main);
+
+ // #![main]
+ let main_meta = ecx.meta_word(sp, sym::main);
+ let main_attr = ecx.attribute(main_meta);
+
+ // extern crate test as test_gensym
+ let test_extern_stmt = ecx.stmt_item(sp, ecx.item(sp,
+ test_id,
+ vec![],
+ ast::ItemKind::ExternCrate(None)
+ ));
+
+ // pub fn main() { ... }
+ let main_ret_ty = ecx.ty(sp, ast::TyKind::Tup(vec![]));
+
+ // If no test runner is provided we need to import the test crate
+ let main_body = if cx.test_runner.is_none() {
+ ecx.block(sp, vec![test_extern_stmt, call_test_main])
+ } else {
+ ecx.block(sp, vec![call_test_main])
+ };
+
+ let main = ast::ItemKind::Fn(ecx.fn_decl(vec![], ast::FunctionRetTy::Ty(main_ret_ty)),
+ ast::FnHeader::default(),
+ ast::Generics::default(),
+ main_body);
+
+ // Honor the reexport_test_harness_main attribute
+ let main_id = match cx.reexport_test_harness_main {
+ Some(sym) => Ident::new(sym, sp),
+ None => Ident::from_str_and_span("main", sp).gensym(),
+ };
+
+ P(ast::Item {
+ ident: main_id,
+ attrs: vec![main_attr],
+ id: ast::DUMMY_NODE_ID,
+ node: main,
+ vis: dummy_spanned(ast::VisibilityKind::Public),
+ span: sp,
+ tokens: None,
+ })
+
+}
+
+fn path_name_i(idents: &[Ident]) -> String {
+ let mut path_name = "".to_string();
+ let mut idents_iter = idents.iter().peekable();
+ while let Some(ident) = idents_iter.next() {
+ path_name.push_str(&ident.as_str());
+ if idents_iter.peek().is_some() {
+ path_name.push_str("::")
+ }
+ }
+ path_name
+}
+
+/// Creates a slice containing every test like so:
+/// &[path::to::test1, path::to::test2]
+fn mk_tests_slice(cx: &TestCtxt<'_>) -> P<ast::Expr> {
+ debug!("building test vector from {} tests", cx.test_cases.len());
+ let ref ecx = cx.ext_cx;
+
+ ecx.expr_vec_slice(DUMMY_SP,
+ cx.test_cases.iter().map(|test| {
+ ecx.expr_addr_of(test.span,
+ ecx.expr_path(ecx.path(test.span, visible_path(cx, &test.path))))
+ }).collect())
+}
+
+/// Creates a path from the top-level __test module to the test via __test_reexports
+fn visible_path(cx: &TestCtxt<'_>, path: &[Ident]) -> Vec<Ident>{
+ let mut visible_path = vec![];
+ match cx.toplevel_reexport {
+ Some(id) => visible_path.push(id),
+ None => {
+ cx.span_diagnostic.bug("expected to find top-level re-export name, but found None");
+ }
+ }
+ visible_path.extend_from_slice(path);
+ visible_path
+}
+
+fn is_test_case(i: &ast::Item) -> bool {
+ attr::contains_name(&i.attrs, sym::rustc_test_marker)
+}
+
+fn get_test_runner(sd: &errors::Handler, krate: &ast::Crate) -> Option<ast::Path> {
+ let test_attr = attr::find_by_name(&krate.attrs, sym::test_runner)?;
+ test_attr.meta_item_list().map(|meta_list| {
+ if meta_list.len() != 1 {
+ sd.span_fatal(test_attr.span,
+ "`#![test_runner(..)]` accepts exactly 1 argument").raise()
+ }
+ match meta_list[0].meta_item() {
+ Some(meta_item) if meta_item.is_word() => meta_item.path.clone(),
+ _ => sd.span_fatal(test_attr.span, "`test_runner` argument must be a path").raise()
+ }
+ })
+}
diff --git a/src/libsyntax_ext/trace_macros.rs b/src/libsyntax_ext/trace_macros.rs
index 512513e..0dce8a3 100644
--- a/src/libsyntax_ext/trace_macros.rs
+++ b/src/libsyntax_ext/trace_macros.rs
@@ -1,6 +1,5 @@
use syntax::ext::base::{self, ExtCtxt};
-use syntax::feature_gate;
-use syntax::symbol::{kw, sym};
+use syntax::symbol::kw;
use syntax_pos::Span;
use syntax::tokenstream::TokenTree;
@@ -8,14 +7,6 @@
sp: Span,
tt: &[TokenTree])
-> Box<dyn base::MacResult + 'static> {
- if !cx.ecfg.enable_trace_macros() {
- feature_gate::emit_feature_err(&cx.parse_sess,
- sym::trace_macros,
- sp,
- feature_gate::GateIssue::Language,
- feature_gate::EXPLAIN_TRACE_MACROS);
- }
-
match tt {
[TokenTree::Token(token)] if token.is_keyword(kw::True) => {
cx.set_trace_macros(true);