diff --git a/RELEASES.md b/RELEASES.md index 7a9d256be282f..64e2145e0f37b 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -29,7 +29,7 @@ Libraries - [Copied `AsciiExt` methods onto `char`][46077] - [Remove `T: Sized` requirement on `ptr::is_null()`][46094] - [impl `From` for `{TryRecvError, RecvTimeoutError}`][45506] -- [Optimised `f32::{min, max}` to generate more efficent x86 assembly][47080] +- [Optimised `f32::{min, max}` to generate more efficient x86 assembly][47080] - [`[u8]::contains` now uses memchr which provides a 3x speed improvement][46713] Stabilized APIs diff --git a/config.toml.example b/config.toml.example index f153562a53894..8d1fa3eec5cf2 100644 --- a/config.toml.example +++ b/config.toml.example @@ -151,8 +151,8 @@ # default. #extended = false -# Installs choosen set of extended tools if enables. By default builds all. -# If choosen tool failed to build the installation fails. +# Installs chosen set of extended tools if enables. By default builds all. +# If chosen tool failed to build the installation fails. #tools = ["cargo", "rls", "rustfmt", "analysis", "src"] # Verbosity level: 0 == not verbose, 1 == verbose, 2 == very verbose diff --git a/src/bootstrap/flags.rs b/src/bootstrap/flags.rs index 42b949527e09d..eb5c3b8ce147f 100644 --- a/src/bootstrap/flags.rs +++ b/src/bootstrap/flags.rs @@ -60,6 +60,7 @@ pub enum Subcommand { test_args: Vec, rustc_args: Vec, fail_fast: bool, + doc_tests: bool, }, Bench { paths: Vec, @@ -164,6 +165,7 @@ To learn more about a subcommand, run `./x.py -h`"); "extra options to pass the compiler when running tests", "ARGS", ); + opts.optflag("", "doc", "run doc tests"); }, "bench" => { opts.optmulti("", "test-args", "extra arguments", "ARGS"); }, "clean" => { opts.optflag("", "all", "clean all build artifacts"); }, @@ -320,6 +322,7 @@ Arguments: test_args: matches.opt_strs("test-args"), rustc_args: matches.opt_strs("rustc-args"), fail_fast: !matches.opt_present("no-fail-fast"), + doc_tests: matches.opt_present("doc"), } } "bench" => { @@ -410,6 +413,13 @@ impl Subcommand { _ => false, } } + + pub fn doc_tests(&self) -> bool { + match *self { + Subcommand::Test { doc_tests, .. } => doc_tests, + _ => false, + } + } } fn split(s: Vec) -> Vec { diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs index afd740ce54845..90f50275b6bb4 100644 --- a/src/bootstrap/lib.rs +++ b/src/bootstrap/lib.rs @@ -226,6 +226,7 @@ pub struct Build { rustfmt_info: channel::GitInfo, local_rebuild: bool, fail_fast: bool, + doc_tests: bool, verbosity: usize, // Targets for which to build. @@ -326,6 +327,7 @@ impl Build { initial_cargo: config.initial_cargo.clone(), local_rebuild: config.local_rebuild, fail_fast: config.cmd.fail_fast(), + doc_tests: config.cmd.doc_tests(), verbosity: config.verbose, build: config.build, diff --git a/src/bootstrap/test.rs b/src/bootstrap/test.rs index 64ede4f4ecc88..bd8c36a296c09 100644 --- a/src/bootstrap/test.rs +++ b/src/bootstrap/test.rs @@ -935,7 +935,7 @@ impl Step for Compiletest { } } if suite == "run-make" && !build.config.llvm_enabled { - println!("Ignoring run-make test suite as they generally dont work without LLVM"); + println!("Ignoring run-make test suite as they generally don't work without LLVM"); return; } @@ -1355,6 +1355,9 @@ impl Step for Crate { if test_kind.subcommand() == "test" && !build.fail_fast { cargo.arg("--no-fail-fast"); } + if build.doc_tests { + cargo.arg("--doc"); + } cargo.arg("-p").arg(krate); diff --git a/src/libcore/iter/sources.rs b/src/libcore/iter/sources.rs index 3e9d799c08948..dfd42f3e73301 100644 --- a/src/libcore/iter/sources.rs +++ b/src/libcore/iter/sources.rs @@ -159,7 +159,7 @@ unsafe impl A> TrustedLen for RepeatWith {} /// [`repeat`]: fn.repeat.html /// /// An iterator produced by `repeat_with()` is a `DoubleEndedIterator`. -/// It is important to not that reversing `repeat_with(f)` will produce +/// It is important to note that reversing `repeat_with(f)` will produce /// the exact same sequence as the non-reversed iterator. In other words, /// `repeat_with(f).rev().collect::>()` is equivalent to /// `repeat_with(f).collect::>()`. diff --git a/src/librustc/diagnostics.rs b/src/librustc/diagnostics.rs index 4c256556191fa..287516474d49a 100644 --- a/src/librustc/diagnostics.rs +++ b/src/librustc/diagnostics.rs @@ -1891,7 +1891,7 @@ is a function pointer, which is not zero-sized. This pattern should be rewritten. There are a few possible ways to do this: - change the original fn declaration to match the expected signature, - and do the cast in the fn body (the prefered option) + and do the cast in the fn body (the preferred option) - cast the fn item fo a fn pointer before calling transmute, as shown here: ``` diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index 55dcb16c3c95f..e3af285053805 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -3362,10 +3362,10 @@ impl<'a> LoweringContext<'a> { v: &Visibility, explicit_owner: Option) -> hir::Visibility { - match *v { - Visibility::Public => hir::Public, - Visibility::Crate(..) => hir::Visibility::Crate, - Visibility::Restricted { ref path, id } => { + match v.node { + VisibilityKind::Public => hir::Public, + VisibilityKind::Crate(..) => hir::Visibility::Crate, + VisibilityKind::Restricted { ref path, id, .. } => { hir::Visibility::Restricted { path: P(self.lower_path(id, path, ParamMode::Explicit, true)), id: if let Some(owner) = explicit_owner { @@ -3375,7 +3375,7 @@ impl<'a> LoweringContext<'a> { } } } - Visibility::Inherited => hir::Inherited, + VisibilityKind::Inherited => hir::Inherited, } } diff --git a/src/librustc/hir/map/definitions.rs b/src/librustc/hir/map/definitions.rs index 43cc437e1e7e3..61a58a6030623 100644 --- a/src/librustc/hir/map/definitions.rs +++ b/src/librustc/hir/map/definitions.rs @@ -72,6 +72,10 @@ impl DefPathTable { index } + pub fn next_id(&self, address_space: DefIndexAddressSpace) -> DefIndex { + DefIndex::from_array_index(self.index_to_key[address_space.index()].len(), address_space) + } + #[inline(always)] pub fn def_key(&self, index: DefIndex) -> DefKey { self.index_to_key[index.address_space().index()] diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs index 2854b9da1476f..bc03f7ead8187 100644 --- a/src/librustc/hir/mod.rs +++ b/src/librustc/hir/mod.rs @@ -543,7 +543,7 @@ impl Generics { } /// Synthetic Type Parameters are converted to an other form during lowering, this allows -/// to track the original form they had. Usefull for error messages. +/// to track the original form they had. Useful for error messages. #[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum SyntheticTyParamKind { ImplTrait diff --git a/src/librustc/infer/error_reporting/mod.rs b/src/librustc/infer/error_reporting/mod.rs index 03fc40b2e39fc..700d06acf11a4 100644 --- a/src/librustc/infer/error_reporting/mod.rs +++ b/src/librustc/infer/error_reporting/mod.rs @@ -734,7 +734,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { } } - // When finding T != &T, hightlight only the borrow + // When finding T != &T, highlight only the borrow (&ty::TyRef(r1, ref tnm1), _) if equals(&tnm1.ty, &t2) => { let mut values = (DiagnosticStyledString::new(), DiagnosticStyledString::new()); push_ty_ref(&r1, tnm1, &mut values.0); @@ -946,7 +946,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { let type_param = generics.type_param(param, self.tcx); let hir = &self.tcx.hir; hir.as_local_node_id(type_param.def_id).map(|id| { - // Get the `hir::TyParam` to verify wether it already has any bounds. + // Get the `hir::TyParam` to verify whether it already has any bounds. // We do this to avoid suggesting code that ends up as `T: 'a'b`, // instead we suggest `T: 'a + 'b` in that case. let has_lifetimes = if let hir_map::NodeTyParam(ref p) = hir.get(id) { diff --git a/src/librustc/infer/mod.rs b/src/librustc/infer/mod.rs index 07c5b319970f8..7a386c144b738 100644 --- a/src/librustc/infer/mod.rs +++ b/src/librustc/infer/mod.rs @@ -180,7 +180,7 @@ pub struct InferCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { // for each body-id in this map, which will process the // obligations within. This is expected to be done 'late enough' // that all type inference variables have been bound and so forth. - region_obligations: RefCell)>>, + pub region_obligations: RefCell)>>, } /// A map returned by `skolemize_late_bound_regions()` indicating the skolemized @@ -1555,11 +1555,20 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { InferOk { value, obligations } } - fn borrow_region_constraints(&self) -> RefMut<'_, RegionConstraintCollector<'tcx>> { + pub fn borrow_region_constraints(&self) -> RefMut<'_, RegionConstraintCollector<'tcx>> { RefMut::map( self.region_constraints.borrow_mut(), |c| c.as_mut().expect("region constraints already solved")) } + + /// Clears the selection, evaluation, and projection cachesThis is useful when + /// repeatedly attemping to select an Obligation while changing only + /// its ParamEnv, since FulfillmentContext doesn't use 'probe' + pub fn clear_caches(&self) { + self.selection_cache.clear(); + self.evaluation_cache.clear(); + self.projection_cache.borrow_mut().clear(); + } } impl<'a, 'gcx, 'tcx> TypeTrace<'tcx> { diff --git a/src/librustc/infer/outlives/obligations.rs b/src/librustc/infer/outlives/obligations.rs index eda2e1f7b4ef4..36e657f78b4b2 100644 --- a/src/librustc/infer/outlives/obligations.rs +++ b/src/librustc/infer/outlives/obligations.rs @@ -106,7 +106,7 @@ impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> { /// done (or else an assert will fire). /// /// See the `region_obligations` field of `InferCtxt` for some - /// comments about how this funtion fits into the overall expected + /// comments about how this function fits into the overall expected /// flow of the the inferencer. The key point is that it is /// invoked after all type-inference variables have been bound -- /// towards the end of regionck. This also ensures that the diff --git a/src/librustc/infer/region_constraints/README.md b/src/librustc/infer/region_constraints/README.md index 67ad08c753033..95f9c8c835398 100644 --- a/src/librustc/infer/region_constraints/README.md +++ b/src/librustc/infer/region_constraints/README.md @@ -19,7 +19,7 @@ The constraints are always of one of three possible forms: a subregion of Rj - `ConstrainRegSubVar(R, Ri)` states that the concrete region R (which must not be a variable) must be a subregion of the variable Ri -- `ConstrainVarSubReg(Ri, R)` states the variable Ri shoudl be less +- `ConstrainVarSubReg(Ri, R)` states the variable Ri should be less than the concrete region R. This is kind of deprecated and ought to be replaced with a verify (they essentially play the same role). diff --git a/src/librustc/infer/region_constraints/mod.rs b/src/librustc/infer/region_constraints/mod.rs index 68d81a2dee352..be196192371fd 100644 --- a/src/librustc/infer/region_constraints/mod.rs +++ b/src/librustc/infer/region_constraints/mod.rs @@ -82,7 +82,7 @@ pub type VarOrigins = IndexVec; /// Describes constraints between the region variables and other /// regions, as well as other conditions that must be verified, or /// assumptions that can be made. -#[derive(Debug, Default)] +#[derive(Debug, Default, Clone)] pub struct RegionConstraintData<'tcx> { /// Constraints of the form `A <= B`, where either `A` or `B` can /// be a region variable (or neither, as it happens). @@ -142,7 +142,7 @@ pub enum Constraint<'tcx> { /// outlive `RS`. Therefore verify that `R <= RS[i]` for some /// `i`. Inference variables may be involved (but this verification /// step doesn't influence inference). -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct Verify<'tcx> { pub kind: GenericKind<'tcx>, pub origin: SubregionOrigin<'tcx>, @@ -159,7 +159,7 @@ pub enum GenericKind<'tcx> { /// When we introduce a verification step, we wish to test that a /// particular region (let's call it `'min`) meets some bound. /// The bound is described the by the following grammar: -#[derive(Debug)] +#[derive(Debug, Clone)] pub enum VerifyBound<'tcx> { /// B = exists {R} --> some 'r in {R} must outlive 'min /// @@ -288,6 +288,10 @@ impl<'tcx> RegionConstraintCollector<'tcx> { &self.var_origins } + pub fn region_constraint_data(&self) -> &RegionConstraintData<'tcx> { + &self.data + } + /// Once all the constraints have been gathered, extract out the final data. /// /// Not legal during a snapshot. diff --git a/src/librustc/middle/region.rs b/src/librustc/middle/region.rs index e5619f469e774..3ce4ab04777ca 100644 --- a/src/librustc/middle/region.rs +++ b/src/librustc/middle/region.rs @@ -886,7 +886,7 @@ fn resolve_block<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, blk: // // Each of the statements within the block is a terminating // scope, and thus a temporary (e.g. the result of calling - // `bar()` in the initalizer expression for `let inner = ...;`) + // `bar()` in the initializer expression for `let inner = ...;`) // will be cleaned up immediately after its corresponding // statement (i.e. `let inner = ...;`) executes. // diff --git a/src/librustc/mir/interpret/mod.rs b/src/librustc/mir/interpret/mod.rs index 8ffea62f6be51..a80695ec9b987 100644 --- a/src/librustc/mir/interpret/mod.rs +++ b/src/librustc/mir/interpret/mod.rs @@ -56,7 +56,7 @@ pub struct GlobalId<'tcx> { //////////////////////////////////////////////////////////////////////////////// pub trait PointerArithmetic: layout::HasDataLayout { - // These are not supposed to be overriden. + // These are not supposed to be overridden. //// Trunace the given value to the pointer size; also return whether there was an overflow fn truncate_to_ptr(self, val: u128) -> (u64, bool) { diff --git a/src/librustc/mir/mod.rs b/src/librustc/mir/mod.rs index 439be667861a2..b88dea871ce67 100644 --- a/src/librustc/mir/mod.rs +++ b/src/librustc/mir/mod.rs @@ -1950,7 +1950,7 @@ pub struct GeneratorLayout<'tcx> { /// ``` /// /// here, there is one unique free region (`'a`) but it appears -/// twice. We would "renumber" each occurence to a unique vid, as follows: +/// twice. We would "renumber" each occurrence to a unique vid, as follows: /// /// ```text /// ClosureSubsts = [ diff --git a/src/librustc/traits/coherence.rs b/src/librustc/traits/coherence.rs index 9de18612d816c..7311b47974ac5 100644 --- a/src/librustc/traits/coherence.rs +++ b/src/librustc/traits/coherence.rs @@ -277,7 +277,7 @@ pub fn orphan_check<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, /// is bad, because the only local type with `T` as a subtree is /// `LocalType`, and `Vec<->` is between it and the type parameter. /// - similarly, `FundamentalPair, T>` is bad, because -/// the second occurence of `T` is not a subtree of *any* local type. +/// the second occurrence of `T` is not a subtree of *any* local type. /// - however, `LocalType>` is OK, because `T` is a subtree of /// `LocalType>`, which is local and has no types between it and /// the type parameter. diff --git a/src/librustc/traits/mod.rs b/src/librustc/traits/mod.rs index 80819a86b7c46..31836f7e3c57b 100644 --- a/src/librustc/traits/mod.rs +++ b/src/librustc/traits/mod.rs @@ -32,8 +32,8 @@ use syntax_pos::{Span, DUMMY_SP}; pub use self::coherence::{orphan_check, overlapping_impls, OrphanCheckErr, OverlapResult}; pub use self::fulfill::FulfillmentContext; pub use self::project::MismatchedProjectionTypes; -pub use self::project::{normalize, normalize_projection_type, Normalized}; -pub use self::project::{ProjectionCache, ProjectionCacheSnapshot, Reveal}; +pub use self::project::{normalize, normalize_projection_type, poly_project_and_unify_type}; +pub use self::project::{ProjectionCache, ProjectionCacheSnapshot, Reveal, Normalized}; pub use self::object_safety::ObjectSafetyViolation; pub use self::object_safety::MethodViolationCode; pub use self::on_unimplemented::{OnUnimplementedDirective, OnUnimplementedNote}; @@ -621,7 +621,7 @@ pub fn fully_normalize<'a, 'gcx, 'tcx, T>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, // FIXME (@jroesch) ISSUE 26721 // I'm not sure if this is a bug or not, needs further investigation. // It appears that by reusing the fulfillment_cx here we incur more - // obligations and later trip an asssertion on regionck.rs line 337. + // obligations and later trip an assertion on regionck.rs line 337. // // The two possibilities I see is: // - normalization is not actually fully happening and we diff --git a/src/librustc/traits/project.rs b/src/librustc/traits/project.rs index ae539f07336d5..0d0476e7c21dd 100644 --- a/src/librustc/traits/project.rs +++ b/src/librustc/traits/project.rs @@ -1596,6 +1596,10 @@ impl<'tcx> ProjectionCache<'tcx> { } } + pub fn clear(&mut self) { + self.map.clear(); + } + pub fn snapshot(&mut self) -> ProjectionCacheSnapshot { ProjectionCacheSnapshot { snapshot: self.map.snapshot() } } diff --git a/src/librustc/traits/select.rs b/src/librustc/traits/select.rs index 4ed25646d436d..cfeb456acefe6 100644 --- a/src/librustc/traits/select.rs +++ b/src/librustc/traits/select.rs @@ -93,6 +93,11 @@ pub struct SelectionContext<'cx, 'gcx: 'cx+'tcx, 'tcx: 'cx> { inferred_obligations: SnapshotVec>, intercrate_ambiguity_causes: Option>, + + /// Controls whether or not to filter out negative impls when selecting. + /// This is used in librustdoc to distinguish between the lack of an impl + /// and a negative impl + allow_negative_impls: bool } #[derive(Clone, Debug)] @@ -424,6 +429,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { intercrate: None, inferred_obligations: SnapshotVec::new(), intercrate_ambiguity_causes: None, + allow_negative_impls: false, } } @@ -436,6 +442,20 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { intercrate: Some(mode), inferred_obligations: SnapshotVec::new(), intercrate_ambiguity_causes: None, + allow_negative_impls: false, + } + } + + pub fn with_negative(infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>, + allow_negative_impls: bool) -> SelectionContext<'cx, 'gcx, 'tcx> { + debug!("with_negative({:?})", allow_negative_impls); + SelectionContext { + infcx, + freshener: infcx.freshener(), + intercrate: None, + inferred_obligations: SnapshotVec::new(), + intercrate_ambiguity_causes: None, + allow_negative_impls, } } @@ -1086,7 +1106,8 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { fn filter_negative_impls(&self, candidate: SelectionCandidate<'tcx>) -> SelectionResult<'tcx, SelectionCandidate<'tcx>> { if let ImplCandidate(def_id) = candidate { - if self.tcx().impl_polarity(def_id) == hir::ImplPolarity::Negative { + if !self.allow_negative_impls && + self.tcx().impl_polarity(def_id) == hir::ImplPolarity::Negative { return Err(Unimplemented) } } @@ -3337,6 +3358,10 @@ impl<'tcx> SelectionCache<'tcx> { hashmap: RefCell::new(FxHashMap()) } } + + pub fn clear(&self) { + *self.hashmap.borrow_mut() = FxHashMap() + } } impl<'tcx> EvaluationCache<'tcx> { @@ -3345,6 +3370,10 @@ impl<'tcx> EvaluationCache<'tcx> { hashmap: RefCell::new(FxHashMap()) } } + + pub fn clear(&self) { + *self.hashmap.borrow_mut() = FxHashMap() + } } impl<'o,'tcx> TraitObligationStack<'o,'tcx> { diff --git a/src/librustc/ty/layout.rs b/src/librustc/ty/layout.rs index 63b91ff110161..c3cd65230bd86 100644 --- a/src/librustc/ty/layout.rs +++ b/src/librustc/ty/layout.rs @@ -2059,7 +2059,7 @@ impl<'a, 'tcx> LayoutOf> for LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { // can however trigger recursive invocations of `layout_of`. // Therefore, we execute it *after* the main query has // completed, to avoid problems around recursive structures - // and the like. (Admitedly, I wasn't able to reproduce a problem + // and the like. (Admittedly, I wasn't able to reproduce a problem // here, but it seems like the right thing to do. -nmatsakis) self.record_layout_for_printing(layout); @@ -2085,7 +2085,7 @@ impl<'a, 'tcx> LayoutOf> for LayoutCx<'tcx, ty::maps::TyCtxtAt<'a, 'tcx // can however trigger recursive invocations of `layout_of`. // Therefore, we execute it *after* the main query has // completed, to avoid problems around recursive structures - // and the like. (Admitedly, I wasn't able to reproduce a problem + // and the like. (Admittedly, I wasn't able to reproduce a problem // here, but it seems like the right thing to do. -nmatsakis) let cx = LayoutCx { tcx: *self.tcx, diff --git a/src/librustc_allocator/expand.rs b/src/librustc_allocator/expand.rs index 352184c1efa76..c088458c3557c 100644 --- a/src/librustc_allocator/expand.rs +++ b/src/librustc_allocator/expand.rs @@ -13,9 +13,9 @@ use rustc_errors; use syntax::abi::Abi; use syntax::ast::{Crate, Attribute, LitKind, StrStyle, ExprKind}; use syntax::ast::{Unsafety, Constness, Generics, Mutability, Ty, Mac, Arg}; -use syntax::ast::{self, Ident, Item, ItemKind, TyKind, Visibility, Expr}; +use syntax::ast::{self, Ident, Item, ItemKind, TyKind, VisibilityKind, Expr}; use syntax::attr; -use syntax::codemap::dummy_spanned; +use syntax::codemap::{dummy_spanned, respan}; use syntax::codemap::{ExpnInfo, NameAndSpan, MacroAttribute}; use syntax::ext::base::ExtCtxt; use syntax::ext::base::Resolver; @@ -97,7 +97,11 @@ impl<'a> Folder for ExpandAllocatorDirectives<'a> { ]); let mut items = vec![ f.cx.item_extern_crate(f.span, f.alloc), - f.cx.item_use_simple(f.span, Visibility::Inherited, super_path), + f.cx.item_use_simple( + f.span, + respan(f.span.empty(), VisibilityKind::Inherited), + super_path, + ), ]; for method in ALLOCATOR_METHODS { items.push(f.allocator_fn(method)); diff --git a/src/librustc_apfloat/tests/ieee.rs b/src/librustc_apfloat/tests/ieee.rs index aff2076e03833..ff46ee79c31d0 100644 --- a/src/librustc_apfloat/tests/ieee.rs +++ b/src/librustc_apfloat/tests/ieee.rs @@ -2201,12 +2201,12 @@ fn is_finite_non_zero() { assert!(!Single::ZERO.is_finite_non_zero()); assert!(!(-Single::ZERO).is_finite_non_zero()); - // Test +/- qNaN. +/- dont mean anything with qNaN but paranoia can't hurt in + // Test +/- qNaN. +/- don't mean anything with qNaN but paranoia can't hurt in // this instance. assert!(!Single::NAN.is_finite_non_zero()); assert!(!(-Single::NAN).is_finite_non_zero()); - // Test +/- sNaN. +/- dont mean anything with sNaN but paranoia can't hurt in + // Test +/- sNaN. +/- don't mean anything with sNaN but paranoia can't hurt in // this instance. assert!(!Single::snan(None).is_finite_non_zero()); assert!(!(-Single::snan(None)).is_finite_non_zero()); diff --git a/src/librustc_borrowck/borrowck/mod.rs b/src/librustc_borrowck/borrowck/mod.rs index 738c0d82ee1b5..58818d0ce8033 100644 --- a/src/librustc_borrowck/borrowck/mod.rs +++ b/src/librustc_borrowck/borrowck/mod.rs @@ -1111,7 +1111,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { /// Given a type, if it is an immutable reference, return a suggestion to make it mutable fn suggest_mut_for_immutable(&self, pty: &hir::Ty, is_implicit_self: bool) -> Option { - // Check wether the argument is an immutable reference + // Check whether the argument is an immutable reference debug!("suggest_mut_for_immutable({:?}, {:?})", pty, is_implicit_self); if let hir::TyRptr(lifetime, hir::MutTy { mutbl: hir::Mutability::MutImmutable, diff --git a/src/librustc_const_eval/_match.rs b/src/librustc_const_eval/_match.rs index a7c382eba5091..e30f5cb4f1293 100644 --- a/src/librustc_const_eval/_match.rs +++ b/src/librustc_const_eval/_match.rs @@ -607,7 +607,7 @@ pub fn is_useful<'p, 'a: 'p, 'tcx: 'a>(cx: &mut MatchCheckCtxt<'a, 'tcx>, // be able to observe whether the types of the struct's fields are // inhabited. // - // If the field is truely inaccessible, then all the patterns + // If the field is truly inaccessible, then all the patterns // matching against it must be wildcard patterns, so its type // does not matter. // diff --git a/src/librustc_data_structures/indexed_vec.rs b/src/librustc_data_structures/indexed_vec.rs index 753f12f400bf9..b11ca107af7dd 100644 --- a/src/librustc_data_structures/indexed_vec.rs +++ b/src/librustc_data_structures/indexed_vec.rs @@ -204,7 +204,7 @@ macro_rules! newtype_index { $($tokens)*); ); - // The case where no derives are added, but encodable is overriden. Don't + // The case where no derives are added, but encodable is overridden. Don't // derive serialization traits (@pub [$($pub:tt)*] @type [$type:ident] diff --git a/src/librustc_data_structures/snapshot_map/mod.rs b/src/librustc_data_structures/snapshot_map/mod.rs index cd7143ad3ce84..cede6f147821b 100644 --- a/src/librustc_data_structures/snapshot_map/mod.rs +++ b/src/librustc_data_structures/snapshot_map/mod.rs @@ -45,6 +45,11 @@ impl SnapshotMap } } + pub fn clear(&mut self) { + self.map.clear(); + self.undo_log.clear(); + } + pub fn insert(&mut self, key: K, value: V) -> bool { match self.map.insert(key.clone(), value) { None => { diff --git a/src/librustc_metadata/cstore_impl.rs b/src/librustc_metadata/cstore_impl.rs index d73e968a82760..c1340d0a28a44 100644 --- a/src/librustc_metadata/cstore_impl.rs +++ b/src/librustc_metadata/cstore_impl.rs @@ -34,6 +34,7 @@ use std::rc::Rc; use syntax::ast; use syntax::attr; +use syntax::codemap; use syntax::ext::base::SyntaxExtension; use syntax::parse::filemap_to_stream; use syntax::symbol::Symbol; @@ -496,7 +497,7 @@ impl CrateStore for cstore::CStore { tokens: body.into(), legacy: def.legacy, }), - vis: ast::Visibility::Inherited, + vis: codemap::respan(local_span.empty(), ast::VisibilityKind::Inherited), tokens: None, }) } diff --git a/src/librustc_mir/borrow_check/mod.rs b/src/librustc_mir/borrow_check/mod.rs index 650f99828ae48..c6ed971f767cf 100644 --- a/src/librustc_mir/borrow_check/mod.rs +++ b/src/librustc_mir/borrow_check/mod.rs @@ -117,7 +117,7 @@ fn do_mir_borrowck<'a, 'gcx, 'tcx>( for move_error in move_errors { let (span, kind): (Span, IllegalMoveOriginKind) = match move_error { MoveError::UnionMove { .. } => { - unimplemented!("dont know how to report union move errors yet.") + unimplemented!("don't know how to report union move errors yet.") } MoveError::IllegalMove { cannot_move_out_of: o, @@ -1424,7 +1424,7 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> { /// tracked in the MoveData. /// /// An Err result includes a tag indicated why the search failed. - /// Currenly this can only occur if the place is built off of a + /// Currently this can only occur if the place is built off of a /// static variable, as we do not track those in the MoveData. fn move_path_closest_to( &mut self, @@ -1439,7 +1439,7 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> { } match *last_prefix { Place::Local(_) => panic!("should have move path for every Local"), - Place::Projection(_) => panic!("PrefixSet::All meant dont stop for Projection"), + Place::Projection(_) => panic!("PrefixSet::All meant don't stop for Projection"), Place::Static(_) => return Err(NoMovePathFound::ReachedStatic), } } @@ -1484,7 +1484,7 @@ impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> { { } ProjectionElem::Subslice { .. } => { - panic!("we dont allow assignments to subslices, context: {:?}", + panic!("we don't allow assignments to subslices, context: {:?}", context); } diff --git a/src/librustc_mir/borrow_check/nll/mod.rs b/src/librustc_mir/borrow_check/nll/mod.rs index 66ca74b0139a3..07e5091da9c1e 100644 --- a/src/librustc_mir/borrow_check/nll/mod.rs +++ b/src/librustc_mir/borrow_check/nll/mod.rs @@ -278,7 +278,7 @@ fn for_each_region_constraint( /// Right now, we piggy back on the `ReVar` to store our NLL inference /// regions. These are indexed with `RegionVid`. This method will -/// assert that the region is a `ReVar` and extract its interal index. +/// assert that the region is a `ReVar` and extract its internal index. /// This is reasonable because in our MIR we replace all universal regions /// with inference variables. pub trait ToRegionVid { diff --git a/src/librustc_mir/borrow_check/nll/region_infer/mod.rs b/src/librustc_mir/borrow_check/nll/region_infer/mod.rs index 9a338947f4772..33c012dfad829 100644 --- a/src/librustc_mir/borrow_check/nll/region_infer/mod.rs +++ b/src/librustc_mir/borrow_check/nll/region_infer/mod.rs @@ -964,7 +964,7 @@ impl<'tcx> RegionInferenceContext<'tcx> { debug!("check_universal_region: fr_minus={:?}", fr_minus); // Grow `shorter_fr` until we find a non-local - // regon. (We always will.) We'll call that + // region. (We always will.) We'll call that // `shorter_fr+` -- it's ever so slightly larger than // `fr`. let shorter_fr_plus = self.universal_regions.non_local_upper_bound(shorter_fr); diff --git a/src/librustc_mir/borrow_check/nll/region_infer/values.rs b/src/librustc_mir/borrow_check/nll/region_infer/values.rs index b2b2ca1182d03..45236bbc4aae2 100644 --- a/src/librustc_mir/borrow_check/nll/region_infer/values.rs +++ b/src/librustc_mir/borrow_check/nll/region_infer/values.rs @@ -150,7 +150,7 @@ pub(super) enum RegionElement { /// A point in the control-flow graph. Location(Location), - /// An in-scope, universally quantified region (e.g., a liftime parameter). + /// An in-scope, universally quantified region (e.g., a lifetime parameter). UniversalRegion(RegionVid), } diff --git a/src/librustc_mir/build/matches/mod.rs b/src/librustc_mir/build/matches/mod.rs index 8053a0a69484f..58ce572ae8d88 100644 --- a/src/librustc_mir/build/matches/mod.rs +++ b/src/librustc_mir/build/matches/mod.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -//! Code related to match expresions. These are sufficiently complex +//! Code related to match expressions. These are sufficiently complex //! to warrant their own module and submodules. :) This main module //! includes the high-level algorithm, the submodules contain the //! details. diff --git a/src/librustc_mir/dataflow/impls/borrows.rs b/src/librustc_mir/dataflow/impls/borrows.rs index e798cc93cb09a..8ab4035cf4aec 100644 --- a/src/librustc_mir/dataflow/impls/borrows.rs +++ b/src/librustc_mir/dataflow/impls/borrows.rs @@ -80,14 +80,14 @@ pub struct Borrows<'a, 'gcx: 'tcx, 'tcx: 'a> { /// tracking (phased) borrows. It computes where a borrow is reserved; /// i.e. where it can reach in the control flow starting from its /// initial `assigned = &'rgn borrowed` statement, and ending -/// whereever `'rgn` itself ends. +/// wherever `'rgn` itself ends. pub(crate) struct Reservations<'a, 'gcx: 'tcx, 'tcx: 'a>(pub(crate) Borrows<'a, 'gcx, 'tcx>); /// The `ActiveBorrows` analysis is the second of the two flow /// analyses tracking (phased) borrows. It computes where any given /// borrow `&assigned = &'rgn borrowed` is *active*, which starts at /// the first use of `assigned` after the reservation has started, and -/// ends whereever `'rgn` itself ends. +/// ends wherever `'rgn` itself ends. pub(crate) struct ActiveBorrows<'a, 'gcx: 'tcx, 'tcx: 'a>(pub(crate) Borrows<'a, 'gcx, 'tcx>); impl<'a, 'gcx, 'tcx> Reservations<'a, 'gcx, 'tcx> { diff --git a/src/librustc_mir/diagnostics.rs b/src/librustc_mir/diagnostics.rs index 619c0dc847ebc..3491faf9cdac0 100644 --- a/src/librustc_mir/diagnostics.rs +++ b/src/librustc_mir/diagnostics.rs @@ -365,7 +365,7 @@ with `#[derive(Clone)]`. Some types have no ownership semantics at all and are trivial to duplicate. An example is `i32` and the other number types. We don't have to call `.clone()` to clone them, because they are marked `Copy` in addition to `Clone`. Implicit -cloning is more convienient in this case. We can mark our own types `Copy` if +cloning is more convenient in this case. We can mark our own types `Copy` if all their members also are marked `Copy`. In the example below, we implement a `Point` type. Because it only stores two diff --git a/src/librustc_mir/interpret/eval_context.rs b/src/librustc_mir/interpret/eval_context.rs index 52b87282180c4..3578164feb7c5 100644 --- a/src/librustc_mir/interpret/eval_context.rs +++ b/src/librustc_mir/interpret/eval_context.rs @@ -84,7 +84,7 @@ pub struct Frame<'tcx> { /// return). pub block: mir::BasicBlock, - /// The index of the currently evaluated statment. + /// The index of the currently evaluated statement. pub stmt: usize, } diff --git a/src/librustc_mir/monomorphize/item.rs b/src/librustc_mir/monomorphize/item.rs index 86a4dd4a31f8c..a5078187a57e3 100644 --- a/src/librustc_mir/monomorphize/item.rs +++ b/src/librustc_mir/monomorphize/item.rs @@ -68,7 +68,7 @@ pub enum InstantiationMode { /// however, our local copy may conflict with other crates also /// inlining the same function. /// - /// This flag indicates that this situation is occuring, and informs + /// This flag indicates that this situation is occurring, and informs /// symbol name calculation that some extra mangling is needed to /// avoid conflicts. Note that this may eventually go away entirely if /// ThinLTO enables us to *always* have a globally shared instance of a diff --git a/src/librustc_passes/ast_validation.rs b/src/librustc_passes/ast_validation.rs index 6971033c8994b..bb6dbe632e316 100644 --- a/src/librustc_passes/ast_validation.rs +++ b/src/librustc_passes/ast_validation.rs @@ -58,14 +58,14 @@ impl<'a> AstValidator<'a> { } } - fn invalid_visibility(&self, vis: &Visibility, span: Span, note: Option<&str>) { - if vis != &Visibility::Inherited { + fn invalid_visibility(&self, vis: &Visibility, note: Option<&str>) { + if vis.node != VisibilityKind::Inherited { let mut err = struct_span_err!(self.session, - span, + vis.span, E0449, "unnecessary visibility qualifier"); - if vis == &Visibility::Public { - err.span_label(span, "`pub` not needed here"); + if vis.node == VisibilityKind::Public { + err.span_label(vis.span, "`pub` not needed here"); } if let Some(note) = note { err.note(note); @@ -216,7 +216,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> { fn visit_item(&mut self, item: &'a Item) { match item.node { ItemKind::Impl(unsafety, polarity, _, _, Some(..), ref ty, ref impl_items) => { - self.invalid_visibility(&item.vis, item.span, None); + self.invalid_visibility(&item.vis, None); if ty.node == TyKind::Err { self.err_handler() .struct_span_err(item.span, "`impl Trait for .. {}` is an obsolete syntax") @@ -226,7 +226,7 @@ impl<'a> Visitor<'a> for AstValidator<'a> { span_err!(self.session, item.span, E0198, "negative impls cannot be unsafe"); } for impl_item in impl_items { - self.invalid_visibility(&impl_item.vis, impl_item.span, None); + self.invalid_visibility(&impl_item.vis, None); if let ImplItemKind::Method(ref sig, _) = impl_item.node { self.check_trait_fn_not_const(sig.constness); } @@ -234,7 +234,6 @@ impl<'a> Visitor<'a> for AstValidator<'a> { } ItemKind::Impl(unsafety, polarity, defaultness, _, None, _, _) => { self.invalid_visibility(&item.vis, - item.span, Some("place qualifiers on individual impl items instead")); if unsafety == Unsafety::Unsafe { span_err!(self.session, item.span, E0197, "inherent impls cannot be unsafe"); @@ -247,16 +246,16 @@ impl<'a> Visitor<'a> for AstValidator<'a> { } } ItemKind::ForeignMod(..) => { - self.invalid_visibility(&item.vis, - item.span, - Some("place qualifiers on individual foreign items \ - instead")); + self.invalid_visibility( + &item.vis, + Some("place qualifiers on individual foreign items instead"), + ); } ItemKind::Enum(ref def, _) => { for variant in &def.variants { self.invalid_non_exhaustive_attribute(variant); for field in variant.node.data.fields() { - self.invalid_visibility(&field.vis, field.span, None); + self.invalid_visibility(&field.vis, None); } } } @@ -359,8 +358,8 @@ impl<'a> Visitor<'a> for AstValidator<'a> { } fn visit_vis(&mut self, vis: &'a Visibility) { - match *vis { - Visibility::Restricted { ref path, .. } => { + match vis.node { + VisibilityKind::Restricted { ref path, .. } => { path.segments.iter().find(|segment| segment.parameters.is_some()).map(|segment| { self.err_handler().span_err(segment.parameters.as_ref().unwrap().span(), "generic arguments in visibility path"); diff --git a/src/librustc_privacy/lib.rs b/src/librustc_privacy/lib.rs index b46882f054df9..6ae047609535e 100644 --- a/src/librustc_privacy/lib.rs +++ b/src/librustc_privacy/lib.rs @@ -781,7 +781,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypePrivacyVisitor<'a, 'tcx> { // Additionally, until better reachability analysis for macros 2.0 is available, // we prohibit access to private statics from other crates, this allows to give // more code internal visibility at link time. (Access to private functions - // is already prohibited by type privacy for funciton types.) + // is already prohibited by type privacy for function types.) fn visit_qpath(&mut self, qpath: &'tcx hir::QPath, id: ast::NodeId, span: Span) { let def = match *qpath { hir::QPath::Resolved(_, ref path) => match path.def { diff --git a/src/librustc_resolve/check_unused.rs b/src/librustc_resolve/check_unused.rs index 5a321053b7ae8..163f6a64010b5 100644 --- a/src/librustc_resolve/check_unused.rs +++ b/src/librustc_resolve/check_unused.rs @@ -17,7 +17,7 @@ // `use` directives. // // Unused trait imports can't be checked until the method resolution. We save -// candidates here, and do the acutal check in librustc_typeck/check_unused.rs. +// candidates here, and do the actual check in librustc_typeck/check_unused.rs. use std::ops::{Deref, DerefMut}; @@ -86,7 +86,7 @@ impl<'a, 'b> Visitor<'a> for UnusedImportCheckVisitor<'a, 'b> { // because this means that they were generated in some fashion by the // compiler and we don't need to consider them. if let ast::ItemKind::Use(..) = item.node { - if item.vis == ast::Visibility::Public || item.span.source_equal(&DUMMY_SP) { + if item.vis.node == ast::VisibilityKind::Public || item.span.source_equal(&DUMMY_SP) { return; } } diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs index 2da4bfedd3a17..74dfd3843878e 100644 --- a/src/librustc_resolve/lib.rs +++ b/src/librustc_resolve/lib.rs @@ -1440,7 +1440,7 @@ impl<'a> Resolver<'a> { /// Rustdoc uses this to resolve things in a recoverable way. ResolutionError<'a> /// isn't something that can be returned because it can't be made to live that long, /// and also it's a private type. Fortunately rustdoc doesn't need to know the error, - /// just that an error occured. + /// just that an error occurred. pub fn resolve_str_path_error(&mut self, span: Span, path_str: &str, is_value: bool) -> Result { use std::iter; @@ -3796,13 +3796,15 @@ impl<'a> Resolver<'a> { } fn resolve_visibility(&mut self, vis: &ast::Visibility) -> ty::Visibility { - match *vis { - ast::Visibility::Public => ty::Visibility::Public, - ast::Visibility::Crate(..) => ty::Visibility::Restricted(DefId::local(CRATE_DEF_INDEX)), - ast::Visibility::Inherited => { + match vis.node { + ast::VisibilityKind::Public => ty::Visibility::Public, + ast::VisibilityKind::Crate(..) => { + ty::Visibility::Restricted(DefId::local(CRATE_DEF_INDEX)) + } + ast::VisibilityKind::Inherited => { ty::Visibility::Restricted(self.current_module.normal_ancestor_id) } - ast::Visibility::Restricted { ref path, id } => { + ast::VisibilityKind::Restricted { ref path, id, .. } => { let def = self.smart_resolve_path(id, None, path, PathSource::Visibility).base_def(); if def == Def::Err { diff --git a/src/librustc_resolve/resolve_imports.rs b/src/librustc_resolve/resolve_imports.rs index a8070c553bdbc..438ab3a3513fc 100644 --- a/src/librustc_resolve/resolve_imports.rs +++ b/src/librustc_resolve/resolve_imports.rs @@ -186,7 +186,7 @@ impl<'a> Resolver<'a> { } let check_usable = |this: &mut Self, binding: &'a NameBinding<'a>| { - // `extern crate` are always usable for backwards compatability, see issue #37020. + // `extern crate` are always usable for backwards compatibility, see issue #37020. let usable = this.is_accessible(binding.vis) || binding.is_extern_crate(); if usable { Ok(binding) } else { Err(Determined) } }; diff --git a/src/librustc_save_analysis/dump_visitor.rs b/src/librustc_save_analysis/dump_visitor.rs index 47530c4208520..bf82b0774238b 100644 --- a/src/librustc_save_analysis/dump_visitor.rs +++ b/src/librustc_save_analysis/dump_visitor.rs @@ -43,7 +43,7 @@ use syntax::print::pprust::{ ty_to_string }; use syntax::ptr::P; -use syntax::codemap::{Spanned, DUMMY_SP}; +use syntax::codemap::{Spanned, DUMMY_SP, respan}; use syntax_pos::*; use {escape, generated_code, lower_attributes, PathCollector, SaveContext}; @@ -65,12 +65,19 @@ macro_rules! down_cast_data { } macro_rules! access_from { + ($save_ctxt:expr, $vis:expr, $id:expr) => { + Access { + public: $vis.node == ast::VisibilityKind::Public, + reachable: $save_ctxt.analysis.access_levels.is_reachable($id), + } + }; + ($save_ctxt:expr, $item:expr) => { Access { - public: $item.vis == ast::Visibility::Public, + public: $item.vis.node == ast::VisibilityKind::Public, reachable: $save_ctxt.analysis.access_levels.is_reachable($item.id), } - } + }; } pub struct DumpVisitor<'l, 'tcx: 'l, 'll, O: DumpOutput + 'll> { @@ -405,12 +412,7 @@ impl<'l, 'tcx: 'l, 'll, O: DumpOutput + 'll> DumpVisitor<'l, 'tcx, 'll, O> { method_data.value = sig_str; method_data.sig = sig::method_signature(id, name, generics, sig, &self.save_ctxt); - self.dumper.dump_def( - &Access { - public: vis == ast::Visibility::Public, - reachable: self.save_ctxt.analysis.access_levels.is_reachable(id), - }, - method_data); + self.dumper.dump_def(&access_from!(self.save_ctxt, vis, id), method_data); } // walk arg and return types @@ -543,10 +545,7 @@ impl<'l, 'tcx: 'l, 'll, O: DumpOutput + 'll> DumpVisitor<'l, 'tcx, 'll, O> { let span = self.span_from_span(sub_span.expect("No span found for variable")); self.dumper.dump_def( - &Access { - public: vis == ast::Visibility::Public, - reachable: self.save_ctxt.analysis.access_levels.is_reachable(id), - }, + &access_from!(self.save_ctxt, vis, id), Def { kind: DefKind::Const, id: ::id_from_node_id(id, &self.save_ctxt), @@ -597,7 +596,7 @@ impl<'l, 'tcx: 'l, 'll, O: DumpOutput + 'll> DumpVisitor<'l, 'tcx, 'll, O> { .iter() .enumerate() .filter_map(|(i, f)| { - if include_priv_fields || f.vis == ast::Visibility::Public { + if include_priv_fields || f.vis.node == ast::VisibilityKind::Public { f.ident .map(|i| i.to_string()) .or_else(|| Some(i.to_string())) @@ -1135,6 +1134,7 @@ impl<'l, 'tcx: 'l, 'll, O: DumpOutput + 'll> DumpVisitor<'l, 'tcx, 'll, O> { fn process_trait_item(&mut self, trait_item: &'l ast::TraitItem, trait_id: DefId) { self.process_macro_use(trait_item.span); + let vis_span = trait_item.span.empty(); match trait_item.node { ast::TraitItemKind::Const(ref ty, ref expr) => { self.process_assoc_const( @@ -1144,7 +1144,7 @@ impl<'l, 'tcx: 'l, 'll, O: DumpOutput + 'll> DumpVisitor<'l, 'tcx, 'll, O> { &ty, expr.as_ref().map(|e| &**e), trait_id, - ast::Visibility::Public, + respan(vis_span, ast::VisibilityKind::Public), &trait_item.attrs, ); } @@ -1155,7 +1155,7 @@ impl<'l, 'tcx: 'l, 'll, O: DumpOutput + 'll> DumpVisitor<'l, 'tcx, 'll, O> { trait_item.id, trait_item.ident, &trait_item.generics, - ast::Visibility::Public, + respan(vis_span, ast::VisibilityKind::Public), trait_item.span, ); } @@ -1259,10 +1259,7 @@ impl<'l, 'tcx: 'l, 'll, O: DumpOutput + 'll> DumpVisitor<'l, 'tcx, 'll, O> { // The access is calculated using the current tree ID, but with the root tree's visibility // (since nested trees don't have their own visibility). - let access = Access { - public: root_item.vis == ast::Visibility::Public, - reachable: self.save_ctxt.analysis.access_levels.is_reachable(id), - }; + let access = access_from!(self.save_ctxt, root_item.vis, id); // The parent def id of a given use tree is always the enclosing item. let parent = self.save_ctxt.tcx.hir.opt_local_def_id(id) diff --git a/src/librustc_trans/back/lto.rs b/src/librustc_trans/back/lto.rs index a33270380196f..ab354a30d4151 100644 --- a/src/librustc_trans/back/lto.rs +++ b/src/librustc_trans/back/lto.rs @@ -84,7 +84,7 @@ impl LtoModuleTranslation { } } - /// A "guage" of how costly it is to optimize this module, used to sort + /// A "gauge" of how costly it is to optimize this module, used to sort /// biggest modules first. pub fn cost(&self) -> u64 { match *self { @@ -726,7 +726,7 @@ impl ThinModule { // which was basically a resurgence of #45511 after LLVM's bug 35212 was // fixed. // - // This function below is a huge hack around tihs problem. The function + // This function below is a huge hack around this problem. The function // below is defined in `PassWrapper.cpp` and will basically "merge" // all `DICompileUnit` instances in a module. Basically it'll take all // the objects, rewrite all pointers of `DISubprogram` to point to the diff --git a/src/librustc_trans/builder.rs b/src/librustc_trans/builder.rs index 5ab8d03b8c718..d4e05a18e3a50 100644 --- a/src/librustc_trans/builder.rs +++ b/src/librustc_trans/builder.rs @@ -1240,7 +1240,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { /// on), and `ptr` is nonzero-sized, then extracts the size of `ptr` /// and the intrinsic for `lt` and passes them to `emit`, which is in /// charge of generating code to call the passed intrinsic on whatever - /// block of generated code is targetted for the intrinsic. + /// block of generated code is targeted for the intrinsic. /// /// If LLVM lifetime intrinsic support is disabled (i.e. optimizations /// off) or `ptr` is zero-sized, then no-op (does not call `emit`). diff --git a/src/librustc_trans/mir/rvalue.rs b/src/librustc_trans/mir/rvalue.rs index 2e876ec118d57..34ac44cec025a 100644 --- a/src/librustc_trans/mir/rvalue.rs +++ b/src/librustc_trans/mir/rvalue.rs @@ -844,7 +844,7 @@ fn cast_float_to_int(bx: &Builder, // They are exactly equal to int_ty::{MIN,MAX} if float_ty has enough significand bits. // Otherwise, int_ty::MAX must be rounded towards zero, as it is one less than a power of two. // int_ty::MIN, however, is either zero or a negative power of two and is thus exactly - // representable. Note that this only works if float_ty's exponent range is sufficently large. + // representable. Note that this only works if float_ty's exponent range is sufficiently large. // f16 or 256 bit integers would break this property. Right now the smallest float type is f32 // with exponents ranging up to 127, which is barely enough for i128::MIN = -2^127. // On the other hand, f_max works even if int_ty::MAX is greater than float_ty::MAX. Because diff --git a/src/librustc_trans_utils/trans_crate.rs b/src/librustc_trans_utils/trans_crate.rs index e14abdff33918..9943a9bd398aa 100644 --- a/src/librustc_trans_utils/trans_crate.rs +++ b/src/librustc_trans_utils/trans_crate.rs @@ -151,7 +151,7 @@ impl MetadataLoader for NoLlvmMetadataLoader { } } - Err("Couldnt find metadata section".to_string()) + Err("Couldn't find metadata section".to_string()) } fn get_dylib_metadata( diff --git a/src/librustdoc/clean/auto_trait.rs b/src/librustdoc/clean/auto_trait.rs new file mode 100644 index 0000000000000..5951af6d70e80 --- /dev/null +++ b/src/librustdoc/clean/auto_trait.rs @@ -0,0 +1,1447 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use rustc::ty::TypeFoldable; + +use super::*; + +pub struct AutoTraitFinder<'a, 'tcx: 'a, 'rcx: 'a> { + pub cx: &'a core::DocContext<'a, 'tcx, 'rcx>, +} + +impl<'a, 'tcx, 'rcx> AutoTraitFinder<'a, 'tcx, 'rcx> { + pub fn get_with_def_id(&self, def_id: DefId) -> Vec { + let ty = self.cx.tcx.type_of(def_id); + + let def_ctor: fn(DefId) -> Def = match ty.sty { + ty::TyAdt(adt, _) => match adt.adt_kind() { + AdtKind::Struct => Def::Struct, + AdtKind::Enum => Def::Enum, + AdtKind::Union => Def::Union, + }, + _ => panic!("Unexpected type {:?}", def_id), + }; + + self.get_auto_trait_impls(def_id, def_ctor, None) + } + + pub fn get_with_node_id(&self, id: ast::NodeId, name: String) -> Vec { + let item = &self.cx.tcx.hir.expect_item(id).node; + let did = self.cx.tcx.hir.local_def_id(id); + + let def_ctor = match *item { + hir::ItemStruct(_, _) => Def::Struct, + hir::ItemUnion(_, _) => Def::Union, + hir::ItemEnum(_, _) => Def::Enum, + _ => panic!("Unexpected type {:?} {:?}", item, id), + }; + + self.get_auto_trait_impls(did, def_ctor, Some(name)) + } + + pub fn get_auto_trait_impls( + &self, + def_id: DefId, + def_ctor: fn(DefId) -> Def, + name: Option, + ) -> Vec { + if self.cx + .tcx + .get_attrs(def_id) + .lists("doc") + .has_word("hidden") + { + debug!( + "get_auto_trait_impls(def_id={:?}, def_ctor={:?}): item has doc('hidden'), \ + aborting", + def_id, def_ctor + ); + return Vec::new(); + } + + let tcx = self.cx.tcx; + let generics = self.cx.tcx.generics_of(def_id); + + debug!( + "get_auto_trait_impls(def_id={:?}, def_ctor={:?}, generics={:?}", + def_id, def_ctor, generics + ); + let auto_traits: Vec<_> = self.cx + .send_trait + .and_then(|send_trait| { + self.get_auto_trait_impl_for( + def_id, + name.clone(), + generics.clone(), + def_ctor, + send_trait, + ) + }) + .into_iter() + .chain(self.get_auto_trait_impl_for( + def_id, + name.clone(), + generics.clone(), + def_ctor, + tcx.require_lang_item(lang_items::SyncTraitLangItem), + ).into_iter()) + .collect(); + + debug!( + "get_auto_traits: type {:?} auto_traits {:?}", + def_id, auto_traits + ); + auto_traits + } + + fn get_auto_trait_impl_for( + &self, + def_id: DefId, + name: Option, + generics: ty::Generics, + def_ctor: fn(DefId) -> Def, + trait_def_id: DefId, + ) -> Option { + if !self.cx + .generated_synthetics + .borrow_mut() + .insert((def_id, trait_def_id)) + { + debug!( + "get_auto_trait_impl_for(def_id={:?}, generics={:?}, def_ctor={:?}, \ + trait_def_id={:?}): already generated, aborting", + def_id, generics, def_ctor, trait_def_id + ); + return None; + } + + let result = self.find_auto_trait_generics(def_id, trait_def_id, &generics); + + if result.is_auto() { + let trait_ = hir::TraitRef { + path: get_path_for_type(self.cx.tcx, trait_def_id, hir::def::Def::Trait), + ref_id: ast::DUMMY_NODE_ID, + }; + + let polarity; + + let new_generics = match result { + AutoTraitResult::PositiveImpl(new_generics) => { + polarity = None; + new_generics + } + AutoTraitResult::NegativeImpl => { + polarity = Some(ImplPolarity::Negative); + + // For negative impls, we use the generic params, but *not* the predicates, + // from the original type. Otherwise, the displayed impl appears to be a + // conditional negative impl, when it's really unconditional. + // + // For example, consider the struct Foo(*mut T). Using + // the original predicates in our impl would cause us to generate + // `impl !Send for Foo`, which makes it appear that Foo + // implements Send where T is not copy. + // + // Instead, we generate `impl !Send for Foo`, which better + // expresses the fact that `Foo` never implements `Send`, + // regardless of the choice of `T`. + let real_generics = (&generics, &Default::default()); + + // Clean the generics, but ignore the '?Sized' bounds generated + // by the `Clean` impl + let clean_generics = real_generics.clean(self.cx); + + Generics { + params: clean_generics.params, + where_predicates: Vec::new(), + } + } + _ => unreachable!(), + }; + + let path = get_path_for_type(self.cx.tcx, def_id, def_ctor); + let mut segments = path.segments.into_vec(); + let last = segments.pop().unwrap(); + + let real_name = name.as_ref().map(|n| Symbol::from(n.as_str())); + + segments.push(hir::PathSegment::new( + real_name.unwrap_or(last.name), + self.generics_to_path_params(generics.clone()), + false, + )); + + let new_path = hir::Path { + span: path.span, + def: path.def, + segments: HirVec::from_vec(segments), + }; + + let ty = hir::Ty { + id: ast::DUMMY_NODE_ID, + node: hir::Ty_::TyPath(hir::QPath::Resolved(None, P(new_path))), + span: DUMMY_SP, + hir_id: hir::DUMMY_HIR_ID, + }; + + return Some(Item { + source: Span::empty(), + name: None, + attrs: Default::default(), + visibility: None, + def_id: self.next_def_id(def_id.krate), + stability: None, + deprecation: None, + inner: ImplItem(Impl { + unsafety: hir::Unsafety::Normal, + generics: new_generics, + provided_trait_methods: FxHashSet(), + trait_: Some(trait_.clean(self.cx)), + for_: ty.clean(self.cx), + items: Vec::new(), + polarity, + synthetic: true, + }), + }); + } + None + } + + fn generics_to_path_params(&self, generics: ty::Generics) -> hir::PathParameters { + let lifetimes = HirVec::from_vec( + generics + .regions + .iter() + .map(|p| { + let name = if p.name == "" { + hir::LifetimeName::Static + } else { + hir::LifetimeName::Name(p.name) + }; + + hir::Lifetime { + id: ast::DUMMY_NODE_ID, + span: DUMMY_SP, + name, + } + }) + .collect(), + ); + let types = HirVec::from_vec( + generics + .types + .iter() + .map(|p| P(self.ty_param_to_ty(p.clone()))) + .collect(), + ); + + hir::PathParameters { + lifetimes: lifetimes, + types: types, + bindings: HirVec::new(), + parenthesized: false, + } + } + + fn ty_param_to_ty(&self, param: ty::TypeParameterDef) -> hir::Ty { + debug!("ty_param_to_ty({:?}) {:?}", param, param.def_id); + hir::Ty { + id: ast::DUMMY_NODE_ID, + node: hir::Ty_::TyPath(hir::QPath::Resolved( + None, + P(hir::Path { + span: DUMMY_SP, + def: Def::TyParam(param.def_id), + segments: HirVec::from_vec(vec![hir::PathSegment::from_name(param.name)]), + }), + )), + span: DUMMY_SP, + hir_id: hir::DUMMY_HIR_ID, + } + } + + fn find_auto_trait_generics( + &self, + did: DefId, + trait_did: DefId, + generics: &ty::Generics, + ) -> AutoTraitResult { + let tcx = self.cx.tcx; + let ty = self.cx.tcx.type_of(did); + + let orig_params = tcx.param_env(did); + + let trait_ref = ty::TraitRef { + def_id: trait_did, + substs: tcx.mk_substs_trait(ty, &[]), + }; + + let trait_pred = ty::Binder(trait_ref); + + let bail_out = tcx.infer_ctxt().enter(|infcx| { + let mut selcx = SelectionContext::with_negative(&infcx, true); + let result = selcx.select(&Obligation::new( + ObligationCause::dummy(), + orig_params, + trait_pred.to_poly_trait_predicate(), + )); + match result { + Ok(Some(Vtable::VtableImpl(_))) => { + debug!( + "find_auto_trait_generics(did={:?}, trait_did={:?}, generics={:?}): \ + manual impl found, bailing out", + did, trait_did, generics + ); + return true; + } + _ => return false, + }; + }); + + // If an explicit impl exists, it always takes priority over an auto impl + if bail_out { + return AutoTraitResult::ExplicitImpl; + } + + return tcx.infer_ctxt().enter(|mut infcx| { + let mut fresh_preds = FxHashSet(); + + // Due to the way projections are handled by SelectionContext, we need to run + // evaluate_predicates twice: once on the original param env, and once on the result of + // the first evaluate_predicates call. + // + // The problem is this: most of rustc, including SelectionContext and traits::project, + // are designed to work with a concrete usage of a type (e.g. Vec + // fn() { Vec }. This information will generally never change - given + // the 'T' in fn() { ... }, we'll never know anything else about 'T'. + // If we're unable to prove that 'T' implements a particular trait, we're done - + // there's nothing left to do but error out. + // + // However, synthesizing an auto trait impl works differently. Here, we start out with + // a set of initial conditions - the ParamEnv of the struct/enum/union we're dealing + // with - and progressively discover the conditions we need to fulfill for it to + // implement a certain auto trait. This ends up breaking two assumptions made by trait + // selection and projection: + // + // * We can always cache the result of a particular trait selection for the lifetime of + // an InfCtxt + // * Given a projection bound such as '::SomeItem = K', if 'T: + // SomeTrait' doesn't hold, then we don't need to care about the 'SomeItem = K' + // + // We fix the first assumption by manually clearing out all of the InferCtxt's caches + // in between calls to SelectionContext.select. This allows us to keep all of the + // intermediate types we create bound to the 'tcx lifetime, rather than needing to lift + // them between calls. + // + // We fix the second assumption by reprocessing the result of our first call to + // evaluate_predicates. Using the example of '::SomeItem = K', our first + // pass will pick up 'T: SomeTrait', but not 'SomeItem = K'. On our second pass, + // traits::project will see that 'T: SomeTrait' is in our ParamEnv, allowing + // SelectionContext to return it back to us. + + let (new_env, user_env) = match self.evaluate_predicates( + &mut infcx, + did, + trait_did, + ty, + orig_params.clone(), + orig_params, + &mut fresh_preds, + false, + ) { + Some(e) => e, + None => return AutoTraitResult::NegativeImpl, + }; + + let (full_env, full_user_env) = self.evaluate_predicates( + &mut infcx, + did, + trait_did, + ty, + new_env.clone(), + user_env, + &mut fresh_preds, + true, + ).unwrap_or_else(|| { + panic!( + "Failed to fully process: {:?} {:?} {:?}", + ty, trait_did, orig_params + ) + }); + + debug!( + "find_auto_trait_generics(did={:?}, trait_did={:?}, generics={:?}): fulfilling \ + with {:?}", + did, trait_did, generics, full_env + ); + infcx.clear_caches(); + + // At this point, we already have all of the bounds we need. FulfillmentContext is used + // to store all of the necessary region/lifetime bounds in the InferContext, as well as + // an additional sanity check. + let mut fulfill = FulfillmentContext::new(); + fulfill.register_bound( + &infcx, + full_env, + ty, + trait_did, + ObligationCause::misc(DUMMY_SP, ast::DUMMY_NODE_ID), + ); + fulfill.select_all_or_error(&infcx).unwrap_or_else(|e| { + panic!( + "Unable to fulfill trait {:?} for '{:?}': {:?}", + trait_did, ty, e + ) + }); + + let names_map: FxHashMap = generics + .regions + .iter() + .map(|l| (l.name.as_str().to_string(), l.clean(self.cx))) + .collect(); + + let body_ids: FxHashSet<_> = infcx + .region_obligations + .borrow() + .iter() + .map(|&(id, _)| id) + .collect(); + + for id in body_ids { + infcx.process_registered_region_obligations(&[], None, full_env.clone(), id); + } + + let region_data = infcx + .borrow_region_constraints() + .region_constraint_data() + .clone(); + + let lifetime_predicates = self.handle_lifetimes(®ion_data, &names_map); + let vid_to_region = self.map_vid_to_region(®ion_data); + + debug!( + "find_auto_trait_generics(did={:?}, trait_did={:?}, generics={:?}): computed \ + lifetime information '{:?}' '{:?}'", + did, trait_did, generics, lifetime_predicates, vid_to_region + ); + + let new_generics = self.param_env_to_generics( + infcx.tcx, + did, + full_user_env, + generics.clone(), + lifetime_predicates, + vid_to_region, + ); + debug!( + "find_auto_trait_generics(did={:?}, trait_did={:?}, generics={:?}): finished with \ + {:?}", + did, trait_did, generics, new_generics + ); + return AutoTraitResult::PositiveImpl(new_generics); + }); + } + + fn clean_pred<'c, 'd, 'cx>( + &self, + infcx: &InferCtxt<'c, 'd, 'cx>, + p: ty::Predicate<'cx>, + ) -> ty::Predicate<'cx> { + infcx.freshen(p) + } + + fn evaluate_nested_obligations<'b, 'c, 'd, 'cx, + T: Iterator>>>( + &self, + ty: ty::Ty, + nested: T, + computed_preds: &'b mut FxHashSet>, + fresh_preds: &'b mut FxHashSet>, + predicates: &'b mut VecDeque>, + select: &mut traits::SelectionContext<'c, 'd, 'cx>, + only_projections: bool, + ) -> bool { + let dummy_cause = ObligationCause::misc(DUMMY_SP, ast::DUMMY_NODE_ID); + + for (obligation, predicate) in nested + .filter(|o| o.recursion_depth == 1) + .map(|o| (o.clone(), o.predicate.clone())) + { + let is_new_pred = + fresh_preds.insert(self.clean_pred(select.infcx(), predicate.clone())); + + match &predicate { + &ty::Predicate::Trait(ref p) => { + let substs = &p.skip_binder().trait_ref.substs; + + if self.is_of_param(substs) && !only_projections && is_new_pred { + computed_preds.insert(predicate); + } + predicates.push_back(p.clone()); + } + &ty::Predicate::Projection(p) => { + // If the projection isn't all type vars, then + // we don't want to add it as a bound + if self.is_of_param(p.skip_binder().projection_ty.substs) && is_new_pred { + computed_preds.insert(predicate); + } else { + match traits::poly_project_and_unify_type( + select, + &obligation.with(p.clone()), + ) { + Err(e) => { + debug!( + "evaluate_nested_obligations: Unable to unify predicate \ + '{:?}' '{:?}', bailing out", + ty, e + ); + return false; + } + Ok(Some(v)) => { + if !self.evaluate_nested_obligations( + ty, + v.clone().iter().cloned(), + computed_preds, + fresh_preds, + predicates, + select, + only_projections, + ) { + return false; + } + } + Ok(None) => { + panic!("Unexpected result when selecting {:?} {:?}", ty, obligation) + } + } + } + } + &ty::Predicate::RegionOutlives(ref binder) => { + if let Err(_) = select + .infcx() + .region_outlives_predicate(&dummy_cause, binder) + { + return false; + } + } + &ty::Predicate::TypeOutlives(ref binder) => { + match ( + binder.no_late_bound_regions(), + binder.map_bound_ref(|pred| pred.0).no_late_bound_regions(), + ) { + (None, Some(t_a)) => { + select.infcx().register_region_obligation( + ast::DUMMY_NODE_ID, + RegionObligation { + sup_type: t_a, + sub_region: select.infcx().tcx.types.re_static, + cause: dummy_cause.clone(), + }, + ); + } + (Some(ty::OutlivesPredicate(t_a, r_b)), _) => { + select.infcx().register_region_obligation( + ast::DUMMY_NODE_ID, + RegionObligation { + sup_type: t_a, + sub_region: r_b, + cause: dummy_cause.clone(), + }, + ); + } + _ => {} + }; + } + _ => panic!("Unexpected predicate {:?} {:?}", ty, predicate), + }; + } + return true; + } + + // The core logic responsible for computing the bounds for our synthesized impl. + // + // To calculate the bounds, we call SelectionContext.select in a loop. Like FulfillmentContext, + // we recursively select the nested obligations of predicates we encounter. However, whenver we + // encounter an UnimplementedError involving a type parameter, we add it to our ParamEnv. Since + // our goal is to determine when a particular type implements an auto trait, Unimplemented + // errors tell us what conditions need to be met. + // + // This method ends up working somewhat similary to FulfillmentContext, but with a few key + // differences. FulfillmentContext works under the assumption that it's dealing with concrete + // user code. According, it considers all possible ways that a Predicate could be met - which + // isn't always what we want for a synthesized impl. For example, given the predicate 'T: + // Iterator', FulfillmentContext can end up reporting an Unimplemented error for T: + // IntoIterator - since there's an implementation of Iteratpr where T: IntoIterator, + // FulfillmentContext will drive SelectionContext to consider that impl before giving up. If we + // were to rely on FulfillmentContext's decision, we might end up synthesizing an impl like + // this: + // 'impl Send for Foo where T: IntoIterator' + // + // While it might be technically true that Foo implements Send where T: IntoIterator, + // the bound is overly restrictive - it's really only necessary that T: Iterator. + // + // For this reason, evaluate_predicates handles predicates with type variables specially. When + // we encounter an Unimplemented error for a bound such as 'T: Iterator', we immediately add it + // to our ParamEnv, and add it to our stack for recursive evaluation. When we later select it, + // we'll pick up any nested bounds, without ever inferring that 'T: IntoIterator' needs to + // hold. + // + // One additonal consideration is supertrait bounds. Normally, a ParamEnv is only ever + // consutrcted once for a given type. As part of the construction process, the ParamEnv will + // have any supertrait bounds normalized - e.g. if we have a type 'struct Foo', the + // ParamEnv will contain 'T: Copy' and 'T: Clone', since 'Copy: Clone'. When we construct our + // own ParamEnv, we need to do this outselves, through traits::elaborate_predicates, or else + // SelectionContext will choke on the missing predicates. However, this should never show up in + // the final synthesized generics: we don't want our generated docs page to contain something + // like 'T: Copy + Clone', as that's redundant. Therefore, we keep track of a separate + // 'user_env', which only holds the predicates that will actually be displayed to the user. + fn evaluate_predicates<'b, 'gcx, 'c>( + &self, + infcx: &mut InferCtxt<'b, 'tcx, 'c>, + ty_did: DefId, + trait_did: DefId, + ty: ty::Ty<'c>, + param_env: ty::ParamEnv<'c>, + user_env: ty::ParamEnv<'c>, + fresh_preds: &mut FxHashSet>, + only_projections: bool, + ) -> Option<(ty::ParamEnv<'c>, ty::ParamEnv<'c>)> { + let tcx = infcx.tcx; + + let mut select = traits::SelectionContext::new(&infcx); + + let mut already_visited = FxHashSet(); + let mut predicates = VecDeque::new(); + predicates.push_back(ty::Binder(ty::TraitPredicate { + trait_ref: ty::TraitRef { + def_id: trait_did, + substs: infcx.tcx.mk_substs_trait(ty, &[]), + }, + })); + + let mut computed_preds: FxHashSet<_> = param_env.caller_bounds.iter().cloned().collect(); + let mut user_computed_preds: FxHashSet<_> = + user_env.caller_bounds.iter().cloned().collect(); + + let mut new_env = param_env.clone(); + let dummy_cause = ObligationCause::misc(DUMMY_SP, ast::DUMMY_NODE_ID); + + while let Some(pred) = predicates.pop_front() { + infcx.clear_caches(); + + if !already_visited.insert(pred.clone()) { + continue; + } + + let result = select.select(&Obligation::new(dummy_cause.clone(), new_env, pred)); + + match &result { + &Ok(Some(ref vtable)) => { + let obligations = vtable.clone().nested_obligations().into_iter(); + + if !self.evaluate_nested_obligations( + ty, + obligations, + &mut user_computed_preds, + fresh_preds, + &mut predicates, + &mut select, + only_projections, + ) { + return None; + } + } + &Ok(None) => {} + &Err(SelectionError::Unimplemented) => { + if self.is_of_param(pred.skip_binder().trait_ref.substs) { + already_visited.remove(&pred); + user_computed_preds.insert(ty::Predicate::Trait(pred.clone())); + predicates.push_back(pred); + } else { + debug!( + "evaluate_nested_obligations: Unimplemented found, bailing: {:?} {:?} \ + {:?}", + ty, + pred, + pred.skip_binder().trait_ref.substs + ); + return None; + } + } + _ => panic!("Unexpected error for '{:?}': {:?}", ty, result), + }; + + computed_preds.extend(user_computed_preds.iter().cloned()); + let normalized_preds = + traits::elaborate_predicates(tcx, computed_preds.clone().into_iter().collect()); + new_env = ty::ParamEnv::new(tcx.mk_predicates(normalized_preds), param_env.reveal); + } + + let final_user_env = ty::ParamEnv::new( + tcx.mk_predicates(user_computed_preds.into_iter()), + user_env.reveal, + ); + debug!( + "evaluate_nested_obligations(ty_did={:?}, trait_did={:?}): succeeded with '{:?}' \ + '{:?}'", + ty_did, trait_did, new_env, final_user_env + ); + + return Some((new_env, final_user_env)); + } + + fn is_of_param(&self, substs: &Substs) -> bool { + if substs.is_noop() { + return false; + } + + return match substs.type_at(0).sty { + ty::TyParam(_) => true, + ty::TyProjection(p) => self.is_of_param(p.substs), + _ => false, + }; + } + + fn get_lifetime(&self, region: Region, names_map: &FxHashMap) -> Lifetime { + self.region_name(region) + .map(|name| { + names_map.get(&name).unwrap_or_else(|| { + panic!("Missing lifetime with name {:?} for {:?}", name, region) + }) + }) + .unwrap_or(&Lifetime::statik()) + .clone() + } + + fn region_name(&self, region: Region) -> Option { + match region { + &ty::ReEarlyBound(r) => Some(r.name.as_str().to_string()), + _ => None, + } + } + + // This is very similar to handle_lifetimes. However, instead of matching ty::Region's + // to each other, we match ty::RegionVid's to ty::Region's + fn map_vid_to_region<'cx>( + &self, + regions: &RegionConstraintData<'cx>, + ) -> FxHashMap> { + let mut vid_map: FxHashMap, RegionDeps<'cx>> = FxHashMap(); + let mut finished_map = FxHashMap(); + + for constraint in regions.constraints.keys() { + match constraint { + &Constraint::VarSubVar(r1, r2) => { + { + let deps1 = vid_map + .entry(RegionTarget::RegionVid(r1)) + .or_insert_with(|| Default::default()); + deps1.larger.insert(RegionTarget::RegionVid(r2)); + } + + let deps2 = vid_map + .entry(RegionTarget::RegionVid(r2)) + .or_insert_with(|| Default::default()); + deps2.smaller.insert(RegionTarget::RegionVid(r1)); + } + &Constraint::RegSubVar(region, vid) => { + { + let deps1 = vid_map + .entry(RegionTarget::Region(region)) + .or_insert_with(|| Default::default()); + deps1.larger.insert(RegionTarget::RegionVid(vid)); + } + + let deps2 = vid_map + .entry(RegionTarget::RegionVid(vid)) + .or_insert_with(|| Default::default()); + deps2.smaller.insert(RegionTarget::Region(region)); + } + &Constraint::VarSubReg(vid, region) => { + finished_map.insert(vid, region); + } + &Constraint::RegSubReg(r1, r2) => { + { + let deps1 = vid_map + .entry(RegionTarget::Region(r1)) + .or_insert_with(|| Default::default()); + deps1.larger.insert(RegionTarget::Region(r2)); + } + + let deps2 = vid_map + .entry(RegionTarget::Region(r2)) + .or_insert_with(|| Default::default()); + deps2.smaller.insert(RegionTarget::Region(r1)); + } + } + } + + while !vid_map.is_empty() { + let target = vid_map.keys().next().expect("Keys somehow empty").clone(); + let deps = vid_map.remove(&target).expect("Entry somehow missing"); + + for smaller in deps.smaller.iter() { + for larger in deps.larger.iter() { + match (smaller, larger) { + (&RegionTarget::Region(_), &RegionTarget::Region(_)) => { + if let Entry::Occupied(v) = vid_map.entry(*smaller) { + let smaller_deps = v.into_mut(); + smaller_deps.larger.insert(*larger); + smaller_deps.larger.remove(&target); + } + + if let Entry::Occupied(v) = vid_map.entry(*larger) { + let larger_deps = v.into_mut(); + larger_deps.smaller.insert(*smaller); + larger_deps.smaller.remove(&target); + } + } + (&RegionTarget::RegionVid(v1), &RegionTarget::Region(r1)) => { + finished_map.insert(v1, r1); + } + (&RegionTarget::Region(_), &RegionTarget::RegionVid(_)) => { + // Do nothing - we don't care about regions that are smaller than vids + } + (&RegionTarget::RegionVid(_), &RegionTarget::RegionVid(_)) => { + if let Entry::Occupied(v) = vid_map.entry(*smaller) { + let smaller_deps = v.into_mut(); + smaller_deps.larger.insert(*larger); + smaller_deps.larger.remove(&target); + } + + if let Entry::Occupied(v) = vid_map.entry(*larger) { + let larger_deps = v.into_mut(); + larger_deps.smaller.insert(*smaller); + larger_deps.smaller.remove(&target); + } + } + } + } + } + } + finished_map + } + + // This method calculates two things: Lifetime constraints of the form 'a: 'b, + // and region constraints of the form ReVar: 'a + // + // This is essentially a simplified version of lexical_region_resolve. However, + // handle_lifetimes determines what *needs be* true in order for an impl to hold. + // lexical_region_resolve, along with much of the rest of the compiler, is concerned + // with determining if a given set up constraints/predicates *are* met, given some + // starting conditions (e.g. user-provided code). For this reason, it's easier + // to perform the calculations we need on our own, rather than trying to make + // existing inference/solver code do what we want. + fn handle_lifetimes<'cx>( + &self, + regions: &RegionConstraintData<'cx>, + names_map: &FxHashMap, + ) -> Vec { + // Our goal is to 'flatten' the list of constraints by eliminating + // all intermediate RegionVids. At the end, all constraints should + // be between Regions (aka region variables). This gives us the information + // we need to create the Generics. + let mut finished = FxHashMap(); + + let mut vid_map: FxHashMap = FxHashMap(); + + // Flattening is done in two parts. First, we insert all of the constraints + // into a map. Each RegionTarget (either a RegionVid or a Region) maps + // to its smaller and larger regions. Note that 'larger' regions correspond + // to sub-regions in Rust code (e.g. in 'a: 'b, 'a is the larger region). + for constraint in regions.constraints.keys() { + match constraint { + &Constraint::VarSubVar(r1, r2) => { + { + let deps1 = vid_map + .entry(RegionTarget::RegionVid(r1)) + .or_insert_with(|| Default::default()); + deps1.larger.insert(RegionTarget::RegionVid(r2)); + } + + let deps2 = vid_map + .entry(RegionTarget::RegionVid(r2)) + .or_insert_with(|| Default::default()); + deps2.smaller.insert(RegionTarget::RegionVid(r1)); + } + &Constraint::RegSubVar(region, vid) => { + let deps = vid_map + .entry(RegionTarget::RegionVid(vid)) + .or_insert_with(|| Default::default()); + deps.smaller.insert(RegionTarget::Region(region)); + } + &Constraint::VarSubReg(vid, region) => { + let deps = vid_map + .entry(RegionTarget::RegionVid(vid)) + .or_insert_with(|| Default::default()); + deps.larger.insert(RegionTarget::Region(region)); + } + &Constraint::RegSubReg(r1, r2) => { + // The constraint is already in the form that we want, so we're done with it + // Desired order is 'larger, smaller', so flip then + if self.region_name(r1) != self.region_name(r2) { + finished + .entry(self.region_name(r2).unwrap()) + .or_insert_with(|| Vec::new()) + .push(r1); + } + } + } + } + + // Here, we 'flatten' the map one element at a time. + // All of the element's sub and super regions are connected + // to each other. For example, if we have a graph that looks like this: + // + // (A, B) - C - (D, E) + // Where (A, B) are subregions, and (D,E) are super-regions + // + // then after deleting 'C', the graph will look like this: + // ... - A - (D, E ...) + // ... - B - (D, E, ...) + // (A, B, ...) - D - ... + // (A, B, ...) - E - ... + // + // where '...' signifies the existing sub and super regions of an entry + // When two adjacent ty::Regions are encountered, we've computed a final + // constraint, and add it to our list. Since we make sure to never re-add + // deleted items, this process will always finish. + while !vid_map.is_empty() { + let target = vid_map.keys().next().expect("Keys somehow empty").clone(); + let deps = vid_map.remove(&target).expect("Entry somehow missing"); + + for smaller in deps.smaller.iter() { + for larger in deps.larger.iter() { + match (smaller, larger) { + (&RegionTarget::Region(r1), &RegionTarget::Region(r2)) => { + if self.region_name(r1) != self.region_name(r2) { + finished + .entry(self.region_name(r2).unwrap()) + .or_insert_with(|| Vec::new()) + .push(r1) // Larger, smaller + } + } + (&RegionTarget::RegionVid(_), &RegionTarget::Region(_)) => { + if let Entry::Occupied(v) = vid_map.entry(*smaller) { + let smaller_deps = v.into_mut(); + smaller_deps.larger.insert(*larger); + smaller_deps.larger.remove(&target); + } + } + (&RegionTarget::Region(_), &RegionTarget::RegionVid(_)) => { + if let Entry::Occupied(v) = vid_map.entry(*larger) { + let deps = v.into_mut(); + deps.smaller.insert(*smaller); + deps.smaller.remove(&target); + } + } + (&RegionTarget::RegionVid(_), &RegionTarget::RegionVid(_)) => { + if let Entry::Occupied(v) = vid_map.entry(*smaller) { + let smaller_deps = v.into_mut(); + smaller_deps.larger.insert(*larger); + smaller_deps.larger.remove(&target); + } + + if let Entry::Occupied(v) = vid_map.entry(*larger) { + let larger_deps = v.into_mut(); + larger_deps.smaller.insert(*smaller); + larger_deps.smaller.remove(&target); + } + } + } + } + } + } + + let lifetime_predicates = names_map + .iter() + .flat_map(|(name, lifetime)| { + let empty = Vec::new(); + let bounds: FxHashSet = finished + .get(name) + .unwrap_or(&empty) + .iter() + .map(|region| self.get_lifetime(region, names_map)) + .collect(); + + if bounds.is_empty() { + return None; + } + Some(WherePredicate::RegionPredicate { + lifetime: lifetime.clone(), + bounds: bounds.into_iter().collect(), + }) + }) + .collect(); + + lifetime_predicates + } + + fn extract_for_generics<'b, 'c, 'd>( + &self, + tcx: TyCtxt<'b, 'c, 'd>, + pred: ty::Predicate<'d>, + ) -> FxHashSet { + pred.walk_tys() + .flat_map(|t| { + let mut regions = FxHashSet(); + tcx.collect_regions(&t, &mut regions); + + regions.into_iter().flat_map(|r| { + match r { + // We only care about late bound regions, as we need to add them + // to the 'for<>' section + &ty::ReLateBound(_, ty::BoundRegion::BrNamed(_, name)) => { + Some(GenericParam::Lifetime(Lifetime(name.as_str().to_string()))) + } + &ty::ReVar(_) | &ty::ReEarlyBound(_) => None, + _ => panic!("Unexpected region type {:?}", r), + } + }) + }) + .collect() + } + + fn make_final_bounds<'b, 'c, 'cx>( + &self, + ty_to_bounds: FxHashMap>, + ty_to_fn: FxHashMap, Option)>, + lifetime_to_bounds: FxHashMap>, + ) -> Vec { + ty_to_bounds + .into_iter() + .flat_map(|(ty, mut bounds)| { + if let Some(data) = ty_to_fn.get(&ty) { + let (poly_trait, output) = + (data.0.as_ref().unwrap().clone(), data.1.as_ref().cloned()); + let new_ty = match &poly_trait.trait_ { + &Type::ResolvedPath { + ref path, + ref typarams, + ref did, + ref is_generic, + } => { + let mut new_path = path.clone(); + let last_segment = new_path.segments.pop().unwrap(); + + let (old_input, old_output) = match last_segment.params { + PathParameters::AngleBracketed { types, .. } => (types, None), + PathParameters::Parenthesized { inputs, output, .. } => { + (inputs, output) + } + }; + + if old_output.is_some() && old_output != output { + panic!( + "Output mismatch for {:?} {:?} {:?}", + ty, old_output, data.1 + ); + } + + let new_params = PathParameters::Parenthesized { + inputs: old_input, + output, + }; + + new_path.segments.push(PathSegment { + name: last_segment.name, + params: new_params, + }); + + Type::ResolvedPath { + path: new_path, + typarams: typarams.clone(), + did: did.clone(), + is_generic: *is_generic, + } + } + _ => panic!("Unexpected data: {:?}, {:?}", ty, data), + }; + bounds.insert(TyParamBound::TraitBound( + PolyTrait { + trait_: new_ty, + generic_params: poly_trait.generic_params, + }, + hir::TraitBoundModifier::None, + )); + } + if bounds.is_empty() { + return None; + } + + Some(WherePredicate::BoundPredicate { + ty, + bounds: bounds.into_iter().collect(), + }) + }) + .chain( + lifetime_to_bounds + .into_iter() + .filter(|&(_, ref bounds)| !bounds.is_empty()) + .map(|(lifetime, bounds)| WherePredicate::RegionPredicate { + lifetime, + bounds: bounds.into_iter().collect(), + }), + ) + .collect() + } + + // Converts the calculated ParamEnv and lifetime information to a clean::Generics, suitable for + // display on the docs page. Cleaning the Predicates produces sub-optimal WherePredicate's, + // so we fix them up: + // + // * Multiple bounds for the same type are coalesced into one: e.g. 'T: Copy', 'T: Debug' + // becomes 'T: Copy + Debug' + // * Fn bounds are handled specially - instead of leaving it as 'T: Fn(), = + // K', we use the dedicated syntax 'T: Fn() -> K' + // * We explcitly add a '?Sized' bound if we didn't find any 'Sized' predicates for a type + fn param_env_to_generics<'b, 'c, 'cx>( + &self, + tcx: TyCtxt<'b, 'c, 'cx>, + did: DefId, + param_env: ty::ParamEnv<'cx>, + type_generics: ty::Generics, + mut existing_predicates: Vec, + vid_to_region: FxHashMap>, + ) -> Generics { + debug!( + "param_env_to_generics(did={:?}, param_env={:?}, type_generics={:?}, \ + existing_predicates={:?})", + did, param_env, type_generics, existing_predicates + ); + + // The `Sized` trait must be handled specially, since we only only display it when + // it is *not* required (i.e. '?Sized') + let sized_trait = self.cx + .tcx + .require_lang_item(lang_items::SizedTraitLangItem); + + let mut replacer = RegionReplacer { + vid_to_region: &vid_to_region, + tcx, + }; + + let orig_bounds: FxHashSet<_> = self.cx.tcx.param_env(did).caller_bounds.iter().collect(); + let clean_where_predicates = param_env + .caller_bounds + .iter() + .filter(|p| { + !orig_bounds.contains(p) || match p { + &&ty::Predicate::Trait(pred) => pred.def_id() == sized_trait, + _ => false, + } + }) + .map(|p| { + let replaced = p.fold_with(&mut replacer); + (replaced.clone(), replaced.clean(self.cx)) + }); + + let full_generics = (&type_generics, &tcx.predicates_of(did)); + let Generics { + params: mut generic_params, + .. + } = full_generics.clean(self.cx); + + let mut has_sized = FxHashSet(); + let mut ty_to_bounds = FxHashMap(); + let mut lifetime_to_bounds = FxHashMap(); + let mut ty_to_traits: FxHashMap> = FxHashMap(); + + let mut ty_to_fn: FxHashMap, Option)> = FxHashMap(); + + for (orig_p, p) in clean_where_predicates { + match p { + WherePredicate::BoundPredicate { ty, mut bounds } => { + // Writing a projection trait bound of the form + // ::Name : ?Sized + // is illegal, because ?Sized bounds can only + // be written in the (here, nonexistant) definition + // of the type. + // Therefore, we make sure that we never add a ?Sized + // bound for projections + match &ty { + &Type::QPath { .. } => { + has_sized.insert(ty.clone()); + } + _ => {} + } + + if bounds.is_empty() { + continue; + } + + let mut for_generics = self.extract_for_generics(tcx, orig_p.clone()); + + assert!(bounds.len() == 1); + let mut b = bounds.pop().unwrap(); + + if b.is_sized_bound(self.cx) { + has_sized.insert(ty.clone()); + } else if !b.get_trait_type() + .and_then(|t| { + ty_to_traits + .get(&ty) + .map(|bounds| bounds.contains(&strip_type(t.clone()))) + }) + .unwrap_or(false) + { + // If we've already added a projection bound for the same type, don't add + // this, as it would be a duplicate + + // Handle any 'Fn/FnOnce/FnMut' bounds specially, + // as we want to combine them with any 'Output' qpaths + // later + + let is_fn = match &mut b { + &mut TyParamBound::TraitBound(ref mut p, _) => { + // Insert regions into the for_generics hash map first, to ensure + // that we don't end up with duplicate bounds (e.g. for<'b, 'b>) + for_generics.extend(p.generic_params.clone()); + p.generic_params = for_generics.into_iter().collect(); + self.is_fn_ty(&tcx, &p.trait_) + } + _ => false, + }; + + let poly_trait = b.get_poly_trait().unwrap(); + + if is_fn { + ty_to_fn + .entry(ty.clone()) + .and_modify(|e| *e = (Some(poly_trait.clone()), e.1.clone())) + .or_insert(((Some(poly_trait.clone())), None)); + + ty_to_bounds + .entry(ty.clone()) + .or_insert_with(|| FxHashSet()); + } else { + ty_to_bounds + .entry(ty.clone()) + .or_insert_with(|| FxHashSet()) + .insert(b.clone()); + } + } + } + WherePredicate::RegionPredicate { lifetime, bounds } => { + lifetime_to_bounds + .entry(lifetime) + .or_insert_with(|| FxHashSet()) + .extend(bounds); + } + WherePredicate::EqPredicate { lhs, rhs } => { + match &lhs { + &Type::QPath { + name: ref left_name, + ref self_type, + ref trait_, + } => { + let ty = &*self_type; + match **trait_ { + Type::ResolvedPath { + path: ref trait_path, + ref typarams, + ref did, + ref is_generic, + } => { + let mut new_trait_path = trait_path.clone(); + + if self.is_fn_ty(&tcx, trait_) && left_name == FN_OUTPUT_NAME { + ty_to_fn + .entry(*ty.clone()) + .and_modify(|e| *e = (e.0.clone(), Some(rhs.clone()))) + .or_insert((None, Some(rhs))); + continue; + } + + // FIXME: Remove this scope when NLL lands + { + let params = + &mut new_trait_path.segments.last_mut().unwrap().params; + + match params { + // Convert somethiung like ' = u8' + // to 'T: Iterator' + &mut PathParameters::AngleBracketed { + ref mut bindings, + .. + } => { + bindings.push(TypeBinding { + name: left_name.clone(), + ty: rhs, + }); + } + &mut PathParameters::Parenthesized { .. } => { + existing_predicates.push( + WherePredicate::EqPredicate { + lhs: lhs.clone(), + rhs, + }, + ); + continue; // If something other than a Fn ends up + // with parenthesis, leave it alone + } + } + } + + let bounds = ty_to_bounds + .entry(*ty.clone()) + .or_insert_with(|| FxHashSet()); + + bounds.insert(TyParamBound::TraitBound( + PolyTrait { + trait_: Type::ResolvedPath { + path: new_trait_path, + typarams: typarams.clone(), + did: did.clone(), + is_generic: *is_generic, + }, + generic_params: Vec::new(), + }, + hir::TraitBoundModifier::None, + )); + + // Remove any existing 'plain' bound (e.g. 'T: Iterator`) so + // that we don't see a + // duplicate bound like `T: Iterator + Iterator` + // on the docs page. + bounds.remove(&TyParamBound::TraitBound( + PolyTrait { + trait_: *trait_.clone(), + generic_params: Vec::new(), + }, + hir::TraitBoundModifier::None, + )); + // Avoid creating any new duplicate bounds later in the outer + // loop + ty_to_traits + .entry(*ty.clone()) + .or_insert_with(|| FxHashSet()) + .insert(*trait_.clone()); + } + _ => panic!("Unexpected trait {:?} for {:?}", trait_, did), + } + } + _ => panic!("Unexpected LHS {:?} for {:?}", lhs, did), + } + } + }; + } + + let final_bounds = self.make_final_bounds(ty_to_bounds, ty_to_fn, lifetime_to_bounds); + + existing_predicates.extend(final_bounds); + + for p in generic_params.iter_mut() { + match p { + &mut GenericParam::Type(ref mut ty) => { + // We never want something like 'impl' + ty.default.take(); + + let generic_ty = Type::Generic(ty.name.clone()); + + if !has_sized.contains(&generic_ty) { + ty.bounds.insert(0, TyParamBound::maybe_sized(self.cx)); + } + } + _ => {} + } + } + + Generics { + params: generic_params, + where_predicates: existing_predicates, + } + } + + fn is_fn_ty(&self, tcx: &TyCtxt, ty: &Type) -> bool { + match &ty { + &&Type::ResolvedPath { ref did, .. } => { + *did == tcx.require_lang_item(lang_items::FnTraitLangItem) + || *did == tcx.require_lang_item(lang_items::FnMutTraitLangItem) + || *did == tcx.require_lang_item(lang_items::FnOnceTraitLangItem) + } + _ => false, + } + } + + // This is an ugly hack, but it's the simplest way to handle synthetic impls without greatly + // refactoring either librustdoc or librustc. In particular, allowing new DefIds to be + // registered after the AST is constructed would require storing the defid mapping in a + // RefCell, decreasing the performance for normal compilation for very little gain. + // + // Instead, we construct 'fake' def ids, which start immediately after the last DefId in + // DefIndexAddressSpace::Low. In the Debug impl for clean::Item, we explicitly check for fake + // def ids, as we'll end up with a panic if we use the DefId Debug impl for fake DefIds + fn next_def_id(&self, crate_num: CrateNum) -> DefId { + let start_def_id = { + let next_id = if crate_num == LOCAL_CRATE { + self.cx + .tcx + .hir + .definitions() + .def_path_table() + .next_id(DefIndexAddressSpace::Low) + } else { + self.cx + .cstore + .def_path_table(crate_num) + .next_id(DefIndexAddressSpace::Low) + }; + + DefId { + krate: crate_num, + index: next_id, + } + }; + + let mut fake_ids = self.cx.fake_def_ids.borrow_mut(); + + let def_id = fake_ids.entry(crate_num).or_insert(start_def_id).clone(); + fake_ids.insert( + crate_num, + DefId { + krate: crate_num, + index: DefIndex::from_array_index( + def_id.index.as_array_index() + 1, + def_id.index.address_space(), + ), + }, + ); + + MAX_DEF_ID.with(|m| { + m.borrow_mut() + .entry(def_id.krate.clone()) + .or_insert(start_def_id); + }); + + self.cx.all_fake_def_ids.borrow_mut().insert(def_id); + + def_id.clone() + } +} + +// Replaces all ReVars in a type with ty::Region's, using the provided map +struct RegionReplacer<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { + vid_to_region: &'a FxHashMap>, + tcx: TyCtxt<'a, 'gcx, 'tcx>, +} + +impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for RegionReplacer<'a, 'gcx, 'tcx> { + fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { + self.tcx + } + + fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> { + (match r { + &ty::ReVar(vid) => self.vid_to_region.get(&vid).cloned(), + _ => None, + }).unwrap_or_else(|| r.super_fold_with(self)) + } +} diff --git a/src/librustdoc/clean/cfg.rs b/src/librustdoc/clean/cfg.rs index 5eb3e38d5b371..a769771f8aa88 100644 --- a/src/librustdoc/clean/cfg.rs +++ b/src/librustdoc/clean/cfg.rs @@ -25,7 +25,7 @@ use syntax_pos::Span; use html::escape::Escape; -#[derive(Clone, RustcEncodable, RustcDecodable, Debug, PartialEq)] +#[derive(Clone, RustcEncodable, RustcDecodable, Debug, PartialEq, Eq, Hash)] pub enum Cfg { /// Accepts all configurations. True, diff --git a/src/librustdoc/clean/inline.rs b/src/librustdoc/clean/inline.rs index 9aba399b3b09f..d4233309627f5 100644 --- a/src/librustdoc/clean/inline.rs +++ b/src/librustdoc/clean/inline.rs @@ -12,8 +12,8 @@ use std::collections::BTreeMap; use std::io; -use std::iter::once; use std::rc::Rc; +use std::iter::once; use syntax::ast; use rustc::hir; @@ -25,7 +25,7 @@ use rustc::util::nodemap::FxHashSet; use core::{DocContext, DocAccessLevels}; use doctree; -use clean::{self, GetDefId}; +use clean::{self, GetDefId, get_auto_traits_with_def_id}; use super::Clean; @@ -50,7 +50,7 @@ pub fn try_inline(cx: &DocContext, def: Def, name: ast::Name) let inner = match def { Def::Trait(did) => { record_extern_fqn(cx, did, clean::TypeKind::Trait); - ret.extend(build_impls(cx, did)); + ret.extend(build_impls(cx, did, false)); clean::TraitItem(build_external_trait(cx, did)) } Def::Fn(did) => { @@ -59,27 +59,27 @@ pub fn try_inline(cx: &DocContext, def: Def, name: ast::Name) } Def::Struct(did) => { record_extern_fqn(cx, did, clean::TypeKind::Struct); - ret.extend(build_impls(cx, did)); + ret.extend(build_impls(cx, did, true)); clean::StructItem(build_struct(cx, did)) } Def::Union(did) => { record_extern_fqn(cx, did, clean::TypeKind::Union); - ret.extend(build_impls(cx, did)); + ret.extend(build_impls(cx, did, true)); clean::UnionItem(build_union(cx, did)) } Def::TyAlias(did) => { record_extern_fqn(cx, did, clean::TypeKind::Typedef); - ret.extend(build_impls(cx, did)); + ret.extend(build_impls(cx, did, false)); clean::TypedefItem(build_type_alias(cx, did), false) } Def::Enum(did) => { record_extern_fqn(cx, did, clean::TypeKind::Enum); - ret.extend(build_impls(cx, did)); + ret.extend(build_impls(cx, did, true)); clean::EnumItem(build_enum(cx, did)) } Def::TyForeign(did) => { record_extern_fqn(cx, did, clean::TypeKind::Foreign); - ret.extend(build_impls(cx, did)); + ret.extend(build_impls(cx, did, false)); clean::ForeignTypeItem } // Never inline enum variants but leave them shown as re-exports. @@ -125,6 +125,11 @@ pub fn load_attrs(cx: &DocContext, did: DefId) -> clean::Attributes { /// These names are used later on by HTML rendering to generate things like /// source links back to the original item. pub fn record_extern_fqn(cx: &DocContext, did: DefId, kind: clean::TypeKind) { + if did.is_local() { + debug!("record_extern_fqn(did={:?}, kind+{:?}): def_id is local, aborting", did, kind); + return; + } + let crate_name = cx.tcx.crate_name(did.krate).to_string(); let relative = cx.tcx.def_path(did).data.into_iter().filter_map(|elem| { // extern blocks have an empty name @@ -144,6 +149,7 @@ pub fn record_extern_fqn(cx: &DocContext, did: DefId, kind: clean::TypeKind) { } pub fn build_external_trait(cx: &DocContext, did: DefId) -> clean::Trait { + let auto_trait = cx.tcx.trait_def(did).has_auto_impl; let trait_items = cx.tcx.associated_items(did).map(|item| item.clean(cx)).collect(); let predicates = cx.tcx.predicates_of(did); let generics = (cx.tcx.generics_of(did), &predicates).clean(cx); @@ -152,6 +158,7 @@ pub fn build_external_trait(cx: &DocContext, did: DefId) -> clean::Trait { let is_spotlight = load_attrs(cx, did).has_doc_flag("spotlight"); let is_auto = cx.tcx.trait_is_auto(did); clean::Trait { + auto: auto_trait, unsafety: cx.tcx.trait_def(did).unsafety, generics, items: trait_items, @@ -227,7 +234,7 @@ fn build_type_alias(cx: &DocContext, did: DefId) -> clean::Typedef { } } -pub fn build_impls(cx: &DocContext, did: DefId) -> Vec { +pub fn build_impls(cx: &DocContext, did: DefId, auto_traits: bool) -> Vec { let tcx = cx.tcx; let mut impls = Vec::new(); @@ -235,6 +242,16 @@ pub fn build_impls(cx: &DocContext, did: DefId) -> Vec { build_impl(cx, did, &mut impls); } + if auto_traits { + let auto_impls = get_auto_traits_with_def_id(cx, did); + let mut renderinfo = cx.renderinfo.borrow_mut(); + + let new_impls: Vec = auto_impls.into_iter() + .filter(|i| renderinfo.inlined.insert(i.def_id)).collect(); + + impls.extend(new_impls); + } + // If this is the first time we've inlined something from another crate, then // we inline *all* impls from all the crates into this crate. Note that there's // currently no way for us to filter this based on type, and we likely need @@ -347,13 +364,14 @@ pub fn build_impl(cx: &DocContext, did: DefId, ret: &mut Vec) { ret.push(clean::Item { inner: clean::ImplItem(clean::Impl { - unsafety: hir::Unsafety::Normal, // FIXME: this should be decoded + unsafety: hir::Unsafety::Normal, + generics: (tcx.generics_of(did), &predicates).clean(cx), provided_trait_methods: provided, trait_, for_, - generics: (tcx.generics_of(did), &predicates).clean(cx), items: trait_items, polarity: Some(polarity.clean(cx)), + synthetic: false, }), source: tcx.def_span(did).clean(cx), name: None, diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index 66b5f3b5ea366..0d7b66cfc54c5 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -26,31 +26,41 @@ use syntax::codemap::Spanned; use syntax::feature_gate::UnstableFeatures; use syntax::ptr::P; use syntax::symbol::keywords; +use syntax::symbol::Symbol; use syntax_pos::{self, DUMMY_SP, Pos, FileName}; use rustc::middle::const_val::ConstVal; use rustc::middle::privacy::AccessLevels; use rustc::middle::resolve_lifetime as rl; +use rustc::ty::fold::TypeFolder; use rustc::middle::lang_items; -use rustc::hir::def::{Def, CtorKind}; -use rustc::hir::def_id::{CrateNum, DefId, CRATE_DEF_INDEX, LOCAL_CRATE}; +use rustc::hir::{self, HirVec}; +use rustc::hir::def::{self, Def, CtorKind}; +use rustc::hir::def_id::{CrateNum, DefId, DefIndex, CRATE_DEF_INDEX, LOCAL_CRATE}; +use rustc::hir::def_id::DefIndexAddressSpace; +use rustc::traits; use rustc::ty::subst::Substs; -use rustc::ty::{self, Ty, AdtKind}; +use rustc::ty::{self, TyCtxt, Region, RegionVid, Ty, AdtKind}; use rustc::middle::stability; use rustc::util::nodemap::{FxHashMap, FxHashSet}; use rustc_typeck::hir_ty_to_ty; - -use rustc::hir; +use rustc::infer::{InferCtxt, RegionObligation}; +use rustc::infer::region_constraints::{RegionConstraintData, Constraint}; +use rustc::traits::*; +use std::collections::hash_map::Entry; +use std::collections::VecDeque; +use std::fmt; use rustc_const_math::ConstInt; use std::default::Default; use std::{mem, slice, vec}; -use std::iter::FromIterator; +use std::iter::{FromIterator, once}; use std::rc::Rc; +use std::cell::RefCell; use std::sync::Arc; use std::u32; -use core::DocContext; +use core::{self, DocContext}; use doctree; use visit_ast; use html::item_type::ItemType; @@ -59,8 +69,14 @@ use html::markdown::markdown_links; pub mod inline; pub mod cfg; mod simplify; +mod auto_trait; use self::cfg::Cfg; +use self::auto_trait::AutoTraitFinder; + +thread_local!(static MAX_DEF_ID: RefCell> = RefCell::new(FxHashMap())); + +const FN_OUTPUT_NAME: &'static str = "Output"; // extract the stability index for a node from tcx, if possible fn get_stability(cx: &DocContext, def_id: DefId) -> Option { @@ -282,7 +298,7 @@ impl Clean for CrateNum { /// Anything with a source location and set of attributes and, optionally, a /// name. That is, anything that can be documented. This doesn't correspond /// directly to the AST's concept of an item; it's a strict superset. -#[derive(Clone, RustcEncodable, RustcDecodable, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable)] pub struct Item { /// Stringified span pub source: Span, @@ -296,6 +312,26 @@ pub struct Item { pub deprecation: Option, } +impl fmt::Debug for Item { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + + let fake = MAX_DEF_ID.with(|m| m.borrow().get(&self.def_id.krate) + .map(|id| self.def_id >= *id).unwrap_or(false)); + let def_id: &fmt::Debug = if fake { &"**FAKE**" } else { &self.def_id }; + + fmt.debug_struct("Item") + .field("source", &self.source) + .field("name", &self.name) + .field("attrs", &self.attrs) + .field("inner", &self.inner) + .field("visibility", &self.visibility) + .field("def_id", def_id) + .field("stability", &self.stability) + .field("deprecation", &self.deprecation) + .finish() + } +} + impl Item { /// Finds the `doc` attribute as a NameValue and returns the corresponding /// value found. @@ -492,9 +528,9 @@ impl Clean for doctree::Module { let mut items: Vec = vec![]; items.extend(self.extern_crates.iter().map(|x| x.clean(cx))); items.extend(self.imports.iter().flat_map(|x| x.clean(cx))); - items.extend(self.structs.iter().map(|x| x.clean(cx))); - items.extend(self.unions.iter().map(|x| x.clean(cx))); - items.extend(self.enums.iter().map(|x| x.clean(cx))); + items.extend(self.structs.iter().flat_map(|x| x.clean(cx))); + items.extend(self.unions.iter().flat_map(|x| x.clean(cx))); + items.extend(self.enums.iter().flat_map(|x| x.clean(cx))); items.extend(self.fns.iter().map(|x| x.clean(cx))); items.extend(self.foreigns.iter().flat_map(|x| x.clean(cx))); items.extend(self.mods.iter().map(|x| x.clean(cx))); @@ -601,7 +637,7 @@ impl> NestedAttributesExt for I { /// Included files are kept separate from inline doc comments so that proper line-number /// information can be given when a doctest fails. Sugared doc comments and "raw" doc comments are /// kept separate because of issue #42760. -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub enum DocFragment { // FIXME #44229 (misdreavus): sugared and raw doc comments can be brought back together once // hoedown is completely removed from rustdoc. @@ -653,7 +689,7 @@ impl<'a> FromIterator<&'a DocFragment> for String { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug, Default)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Default, Hash)] pub struct Attributes { pub doc_strings: Vec, pub other_attrs: Vec, @@ -1177,7 +1213,7 @@ impl Clean for [ast::Attribute] { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub struct TyParam { pub name: String, pub did: DefId, @@ -1212,7 +1248,7 @@ impl<'tcx> Clean for ty::TypeParameterDef { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub enum TyParamBound { RegionBound(Lifetime), TraitBound(PolyTrait, hir::TraitBoundModifier) @@ -1245,6 +1281,21 @@ impl TyParamBound { } false } + + fn get_poly_trait(&self) -> Option { + if let TyParamBound::TraitBound(ref p, _) = *self { + return Some(p.clone()) + } + None + } + + fn get_trait_type(&self) -> Option { + + if let TyParamBound::TraitBound(PolyTrait { ref trait_, .. }, _) = *self { + return Some(trait_.clone()); + } + None + } } impl Clean for hir::TyParamBound { @@ -1363,7 +1414,7 @@ impl<'tcx> Clean>> for Substs<'tcx> { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub struct Lifetime(String); impl Lifetime { @@ -1380,17 +1431,19 @@ impl Lifetime { impl Clean for hir::Lifetime { fn clean(&self, cx: &DocContext) -> Lifetime { - let hir_id = cx.tcx.hir.node_to_hir_id(self.id); - let def = cx.tcx.named_region(hir_id); - match def { - Some(rl::Region::EarlyBound(_, node_id, _)) | - Some(rl::Region::LateBound(_, node_id, _)) | - Some(rl::Region::Free(_, node_id)) => { - if let Some(lt) = cx.lt_substs.borrow().get(&node_id).cloned() { - return lt; + if self.id != ast::DUMMY_NODE_ID { + let hir_id = cx.tcx.hir.node_to_hir_id(self.id); + let def = cx.tcx.named_region(hir_id); + match def { + Some(rl::Region::EarlyBound(_, node_id, _)) | + Some(rl::Region::LateBound(_, node_id, _)) | + Some(rl::Region::Free(_, node_id)) => { + if let Some(lt) = cx.lt_substs.borrow().get(&node_id).cloned() { + return lt; + } } + _ => {} } - _ => {} } Lifetime(self.name.name().to_string()) } @@ -1437,7 +1490,7 @@ impl Clean> for ty::RegionKind { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub enum WherePredicate { BoundPredicate { ty: Type, bounds: Vec }, RegionPredicate { lifetime: Lifetime, bounds: Vec}, @@ -1562,7 +1615,7 @@ impl<'tcx> Clean for ty::ProjectionTy<'tcx> { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub enum GenericParam { Lifetime(Lifetime), Type(TyParam), @@ -1577,7 +1630,8 @@ impl Clean for hir::GenericParam { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug, Default)] +// maybe use a Generic enum and use Vec? +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Default, Hash)] pub struct Generics { pub params: Vec, pub where_predicates: Vec, @@ -1747,7 +1801,7 @@ impl Clean for doctree::Function { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub struct FnDecl { pub inputs: Arguments, pub output: FunctionRetTy, @@ -1765,7 +1819,7 @@ impl FnDecl { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub struct Arguments { pub values: Vec, } @@ -1840,7 +1894,7 @@ impl<'a, 'tcx> Clean for (DefId, ty::PolyFnSig<'tcx>) { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub struct Argument { pub type_: Type, pub name: String, @@ -1870,7 +1924,7 @@ impl Argument { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub enum FunctionRetTy { Return(Type), DefaultReturn, @@ -1896,6 +1950,7 @@ impl GetDefId for FunctionRetTy { #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Trait { + pub auto: bool, pub unsafety: hir::Unsafety, pub items: Vec, pub generics: Generics, @@ -1917,6 +1972,7 @@ impl Clean for doctree::Trait { stability: self.stab.clean(cx), deprecation: self.depr.clean(cx), inner: TraitItem(Trait { + auto: self.is_auto.clean(cx), unsafety: self.unsafety, items: self.items.clean(cx), generics: self.generics.clean(cx), @@ -2158,7 +2214,7 @@ impl<'tcx> Clean for ty::AssociatedItem { } /// A trait reference, which may have higher ranked lifetimes. -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub struct PolyTrait { pub trait_: Type, pub generic_params: Vec, @@ -2167,7 +2223,7 @@ pub struct PolyTrait { /// A representation of a Type suitable for hyperlinking purposes. Ideally one can get the original /// type out of the AST/TyCtxt given one of these, if more information is needed. Most importantly /// it does not preserve mutability or boxes. -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub enum Type { /// structs/enums/traits (most that'd be an hir::TyPath) ResolvedPath { @@ -2782,10 +2838,13 @@ pub struct Union { pub fields_stripped: bool, } -impl Clean for doctree::Struct { - fn clean(&self, cx: &DocContext) -> Item { - Item { - name: Some(self.name.clean(cx)), +impl Clean> for doctree::Struct { + fn clean(&self, cx: &DocContext) -> Vec { + let name = self.name.clean(cx); + let mut ret = get_auto_traits_with_node_id(cx, self.id, name.clone()); + + ret.push(Item { + name: Some(name), attrs: self.attrs.clean(cx), source: self.whence.clean(cx), def_id: cx.tcx.hir.local_def_id(self.id), @@ -2798,14 +2857,19 @@ impl Clean for doctree::Struct { fields: self.fields.clean(cx), fields_stripped: false, }), - } + }); + + ret } } -impl Clean for doctree::Union { - fn clean(&self, cx: &DocContext) -> Item { - Item { - name: Some(self.name.clean(cx)), +impl Clean> for doctree::Union { + fn clean(&self, cx: &DocContext) -> Vec { + let name = self.name.clean(cx); + let mut ret = get_auto_traits_with_node_id(cx, self.id, name.clone()); + + ret.push(Item { + name: Some(name), attrs: self.attrs.clean(cx), source: self.whence.clean(cx), def_id: cx.tcx.hir.local_def_id(self.id), @@ -2818,7 +2882,9 @@ impl Clean for doctree::Union { fields: self.fields.clean(cx), fields_stripped: false, }), - } + }); + + ret } } @@ -2849,10 +2915,13 @@ pub struct Enum { pub variants_stripped: bool, } -impl Clean for doctree::Enum { - fn clean(&self, cx: &DocContext) -> Item { - Item { - name: Some(self.name.clean(cx)), +impl Clean> for doctree::Enum { + fn clean(&self, cx: &DocContext) -> Vec { + let name = self.name.clean(cx); + let mut ret = get_auto_traits_with_node_id(cx, self.id, name.clone()); + + ret.push(Item { + name: Some(name), attrs: self.attrs.clean(cx), source: self.whence.clean(cx), def_id: cx.tcx.hir.local_def_id(self.id), @@ -2864,7 +2933,9 @@ impl Clean for doctree::Enum { generics: self.generics.clean(cx), variants_stripped: false, }), - } + }); + + ret } } @@ -2989,7 +3060,7 @@ impl Clean for syntax_pos::Span { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub struct Path { pub global: bool, pub def: Def, @@ -3006,7 +3077,7 @@ impl Path { params: PathParameters::AngleBracketed { lifetimes: Vec::new(), types: Vec::new(), - bindings: Vec::new() + bindings: Vec::new(), } }] } @@ -3027,7 +3098,7 @@ impl Clean for hir::Path { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub enum PathParameters { AngleBracketed { lifetimes: Vec, @@ -3062,7 +3133,7 @@ impl Clean for hir::PathParameters { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub struct PathSegment { pub name: String, pub params: PathParameters, @@ -3077,6 +3148,50 @@ impl Clean for hir::PathSegment { } } +fn strip_type(ty: Type) -> Type { + match ty { + Type::ResolvedPath { path, typarams, did, is_generic } => { + Type::ResolvedPath { path: strip_path(&path), typarams, did, is_generic } + } + Type::Tuple(inner_tys) => { + Type::Tuple(inner_tys.iter().map(|t| strip_type(t.clone())).collect()) + } + Type::Slice(inner_ty) => Type::Slice(Box::new(strip_type(*inner_ty))), + Type::Array(inner_ty, s) => Type::Array(Box::new(strip_type(*inner_ty)), s), + Type::Unique(inner_ty) => Type::Unique(Box::new(strip_type(*inner_ty))), + Type::RawPointer(m, inner_ty) => Type::RawPointer(m, Box::new(strip_type(*inner_ty))), + Type::BorrowedRef { lifetime, mutability, type_ } => { + Type::BorrowedRef { lifetime, mutability, type_: Box::new(strip_type(*type_)) } + } + Type::QPath { name, self_type, trait_ } => { + Type::QPath { + name, + self_type: Box::new(strip_type(*self_type)), trait_: Box::new(strip_type(*trait_)) + } + } + _ => ty + } +} + +fn strip_path(path: &Path) -> Path { + let segments = path.segments.iter().map(|s| { + PathSegment { + name: s.name.clone(), + params: PathParameters::AngleBracketed { + lifetimes: Vec::new(), + types: Vec::new(), + bindings: Vec::new(), + } + } + }).collect(); + + Path { + global: path.global, + def: path.def.clone(), + segments, + } +} + fn qpath_to_string(p: &hir::QPath) -> String { let segments = match *p { hir::QPath::Resolved(_, ref path) => &path.segments, @@ -3125,7 +3240,7 @@ impl Clean for doctree::Typedef { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)] pub struct BareFunctionDecl { pub unsafety: hir::Unsafety, pub generic_params: Vec, @@ -3198,7 +3313,7 @@ impl Clean for doctree::Constant { } } -#[derive(Debug, Clone, RustcEncodable, RustcDecodable, PartialEq, Copy)] +#[derive(Debug, Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Copy, Hash)] pub enum Mutability { Mutable, Immutable, @@ -3213,7 +3328,7 @@ impl Clean for hir::Mutability { } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Copy, Debug)] +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Copy, Debug, Hash)] pub enum ImplPolarity { Positive, Negative, @@ -3237,6 +3352,20 @@ pub struct Impl { pub for_: Type, pub items: Vec, pub polarity: Option, + pub synthetic: bool, +} + +pub fn get_auto_traits_with_node_id(cx: &DocContext, id: ast::NodeId, name: String) -> Vec { + let finder = AutoTraitFinder { cx }; + finder.get_with_node_id(id, name) +} + +pub fn get_auto_traits_with_def_id(cx: &DocContext, id: DefId) -> Vec { + let finder = AutoTraitFinder { + cx, + }; + + finder.get_with_def_id(id) } impl Clean> for doctree::Impl { @@ -3274,7 +3403,8 @@ impl Clean> for doctree::Impl { for_: self.for_.clean(cx), items, polarity: Some(self.polarity.clean(cx)), - }), + synthetic: false, + }) }); ret } @@ -3294,7 +3424,7 @@ fn build_deref_target_impls(cx: &DocContext, let primitive = match *target { ResolvedPath { did, .. } if did.is_local() => continue, ResolvedPath { did, .. } => { - ret.extend(inline::build_impls(cx, did)); + ret.extend(inline::build_impls(cx, did, true)); continue } _ => match target.primitive_type() { @@ -3514,7 +3644,11 @@ fn print_const_expr(cx: &DocContext, body: hir::BodyId) -> String { fn resolve_type(cx: &DocContext, path: Path, id: ast::NodeId) -> Type { - debug!("resolve_type({:?},{:?})", path, id); + if id == ast::DUMMY_NODE_ID { + debug!("resolve_type({:?})", path); + } else { + debug!("resolve_type({:?},{:?})", path, id); + } let is_generic = match path.def { Def::PrimTy(p) => match p { @@ -3669,7 +3803,7 @@ impl Clean for attr::Deprecation { } /// An equality constraint on an associated type, e.g. `A=Bar` in `Foo` -#[derive(Clone, PartialEq, RustcDecodable, RustcEncodable, Debug)] +#[derive(Clone, PartialEq, Eq, RustcDecodable, RustcEncodable, Debug, Hash)] pub struct TypeBinding { pub name: String, pub ty: Type @@ -3683,3 +3817,182 @@ impl Clean for hir::TypeBinding { } } } + +pub fn def_id_to_path(cx: &DocContext, did: DefId, name: Option) -> Vec { + let crate_name = name.unwrap_or_else(|| cx.tcx.crate_name(did.krate).to_string()); + let relative = cx.tcx.def_path(did).data.into_iter().filter_map(|elem| { + // extern blocks have an empty name + let s = elem.data.to_string(); + if !s.is_empty() { + Some(s) + } else { + None + } + }); + once(crate_name).chain(relative).collect() +} + +// Start of code copied from rust-clippy + +pub fn get_trait_def_id(tcx: &TyCtxt, path: &[&str], use_local: bool) -> Option { + if use_local { + path_to_def_local(tcx, path) + } else { + path_to_def(tcx, path) + } +} + +pub fn path_to_def_local(tcx: &TyCtxt, path: &[&str]) -> Option { + let krate = tcx.hir.krate(); + let mut items = krate.module.item_ids.clone(); + let mut path_it = path.iter().peekable(); + + loop { + let segment = match path_it.next() { + Some(segment) => segment, + None => return None, + }; + + for item_id in mem::replace(&mut items, HirVec::new()).iter() { + let item = tcx.hir.expect_item(item_id.id); + if item.name == *segment { + if path_it.peek().is_none() { + return Some(tcx.hir.local_def_id(item_id.id)) + } + + items = match &item.node { + &hir::ItemMod(ref m) => m.item_ids.clone(), + _ => panic!("Unexpected item {:?} in path {:?} path") + }; + break; + } + } + } +} + +pub fn path_to_def(tcx: &TyCtxt, path: &[&str]) -> Option { + let crates = tcx.crates(); + + let krate = crates + .iter() + .find(|&&krate| tcx.crate_name(krate) == path[0]); + + if let Some(krate) = krate { + let krate = DefId { + krate: *krate, + index: CRATE_DEF_INDEX, + }; + let mut items = tcx.item_children(krate); + let mut path_it = path.iter().skip(1).peekable(); + + loop { + let segment = match path_it.next() { + Some(segment) => segment, + None => return None, + }; + + for item in mem::replace(&mut items, Rc::new(vec![])).iter() { + if item.ident.name == *segment { + if path_it.peek().is_none() { + return match item.def { + def::Def::Trait(did) => Some(did), + _ => None, + } + } + + items = tcx.item_children(item.def.def_id()); + break; + } + } + } + } else { + None + } +} + +fn get_path_for_type(tcx: TyCtxt, def_id: DefId, def_ctor: fn(DefId) -> Def) -> hir::Path { + struct AbsolutePathBuffer { + names: Vec, + } + + impl ty::item_path::ItemPathBuffer for AbsolutePathBuffer { + fn root_mode(&self) -> &ty::item_path::RootMode { + const ABSOLUTE: &'static ty::item_path::RootMode = &ty::item_path::RootMode::Absolute; + ABSOLUTE + } + + fn push(&mut self, text: &str) { + self.names.push(text.to_owned()); + } + } + + let mut apb = AbsolutePathBuffer { names: vec![] }; + + tcx.push_item_path(&mut apb, def_id); + + hir::Path { + span: DUMMY_SP, + def: def_ctor(def_id), + segments: hir::HirVec::from_vec(apb.names.iter().map(|s| hir::PathSegment { + name: ast::Name::intern(&s), + parameters: None, + infer_types: false, + }).collect()) + } +} + +// End of code copied from rust-clippy + + +#[derive(Eq, PartialEq, Hash, Copy, Clone, Debug)] +enum RegionTarget<'tcx> { + Region(Region<'tcx>), + RegionVid(RegionVid) +} + +#[derive(Default, Debug, Clone)] +struct RegionDeps<'tcx> { + larger: FxHashSet>, + smaller: FxHashSet> +} + +#[derive(Eq, PartialEq, Hash, Debug)] +enum SimpleBound { + RegionBound(Lifetime), + TraitBound(Vec, Vec, Vec, hir::TraitBoundModifier) +} + +enum AutoTraitResult { + ExplicitImpl, + PositiveImpl(Generics), + NegativeImpl, +} + +impl AutoTraitResult { + fn is_auto(&self) -> bool { + match *self { + AutoTraitResult::PositiveImpl(_) | AutoTraitResult::NegativeImpl => true, + _ => false, + } + } +} + +impl From for SimpleBound { + fn from(bound: TyParamBound) -> Self { + match bound.clone() { + TyParamBound::RegionBound(l) => SimpleBound::RegionBound(l), + TyParamBound::TraitBound(t, mod_) => match t.trait_ { + Type::ResolvedPath { path, typarams, .. } => { + SimpleBound::TraitBound(path.segments, + typarams + .map_or_else(|| Vec::new(), |v| v.iter() + .map(|p| SimpleBound::from(p.clone())) + .collect()), + t.generic_params, + mod_) + } + _ => panic!("Unexpected bound {:?}", bound), + } + } + } +} diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs index 0674a0b5a3b10..c95d818b75010 100644 --- a/src/librustdoc/core.rs +++ b/src/librustdoc/core.rs @@ -11,13 +11,14 @@ use rustc_lint; use rustc_driver::{self, driver, target_features, abort_on_err}; use rustc::session::{self, config}; -use rustc::hir::def_id::DefId; +use rustc::hir::def_id::{DefId, CrateNum}; use rustc::hir::def::Def; +use rustc::middle::cstore::CrateStore; use rustc::middle::privacy::AccessLevels; use rustc::ty::{self, TyCtxt, AllArenas}; use rustc::hir::map as hir_map; use rustc::lint; -use rustc::util::nodemap::FxHashMap; +use rustc::util::nodemap::{FxHashMap, FxHashSet}; use rustc_resolve as resolve; use rustc_metadata::creader::CrateLoader; use rustc_metadata::cstore::CStore; @@ -49,6 +50,8 @@ pub struct DocContext<'a, 'tcx: 'a, 'rcx: 'a> { pub resolver: &'a RefCell>, /// The stack of module NodeIds up till this point pub mod_ids: RefCell>, + pub crate_name: Option, + pub cstore: Rc, pub populated_all_crate_impls: Cell, // Note that external items for which `doc(hidden)` applies to are shown as // non-reachable while local items aren't. This is because we're reusing @@ -69,6 +72,11 @@ pub struct DocContext<'a, 'tcx: 'a, 'rcx: 'a> { pub ty_substs: RefCell>, /// Table node id of lifetime parameter definition -> substituted lifetime pub lt_substs: RefCell>, + pub send_trait: Option, + pub fake_def_ids: RefCell>, + pub all_fake_def_ids: RefCell>, + /// Maps (type_id, trait_id) -> auto trait impl + pub generated_synthetics: RefCell> } impl<'a, 'tcx, 'rcx> DocContext<'a, 'tcx, 'rcx> { @@ -111,6 +119,7 @@ pub fn run_core(search_paths: SearchPaths, triple: Option, maybe_sysroot: Option, allow_warnings: bool, + crate_name: Option, force_unstable_if_unmarked: bool, render_type: RenderType) -> (clean::Crate, RenderInfo) { @@ -235,9 +244,17 @@ pub fn run_core(search_paths: SearchPaths, .collect() }; + let send_trait = if crate_name == Some("core".to_string()) { + clean::get_trait_def_id(&tcx, &["marker", "Send"], true) + } else { + clean::get_trait_def_id(&tcx, &["core", "marker", "Send"], false) + }; + let ctxt = DocContext { tcx, resolver: &resolver, + crate_name, + cstore: cstore.clone(), populated_all_crate_impls: Cell::new(false), access_levels: RefCell::new(access_levels), external_traits: Default::default(), @@ -246,6 +263,10 @@ pub fn run_core(search_paths: SearchPaths, ty_substs: Default::default(), lt_substs: Default::default(), mod_ids: Default::default(), + send_trait: send_trait, + fake_def_ids: RefCell::new(FxHashMap()), + all_fake_def_ids: RefCell::new(FxHashSet()), + generated_synthetics: RefCell::new(FxHashSet()), }; debug!("crate: {:?}", tcx.hir.krate()); diff --git a/src/librustdoc/doctree.rs b/src/librustdoc/doctree.rs index 430236f30c4ef..413e5623118ac 100644 --- a/src/librustdoc/doctree.rs +++ b/src/librustdoc/doctree.rs @@ -210,6 +210,7 @@ pub struct Trait { pub depr: Option, } +#[derive(Debug)] pub struct Impl { pub unsafety: hir::Unsafety, pub polarity: hir::ImplPolarity, diff --git a/src/librustdoc/html/render.rs b/src/librustdoc/html/render.rs index 1fb8f106cac03..45d0cf09f0030 100644 --- a/src/librustdoc/html/render.rs +++ b/src/librustdoc/html/render.rs @@ -37,7 +37,7 @@ pub use self::ExternalLocation::*; use std::borrow::Cow; use std::cell::RefCell; use std::cmp::Ordering; -use std::collections::{BTreeMap, HashSet}; +use std::collections::{BTreeMap, HashSet, VecDeque}; use std::default::Default; use std::error; use std::fmt::{self, Display, Formatter, Write as FmtWrite}; @@ -276,6 +276,18 @@ pub struct Cache { /// generating explicit hyperlinks to other crates. pub external_paths: FxHashMap, ItemType)>, + /// Maps local def ids of exported types to fully qualified paths. + /// Unlike 'paths', this mapping ignores any renames that occur + /// due to 'use' statements. + /// + /// This map is used when writing out the special 'implementors' + /// javascript file. By using the exact path that the type + /// is declared with, we ensure that each path will be identical + /// to the path used if the corresponding type is inlined. By + /// doing this, we can detect duplicate impls on a trait page, and only display + /// the impl for the inlined type. + pub exact_paths: FxHashMap>, + /// This map contains information about all known traits of this crate. /// Implementations of a crate should inherit the documentation of the /// parent trait if no extra documentation is specified, and default methods @@ -328,6 +340,7 @@ pub struct RenderInfo { pub inlined: FxHashSet, pub external_paths: ::core::ExternalPaths, pub external_typarams: FxHashMap, + pub exact_paths: FxHashMap>, pub deref_trait_did: Option, pub deref_mut_trait_did: Option, pub owned_box_did: Option, @@ -456,7 +469,9 @@ fn init_ids() -> FxHashMap { "required-methods", "provided-methods", "implementors", + "synthetic-implementors", "implementors-list", + "synthetic-implementors-list", "methods", "deref-methods", "implementations", @@ -580,6 +595,7 @@ pub fn run(mut krate: clean::Crate, inlined: _, external_paths, external_typarams, + exact_paths, deref_trait_did, deref_mut_trait_did, owned_box_did, @@ -592,6 +608,7 @@ pub fn run(mut krate: clean::Crate, let mut cache = Cache { impls: FxHashMap(), external_paths, + exact_paths, paths: FxHashMap(), implementors: FxHashMap(), stack: Vec::new(), @@ -1030,7 +1047,10 @@ themePicker.onclick = function() {{ // should add it. if !imp.impl_item.def_id.is_local() { continue } have_impls = true; - write!(implementors, "{},", as_json(&imp.inner_impl().to_string())).unwrap(); + write!(implementors, "{{text:{},synthetic:{},types:{}}},", + as_json(&imp.inner_impl().to_string()), + imp.inner_impl().synthetic, + as_json(&collect_paths_for_type(imp.inner_impl().for_.clone()))).unwrap(); } implementors.push_str("];"); @@ -2050,8 +2070,7 @@ fn item_module(w: &mut fmt::Formatter, cx: &Context, item: &clean::Item, items: &[clean::Item]) -> fmt::Result { document(w, cx, item)?; - let mut indices = (0..items.len()).filter(|i| !items[*i].is_stripped()) - .collect::>(); + let mut indices = (0..items.len()).filter(|i| !items[*i].is_stripped()).collect::>(); // the order of item types in the listing fn reorder(ty: ItemType) -> u8 { @@ -2401,6 +2420,50 @@ fn item_function(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, document(w, cx, it) } +fn render_implementor(cx: &Context, implementor: &Impl, w: &mut fmt::Formatter, + implementor_dups: &FxHashMap<&str, (DefId, bool)>) -> Result<(), fmt::Error> { + write!(w, "
  • ")?; + if let Some(l) = (Item { cx, item: &implementor.impl_item }).src_href() { + write!(w, "
    ")?; + write!(w, "[src]", + l, "goto source code")?; + write!(w, "
    ")?; + } + write!(w, "")?; + // If there's already another implementor that has the same abbridged name, use the + // full path, for example in `std::iter::ExactSizeIterator` + let use_absolute = match implementor.inner_impl().for_ { + clean::ResolvedPath { ref path, is_generic: false, .. } | + clean::BorrowedRef { + type_: box clean::ResolvedPath { ref path, is_generic: false, .. }, + .. + } => implementor_dups[path.last_name()].1, + _ => false, + }; + fmt_impl_for_trait_page(&implementor.inner_impl(), w, use_absolute)?; + for it in &implementor.inner_impl().items { + if let clean::TypedefItem(ref tydef, _) = it.inner { + write!(w, " ")?; + assoc_type(w, it, &vec![], Some(&tydef.type_), AssocItemLink::Anchor(None))?; + write!(w, ";")?; + } + } + writeln!(w, "
  • ")?; + Ok(()) +} + +fn render_impls(cx: &Context, w: &mut fmt::Formatter, + traits: Vec<&&Impl>, + containing_item: &clean::Item) -> Result<(), fmt::Error> { + for i in &traits { + let did = i.trait_did().unwrap(); + let assoc_link = AssocItemLink::GotoSource(did, &i.inner_impl().provided_trait_methods); + render_impl(w, cx, i, assoc_link, + RenderMode::Normal, containing_item.stable_since(), true)?; + } + Ok(()) +} + fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, t: &clean::Trait) -> fmt::Result { let mut bounds = String::new(); @@ -2580,6 +2643,16 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
      "; + + let synthetic_impl_header = " +

      + Auto implementors +

      +
        + "; + + let mut synthetic_types = Vec::new(); + if let Some(implementors) = cache.implementors.get(&it.def_id) { // The DefId is for the first Type found with that name. The bool is // if any Types with the same name but different DefId have been found. @@ -2605,6 +2678,11 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, .partition::, _>(|i| i.inner_impl().for_.def_id() .map_or(true, |d| cache.paths.contains_key(&d))); + + let (synthetic, concrete) = local.iter() + .partition::, _>(|i| i.inner_impl().synthetic); + + if !foreign.is_empty() { write!(w, "

        @@ -2622,42 +2700,35 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, } write!(w, "{}", impl_header)?; + for implementor in concrete { + render_implementor(cx, implementor, w, &implementor_dups)?; + } + write!(w, "

      ")?; - for implementor in local { - write!(w, "
    • ")?; - if let Some(l) = (Item { cx, item: &implementor.impl_item }).src_href() { - write!(w, "
      ")?; - write!(w, "[src]", - l, "goto source code")?; - write!(w, "
      ")?; - } - write!(w, "")?; - // If there's already another implementor that has the same abbridged name, use the - // full path, for example in `std::iter::ExactSizeIterator` - let use_absolute = match implementor.inner_impl().for_ { - clean::ResolvedPath { ref path, is_generic: false, .. } | - clean::BorrowedRef { - type_: box clean::ResolvedPath { ref path, is_generic: false, .. }, - .. - } => implementor_dups[path.last_name()].1, - _ => false, - }; - fmt_impl_for_trait_page(&implementor.inner_impl(), w, use_absolute)?; - for it in &implementor.inner_impl().items { - if let clean::TypedefItem(ref tydef, _) = it.inner { - write!(w, " ")?; - assoc_type(w, it, &vec![], Some(&tydef.type_), AssocItemLink::Anchor(None))?; - write!(w, ";")?; - } + if t.auto { + write!(w, "{}", synthetic_impl_header)?; + for implementor in synthetic { + synthetic_types.extend( + collect_paths_for_type(implementor.inner_impl().for_.clone()) + ); + render_implementor(cx, implementor, w, &implementor_dups)?; } - writeln!(w, "
    • ")?; + write!(w, "
    ")?; } } else { // even without any implementations to write in, we still want the heading and list, so the // implementors javascript file pulled in below has somewhere to write the impls into write!(w, "{}", impl_header)?; + write!(w, "")?; + + if t.auto { + write!(w, "{}", synthetic_impl_header)?; + write!(w, "")?; + } } - write!(w, "")?; + write!(w, r#""#, + as_json(&synthetic_types))?; + write!(w, r#""#, @@ -3275,17 +3346,28 @@ fn render_assoc_items(w: &mut fmt::Formatter, }).is_some(); render_deref_methods(w, cx, impl_, containing_item, has_deref_mut)?; } + + let (synthetic, concrete) = traits + .iter() + .partition::, _>(|t| t.inner_impl().synthetic); + write!(w, "

    Trait Implementations

    +
    ")?; - for i in &traits { - let did = i.trait_did().unwrap(); - let assoc_link = AssocItemLink::GotoSource(did, &i.inner_impl().provided_trait_methods); - render_impl(w, cx, i, assoc_link, - RenderMode::Normal, containing_item.stable_since(), true)?; - } + render_impls(cx, w, concrete, containing_item)?; + write!(w, "
    ")?; + + write!(w, " +

    + Auto Trait Implementations +

    +
    + ")?; + render_impls(cx, w, synthetic, containing_item)?; + write!(w, "
    ")?; } Ok(()) } @@ -3786,32 +3868,48 @@ fn sidebar_assoc_items(it: &clean::Item) -> String { } } } - let mut links = HashSet::new(); - let ret = v.iter() - .filter_map(|i| { - let is_negative_impl = is_negative_impl(i.inner_impl()); - if let Some(ref i) = i.inner_impl().trait_ { - let i_display = format!("{:#}", i); - let out = Escape(&i_display); - let encoded = small_url_encode(&format!("{:#}", i)); - let generated = format!("{}{}", - encoded, - if is_negative_impl { "!" } else { "" }, - out); - if !links.contains(&generated) && links.insert(generated.clone()) { - Some(generated) + let format_impls = |impls: Vec<&Impl>| { + let mut links = HashSet::new(); + impls.iter() + .filter_map(|i| { + let is_negative_impl = is_negative_impl(i.inner_impl()); + if let Some(ref i) = i.inner_impl().trait_ { + let i_display = format!("{:#}", i); + let out = Escape(&i_display); + let encoded = small_url_encode(&format!("{:#}", i)); + let generated = format!("{}{}", + encoded, + if is_negative_impl { "!" } else { "" }, + out); + if links.insert(generated.clone()) { + Some(generated) + } else { + None + } } else { None } - } else { - None - } - }) - .collect::(); - if !ret.is_empty() { + }) + .collect::() + }; + + let (synthetic, concrete) = v + .iter() + .partition::, _>(|i| i.inner_impl().synthetic); + + let concrete_format = format_impls(concrete); + let synthetic_format = format_impls(synthetic); + + if !concrete_format.is_empty() { out.push_str("\ Trait Implementations"); - out.push_str(&format!("
    {}
    ", ret)); + out.push_str(&format!("
    {}
    ", concrete_format)); + } + + if !synthetic_format.is_empty() { + out.push_str("\ + Auto Trait Implementations"); + out.push_str(&format!("
    {}
    ", synthetic_format)); } } } @@ -3934,7 +4032,7 @@ fn sidebar_trait(fmt: &mut fmt::Formatter, it: &clean::Item, if let Some(implementors) = c.implementors.get(&it.def_id) { let res = implementors.iter() .filter(|i| i.inner_impl().for_.def_id() - .map_or(false, |d| !c.paths.contains_key(&d))) + .map_or(false, |d| !c.paths.contains_key(&d))) .filter_map(|i| { match extract_for_impl_name(&i.impl_item) { Some((ref name, ref url)) => { @@ -3955,6 +4053,10 @@ fn sidebar_trait(fmt: &mut fmt::Formatter, it: &clean::Item, } sidebar.push_str("Implementors"); + if t.auto { + sidebar.push_str("Auto Implementors"); + } sidebar.push_str(&sidebar_assoc_items(it)); @@ -4169,6 +4271,66 @@ fn get_index_type(clean_type: &clean::Type) -> Type { t } +/// Returns a list of all paths used in the type. +/// This is used to help deduplicate imported impls +/// for reexported types. If any of the contained +/// types are re-exported, we don't use the corresponding +/// entry from the js file, as inlining will have already +/// picked up the impl +fn collect_paths_for_type(first_ty: clean::Type) -> Vec { + let mut out = Vec::new(); + let mut visited = FxHashSet(); + let mut work = VecDeque::new(); + let cache = cache(); + + work.push_back(first_ty); + + while let Some(ty) = work.pop_front() { + if !visited.insert(ty.clone()) { + continue; + } + + match ty { + clean::Type::ResolvedPath { did, .. } => { + let get_extern = || cache.external_paths.get(&did).map(|s| s.0.clone()); + let fqp = cache.exact_paths.get(&did).cloned().or_else(get_extern); + + match fqp { + Some(path) => { + out.push(path.join("::")); + }, + _ => {} + }; + + }, + clean::Type::Tuple(tys) => { + work.extend(tys.into_iter()); + }, + clean::Type::Slice(ty) => { + work.push_back(*ty); + } + clean::Type::Array(ty, _) => { + work.push_back(*ty); + }, + clean::Type::Unique(ty) => { + work.push_back(*ty); + }, + clean::Type::RawPointer(_, ty) => { + work.push_back(*ty); + }, + clean::Type::BorrowedRef { type_, .. } => { + work.push_back(*type_); + }, + clean::Type::QPath { self_type, trait_, .. } => { + work.push_back(*self_type); + work.push_back(*trait_); + }, + _ => {} + } + }; + out +} + fn get_index_type_name(clean_type: &clean::Type, accept_generic: bool) -> Option { match *clean_type { clean::ResolvedPath { ref path, .. } => { diff --git a/src/librustdoc/html/static/main.js b/src/librustdoc/html/static/main.js index f688be89beebc..5c674cabde5d8 100644 --- a/src/librustdoc/html/static/main.js +++ b/src/librustdoc/html/static/main.js @@ -1563,14 +1563,31 @@ window.initSidebarItems = initSidebarItems; window.register_implementors = function(imp) { - var list = document.getElementById('implementors-list'); + var implementors = document.getElementById('implementors-list'); + var synthetic_implementors = document.getElementById('synthetic-implementors-list'); + var libs = Object.getOwnPropertyNames(imp); for (var i = 0; i < libs.length; ++i) { if (libs[i] === currentCrate) { continue; } var structs = imp[libs[i]]; + + struct_loop: for (var j = 0; j < structs.length; ++j) { + var struct = structs[j]; + + var list = struct.synthetic ? synthetic_implementors : implementors; + + if (struct.synthetic) { + for (var k = 0; k < struct.types.length; k++) { + if (window.inlined_types.has(struct.types[k])) { + continue struct_loop; + } + window.inlined_types.add(struct.types[k]); + } + } + var code = document.createElement('code'); - code.innerHTML = structs[j]; + code.innerHTML = struct.text; var x = code.getElementsByTagName('a'); for (var k = 0; k < x.length; k++) { diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs index a72026c7d6b27..50c4977f80d76 100644 --- a/src/librustdoc/lib.rs +++ b/src/librustdoc/lib.rs @@ -25,6 +25,7 @@ #![feature(test)] #![feature(unicode)] #![feature(vec_remove_item)] +#![feature(entry_and_modify)] extern crate arena; extern crate getopts; @@ -578,7 +579,8 @@ where R: 'static + Send, F: 'static + Send + FnOnce(Output) -> R { let (mut krate, renderinfo) = core::run_core(paths, cfgs, externs, Input::File(cratefile), triple, maybe_sysroot, - display_warnings, force_unstable_if_unmarked, render_type); + display_warnings, crate_name.clone(), + force_unstable_if_unmarked, render_type); info!("finished with rustc"); diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs index 3b882827c6147..f692e05d6a259 100644 --- a/src/librustdoc/visit_ast.rs +++ b/src/librustdoc/visit_ast.rs @@ -24,12 +24,12 @@ use rustc::hir::def_id::{DefId, LOCAL_CRATE}; use rustc::middle::cstore::{LoadedMacro, CrateStore}; use rustc::middle::privacy::AccessLevel; use rustc::ty::Visibility; -use rustc::util::nodemap::FxHashSet; +use rustc::util::nodemap::{FxHashSet, FxHashMap}; use rustc::hir; use core; -use clean::{self, AttributesExt, NestedAttributesExt}; +use clean::{self, AttributesExt, NestedAttributesExt, def_id_to_path}; use doctree::*; // looks to me like the first two of these are actually @@ -41,7 +41,7 @@ use doctree::*; // framework from syntax? pub struct RustdocVisitor<'a, 'tcx: 'a, 'rcx: 'a> { - cstore: &'a CrateStore, + pub cstore: &'a CrateStore, pub module: Module, pub attrs: hir::HirVec, pub cx: &'a core::DocContext<'a, 'tcx, 'rcx>, @@ -50,6 +50,7 @@ pub struct RustdocVisitor<'a, 'tcx: 'a, 'rcx: 'a> { /// Is the current module and all of its parents public? inside_public_path: bool, reexported_macros: FxHashSet, + exact_paths: Option>>, } impl<'a, 'tcx, 'rcx> RustdocVisitor<'a, 'tcx, 'rcx> { @@ -66,10 +67,21 @@ impl<'a, 'tcx, 'rcx> RustdocVisitor<'a, 'tcx, 'rcx> { inlining: false, inside_public_path: true, reexported_macros: FxHashSet(), + exact_paths: Some(FxHashMap()), cstore, } } + fn store_path(&mut self, did: DefId) { + // We can't use the entry api, as that keeps the mutable borrow of self active + // when we try to use cx + let exact_paths = self.exact_paths.as_mut().unwrap(); + if exact_paths.get(&did).is_none() { + let path = def_id_to_path(self.cx, did, self.cx.crate_name.clone()); + exact_paths.insert(did, path); + } + } + fn stability(&self, id: ast::NodeId) -> Option { self.cx.tcx.hir.opt_local_def_id(id) .and_then(|def_id| self.cx.tcx.lookup_stability(def_id)).cloned() @@ -94,6 +106,8 @@ impl<'a, 'tcx, 'rcx> RustdocVisitor<'a, 'tcx, 'rcx> { krate.exported_macros.iter().map(|def| self.visit_local_macro(def)).collect(); self.module.macros.extend(macro_exports); self.module.is_crate = true; + + self.cx.renderinfo.borrow_mut().exact_paths = self.exact_paths.take().unwrap(); } pub fn visit_variant_data(&mut self, item: &hir::Item, @@ -371,6 +385,12 @@ impl<'a, 'tcx, 'rcx> RustdocVisitor<'a, 'tcx, 'rcx> { renamed: Option, om: &mut Module) { debug!("Visiting item {:?}", item); let name = renamed.unwrap_or(item.name); + + if item.vis == hir::Public { + let def_id = self.cx.tcx.hir.local_def_id(item.id); + self.store_path(def_id); + } + match item.node { hir::ItemForeignMod(ref fm) => { // If inlining we only want to include public functions. diff --git a/src/libstd/fs.rs b/src/libstd/fs.rs index 5cea389531f94..292a78278ab0a 100644 --- a/src/libstd/fs.rs +++ b/src/libstd/fs.rs @@ -81,9 +81,18 @@ use time::SystemTime; /// # } /// ``` /// +/// Note that, although read and write methods require a `&mut File`, because +/// of the interfaces for [`Read`] and [`Write`], the holder of a `&File` can +/// still modify the file, either through methods that take `&File` or by +/// retrieving the underlying OS object and modifying the file that way. +/// Additionally, many operating systems allow concurrent modification of files +/// by different processes. Avoid assuming that holding a `&File` means that the +/// file will not change. +/// /// [`Seek`]: ../io/trait.Seek.html /// [`String`]: ../string/struct.String.html /// [`Read`]: ../io/trait.Read.html +/// [`Write`]: ../io/trait.Write.html /// [`BufReader`]: ../io/struct.BufReader.html #[stable(feature = "rust1", since = "1.0.0")] pub struct File { @@ -459,6 +468,9 @@ impl File { /// # Ok(()) /// # } /// ``` + /// + /// Note that this method alters the content of the underlying file, even + /// though it takes `&self` rather than `&mut self`. #[stable(feature = "rust1", since = "1.0.0")] pub fn set_len(&self, size: u64) -> io::Result<()> { self.inner.truncate(size) @@ -557,6 +569,9 @@ impl File { /// # Ok(()) /// # } /// ``` + /// + /// Note that this method alters the permissions of the underlying file, + /// even though it takes `&self` rather than `&mut self`. #[stable(feature = "set_permissions_atomic", since = "1.16.0")] pub fn set_permissions(&self, perm: Permissions) -> io::Result<()> { self.inner.set_permissions(perm.0) diff --git a/src/libstd/io/buffered.rs b/src/libstd/io/buffered.rs index 4e7db5f08261f..9250c1c437b2a 100644 --- a/src/libstd/io/buffered.rs +++ b/src/libstd/io/buffered.rs @@ -293,7 +293,7 @@ impl Seek for BufReader { /// where `n` minus the internal buffer length overflows an `i64`, two /// seeks will be performed instead of one. If the second seek returns /// `Err`, the underlying reader will be left at the same position it would - /// have if you seeked to `SeekFrom::Current(0)`. + /// have if you called `seek` with `SeekFrom::Current(0)`. /// /// [`seek_relative`]: #method.seek_relative fn seek(&mut self, pos: SeekFrom) -> io::Result { diff --git a/src/libstd/sync/rwlock.rs b/src/libstd/sync/rwlock.rs index 2edf02efc477c..f7fdedc0d2179 100644 --- a/src/libstd/sync/rwlock.rs +++ b/src/libstd/sync/rwlock.rs @@ -24,8 +24,8 @@ use sys_common::rwlock as sys; /// typically allows for read-only access (shared access). /// /// In comparison, a [`Mutex`] does not distinguish between readers or writers -/// that aquire the lock, therefore blocking any threads waiting for the lock to -/// become available. An `RwLock` will allow any number of readers to aquire the +/// that acquire the lock, therefore blocking any threads waiting for the lock to +/// become available. An `RwLock` will allow any number of readers to acquire the /// lock as long as a writer is not holding the lock. /// /// The priority policy of the lock is dependent on the underlying operating diff --git a/src/libstd/sys_common/backtrace.rs b/src/libstd/sys_common/backtrace.rs index a364a0392b399..1955f3ec9a28f 100644 --- a/src/libstd/sys_common/backtrace.rs +++ b/src/libstd/sys_common/backtrace.rs @@ -136,7 +136,7 @@ pub fn __rust_begin_short_backtrace(f: F) -> T f() } -/// Controls how the backtrace should be formated. +/// Controls how the backtrace should be formatted. #[derive(Debug, Copy, Clone, Eq, PartialEq)] pub enum PrintFormat { /// Show all the frames with absolute path for files. diff --git a/src/libstd/sys_common/poison.rs b/src/libstd/sys_common/poison.rs index 934ac3edbf1f1..e74c40ae04b5d 100644 --- a/src/libstd/sys_common/poison.rs +++ b/src/libstd/sys_common/poison.rs @@ -98,7 +98,7 @@ pub struct PoisonError { } /// An enumeration of possible errors associated with a [`TryLockResult`] which -/// can occur while trying to aquire a lock, from the [`try_lock`] method on a +/// can occur while trying to acquire a lock, from the [`try_lock`] method on a /// [`Mutex`] or the [`try_read`] and [`try_write`] methods on an [`RwLock`]. /// /// [`Mutex`]: struct.Mutex.html diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index c7ab6158256ba..c7ce7fffaa21b 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -918,7 +918,7 @@ pub struct Expr { } impl Expr { - /// Wether this expression would be valid somewhere that expects a value, for example, an `if` + /// Whether this expression would be valid somewhere that expects a value, for example, an `if` /// condition. pub fn returns(&self) -> bool { if let ExprKind::Block(ref block) = self.node { @@ -1937,10 +1937,12 @@ pub enum CrateSugar { JustCrate, } +pub type Visibility = Spanned; + #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] -pub enum Visibility { +pub enum VisibilityKind { Public, - Crate(Span, CrateSugar), + Crate(CrateSugar), Restricted { path: P, id: NodeId }, Inherited, } diff --git a/src/libsyntax/config.rs b/src/libsyntax/config.rs index fc82357455b91..aa360ed1bf5bf 100644 --- a/src/libsyntax/config.rs +++ b/src/libsyntax/config.rs @@ -114,7 +114,7 @@ impl<'a> StripUnconfigured<'a> { } } - // Determine if a node with the given attributes should be included in this configuation. + // Determine if a node with the given attributes should be included in this configuration. pub fn in_cfg(&mut self, attrs: &[ast::Attribute]) -> bool { attrs.iter().all(|attr| { // When not compiling with --test we should not compile the #[test] functions diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs index 5224f52c49629..dd27dea4f0d97 100644 --- a/src/libsyntax/diagnostics/plugin.rs +++ b/src/libsyntax/diagnostics/plugin.rs @@ -14,6 +14,7 @@ use std::env; use ast; use ast::{Ident, Name}; +use codemap; use syntax_pos::Span; use ext::base::{ExtCtxt, MacEager, MacResult}; use ext::build::AstBuilder; @@ -234,7 +235,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt, ty, expr, ), - vis: ast::Visibility::Public, + vis: codemap::respan(span.empty(), ast::VisibilityKind::Public), span, tokens: None, }) diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 2e6de96d65a6d..7681f55bd8ccb 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -987,7 +987,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { attrs, id: ast::DUMMY_NODE_ID, node, - vis: ast::Visibility::Inherited, + vis: respan(span.empty(), ast::VisibilityKind::Inherited), span, tokens: None, }) @@ -1033,7 +1033,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { span: ty.span, ty, ident: None, - vis: ast::Visibility::Inherited, + vis: respan(span.empty(), ast::VisibilityKind::Inherited), attrs: Vec::new(), id: ast::DUMMY_NODE_ID, } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 44a073545a730..d4d9dfb01da2c 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -11,7 +11,7 @@ use ast::{self, Block, Ident, NodeId, PatKind, Path}; use ast::{MacStmtStyle, StmtKind, ItemKind}; use attr::{self, HasAttrs}; -use codemap::{ExpnInfo, NameAndSpan, MacroBang, MacroAttribute, dummy_spanned}; +use codemap::{ExpnInfo, NameAndSpan, MacroBang, MacroAttribute, dummy_spanned, respan}; use config::{is_test_or_bench, StripUnconfigured}; use errors::FatalError; use ext::base::*; @@ -238,7 +238,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { node: ast::ItemKind::Mod(krate.module), ident: keywords::Invalid.ident(), id: ast::DUMMY_NODE_ID, - vis: ast::Visibility::Public, + vis: respan(krate.span.empty(), ast::VisibilityKind::Public), tokens: None, }))); @@ -1022,7 +1022,10 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { // Ensure that test functions are accessible from the test harness. ast::ItemKind::Fn(..) if self.cx.ecfg.should_test => { if item.attrs.iter().any(|attr| is_test_or_bench(attr)) { - item = item.map(|mut item| { item.vis = ast::Visibility::Public; item }); + item = item.map(|mut item| { + item.vis = respan(item.vis.span, ast::VisibilityKind::Public); + item + }); } noop_fold_item(item, self) } diff --git a/src/libsyntax/ext/placeholders.rs b/src/libsyntax/ext/placeholders.rs index 2f5b386346bc8..9c2c22476e9d9 100644 --- a/src/libsyntax/ext/placeholders.rs +++ b/src/libsyntax/ext/placeholders.rs @@ -33,7 +33,7 @@ pub fn placeholder(kind: ExpansionKind, id: ast::NodeId) -> Expansion { let ident = keywords::Invalid.ident(); let attrs = Vec::new(); let generics = ast::Generics::default(); - let vis = ast::Visibility::Inherited; + let vis = dummy_spanned(ast::VisibilityKind::Inherited); let span = DUMMY_SP; let expr_placeholder = || P(ast::Expr { id, span, diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 7fcd88c94ca6f..7a024dbad8830 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -9,6 +9,7 @@ // except according to those terms. use ast::{self, Arg, Arm, Block, Expr, Item, Pat, Stmt, Ty}; +use codemap::respan; use syntax_pos::Span; use ext::base::ExtCtxt; use ext::base; @@ -855,7 +856,12 @@ fn expand_wrapper(cx: &ExtCtxt, let mut stmts = imports.iter().map(|path| { // make item: `use ...;` let path = path.iter().map(|s| s.to_string()).collect(); - cx.stmt_item(sp, cx.item_use_glob(sp, ast::Visibility::Inherited, ids_ext(path))) + let use_item = cx.item_use_glob( + sp, + respan(sp.empty(), ast::VisibilityKind::Inherited), + ids_ext(path), + ); + cx.stmt_item(sp, use_item) }).chain(Some(stmt_let_ext_cx)).collect::>(); stmts.push(cx.stmt_expr(expr)); diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index 3b137f9570a39..c0fde71d086f4 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -1816,8 +1816,8 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> { } fn visit_vis(&mut self, vis: &'a ast::Visibility) { - if let ast::Visibility::Crate(span, ast::CrateSugar::JustCrate) = *vis { - gate_feature_post!(&self, crate_visibility_modifier, span, + if let ast::VisibilityKind::Crate(ast::CrateSugar::JustCrate) = vis.node { + gate_feature_post!(&self, crate_visibility_modifier, vis.span, "`crate` visibility modifier is experimental"); } visit::walk_vis(self, vis); diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 921ed3565a471..1a2025b073b2b 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -1018,7 +1018,7 @@ pub fn noop_fold_crate(Crate {module, attrs, span}: Crate, ident: keywords::Invalid.ident(), attrs, id: ast::DUMMY_NODE_ID, - vis: ast::Visibility::Public, + vis: respan(span.empty(), ast::VisibilityKind::Public), span, node: ast::ItemKind::Mod(module), tokens: None, @@ -1367,11 +1367,13 @@ pub fn noop_fold_stmt_kind(node: StmtKind, folder: &mut T) -> SmallVe } pub fn noop_fold_vis(vis: Visibility, folder: &mut T) -> Visibility { - match vis { - Visibility::Restricted { path, id } => Visibility::Restricted { - path: path.map(|path| folder.fold_path(path)), - id: folder.new_id(id) - }, + match vis.node { + VisibilityKind::Restricted { path, id } => { + respan(vis.span, VisibilityKind::Restricted { + path: path.map(|path| folder.fold_path(path)), + id: folder.new_id(id), + }) + } _ => vis, } } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index b671f81c2a84b..06eb64e157cd5 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -664,7 +664,7 @@ pub fn integer_lit(s: &str, suffix: Option, diag: Option<(Span, &Handler mod tests { use super::*; use syntax_pos::{self, Span, BytePos, Pos, NO_EXPANSION}; - use codemap::Spanned; + use codemap::{respan, Spanned}; use ast::{self, Ident, PatKind}; use abi::Abi; use attr::first_attr_value_str_by_name; @@ -932,7 +932,7 @@ mod tests { span: sp(15,21), recovered: false, })), - vis: ast::Visibility::Inherited, + vis: respan(sp(0, 0), ast::VisibilityKind::Inherited), span: sp(0,21)}))); } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index ac582627f88fd..74daa5179d381 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -36,7 +36,7 @@ use ast::StrStyle; use ast::SelfKind; use ast::{TraitItem, TraitRef, TraitObjectSyntax}; use ast::{Ty, TyKind, TypeBinding, TyParam, TyParamBounds}; -use ast::{Visibility, WhereClause, CrateSugar}; +use ast::{Visibility, VisibilityKind, WhereClause, CrateSugar}; use ast::{UseTree, UseTreeKind}; use ast::{BinOpKind, UnOp}; use ast::{RangeEnd, RangeSyntax}; @@ -3912,7 +3912,7 @@ impl<'a> Parser<'a> { "use `=` if you meant to assign", "=".to_string()); err.emit(); - // As this was parsed successfuly, continue as if the code has been fixed for the + // As this was parsed successfully, continue as if the code has been fixed for the // rest of the file. It will still fail due to the emitted error, but we avoid // extra noise. init @@ -4132,7 +4132,7 @@ impl<'a> Parser<'a> { token::Ident(ident) if ident.name == "macro_rules" && self.look_ahead(1, |t| *t == token::Not) => { let prev_span = self.prev_span; - self.complain_if_pub_macro(vis, prev_span); + self.complain_if_pub_macro(&vis.node, prev_span); self.bump(); self.bump(); @@ -4169,7 +4169,11 @@ impl<'a> Parser<'a> { node: StmtKind::Local(self.parse_local(attrs.into())?), span: lo.to(self.prev_span), } - } else if let Some(macro_def) = self.eat_macro_def(&attrs, &Visibility::Inherited, lo)? { + } else if let Some(macro_def) = self.eat_macro_def( + &attrs, + &codemap::respan(lo, VisibilityKind::Inherited), + lo, + )? { Stmt { id: ast::DUMMY_NODE_ID, node: StmtKind::Item(macro_def), @@ -4296,7 +4300,7 @@ impl<'a> Parser<'a> { self.mk_item( span, id /*id is good here*/, ItemKind::Mac(respan(span, Mac_ { path: pth, tts: tts })), - Visibility::Inherited, + respan(lo, VisibilityKind::Inherited), attrs) }), } @@ -5213,15 +5217,15 @@ impl<'a> Parser<'a> { }) } - fn complain_if_pub_macro(&mut self, vis: &Visibility, sp: Span) { + fn complain_if_pub_macro(&mut self, vis: &VisibilityKind, sp: Span) { if let Err(mut err) = self.complain_if_pub_macro_diag(vis, sp) { err.emit(); } } - fn complain_if_pub_macro_diag(&mut self, vis: &Visibility, sp: Span) -> PResult<'a, ()> { + fn complain_if_pub_macro_diag(&mut self, vis: &VisibilityKind, sp: Span) -> PResult<'a, ()> { match *vis { - Visibility::Inherited => Ok(()), + VisibilityKind::Inherited => Ok(()), _ => { let is_macro_rules: bool = match self.token { token::Ident(sid) => sid.name == Symbol::intern("macro_rules"), @@ -5283,7 +5287,7 @@ impl<'a> Parser<'a> { self.expect(&token::Not)?; } - self.complain_if_pub_macro(vis, prev_span); + self.complain_if_pub_macro(&vis.node, prev_span); // eat a matched-delimiter token tree: *at_end = true; @@ -5686,12 +5690,13 @@ impl<'a> Parser<'a> { self.expected_tokens.push(TokenType::Keyword(keywords::Crate)); if self.is_crate_vis() { self.bump(); // `crate` - return Ok(Visibility::Crate(self.prev_span, CrateSugar::JustCrate)); + return Ok(respan(self.prev_span, VisibilityKind::Crate(CrateSugar::JustCrate))); } if !self.eat_keyword(keywords::Pub) { - return Ok(Visibility::Inherited) + return Ok(respan(self.prev_span, VisibilityKind::Inherited)) } + let lo = self.prev_span; if self.check(&token::OpenDelim(token::Paren)) { // We don't `self.bump()` the `(` yet because this might be a struct definition where @@ -5702,25 +5707,35 @@ impl<'a> Parser<'a> { // `pub(crate)` self.bump(); // `(` self.bump(); // `crate` - let vis = Visibility::Crate(self.prev_span, CrateSugar::PubCrate); self.expect(&token::CloseDelim(token::Paren))?; // `)` + let vis = respan( + lo.to(self.prev_span), + VisibilityKind::Crate(CrateSugar::PubCrate), + ); return Ok(vis) } else if self.look_ahead(1, |t| t.is_keyword(keywords::In)) { // `pub(in path)` self.bump(); // `(` self.bump(); // `in` let path = self.parse_path(PathStyle::Mod)?.default_to_global(); // `path` - let vis = Visibility::Restricted { path: P(path), id: ast::DUMMY_NODE_ID }; self.expect(&token::CloseDelim(token::Paren))?; // `)` + let vis = respan(lo.to(self.prev_span), VisibilityKind::Restricted { + path: P(path), + id: ast::DUMMY_NODE_ID, + }); return Ok(vis) } else if self.look_ahead(2, |t| t == &token::CloseDelim(token::Paren)) && self.look_ahead(1, |t| t.is_keyword(keywords::Super) || - t.is_keyword(keywords::SelfValue)) { + t.is_keyword(keywords::SelfValue)) + { // `pub(self)` or `pub(super)` self.bump(); // `(` let path = self.parse_path(PathStyle::Mod)?.default_to_global(); // `super`/`self` - let vis = Visibility::Restricted { path: P(path), id: ast::DUMMY_NODE_ID }; self.expect(&token::CloseDelim(token::Paren))?; // `)` + let vis = respan(lo.to(self.prev_span), VisibilityKind::Restricted { + path: P(path), + id: ast::DUMMY_NODE_ID, + }); return Ok(vis) } else if !can_take_tuple { // Provide this diagnostic if this is not a tuple struct // `pub(something) fn ...` or `struct X { pub(something) y: Z }` @@ -5740,7 +5755,7 @@ impl<'a> Parser<'a> { } } - Ok(Visibility::Public) + Ok(respan(lo, VisibilityKind::Public)) } /// Parse defaultness: `default` or nothing. @@ -6571,9 +6586,9 @@ impl<'a> Parser<'a> { return Ok(Some(macro_def)); } - // Verify wether we have encountered a struct or method definition where the user forgot to + // Verify whether we have encountered a struct or method definition where the user forgot to // add the `struct` or `fn` keyword after writing `pub`: `pub S {}` - if visibility == Visibility::Public && + if visibility.node == VisibilityKind::Public && self.check_ident() && self.look_ahead(1, |t| *t != token::Not) { @@ -6681,7 +6696,7 @@ impl<'a> Parser<'a> { // MACRO INVOCATION ITEM let prev_span = self.prev_span; - self.complain_if_pub_macro(&visibility, prev_span); + self.complain_if_pub_macro(&visibility.node, prev_span); let mac_lo = self.span; @@ -6715,8 +6730,8 @@ impl<'a> Parser<'a> { } // FAILURE TO PARSE ITEM - match visibility { - Visibility::Inherited => {} + match visibility.node { + VisibilityKind::Inherited => {} _ => { return Err(self.span_fatal(self.prev_span, "unmatched visibility `pub`")); } diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index ae459c668aae4..3dfe3c9e5b990 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -377,7 +377,7 @@ pub fn fun_to_string(decl: &ast::FnDecl, to_string(|s| { s.head("")?; s.print_fn(decl, unsafety, constness, Abi::Rust, Some(name), - generics, &ast::Visibility::Inherited)?; + generics, &codemap::dummy_spanned(ast::VisibilityKind::Inherited))?; s.end()?; // Close the head box s.end() // Close the outer box }) @@ -1458,13 +1458,13 @@ impl<'a> State<'a> { } pub fn print_visibility(&mut self, vis: &ast::Visibility) -> io::Result<()> { - match *vis { - ast::Visibility::Public => self.word_nbsp("pub"), - ast::Visibility::Crate(_, sugar) => match sugar { + match vis.node { + ast::VisibilityKind::Public => self.word_nbsp("pub"), + ast::VisibilityKind::Crate(sugar) => match sugar { ast::CrateSugar::PubCrate => self.word_nbsp("pub(crate)"), ast::CrateSugar::JustCrate => self.word_nbsp("crate") } - ast::Visibility::Restricted { ref path, .. } => { + ast::VisibilityKind::Restricted { ref path, .. } => { let path = to_string(|s| s.print_path(path, false, 0, true)); if path == "self" || path == "super" { self.word_nbsp(&format!("pub({})", path)) @@ -1472,7 +1472,7 @@ impl<'a> State<'a> { self.word_nbsp(&format!("pub(in {})", path)) } } - ast::Visibility::Inherited => Ok(()) + ast::VisibilityKind::Inherited => Ok(()) } } @@ -1569,15 +1569,23 @@ impl<'a> State<'a> { self.print_outer_attributes(&ti.attrs)?; match ti.node { ast::TraitItemKind::Const(ref ty, ref default) => { - self.print_associated_const(ti.ident, ty, - default.as_ref().map(|expr| &**expr), - &ast::Visibility::Inherited)?; + self.print_associated_const( + ti.ident, + ty, + default.as_ref().map(|expr| &**expr), + &codemap::respan(ti.span.empty(), ast::VisibilityKind::Inherited), + )?; } ast::TraitItemKind::Method(ref sig, ref body) => { if body.is_some() { self.head("")?; } - self.print_method_sig(ti.ident, &ti.generics, sig, &ast::Visibility::Inherited)?; + self.print_method_sig( + ti.ident, + &ti.generics, + sig, + &codemap::respan(ti.span.empty(), ast::VisibilityKind::Inherited), + )?; if let Some(ref body) = *body { self.nbsp()?; self.print_block_with_attrs(body, &ti.attrs)?; @@ -3055,7 +3063,7 @@ impl<'a> State<'a> { abi, name, &generics, - &ast::Visibility::Inherited)?; + &codemap::dummy_spanned(ast::VisibilityKind::Inherited))?; self.end() } diff --git a/src/libsyntax/std_inject.rs b/src/libsyntax/std_inject.rs index 00546400bb542..da24107f4c33b 100644 --- a/src/libsyntax/std_inject.rs +++ b/src/libsyntax/std_inject.rs @@ -14,7 +14,7 @@ use std::cell::Cell; use ext::hygiene::{Mark, SyntaxContext}; use symbol::{Symbol, keywords}; use syntax_pos::{DUMMY_SP, Span}; -use codemap::{ExpnInfo, NameAndSpan, MacroAttribute}; +use codemap::{ExpnInfo, NameAndSpan, MacroAttribute, dummy_spanned, respan}; use ptr::P; use tokenstream::TokenStream; @@ -60,7 +60,7 @@ pub fn maybe_inject_crates_ref(mut krate: ast::Crate, alt_std_name: Option P { prefix: path_node(vec![id_test]), kind: ast::UseTreeKind::Simple(id_test), })), - ast::Visibility::Public, keywords::Invalid.ident()) + ast::VisibilityKind::Public, keywords::Invalid.ident()) } else { - (ast::ItemKind::ExternCrate(None), ast::Visibility::Inherited, id_test) + (ast::ItemKind::ExternCrate(None), ast::VisibilityKind::Inherited, id_test) }; P(ast::Item { id: ast::DUMMY_NODE_ID, ident, node: vi, attrs: vec![], - vis, + vis: dummy_spanned(vis), span: sp, tokens: None, }) @@ -513,7 +513,7 @@ fn mk_main(cx: &mut TestCtxt) -> P { attrs: vec![main_attr], id: ast::DUMMY_NODE_ID, node: main, - vis: ast::Visibility::Public, + vis: dummy_spanned(ast::VisibilityKind::Public), span: sp, tokens: None, }) @@ -543,7 +543,7 @@ fn mk_test_module(cx: &mut TestCtxt) -> (P, Option>) { ident: mod_ident, attrs: vec![], node: item_, - vis: ast::Visibility::Public, + vis: dummy_spanned(ast::VisibilityKind::Public), span: DUMMY_SP, tokens: None, })).pop().unwrap(); @@ -562,7 +562,7 @@ fn mk_test_module(cx: &mut TestCtxt) -> (P, Option>) { ident: keywords::Invalid.ident(), attrs: vec![], node: ast::ItemKind::Use(P(use_path)), - vis: ast::Visibility::Inherited, + vis: dummy_spanned(ast::VisibilityKind::Inherited), span: DUMMY_SP, tokens: None, })).pop().unwrap() diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index bbb123dab2868..4691ddafa36e8 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -811,7 +811,7 @@ pub fn walk_arm<'a, V: Visitor<'a>>(visitor: &mut V, arm: &'a Arm) { } pub fn walk_vis<'a, V: Visitor<'a>>(visitor: &mut V, vis: &'a Visibility) { - if let Visibility::Restricted { ref path, id } = *vis { + if let VisibilityKind::Restricted { ref path, id } = vis.node { visitor.visit_path(path, id); } } diff --git a/src/libsyntax_ext/deriving/generic/mod.rs b/src/libsyntax_ext/deriving/generic/mod.rs index 0dfe9cb970efb..1b3917efdd1e7 100644 --- a/src/libsyntax_ext/deriving/generic/mod.rs +++ b/src/libsyntax_ext/deriving/generic/mod.rs @@ -530,7 +530,7 @@ impl<'a> TraitDef<'a> { id: ast::DUMMY_NODE_ID, span: self.span, ident, - vis: ast::Visibility::Inherited, + vis: respan(self.span.empty(), ast::VisibilityKind::Inherited), defaultness: ast::Defaultness::Final, attrs: Vec::new(), generics: Generics::default(), @@ -977,7 +977,7 @@ impl<'a> MethodDef<'a> { attrs: self.attributes.clone(), generics: fn_generics, span: trait_.span, - vis: ast::Visibility::Inherited, + vis: respan(trait_.span.empty(), ast::VisibilityKind::Inherited), defaultness: ast::Defaultness::Final, ident: method_ident, node: ast::ImplItemKind::Method(ast::MethodSig { diff --git a/src/libsyntax_ext/global_asm.rs b/src/libsyntax_ext/global_asm.rs index 81226ba599ae6..9605f6b5c5a9d 100644 --- a/src/libsyntax_ext/global_asm.rs +++ b/src/libsyntax_ext/global_asm.rs @@ -19,6 +19,7 @@ /// therefore apply. use syntax::ast; +use syntax::codemap::respan; use syntax::ext::base; use syntax::ext::base::*; use syntax::feature_gate; @@ -59,7 +60,7 @@ pub fn expand_global_asm<'cx>(cx: &'cx mut ExtCtxt, asm, ctxt: cx.backtrace(), })), - vis: ast::Visibility::Inherited, + vis: respan(sp.empty(), ast::VisibilityKind::Inherited), span: sp, tokens: None, }))) diff --git a/src/libsyntax_ext/proc_macro_registrar.rs b/src/libsyntax_ext/proc_macro_registrar.rs index 0ba21e6b366cf..e623779ce63ba 100644 --- a/src/libsyntax_ext/proc_macro_registrar.rs +++ b/src/libsyntax_ext/proc_macro_registrar.rs @@ -14,7 +14,7 @@ use errors; use syntax::ast::{self, Ident, NodeId}; use syntax::attr; -use syntax::codemap::{ExpnInfo, NameAndSpan, MacroAttribute}; +use syntax::codemap::{ExpnInfo, NameAndSpan, MacroAttribute, respan}; use syntax::ext::base::ExtCtxt; use syntax::ext::build::AstBuilder; use syntax::ext::expand::ExpansionConfig; @@ -103,7 +103,7 @@ impl<'a> CollectProcMacros<'a> { fn check_not_pub_in_root(&self, vis: &ast::Visibility, sp: Span) { if self.is_proc_macro_crate && self.in_root && - *vis == ast::Visibility::Public { + vis.node == ast::VisibilityKind::Public { self.handler.span_err(sp, "`proc-macro` crate types cannot \ export any items other than functions \ @@ -181,7 +181,7 @@ impl<'a> CollectProcMacros<'a> { Vec::new() }; - if self.in_root && item.vis == ast::Visibility::Public { + if self.in_root && item.vis.node == ast::VisibilityKind::Public { self.derives.push(ProcMacroDerive { span: item.span, trait_name, @@ -206,7 +206,7 @@ impl<'a> CollectProcMacros<'a> { return; } - if self.in_root && item.vis == ast::Visibility::Public { + if self.in_root && item.vis.node == ast::VisibilityKind::Public { self.attr_macros.push(ProcMacroDef { span: item.span, function_name: item.ident, @@ -229,7 +229,7 @@ impl<'a> CollectProcMacros<'a> { return; } - if self.in_root && item.vis == ast::Visibility::Public { + if self.in_root && item.vis.node == ast::VisibilityKind::Public { self.bang_macros.push(ProcMacroDef { span: item.span, function_name: item.ident, @@ -439,12 +439,12 @@ fn mk_registrar(cx: &mut ExtCtxt, let derive_registrar = cx.attribute(span, derive_registrar); let func = func.map(|mut i| { i.attrs.push(derive_registrar); - i.vis = ast::Visibility::Public; + i.vis = respan(span, ast::VisibilityKind::Public); i }); let ident = ast::Ident::with_empty_ctxt(Symbol::gensym("registrar")); let module = cx.item_mod(span, span, ident, Vec::new(), vec![krate, func]).map(|mut i| { - i.vis = ast::Visibility::Public; + i.vis = respan(span, ast::VisibilityKind::Public); i }); diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index 294506625bc05..0f6dbc39e217a 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -216,6 +216,12 @@ impl Span { self.data().with_ctxt(ctxt) } + /// Returns a new span representing an empty span at the beginning of this span + #[inline] + pub fn empty(self) -> Span { + self.with_hi(self.lo()) + } + /// Returns `self` if `self` is not the dummy span, and `other` otherwise. pub fn substitute_dummy(self, other: Span) -> Span { if self.source_equal(&DUMMY_SP) { other } else { self } diff --git a/src/test/compile-fail/coerce-to-bang.rs b/src/test/compile-fail/coerce-to-bang.rs index 2cf568777d475..b804bb2981ba6 100644 --- a/src/test/compile-fail/coerce-to-bang.rs +++ b/src/test/compile-fail/coerce-to-bang.rs @@ -14,7 +14,7 @@ fn foo(x: usize, y: !, z: usize) { } fn call_foo_a() { - // FIXME(#40800) -- accepted beacuse divergence happens **before** + // FIXME(#40800) -- accepted because divergence happens **before** // the coercion to `!`, but within same expression. Not clear that // these are the rules we want. foo(return, 22, 44); diff --git a/src/test/compile-fail/directory_ownership/macro_expanded_mod_helper/foo/bar.rs b/src/test/compile-fail/directory_ownership/macro_expanded_mod_helper/foo/bar.rs index 9177dcba0d7a3..4ef92981314fd 100644 --- a/src/test/compile-fail/directory_ownership/macro_expanded_mod_helper/foo/bar.rs +++ b/src/test/compile-fail/directory_ownership/macro_expanded_mod_helper/foo/bar.rs @@ -8,4 +8,4 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-test not a test, auxillary +// ignore-test not a test, auxiliary diff --git a/src/test/compile-fail/directory_ownership/macro_expanded_mod_helper/foo/mod.rs b/src/test/compile-fail/directory_ownership/macro_expanded_mod_helper/foo/mod.rs index e29c985b983a2..41a8c288e7cdb 100644 --- a/src/test/compile-fail/directory_ownership/macro_expanded_mod_helper/foo/mod.rs +++ b/src/test/compile-fail/directory_ownership/macro_expanded_mod_helper/foo/mod.rs @@ -8,6 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-test not a test, auxillary +// ignore-test not a test, auxiliary mod_decl!(bar); diff --git a/src/test/compile-fail/hr-subtype.rs b/src/test/compile-fail/hr-subtype.rs index c88d74d53ce94..86df2382732b5 100644 --- a/src/test/compile-fail/hr-subtype.rs +++ b/src/test/compile-fail/hr-subtype.rs @@ -84,7 +84,7 @@ check! { free_inv_x_vs_free_inv_y: (fn(Inv<'x>), fn(Inv<'y>)) } // Somewhat surprisingly, a fn taking two distinct bound lifetimes and -// a fn taking one bound lifetime can be interchangable, but only if +// a fn taking one bound lifetime can be interchangeable, but only if // we are co- or contra-variant with respect to both lifetimes. // // The reason is: @@ -100,7 +100,7 @@ check! { bound_contra_a_contra_b_ret_co_a: (for<'a,'b> fn(Contra<'a>, Contra<'b> check! { bound_co_a_co_b_ret_contra_a: (for<'a,'b> fn(Co<'a>, Co<'b>) -> Contra<'a>, for<'a> fn(Co<'a>, Co<'a>) -> Contra<'a>) } -// If we make those lifetimes invariant, then the two types are not interchangable. +// If we make those lifetimes invariant, then the two types are not interchangeable. check! { bound_inv_a_b_vs_bound_inv_a: (for<'a,'b> fn(Inv<'a>, Inv<'b>), for<'a> fn(Inv<'a>, Inv<'a>)) } check! { bound_a_b_ret_a_vs_bound_a_ret_a: (for<'a,'b> fn(&'a u32, &'b u32) -> &'a u32, diff --git a/src/test/compile-fail/issue-20616-1.rs b/src/test/compile-fail/issue-20616-1.rs index a1949df661a34..3e29383d62cf8 100644 --- a/src/test/compile-fail/issue-20616-1.rs +++ b/src/test/compile-fail/issue-20616-1.rs @@ -9,7 +9,7 @@ // except according to those terms. // We need all these 9 issue-20616-N.rs files -// becase we can only catch one parsing error at a time +// because we can only catch one parsing error at a time diff --git a/src/test/compile-fail/issue-20616-2.rs b/src/test/compile-fail/issue-20616-2.rs index 87b836d687274..1ec7a74559a6e 100644 --- a/src/test/compile-fail/issue-20616-2.rs +++ b/src/test/compile-fail/issue-20616-2.rs @@ -9,7 +9,7 @@ // except according to those terms. // We need all these 9 issue-20616-N.rs files -// becase we can only catch one parsing error at a time +// because we can only catch one parsing error at a time diff --git a/src/test/compile-fail/issue-20616-3.rs b/src/test/compile-fail/issue-20616-3.rs index e5ed46d2cb3b0..885fd24654731 100644 --- a/src/test/compile-fail/issue-20616-3.rs +++ b/src/test/compile-fail/issue-20616-3.rs @@ -9,7 +9,7 @@ // except according to those terms. // We need all these 9 issue-20616-N.rs files -// becase we can only catch one parsing error at a time +// because we can only catch one parsing error at a time diff --git a/src/test/compile-fail/issue-20616-4.rs b/src/test/compile-fail/issue-20616-4.rs index 9b731289e138b..0dbe92fc1bcb3 100644 --- a/src/test/compile-fail/issue-20616-4.rs +++ b/src/test/compile-fail/issue-20616-4.rs @@ -9,7 +9,7 @@ // except according to those terms. // We need all these 9 issue-20616-N.rs files -// becase we can only catch one parsing error at a time +// because we can only catch one parsing error at a time diff --git a/src/test/compile-fail/issue-20616-5.rs b/src/test/compile-fail/issue-20616-5.rs index 5e3b024da9a07..794e5178f4b2c 100644 --- a/src/test/compile-fail/issue-20616-5.rs +++ b/src/test/compile-fail/issue-20616-5.rs @@ -9,7 +9,7 @@ // except according to those terms. // We need all these 9 issue-20616-N.rs files -// becase we can only catch one parsing error at a time +// because we can only catch one parsing error at a time diff --git a/src/test/compile-fail/issue-20616-6.rs b/src/test/compile-fail/issue-20616-6.rs index b6ee26f9f62b8..fe91751a4a06b 100644 --- a/src/test/compile-fail/issue-20616-6.rs +++ b/src/test/compile-fail/issue-20616-6.rs @@ -9,7 +9,7 @@ // except according to those terms. // We need all these 9 issue-20616-N.rs files -// becase we can only catch one parsing error at a time +// because we can only catch one parsing error at a time diff --git a/src/test/compile-fail/issue-20616-7.rs b/src/test/compile-fail/issue-20616-7.rs index fef3dd4e31d5a..184ad02710268 100644 --- a/src/test/compile-fail/issue-20616-7.rs +++ b/src/test/compile-fail/issue-20616-7.rs @@ -9,7 +9,7 @@ // except according to those terms. // We need all these 9 issue-20616-N.rs files -// becase we can only catch one parsing error at a time +// because we can only catch one parsing error at a time diff --git a/src/test/compile-fail/issue-20616-8.rs b/src/test/compile-fail/issue-20616-8.rs index b7bef47c4f442..5cdec33e94b92 100644 --- a/src/test/compile-fail/issue-20616-8.rs +++ b/src/test/compile-fail/issue-20616-8.rs @@ -9,7 +9,7 @@ // except according to those terms. // We need all these 9 issue-20616-N.rs files -// becase we can only catch one parsing error at a time +// because we can only catch one parsing error at a time diff --git a/src/test/compile-fail/issue-20616-9.rs b/src/test/compile-fail/issue-20616-9.rs index 5c16d24cef854..7995addb692cc 100644 --- a/src/test/compile-fail/issue-20616-9.rs +++ b/src/test/compile-fail/issue-20616-9.rs @@ -9,7 +9,7 @@ // except according to those terms. // We need all these 9 issue-20616-N.rs files -// becase we can only catch one parsing error at a time +// because we can only catch one parsing error at a time diff --git a/src/test/compile-fail/no_crate_type.rs b/src/test/compile-fail/no_crate_type.rs index bef909917d22a..b2cc5cae69750 100644 --- a/src/test/compile-fail/no_crate_type.rs +++ b/src/test/compile-fail/no_crate_type.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// regresion test for issue 11256 +// regression test for issue 11256 #![crate_type] //~ ERROR `crate_type` requires a value fn main() { diff --git a/src/test/mir-opt/README.md b/src/test/mir-opt/README.md index b00b35aa29ff4..ad4932b9fb945 100644 --- a/src/test/mir-opt/README.md +++ b/src/test/mir-opt/README.md @@ -26,7 +26,7 @@ other non-matched lines before and after, but not between $expected_lines, should you want to skip lines, you must include an elision comment, of the form (as a regex) `//\s*...\s*`. The lines will be skipped lazily, that is, if there are two identical lines in the output that match the line after the elision -comment, the first one wil be matched. +comment, the first one will be matched. Examples: diff --git a/src/test/pretty/stmt_expr_attributes.rs b/src/test/pretty/stmt_expr_attributes.rs index 1c443020d2e93..17e6119f968ac 100644 --- a/src/test/pretty/stmt_expr_attributes.rs +++ b/src/test/pretty/stmt_expr_attributes.rs @@ -255,7 +255,7 @@ fn _11() { while true { let _ = #[attr] break ; } || #[attr] return; let _ = #[attr] expr_mac!(); - /* FIXME: pp bug, loosing delimiter styles + /* FIXME: pp bug, losing delimiter styles let _ = #[attr] expr_mac![]; let _ = #[attr] expr_mac!{}; */ diff --git a/src/test/run-make/hotplug_codegen_backend/Makefile b/src/test/run-make/hotplug_codegen_backend/Makefile index 9a216d1d81ff8..2ddf3aa5439f6 100644 --- a/src/test/run-make/hotplug_codegen_backend/Makefile +++ b/src/test/run-make/hotplug_codegen_backend/Makefile @@ -6,4 +6,4 @@ all: -o $(TMPDIR)/the_backend.dylib $(RUSTC) some_crate.rs --crate-name some_crate --crate-type bin -o $(TMPDIR)/some_crate \ -Z codegen-backend=$(TMPDIR)/the_backend.dylib -Z unstable-options - grep -x "This has been \"compiled\" succesfully." $(TMPDIR)/some_crate + grep -x "This has been \"compiled\" successfully." $(TMPDIR)/some_crate diff --git a/src/test/run-make/hotplug_codegen_backend/the_backend.rs b/src/test/run-make/hotplug_codegen_backend/the_backend.rs index 5972149590c23..9e87268e6999d 100644 --- a/src/test/run-make/hotplug_codegen_backend/the_backend.rs +++ b/src/test/run-make/hotplug_codegen_backend/the_backend.rs @@ -69,7 +69,7 @@ impl TransCrate for TheBackend { let output_name = out_filename(sess, crate_type, &outputs, &*crate_name.as_str()); let mut out_file = ::std::fs::File::create(output_name).unwrap(); - write!(out_file, "This has been \"compiled\" succesfully.").unwrap(); + write!(out_file, "This has been \"compiled\" successfully.").unwrap(); } Ok(()) } diff --git a/src/test/run-pass-fulldeps/proc-macro/auxiliary/derive-reexport.rs b/src/test/run-pass-fulldeps/proc-macro/auxiliary/derive-reexport.rs index 24865ea270982..cfaf913216a5c 100644 --- a/src/test/run-pass-fulldeps/proc-macro/auxiliary/derive-reexport.rs +++ b/src/test/run-pass-fulldeps/proc-macro/auxiliary/derive-reexport.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-test not a test, auxillary +// ignore-test not a test, auxiliary #![feature(macro_reexport)] diff --git a/src/test/run-pass/issue-29746.rs b/src/test/run-pass/issue-29746.rs index 61c601ac6a903..d4463fed1a674 100644 --- a/src/test/run-pass/issue-29746.rs +++ b/src/test/run-pass/issue-29746.rs @@ -17,7 +17,7 @@ macro_rules! zip { }; // Intermediate steps to build the zipped expression, the match pattern, and - // and the output tuple of the closure, using macro hygene to repeatedly + // and the output tuple of the closure, using macro hygiene to repeatedly // introduce new variables named 'x'. ([$a:expr, $($rest:expr),*], $zip:expr, $pat:pat, [$($flat:expr),*]) => { zip!([$($rest),*], $zip.zip($a), ($pat,x), [$($flat),*, x]) diff --git a/src/test/run-pass/issue-32008.rs b/src/test/run-pass/issue-32008.rs index cb489acf1d919..95890d2e1b47b 100644 --- a/src/test/run-pass/issue-32008.rs +++ b/src/test/run-pass/issue-32008.rs @@ -9,7 +9,7 @@ // except according to those terms. // Tests that binary operators allow subtyping on both the LHS and RHS, -// and as such do not introduce unnecesarily strict lifetime constraints. +// and as such do not introduce unnecessarily strict lifetime constraints. use std::ops::Add; diff --git a/src/test/run-pass/rfc1857-drop-order.rs b/src/test/run-pass/rfc1857-drop-order.rs index b2e5ff62eb86e..94b2a586ddfa0 100644 --- a/src/test/run-pass/rfc1857-drop-order.rs +++ b/src/test/run-pass/rfc1857-drop-order.rs @@ -67,7 +67,7 @@ fn test_drop_tuple() { panic::catch_unwind(|| { (PushOnDrop::new(2, cloned.clone()), PushOnDrop::new(1, cloned.clone()), - panic!("this panic is catched :D")); + panic!("this panic is caught :D")); }).err().unwrap(); assert_eq!(*dropped_fields.borrow(), &[1, 2]); } @@ -99,7 +99,7 @@ fn test_drop_struct() { TestStruct { x: PushOnDrop::new(2, cloned.clone()), y: PushOnDrop::new(1, cloned.clone()), - z: panic!("this panic is catched :D") + z: panic!("this panic is caught :D") }; }).err().unwrap(); assert_eq!(*dropped_fields.borrow(), &[1, 2]); @@ -111,7 +111,7 @@ fn test_drop_struct() { TestStruct { y: PushOnDrop::new(2, cloned.clone()), x: PushOnDrop::new(1, cloned.clone()), - z: panic!("this panic is catched :D") + z: panic!("this panic is caught :D") }; }).err().unwrap(); assert_eq!(*dropped_fields.borrow(), &[1, 2]); @@ -122,7 +122,7 @@ fn test_drop_struct() { panic::catch_unwind(|| { TestTupleStruct(PushOnDrop::new(2, cloned.clone()), PushOnDrop::new(1, cloned.clone()), - panic!("this panic is catched :D")); + panic!("this panic is caught :D")); }).err().unwrap(); assert_eq!(*dropped_fields.borrow(), &[1, 2]); } @@ -154,7 +154,7 @@ fn test_drop_enum() { TestEnum::Struct { x: PushOnDrop::new(2, cloned.clone()), y: PushOnDrop::new(1, cloned.clone()), - z: panic!("this panic is catched :D") + z: panic!("this panic is caught :D") }; }).err().unwrap(); assert_eq!(*dropped_fields.borrow(), &[1, 2]); @@ -166,7 +166,7 @@ fn test_drop_enum() { TestEnum::Struct { y: PushOnDrop::new(2, cloned.clone()), x: PushOnDrop::new(1, cloned.clone()), - z: panic!("this panic is catched :D") + z: panic!("this panic is caught :D") }; }).err().unwrap(); assert_eq!(*dropped_fields.borrow(), &[1, 2]); @@ -177,7 +177,7 @@ fn test_drop_enum() { panic::catch_unwind(|| { TestEnum::Tuple(PushOnDrop::new(2, cloned.clone()), PushOnDrop::new(1, cloned.clone()), - panic!("this panic is catched :D")); + panic!("this panic is caught :D")); }).err().unwrap(); assert_eq!(*dropped_fields.borrow(), &[1, 2]); } @@ -207,7 +207,7 @@ fn test_drop_list() { vec![ PushOnDrop::new(2, cloned.clone()), PushOnDrop::new(1, cloned.clone()), - panic!("this panic is catched :D") + panic!("this panic is caught :D") ]; }).err().unwrap(); assert_eq!(*dropped_fields.borrow(), &[1, 2]); @@ -219,7 +219,7 @@ fn test_drop_list() { [ PushOnDrop::new(2, cloned.clone()), PushOnDrop::new(1, cloned.clone()), - panic!("this panic is catched :D") + panic!("this panic is caught :D") ]; }).err().unwrap(); assert_eq!(*dropped_fields.borrow(), &[1, 2]); diff --git a/src/test/run-pass/simd-target-feature-mixup.rs b/src/test/run-pass/simd-target-feature-mixup.rs index 2c9ef59709dbf..3c54921ac6e02 100644 --- a/src/test/run-pass/simd-target-feature-mixup.rs +++ b/src/test/run-pass/simd-target-feature-mixup.rs @@ -30,7 +30,7 @@ fn main() { // We don't actually know if our computer has the requisite target features // for the test below. Testing for that will get added to libstd later so - // for now just asume sigill means this is a machine that can't run this test. + // for now just assume sigill means this is a machine that can't run this test. if is_sigill(status) { println!("sigill with {}, assuming spurious", level); continue diff --git a/src/test/rustdoc/duplicate_impls/issue-33054.rs b/src/test/rustdoc/duplicate_impls/issue-33054.rs index df6ebcae10756..43a425d4c5e4f 100644 --- a/src/test/rustdoc/duplicate_impls/issue-33054.rs +++ b/src/test/rustdoc/duplicate_impls/issue-33054.rs @@ -11,7 +11,8 @@ // @has issue_33054/impls/struct.Foo.html // @has - '//code' 'impl Foo' // @has - '//code' 'impl Bar for Foo' -// @count - '//*[@class="impl"]' 2 +// @count - '//*[@id="implementations-list"]/*[@class="impl"]' 1 +// @count - '//*[@id="main"]/*[@class="impl"]' 1 // @has issue_33054/impls/bar/trait.Bar.html // @has - '//code' 'impl Bar for Foo' // @count - '//*[@class="struct"]' 1 diff --git a/src/test/rustdoc/impl-parts-crosscrate.rs b/src/test/rustdoc/impl-parts-crosscrate.rs index 5fa2e03e0a884..1d055ccbeadef 100644 --- a/src/test/rustdoc/impl-parts-crosscrate.rs +++ b/src/test/rustdoc/impl-parts-crosscrate.rs @@ -17,7 +17,7 @@ extern crate rustdoc_impl_parts_crosscrate; pub struct Bar { t: T } -// The output file is html embeded in javascript, so the html tags +// The output file is html embedded in javascript, so the html tags // aren't stripped by the processing script and we can't check for the // full impl string. Instead, just make sure something from each part // is mentioned. diff --git a/src/test/rustdoc/issue-21474.rs b/src/test/rustdoc/issue-21474.rs index 36f160acf1cf8..553bbeb0cff39 100644 --- a/src/test/rustdoc/issue-21474.rs +++ b/src/test/rustdoc/issue-21474.rs @@ -17,5 +17,5 @@ mod inner { pub trait Blah { } // @count issue_21474/struct.What.html \ -// '//*[@class="impl"]' 1 +// '//*[@id="implementations-list"]/*[@class="impl"]' 1 pub struct What; diff --git a/src/test/rustdoc/issue-45584.rs b/src/test/rustdoc/issue-45584.rs index 6d6ae3dc94a21..b0e64557be253 100644 --- a/src/test/rustdoc/issue-45584.rs +++ b/src/test/rustdoc/issue-45584.rs @@ -14,12 +14,12 @@ pub trait Bar {} // @has 'foo/struct.Foo1.html' pub struct Foo1; -// @count - '//*[@class="impl"]' 1 +// @count - '//*[@id="implementations-list"]/*[@class="impl"]' 1 // @has - '//*[@class="impl"]' "impl Bar for Foo1" impl Bar for Foo1 {} // @has 'foo/struct.Foo2.html' pub struct Foo2; -// @count - '//*[@class="impl"]' 1 +// @count - '//*[@id="implementations-list"]/*[@class="impl"]' 1 // @has - '//*[@class="impl"]' "impl Bar<&'static Foo2, Foo2> for u8" impl Bar<&'static Foo2, Foo2> for u8 {} diff --git a/src/test/rustdoc/synthetic_auto/basic.rs b/src/test/rustdoc/synthetic_auto/basic.rs new file mode 100644 index 0000000000000..8ff84d11a5009 --- /dev/null +++ b/src/test/rustdoc/synthetic_auto/basic.rs @@ -0,0 +1,18 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// @has basic/struct.Foo.html +// @has - '//code' 'impl Send for Foo where T: Send' +// @has - '//code' 'impl Sync for Foo where T: Sync' +// @count - '//*[@id="implementations-list"]/*[@class="impl"]' 0 +// @count - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]' 2 +pub struct Foo { + field: T, +} diff --git a/src/test/rustdoc/synthetic_auto/complex.rs b/src/test/rustdoc/synthetic_auto/complex.rs new file mode 100644 index 0000000000000..896d49ca79a5e --- /dev/null +++ b/src/test/rustdoc/synthetic_auto/complex.rs @@ -0,0 +1,52 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +mod foo { + pub trait MyTrait<'a> { + type MyItem: ?Sized; + } + + pub struct Inner<'a, Q, R: ?Sized> { + field: Q, + field3: &'a u8, + my_foo: Foo, + field2: R, + } + + pub struct Outer<'a, T, K: ?Sized> { + my_inner: Inner<'a, T, K>, + } + + pub struct Foo { + myfield: T, + } +} + +// @has complex/struct.NotOuter.html +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' "impl<'a, T, K: \ +// ?Sized> Send for NotOuter<'a, T, K> where 'a: 'static, K: for<'b> Fn((&'b bool, &'a u8)) \ +// -> &'b i8, >::MyItem: Copy, T: MyTrait<'a>" + +pub use foo::{Foo, Inner as NotInner, MyTrait as NotMyTrait, Outer as NotOuter}; + +unsafe impl Send for Foo +where + T: NotMyTrait<'static>, +{ +} + +unsafe impl<'a, Q, R: ?Sized> Send for NotInner<'a, Q, R> +where + Q: NotMyTrait<'a>, + >::MyItem: Copy, + /* for<'b> */ R: for<'b> Fn((&'b bool, &'a u8)) -> &'b i8, + Foo: Send, +{ +} diff --git a/src/test/rustdoc/synthetic_auto/lifetimes.rs b/src/test/rustdoc/synthetic_auto/lifetimes.rs new file mode 100644 index 0000000000000..2f92627f9546c --- /dev/null +++ b/src/test/rustdoc/synthetic_auto/lifetimes.rs @@ -0,0 +1,28 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. +pub struct Inner<'a, T: 'a> { + field: &'a T, +} + +unsafe impl<'a, T> Send for Inner<'a, T> +where + 'a: 'static, + T: for<'b> Fn(&'b bool) -> &'a u8, +{} + +// @has lifetimes/struct.Foo.html +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' "impl<'c, K> Send \ +// for Foo<'c, K> where 'c: 'static, K: for<'b> Fn(&'b bool) -> &'c u8" +// +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' "impl<'c, K> Sync \ +// for Foo<'c, K> where K: Sync" +pub struct Foo<'c, K: 'c> { + inner_field: Inner<'c, K>, +} diff --git a/src/test/rustdoc/synthetic_auto/manual.rs b/src/test/rustdoc/synthetic_auto/manual.rs new file mode 100644 index 0000000000000..d81e6309dff61 --- /dev/null +++ b/src/test/rustdoc/synthetic_auto/manual.rs @@ -0,0 +1,24 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// @has manual/struct.Foo.html +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' 'impl Sync for \ +// Foo where T: Sync' +// +// @has - '//*[@id="implementations-list"]/*[@class="impl"]/*/code' \ +// 'impl Send for Foo' +// +// @count - '//*[@id="implementations-list"]/*[@class="impl"]' 1 +// @count - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]' 1 +pub struct Foo { + field: T, +} + +unsafe impl Send for Foo {} diff --git a/src/test/rustdoc/synthetic_auto/negative.rs b/src/test/rustdoc/synthetic_auto/negative.rs new file mode 100644 index 0000000000000..ec9cb710f1f8c --- /dev/null +++ b/src/test/rustdoc/synthetic_auto/negative.rs @@ -0,0 +1,23 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +pub struct Inner { + field: *mut T, +} + +// @has negative/struct.Outer.html +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' "impl !Send for \ +// Outer" +// +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' "impl \ +// !Sync for Outer" +pub struct Outer { + inner_field: Inner, +} diff --git a/src/test/rustdoc/synthetic_auto/nested.rs b/src/test/rustdoc/synthetic_auto/nested.rs new file mode 100644 index 0000000000000..1f33a8b13cbf8 --- /dev/null +++ b/src/test/rustdoc/synthetic_auto/nested.rs @@ -0,0 +1,28 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. +pub struct Inner { + field: T, +} + +unsafe impl Send for Inner +where + T: Copy, +{ +} + +// @has nested/struct.Foo.html +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' 'impl Send for \ +// Foo where T: Copy' +// +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' \ +// 'impl Sync for Foo where T: Sync' +pub struct Foo { + inner_field: Inner, +} diff --git a/src/test/rustdoc/synthetic_auto/no-redundancy.rs b/src/test/rustdoc/synthetic_auto/no-redundancy.rs new file mode 100644 index 0000000000000..0b37f2ed31790 --- /dev/null +++ b/src/test/rustdoc/synthetic_auto/no-redundancy.rs @@ -0,0 +1,26 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +pub struct Inner { + field: T, +} + +unsafe impl Send for Inner +where + T: Copy + Send, +{ +} + +// @has no_redundancy/struct.Outer.html +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' "impl Send for \ +// Outer where T: Copy + Send" +pub struct Outer { + inner_field: Inner, +} diff --git a/src/test/rustdoc/synthetic_auto/project.rs b/src/test/rustdoc/synthetic_auto/project.rs new file mode 100644 index 0000000000000..e1b8621ff6dcc --- /dev/null +++ b/src/test/rustdoc/synthetic_auto/project.rs @@ -0,0 +1,43 @@ +// Copyright 2018 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +pub struct Inner<'a, T: 'a> { + field: &'a T, +} + +trait MyTrait { + type MyItem; +} + +trait OtherTrait {} + +unsafe impl<'a, T> Send for Inner<'a, T> +where + 'a: 'static, + T: MyTrait, +{ +} +unsafe impl<'a, T> Sync for Inner<'a, T> +where + 'a: 'static, + T: MyTrait, + ::MyItem: OtherTrait, +{ +} + +// @has project/struct.Foo.html +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' "impl<'c, K> Send \ +// for Foo<'c, K> where 'c: 'static, K: MyTrait" +// +// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]/*/code' "impl<'c, K> Sync \ +// for Foo<'c, K> where 'c: 'static, K: MyTrait, ::MyItem: OtherTrait" +pub struct Foo<'c, K: 'c> { + inner_field: Inner<'c, K>, +} diff --git a/src/test/ui/error-codes/E0449.stderr b/src/test/ui/error-codes/E0449.stderr index 2270167303a80..3587319ed0cbc 100644 --- a/src/test/ui/error-codes/E0449.stderr +++ b/src/test/ui/error-codes/E0449.stderr @@ -2,23 +2,21 @@ error[E0449]: unnecessary visibility qualifier --> $DIR/E0449.rs:17:1 | 17 | pub impl Bar {} //~ ERROR E0449 - | ^^^^^^^^^^^^^^^ `pub` not needed here + | ^^^ `pub` not needed here | = note: place qualifiers on individual impl items instead error[E0449]: unnecessary visibility qualifier --> $DIR/E0449.rs:19:1 | -19 | / pub impl Foo for Bar { //~ ERROR E0449 -20 | | pub fn foo() {} //~ ERROR E0449 -21 | | } - | |_^ `pub` not needed here +19 | pub impl Foo for Bar { //~ ERROR E0449 + | ^^^ `pub` not needed here error[E0449]: unnecessary visibility qualifier --> $DIR/E0449.rs:20:5 | 20 | pub fn foo() {} //~ ERROR E0449 - | ^^^^^^^^^^^^^^^ `pub` not needed here + | ^^^ `pub` not needed here error: aborting due to 3 previous errors diff --git a/src/test/ui/explain.stdout b/src/test/ui/explain.stdout index 0bbbd95320a8c..411cdfb335b34 100644 --- a/src/test/ui/explain.stdout +++ b/src/test/ui/explain.stdout @@ -45,7 +45,7 @@ is a function pointer, which is not zero-sized. This pattern should be rewritten. There are a few possible ways to do this: - change the original fn declaration to match the expected signature, - and do the cast in the fn body (the prefered option) + and do the cast in the fn body (the preferred option) - cast the fn item fo a fn pointer before calling transmute, as shown here: ``` diff --git a/src/test/ui/feature-gate/issue-43106-gating-of-builtin-attrs.rs b/src/test/ui/feature-gate/issue-43106-gating-of-builtin-attrs.rs index 029949b26047b..21950402c8c41 100644 --- a/src/test/ui/feature-gate/issue-43106-gating-of-builtin-attrs.rs +++ b/src/test/ui/feature-gate/issue-43106-gating-of-builtin-attrs.rs @@ -509,7 +509,7 @@ mod reexport_test_harness_main { //~^ WARN unused attribute } -// Cannnot feed "2700" to `#[macro_escape]` without signaling an error. +// Cannot feed "2700" to `#[macro_escape]` without signaling an error. #[macro_escape] //~^ WARN macro_escape is a deprecated synonym for macro_use mod macro_escape { diff --git a/src/test/ui/lifetime-errors/liveness-assign-imm-local-notes.rs b/src/test/ui/lifetime-errors/liveness-assign-imm-local-notes.rs index d4ef87cdd7681..20a2cbfd3aa78 100644 --- a/src/test/ui/lifetime-errors/liveness-assign-imm-local-notes.rs +++ b/src/test/ui/lifetime-errors/liveness-assign-imm-local-notes.rs @@ -9,7 +9,7 @@ // except according to those terms. // FIXME: Change to UI Test -// Check notes are placed on an assignment that can actually preceed the current assigmnent +// Check notes are placed on an assignment that can actually precede the current assigmnent // Don't emmit a first assignment for assignment in a loop. // compile-flags: -Zborrowck=compare