Skip to content

Commit

Permalink
Auto merge of #92070 - rukai:replace_vec_into_iter_with_array_into_it…
Browse files Browse the repository at this point in the history
…er, r=Mark-Simulacrum

Replace usages of vec![].into_iter with [].into_iter

`[].into_iter` is idiomatic over `vec![].into_iter` because its simpler and faster (unless the vec is optimized away in which case it would be the same)

So we should change all the implementation, documentation and tests to use it.

I skipped:
* `src/tools` - Those are copied in from upstream
* `src/test/ui` - Hard to tell if `vec![].into_iter` was used intentionally or not here and not much benefit to changing it.
*  any case where `vec![].into_iter` was used because we specifically needed a `Vec::IntoIter<T>`
*  any case where it looked like we were intentionally using `vec![].into_iter` to test it.
  • Loading branch information
bors committed Jan 11, 2022
2 parents e4b1d58 + 0882985 commit 2e2c86e
Show file tree
Hide file tree
Showing 36 changed files with 116 additions and 122 deletions.
2 changes: 1 addition & 1 deletion compiler/rustc_codegen_llvm/src/intrinsic.rs
Original file line number Diff line number Diff line change
Expand Up @@ -791,7 +791,7 @@ fn get_rust_try_fn<'ll, 'tcx>(
)));
// `unsafe fn(unsafe fn(*mut i8) -> (), *mut i8, unsafe fn(*mut i8, *mut i8) -> ()) -> i32`
let rust_fn_sig = ty::Binder::dummy(cx.tcx.mk_fn_sig(
vec![try_fn_ty, i8p, catch_fn_ty].into_iter(),
[try_fn_ty, i8p, catch_fn_ty].into_iter(),
tcx.types.i32,
false,
hir::Unsafety::Unsafe,
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_data_structures/src/thin_vec/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@ impl<T> ThinVec<T> {
fn test_from_iterator() {
assert_eq!(std::iter::empty().collect::<ThinVec<String>>().into_vec(), Vec::<String>::new());
assert_eq!(std::iter::once(42).collect::<ThinVec<_>>().into_vec(), vec![42]);
assert_eq!(vec![1, 2].into_iter().collect::<ThinVec<_>>().into_vec(), vec![1, 2]);
assert_eq!(vec![1, 2, 3].into_iter().collect::<ThinVec<_>>().into_vec(), vec![1, 2, 3]);
assert_eq!([1, 2].into_iter().collect::<ThinVec<_>>().into_vec(), vec![1, 2]);
assert_eq!([1, 2, 3].into_iter().collect::<ThinVec<_>>().into_vec(), vec![1, 2, 3]);
}

#[test]
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_data_structures/src/vec_map/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ fn test_from_iterator() {
);
assert_eq!(std::iter::once((42, true)).collect::<VecMap<_, _>>().into_vec(), vec![(42, true)]);
assert_eq!(
vec![(1, true), (2, false)].into_iter().collect::<VecMap<_, _>>().into_vec(),
[(1, true), (2, false)].into_iter().collect::<VecMap<_, _>>().into_vec(),
vec![(1, true), (2, false)]
);
}
Expand All @@ -41,7 +41,7 @@ fn test_insert() {

#[test]
fn test_get() {
let v = vec![(1, true), (2, false)].into_iter().collect::<VecMap<_, _>>();
let v = [(1, true), (2, false)].into_iter().collect::<VecMap<_, _>>();
assert_eq!(v.get(&1), Some(&true));
assert_eq!(v.get(&2), Some(&false));
assert_eq!(v.get(&3), None);
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_errors/src/json.rs
Original file line number Diff line number Diff line change
Expand Up @@ -455,7 +455,7 @@ impl DiagnosticSpan {
let backtrace_step = backtrace.next().map(|bt| {
let call_site = Self::from_span_full(bt.call_site, false, None, None, backtrace, je);
let def_site_span =
Self::from_span_full(bt.def_site, false, None, None, vec![].into_iter(), je);
Self::from_span_full(bt.def_site, false, None, None, [].into_iter(), je);
Box::new(DiagnosticSpanMacroExpansion {
span: call_site,
macro_decl_name: bt.kind.descr(),
Expand Down
6 changes: 3 additions & 3 deletions compiler/rustc_expand/src/proc_macro_server.rs
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ impl FromInternal<(TreeAndSpacing, &'_ mut Vec<Self>, &mut Rustc<'_, '_>)>
for ch in data.as_str().chars() {
escaped.extend(ch.escape_debug());
}
let stream = vec![
let stream = [
Ident(sym::doc, false),
Eq,
TokenKind::lit(token::Str, Symbol::intern(&escaped), None),
Expand Down Expand Up @@ -221,7 +221,7 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
let integer = TokenKind::lit(token::Integer, symbol, suffix);
let a = tokenstream::TokenTree::token(minus, span);
let b = tokenstream::TokenTree::token(integer, span);
return vec![a, b].into_iter().collect();
return [a, b].into_iter().collect();
}
TokenTree::Literal(self::Literal {
lit: token::Lit { kind: token::Float, symbol, suffix },
Expand All @@ -232,7 +232,7 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
let float = TokenKind::lit(token::Float, symbol, suffix);
let a = tokenstream::TokenTree::token(minus, span);
let b = tokenstream::TokenTree::token(float, span);
return vec![a, b].into_iter().collect();
return [a, b].into_iter().collect();
}
TokenTree::Literal(self::Literal { lit, span }) => {
return tokenstream::TokenTree::token(Literal(lit), span).into();
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_graphviz/src/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ impl NodeLabels<&'static str> {
match self {
UnlabelledNodes(len) => vec![None; len],
AllNodesLabelled(lbls) => lbls.into_iter().map(Some).collect(),
SomeNodesLabelled(lbls) => lbls.into_iter().collect(),
SomeNodesLabelled(lbls) => lbls,
}
}

Expand Down
10 changes: 5 additions & 5 deletions compiler/rustc_incremental/src/persist/fs/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ fn test_all_except_most_recent() {
.keys()
.cloned()
.collect::<FxHashSet<PathBuf>>(),
vec![PathBuf::from("1"), PathBuf::from("2"), PathBuf::from("3"), PathBuf::from("4"),]
[PathBuf::from("1"), PathBuf::from("2"), PathBuf::from("3"), PathBuf::from("4"),]
.into_iter()
.collect::<FxHashSet<PathBuf>>()
);
Expand All @@ -40,7 +40,7 @@ fn test_find_source_directory_in_iter() {
// Find newest
assert_eq!(
find_source_directory_in_iter(
vec![
[
PathBuf::from("crate-dir/s-3234-0000-svh"),
PathBuf::from("crate-dir/s-2234-0000-svh"),
PathBuf::from("crate-dir/s-1234-0000-svh")
Expand All @@ -54,7 +54,7 @@ fn test_find_source_directory_in_iter() {
// Filter out "-working"
assert_eq!(
find_source_directory_in_iter(
vec![
[
PathBuf::from("crate-dir/s-3234-0000-working"),
PathBuf::from("crate-dir/s-2234-0000-svh"),
PathBuf::from("crate-dir/s-1234-0000-svh")
Expand All @@ -66,12 +66,12 @@ fn test_find_source_directory_in_iter() {
);

// Handle empty
assert_eq!(find_source_directory_in_iter(vec![].into_iter(), &already_visited), None);
assert_eq!(find_source_directory_in_iter([].into_iter(), &already_visited), None);

// Handle only working
assert_eq!(
find_source_directory_in_iter(
vec![
[
PathBuf::from("crate-dir/s-3234-0000-working"),
PathBuf::from("crate-dir/s-2234-0000-working"),
PathBuf::from("crate-dir/s-1234-0000-working")
Expand Down
9 changes: 3 additions & 6 deletions compiler/rustc_infer/src/infer/error_reporting/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -771,7 +771,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
self.suggest_boxing_for_return_impl_trait(
err,
ret_sp,
vec![then, else_sp].into_iter(),
[then, else_sp].into_iter(),
);
}
}
Expand Down Expand Up @@ -807,11 +807,8 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
);
let sugg = arm_spans
.flat_map(|sp| {
vec![
(sp.shrink_to_lo(), "Box::new(".to_string()),
(sp.shrink_to_hi(), ")".to_string()),
]
.into_iter()
[(sp.shrink_to_lo(), "Box::new(".to_string()), (sp.shrink_to_hi(), ")".to_string())]
.into_iter()
})
.collect::<Vec<_>>();
err.multipart_suggestion(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -540,8 +540,8 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
// error[E0284]: type annotations needed
// --> file.rs:2:5
// |
// 2 | vec![Ok(2)].into_iter().collect()?;
// | ^^^^^^^ cannot infer type
// 2 | [Ok(2)].into_iter().collect()?;
// | ^^^^^^^ cannot infer type
// |
// = note: cannot resolve `<_ as std::ops::Try>::Ok == _`
if span.contains(*call_span) { *call_span } else { span }
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_parse/src/parser/diagnostics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -550,8 +550,8 @@ impl<'a> Parser<'a> {
/// a diagnostic to suggest removing them.
///
/// ```ignore (diagnostic)
/// let _ = vec![1, 2, 3].into_iter().collect::<Vec<usize>>>>();
/// ^^ help: remove extra angle brackets
/// let _ = [1, 2, 3].into_iter().collect::<Vec<usize>>>>();
/// ^^ help: remove extra angle brackets
/// ```
///
/// If `true` is returned, then trailing brackets were recovered, tokens were consumed
Expand Down
6 changes: 2 additions & 4 deletions compiler/rustc_target/src/spec/avr_gnu_base.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,10 @@ pub fn target(target_cpu: String) -> Target {
linker: Some("avr-gcc".to_owned()),
executables: true,
eh_frame_header: false,
pre_link_args: vec![(LinkerFlavor::Gcc, vec![format!("-mmcu={}", target_cpu)])]
.into_iter()
.collect(),
late_link_args: vec![(LinkerFlavor::Gcc, vec!["-lgcc".to_owned()])]
pre_link_args: [(LinkerFlavor::Gcc, vec![format!("-mmcu={}", target_cpu)])]
.into_iter()
.collect(),
late_link_args: [(LinkerFlavor::Gcc, vec!["-lgcc".to_owned()])].into_iter().collect(),
max_atomic_width: Some(0),
atomic_cas: false,
..TargetOptions::default()
Expand Down
8 changes: 4 additions & 4 deletions compiler/rustc_target/src/spec/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -574,15 +574,15 @@ impl ToJson for StackProbeType {
fn to_json(&self) -> Json {
Json::Object(match self {
StackProbeType::None => {
vec![(String::from("kind"), "none".to_json())].into_iter().collect()
[(String::from("kind"), "none".to_json())].into_iter().collect()
}
StackProbeType::Inline => {
vec![(String::from("kind"), "inline".to_json())].into_iter().collect()
[(String::from("kind"), "inline".to_json())].into_iter().collect()
}
StackProbeType::Call => {
vec![(String::from("kind"), "call".to_json())].into_iter().collect()
[(String::from("kind"), "call".to_json())].into_iter().collect()
}
StackProbeType::InlineOrCall { min_llvm_version_for_inline } => vec![
StackProbeType::InlineOrCall { min_llvm_version_for_inline } => [
(String::from("kind"), "inline-or-call".to_json()),
(
String::from("min-llvm-version-for-inline"),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2247,7 +2247,7 @@ pub fn recursive_type_with_infinite_size_error(
spans
.iter()
.flat_map(|&span| {
vec![
[
(span.shrink_to_lo(), "Box<".to_string()),
(span.shrink_to_hi(), ">".to_string()),
]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1226,7 +1226,7 @@ impl<'a, 'tcx> InferCtxtExt<'tcx> for InferCtxt<'a, 'tcx> {
.returns
.iter()
.flat_map(|expr| {
vec![
[
(expr.span.shrink_to_lo(), "Box::new(".to_string()),
(expr.span.shrink_to_hi(), ")".to_string()),
]
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_trait_selection/src/traits/select/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1953,7 +1953,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
ty::Generator(_, ref substs, _) => {
let ty = self.infcx.shallow_resolve(substs.as_generator().tupled_upvars_ty());
let witness = substs.as_generator().witness();
t.rebind(vec![ty].into_iter().chain(iter::once(witness)).collect())
t.rebind([ty].into_iter().chain(iter::once(witness)).collect())
}

ty::GeneratorWitness(types) => {
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_typeck/src/check/coercion.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1667,10 +1667,10 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> {
],
Applicability::MachineApplicable,
);
let sugg = vec![sp, cause.span]
let sugg = [sp, cause.span]
.into_iter()
.flat_map(|sp| {
vec![
[
(sp.shrink_to_lo(), "Box::new(".to_string()),
(sp.shrink_to_hi(), ")".to_string()),
]
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_typeck/src/variance/terms.rs
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ pub fn determine_parameters_to_be_inferred<'a, 'tcx>(

fn lang_items(tcx: TyCtxt<'_>) -> Vec<(hir::HirId, Vec<ty::Variance>)> {
let lang_items = tcx.lang_items();
let all = vec![
let all = [
(lang_items.phantom_data(), vec![ty::Covariant]),
(lang_items.unsafe_cell_type(), vec![ty::Invariant]),
];
Expand Down
4 changes: 2 additions & 2 deletions library/alloc/src/collections/btree/map/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -728,7 +728,7 @@ fn test_range_large() {
#[test]
fn test_range_inclusive_max_value() {
let max = usize::MAX;
let map: BTreeMap<_, _> = vec![(max, 0)].into_iter().collect();
let map: BTreeMap<_, _> = [(max, 0)].into_iter().collect();

assert_eq!(map.range(max..=max).collect::<Vec<_>>(), &[(&max, &0)]);
}
Expand Down Expand Up @@ -2128,7 +2128,7 @@ fn test_into_iter_drop_leak_height_1() {

#[test]
fn test_into_keys() {
let vec = vec![(1, 'a'), (2, 'b'), (3, 'c')];
let vec = [(1, 'a'), (2, 'b'), (3, 'c')];
let map: BTreeMap<_, _> = vec.into_iter().collect();
let keys: Vec<_> = map.into_keys().collect();

Expand Down
16 changes: 8 additions & 8 deletions library/alloc/src/collections/vec_deque/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -669,7 +669,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// ```
/// use std::collections::VecDeque;
///
/// let mut buf: VecDeque<i32> = vec![1].into_iter().collect();
/// let mut buf: VecDeque<i32> = [1].into_iter().collect();
/// buf.reserve_exact(10);
/// assert!(buf.capacity() >= 11);
/// ```
Expand All @@ -692,7 +692,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// ```
/// use std::collections::VecDeque;
///
/// let mut buf: VecDeque<i32> = vec![1].into_iter().collect();
/// let mut buf: VecDeque<i32> = [1].into_iter().collect();
/// buf.reserve(10);
/// assert!(buf.capacity() >= 11);
/// ```
Expand Down Expand Up @@ -1153,7 +1153,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// ```
/// use std::collections::VecDeque;
///
/// let v: VecDeque<_> = vec![1, 2, 3].into_iter().collect();
/// let v: VecDeque<_> = [1, 2, 3].into_iter().collect();
/// let range = v.range(2..).copied().collect::<VecDeque<_>>();
/// assert_eq!(range, [3]);
///
Expand Down Expand Up @@ -1188,7 +1188,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// ```
/// use std::collections::VecDeque;
///
/// let mut v: VecDeque<_> = vec![1, 2, 3].into_iter().collect();
/// let mut v: VecDeque<_> = [1, 2, 3].into_iter().collect();
/// for v in v.range_mut(2..) {
/// *v *= 2;
/// }
Expand Down Expand Up @@ -1235,7 +1235,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// ```
/// use std::collections::VecDeque;
///
/// let mut v: VecDeque<_> = vec![1, 2, 3].into_iter().collect();
/// let mut v: VecDeque<_> = [1, 2, 3].into_iter().collect();
/// let drained = v.drain(2..).collect::<VecDeque<_>>();
/// assert_eq!(drained, [3]);
/// assert_eq!(v, [1, 2]);
Expand Down Expand Up @@ -2025,7 +2025,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// ```
/// use std::collections::VecDeque;
///
/// let mut buf: VecDeque<_> = vec![1, 2, 3].into_iter().collect();
/// let mut buf: VecDeque<_> = [1, 2, 3].into_iter().collect();
/// let buf2 = buf.split_off(1);
/// assert_eq!(buf, [1]);
/// assert_eq!(buf2, [2, 3]);
Expand Down Expand Up @@ -2091,8 +2091,8 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// ```
/// use std::collections::VecDeque;
///
/// let mut buf: VecDeque<_> = vec![1, 2].into_iter().collect();
/// let mut buf2: VecDeque<_> = vec![3, 4].into_iter().collect();
/// let mut buf: VecDeque<_> = [1, 2].into_iter().collect();
/// let mut buf2: VecDeque<_> = [3, 4].into_iter().collect();
/// buf.append(&mut buf2);
/// assert_eq!(buf, [1, 2, 3, 4]);
/// assert_eq!(buf2, []);
Expand Down
Loading

0 comments on commit 2e2c86e

Please sign in to comment.