Skip to content

Commit 8db435e

Browse files
committed
layout: Store inverse memory index in FieldsShape::Arbitrary
All usages of `memory_index` start by calling `invert_bijective_mapping`, so storing the inverted mapping directly saves some work and simplifies the code.
1 parent f794a08 commit 8db435e

36 files changed

+244
-291
lines changed

compiler/rustc_abi/src/layout.rs

Lines changed: 17 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -714,7 +714,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
714714
},
715715
fields: FieldsShape::Arbitrary {
716716
offsets: [niche_offset].into(),
717-
memory_index: [0].into(),
717+
inverse_memory_index: [FieldIdx::new(0)].into(),
718718
},
719719
backend_repr: abi,
720720
largest_niche,
@@ -1008,8 +1008,8 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
10081008
let pair =
10091009
LayoutData::<FieldIdx, VariantIdx>::scalar_pair(&self.cx, tag, prim_scalar);
10101010
let pair_offsets = match pair.fields {
1011-
FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
1012-
assert_eq!(memory_index.raw, [0, 1]);
1011+
FieldsShape::Arbitrary { ref offsets, ref inverse_memory_index } => {
1012+
assert_eq!(inverse_memory_index.raw, [FieldIdx::new(0), FieldIdx::new(1)]);
10131013
offsets
10141014
}
10151015
_ => panic!("encountered a non-arbitrary layout during enum layout"),
@@ -1061,7 +1061,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
10611061
},
10621062
fields: FieldsShape::Arbitrary {
10631063
offsets: [Size::ZERO].into(),
1064-
memory_index: [0].into(),
1064+
inverse_memory_index: [FieldIdx::new(0)].into(),
10651065
},
10661066
largest_niche,
10671067
uninhabited,
@@ -1252,8 +1252,6 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
12521252
// That is, if field 5 has offset 0, the first element of inverse_memory_index is 5.
12531253
// We now write field offsets to the corresponding offset slot;
12541254
// field 5 with offset 0 puts 0 in offsets[5].
1255-
// At the bottom of this function, we invert `inverse_memory_index` to
1256-
// produce `memory_index` (see `invert_mapping`).
12571255
let mut unsized_field = None::<&F>;
12581256
let mut offsets = IndexVec::from_elem(Size::ZERO, fields);
12591257
let mut offset = Size::ZERO;
@@ -1322,18 +1320,6 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
13221320

13231321
debug!("univariant min_size: {:?}", offset);
13241322
let min_size = offset;
1325-
// As stated above, inverse_memory_index holds field indices by increasing offset.
1326-
// This makes it an already-sorted view of the offsets vec.
1327-
// To invert it, consider:
1328-
// If field 5 has offset 0, offsets[0] is 5, and memory_index[5] should be 0.
1329-
// Field 5 would be the first element, so memory_index is i:
1330-
// Note: if we didn't optimize, it's already right.
1331-
let memory_index = if optimize_field_order {
1332-
inverse_memory_index.invert_bijective_mapping()
1333-
} else {
1334-
debug_assert!(inverse_memory_index.iter().copied().eq(fields.indices()));
1335-
inverse_memory_index.into_iter().map(|it| it.index() as u32).collect()
1336-
};
13371323
let size = min_size.align_to(align);
13381324
// FIXME(oli-obk): deduplicate and harden these checks
13391325
if size.bytes() >= dl.obj_size_bound() {
@@ -1389,8 +1375,14 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
13891375
let pair =
13901376
LayoutData::<FieldIdx, VariantIdx>::scalar_pair(&self.cx, a, b);
13911377
let pair_offsets = match pair.fields {
1392-
FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
1393-
assert_eq!(memory_index.raw, [0, 1]);
1378+
FieldsShape::Arbitrary {
1379+
ref offsets,
1380+
ref inverse_memory_index,
1381+
} => {
1382+
assert_eq!(
1383+
inverse_memory_index.raw,
1384+
[FieldIdx::new(0), FieldIdx::new(1)]
1385+
);
13941386
offsets
13951387
}
13961388
FieldsShape::Primitive
@@ -1434,7 +1426,7 @@ impl<Cx: HasDataLayout> LayoutCalculator<Cx> {
14341426

14351427
Ok(LayoutData {
14361428
variants: Variants::Single { index: VariantIdx::new(0) },
1437-
fields: FieldsShape::Arbitrary { offsets, memory_index },
1429+
fields: FieldsShape::Arbitrary { offsets, inverse_memory_index },
14381430
backend_repr: abi,
14391431
largest_niche,
14401432
uninhabited,
@@ -1530,7 +1522,10 @@ where
15301522

15311523
Ok(LayoutData {
15321524
variants: Variants::Single { index: VariantIdx::new(0) },
1533-
fields: FieldsShape::Arbitrary { offsets: [Size::ZERO].into(), memory_index: [0].into() },
1525+
fields: FieldsShape::Arbitrary {
1526+
offsets: [Size::ZERO].into(),
1527+
inverse_memory_index: [FieldIdx::new(0)].into(),
1528+
},
15341529
backend_repr: repr,
15351530
largest_niche: elt.largest_niche,
15361531
uninhabited: false,

compiler/rustc_abi/src/layout/coroutine.rs

Lines changed: 20 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -182,9 +182,7 @@ pub(super) fn layout<
182182
// CoroutineLayout.
183183
debug!("prefix = {:#?}", prefix);
184184
let (outer_fields, promoted_offsets, promoted_memory_index) = match prefix.fields {
185-
FieldsShape::Arbitrary { mut offsets, memory_index } => {
186-
let mut inverse_memory_index = memory_index.invert_bijective_mapping();
187-
185+
FieldsShape::Arbitrary { mut offsets, inverse_memory_index } => {
188186
// "a" (`0..b_start`) and "b" (`b_start..`) correspond to
189187
// "outer" and "promoted" fields respectively.
190188
let b_start = tag_index.plus(1);
@@ -194,21 +192,21 @@ pub(super) fn layout<
194192
// Disentangle the "a" and "b" components of `inverse_memory_index`
195193
// by preserving the order but keeping only one disjoint "half" each.
196194
// FIXME(eddyb) build a better abstraction for permutations, if possible.
197-
let inverse_memory_index_b: IndexVec<u32, FieldIdx> = inverse_memory_index
198-
.iter()
199-
.filter_map(|&i| i.index().checked_sub(b_start.index()).map(FieldIdx::new))
200-
.collect();
201-
inverse_memory_index.raw.retain(|&i| i.index() < b_start.index());
202-
let inverse_memory_index_a = inverse_memory_index;
203-
204-
// Since `inverse_memory_index_{a,b}` each only refer to their
205-
// respective fields, they can be safely inverted
206-
let memory_index_a = inverse_memory_index_a.invert_bijective_mapping();
207-
let memory_index_b = inverse_memory_index_b.invert_bijective_mapping();
195+
let mut inverse_memory_index_a = IndexVec::<u32, FieldIdx>::new();
196+
let mut inverse_memory_index_b = IndexVec::<u32, FieldIdx>::new();
197+
for i in inverse_memory_index {
198+
if let Some(j) = i.index().checked_sub(b_start.index()) {
199+
inverse_memory_index_b.push(FieldIdx::new(j));
200+
} else {
201+
inverse_memory_index_a.push(i);
202+
}
203+
}
208204

209-
let outer_fields =
210-
FieldsShape::Arbitrary { offsets: offsets_a, memory_index: memory_index_a };
211-
(outer_fields, offsets_b, memory_index_b)
205+
let outer_fields = FieldsShape::Arbitrary {
206+
offsets: offsets_a,
207+
inverse_memory_index: inverse_memory_index_a,
208+
};
209+
(outer_fields, offsets_b, inverse_memory_index_b.invert_bijective_mapping())
212210
}
213211
_ => unreachable!(),
214212
};
@@ -236,7 +234,7 @@ pub(super) fn layout<
236234
)?;
237235
variant.variants = Variants::Single { index };
238236

239-
let FieldsShape::Arbitrary { offsets, memory_index } = variant.fields else {
237+
let FieldsShape::Arbitrary { offsets, inverse_memory_index } = variant.fields else {
240238
unreachable!();
241239
};
242240

@@ -249,6 +247,7 @@ pub(super) fn layout<
249247
// promoted fields were being used, but leave the elements not in the
250248
// subset as `invalid_field_idx`, which we can filter out later to
251249
// obtain a valid (bijective) mapping.
250+
let memory_index = inverse_memory_index.invert_bijective_mapping();
252251
let invalid_field_idx = promoted_memory_index.len() + memory_index.len();
253252
let mut combined_inverse_memory_index =
254253
IndexVec::from_elem_n(FieldIdx::new(invalid_field_idx), invalid_field_idx);
@@ -273,14 +272,13 @@ pub(super) fn layout<
273272
})
274273
.collect();
275274

276-
// Remove the unused slots and invert the mapping to obtain the
277-
// combined `memory_index` (also see previous comment).
275+
// Remove the unused slots to obtain the combined `inverse_memory_index`
276+
// (also see previous comment).
278277
combined_inverse_memory_index.raw.retain(|&i| i.index() != invalid_field_idx);
279-
let combined_memory_index = combined_inverse_memory_index.invert_bijective_mapping();
280278

281279
variant.fields = FieldsShape::Arbitrary {
282280
offsets: combined_offsets,
283-
memory_index: combined_memory_index,
281+
inverse_memory_index: combined_inverse_memory_index,
284282
};
285283

286284
size = size.max(variant.size);

compiler/rustc_abi/src/layout/simple.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ impl<FieldIdx: Idx, VariantIdx: Idx> LayoutData<FieldIdx, VariantIdx> {
1616
variants: Variants::Single { index: VariantIdx::new(0) },
1717
fields: FieldsShape::Arbitrary {
1818
offsets: IndexVec::new(),
19-
memory_index: IndexVec::new(),
19+
inverse_memory_index: IndexVec::new(),
2020
},
2121
backend_repr: BackendRepr::Memory { sized },
2222
largest_niche: None,
@@ -108,7 +108,7 @@ impl<FieldIdx: Idx, VariantIdx: Idx> LayoutData<FieldIdx, VariantIdx> {
108108
variants: Variants::Single { index: VariantIdx::new(0) },
109109
fields: FieldsShape::Arbitrary {
110110
offsets: [Size::ZERO, b_offset].into(),
111-
memory_index: [0, 1].into(),
111+
inverse_memory_index: [FieldIdx::new(0), FieldIdx::new(1)].into(),
112112
},
113113
backend_repr: BackendRepr::ScalarPair(a, b),
114114
largest_niche,
@@ -133,7 +133,7 @@ impl<FieldIdx: Idx, VariantIdx: Idx> LayoutData<FieldIdx, VariantIdx> {
133133
Some(fields) => FieldsShape::Union(fields),
134134
None => FieldsShape::Arbitrary {
135135
offsets: IndexVec::new(),
136-
memory_index: IndexVec::new(),
136+
inverse_memory_index: IndexVec::new(),
137137
},
138138
},
139139
backend_repr: BackendRepr::Memory { sized: true },

compiler/rustc_abi/src/lib.rs

Lines changed: 5 additions & 42 deletions
Original file line numberDiff line numberDiff line change
@@ -1636,19 +1636,14 @@ pub enum FieldsShape<FieldIdx: Idx> {
16361636
// FIXME(eddyb) use small vector optimization for the common case.
16371637
offsets: IndexVec<FieldIdx, Size>,
16381638

1639-
/// Maps source order field indices to memory order indices,
1639+
/// Maps memory order field indices to source order indices,
16401640
/// depending on how the fields were reordered (if at all).
16411641
/// This is a permutation, with both the source order and the
16421642
/// memory order using the same (0..n) index ranges.
16431643
///
1644-
/// Note that during computation of `memory_index`, sometimes
1645-
/// it is easier to operate on the inverse mapping (that is,
1646-
/// from memory order to source order), and that is usually
1647-
/// named `inverse_memory_index`.
1648-
///
16491644
// FIXME(eddyb) build a better abstraction for permutations, if possible.
16501645
// FIXME(camlorn) also consider small vector optimization here.
1651-
memory_index: IndexVec<FieldIdx, u32>,
1646+
inverse_memory_index: IndexVec<u32, FieldIdx>,
16521647
},
16531648
}
16541649

@@ -1682,50 +1677,18 @@ impl<FieldIdx: Idx> FieldsShape<FieldIdx> {
16821677
}
16831678
}
16841679

1685-
#[inline]
1686-
pub fn memory_index(&self, i: usize) -> usize {
1687-
match *self {
1688-
FieldsShape::Primitive => {
1689-
unreachable!("FieldsShape::memory_index: `Primitive`s have no fields")
1690-
}
1691-
FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
1692-
FieldsShape::Arbitrary { ref memory_index, .. } => {
1693-
memory_index[FieldIdx::new(i)].try_into().unwrap()
1694-
}
1695-
}
1696-
}
1697-
16981680
/// Gets source indices of the fields by increasing offsets.
16991681
#[inline]
17001682
pub fn index_by_increasing_offset(&self) -> impl ExactSizeIterator<Item = usize> {
1701-
let mut inverse_small = [0u8; 64];
1702-
let mut inverse_big = IndexVec::new();
1703-
let use_small = self.count() <= inverse_small.len();
1704-
1705-
// We have to write this logic twice in order to keep the array small.
1706-
if let FieldsShape::Arbitrary { ref memory_index, .. } = *self {
1707-
if use_small {
1708-
for (field_idx, &mem_idx) in memory_index.iter_enumerated() {
1709-
inverse_small[mem_idx as usize] = field_idx.index() as u8;
1710-
}
1711-
} else {
1712-
inverse_big = memory_index.invert_bijective_mapping();
1713-
}
1714-
}
1715-
17161683
// Primitives don't really have fields in the way that structs do,
17171684
// but having this return an empty iterator for them is unhelpful
17181685
// since that makes them look kinda like ZSTs, which they're not.
17191686
let pseudofield_count = if let FieldsShape::Primitive = self { 1 } else { self.count() };
17201687

1721-
(0..pseudofield_count).map(move |i| match *self {
1688+
(0..pseudofield_count).map(move |i| match self {
17221689
FieldsShape::Primitive | FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
1723-
FieldsShape::Arbitrary { .. } => {
1724-
if use_small {
1725-
inverse_small[i] as usize
1726-
} else {
1727-
inverse_big[i as u32].index()
1728-
}
1690+
FieldsShape::Arbitrary { inverse_memory_index, .. } => {
1691+
inverse_memory_index[i as u32].index()
17291692
}
17301693
})
17311694
}

compiler/rustc_const_eval/src/interpret/visitor.rs

Lines changed: 10 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
use std::num::NonZero;
55

66
use rustc_abi::{FieldIdx, FieldsShape, VariantIdx, Variants};
7-
use rustc_index::IndexVec;
7+
use rustc_index::{Idx as _, IndexVec};
88
use rustc_middle::mir::interpret::InterpResult;
99
use rustc_middle::ty::{self, Ty};
1010
use tracing::trace;
@@ -27,13 +27,15 @@ pub trait ValueVisitor<'tcx, M: Machine<'tcx>>: Sized {
2727
/// This function provides the chance to reorder the order in which fields are visited for
2828
/// `FieldsShape::Aggregate`.
2929
///
30-
/// The default means we iterate in source declaration order; alternatively this can do some
31-
/// work with `memory_index` to iterate in memory order.
30+
/// The default means we iterate in source declaration order; alternatively this can use
31+
/// `inverse_memory_index` to iterate in memory order.
3232
#[inline(always)]
3333
fn aggregate_field_iter(
34-
memory_index: &IndexVec<FieldIdx, u32>,
35-
) -> impl Iterator<Item = FieldIdx> + 'static {
36-
memory_index.indices()
34+
inverse_memory_index: &IndexVec<u32, FieldIdx>,
35+
) -> impl Iterator<Item = FieldIdx> {
36+
// Allow the optimizer to elide the bounds checking when creating each index.
37+
let _ = FieldIdx::new(inverse_memory_index.len());
38+
(0..inverse_memory_index.len()).map(FieldIdx::new)
3739
}
3840

3941
// Recursive actions, ready to be overloaded.
@@ -168,8 +170,8 @@ pub trait ValueVisitor<'tcx, M: Machine<'tcx>>: Sized {
168170
&FieldsShape::Union(fields) => {
169171
self.visit_union(v, fields)?;
170172
}
171-
FieldsShape::Arbitrary { memory_index, .. } => {
172-
for idx in Self::aggregate_field_iter(memory_index) {
173+
FieldsShape::Arbitrary { inverse_memory_index, .. } => {
174+
for idx in Self::aggregate_field_iter(inverse_memory_index) {
173175
let field = self.ecx().project_field(v, idx)?;
174176
self.visit_field(v, idx.as_usize(), &field)?;
175177
}

compiler/rustc_index/src/slice.rs

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -181,9 +181,6 @@ impl<I: Idx, J: Idx> IndexSlice<I, J> {
181181
/// Invert a bijective mapping, i.e. `invert(map)[y] = x` if `map[x] = y`,
182182
/// assuming the values in `self` are a permutation of `0..self.len()`.
183183
///
184-
/// This is used to go between `memory_index` (source field order to memory order)
185-
/// and `inverse_memory_index` (memory order to source field order).
186-
/// See also `FieldsShape::Arbitrary::memory_index` for more details.
187184
// FIXME(eddyb) build a better abstraction for permutations, if possible.
188185
pub fn invert_bijective_mapping(&self) -> IndexVec<J, I> {
189186
debug_assert_eq!(

compiler/rustc_transmute/src/layout/tree.rs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -491,7 +491,7 @@ pub(crate) mod rustc {
491491
) -> Result<Self, Err> {
492492
// This constructor does not support non-`FieldsShape::Arbitrary`
493493
// layouts.
494-
let FieldsShape::Arbitrary { offsets, memory_index } = layout.fields() else {
494+
let FieldsShape::Arbitrary { offsets, inverse_memory_index } = layout.fields() else {
495495
return Err(Err::NotYetSupported);
496496
};
497497

@@ -519,7 +519,6 @@ pub(crate) mod rustc {
519519
}
520520

521521
// Append the fields, in memory order, to the layout.
522-
let inverse_memory_index = memory_index.invert_bijective_mapping();
523522
for &field_idx in inverse_memory_index.iter() {
524523
// Add interfield padding.
525524
let padding_needed = offsets[field_idx] - size;

compiler/rustc_ty_utils/src/layout.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ use rustc_abi::{
99
use rustc_hashes::Hash64;
1010
use rustc_hir::attrs::AttributeKind;
1111
use rustc_hir::find_attr;
12-
use rustc_index::IndexVec;
12+
use rustc_index::{Idx as _, IndexVec};
1313
use rustc_middle::bug;
1414
use rustc_middle::query::Providers;
1515
use rustc_middle::traits::ObligationCause;
@@ -374,7 +374,7 @@ fn layout_of_uncached<'tcx>(
374374
// specifically care about pattern types will have to handle it.
375375
layout.fields = FieldsShape::Arbitrary {
376376
offsets: [Size::ZERO].into_iter().collect(),
377-
memory_index: [0].into_iter().collect(),
377+
inverse_memory_index: [FieldIdx::new(0)].into_iter().collect(),
378378
};
379379
tcx.mk_layout(layout)
380380
}

src/tools/miri/src/helpers.rs

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -584,10 +584,9 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
584584
}
585585

586586
fn aggregate_field_iter(
587-
memory_index: &IndexVec<FieldIdx, u32>,
588-
) -> impl Iterator<Item = FieldIdx> + 'static {
589-
let inverse_memory_index = memory_index.invert_bijective_mapping();
590-
inverse_memory_index.into_iter()
587+
inverse_memory_index: &IndexVec<u32, FieldIdx>,
588+
) -> impl Iterator<Item = FieldIdx> {
589+
inverse_memory_index.iter().copied()
591590
}
592591

593592
// Hook to detect `UnsafeCell`.

tests/ui/abi/c-zst.aarch64-darwin.stderr

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ error: fn_abi_of(pass_zst) = FnAbi {
1313
},
1414
fields: Arbitrary {
1515
offsets: [],
16-
memory_index: [],
16+
inverse_memory_index: [],
1717
},
1818
largest_niche: None,
1919
uninhabited: false,
@@ -41,7 +41,7 @@ error: fn_abi_of(pass_zst) = FnAbi {
4141
},
4242
fields: Arbitrary {
4343
offsets: [],
44-
memory_index: [],
44+
inverse_memory_index: [],
4545
},
4646
largest_niche: None,
4747
uninhabited: false,

0 commit comments

Comments
 (0)