@@ -16019,7 +16019,7 @@ bool GenTree::IsPartialLclFld(Compiler* comp)
16019
16019
(comp->lvaTable[this->AsLclVarCommon()->GetLclNum()].lvExactSize != genTypeSize(gtType)));
16020
16020
}
16021
16021
16022
- bool GenTree::DefinesLocal(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire)
16022
+ bool GenTree::DefinesLocal(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire, ssize_t* pOffset )
16023
16023
{
16024
16024
GenTreeBlk* blkNode = nullptr;
16025
16025
if (OperIs(GT_ASG))
@@ -16039,12 +16039,17 @@ bool GenTree::DefinesLocal(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, bo
16039
16039
*pIsEntire = true;
16040
16040
}
16041
16041
}
16042
+ if (pOffset != nullptr)
16043
+ {
16044
+ *pOffset = AsOp()->gtOp1->AsLclVarCommon()->GetLclOffs();
16045
+ }
16042
16046
return true;
16043
16047
}
16044
16048
else if (AsOp()->gtOp1->OperGet() == GT_IND)
16045
16049
{
16046
16050
GenTree* indArg = AsOp()->gtOp1->AsOp()->gtOp1;
16047
- return indArg->DefinesLocalAddr(comp, genTypeSize(AsOp()->gtOp1->TypeGet()), pLclVarTree, pIsEntire);
16051
+ return indArg->DefinesLocalAddr(comp, genTypeSize(AsOp()->gtOp1->TypeGet()), pLclVarTree, pIsEntire,
16052
+ pOffset);
16048
16053
}
16049
16054
else if (AsOp()->gtOp1->OperIsBlk())
16050
16055
{
@@ -16060,7 +16065,7 @@ bool GenTree::DefinesLocal(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, bo
16060
16065
}
16061
16066
16062
16067
unsigned size = comp->typGetObjLayout(AsCall()->gtRetClsHnd)->GetSize();
16063
- return retBufArg->DefinesLocalAddr(comp, size, pLclVarTree, pIsEntire);
16068
+ return retBufArg->DefinesLocalAddr(comp, size, pLclVarTree, pIsEntire, pOffset );
16064
16069
}
16065
16070
else if (OperIsBlk())
16066
16071
{
@@ -16086,14 +16091,14 @@ bool GenTree::DefinesLocal(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, bo
16086
16091
}
16087
16092
}
16088
16093
16089
- return destAddr->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire);
16094
+ return destAddr->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire, pOffset );
16090
16095
}
16091
16096
// Otherwise...
16092
16097
return false;
16093
16098
}
16094
16099
16095
- // Returns true if this GenTree defines a result which is based on the address of a local.
16096
- bool GenTree::DefinesLocalAddr( Compiler* comp, unsigned width, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire)
16100
+ bool GenTree::DefinesLocalAddr(
16101
+ Compiler* comp, unsigned width, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire, ssize_t* pOffset )
16097
16102
{
16098
16103
if (OperIs(GT_ADDR, GT_LCL_VAR_ADDR, GT_LCL_FLD_ADDR))
16099
16104
{
@@ -16107,10 +16112,10 @@ bool GenTree::DefinesLocalAddr(Compiler* comp, unsigned width, GenTreeLclVarComm
16107
16112
{
16108
16113
GenTreeLclVarCommon* addrArgLcl = addrArg->AsLclVarCommon();
16109
16114
*pLclVarTree = addrArgLcl;
16115
+ unsigned lclOffset = addrArgLcl->GetLclOffs();
16116
+
16110
16117
if (pIsEntire != nullptr)
16111
16118
{
16112
- unsigned lclOffset = addrArgLcl->GetLclOffs();
16113
-
16114
16119
if (lclOffset != 0)
16115
16120
{
16116
16121
// We aren't updating the bytes at [0..lclOffset-1] so *pIsEntire should be set to false
@@ -16129,29 +16134,45 @@ bool GenTree::DefinesLocalAddr(Compiler* comp, unsigned width, GenTreeLclVarComm
16129
16134
*pIsEntire = (varWidth == width);
16130
16135
}
16131
16136
}
16137
+
16138
+ if (pOffset != nullptr)
16139
+ {
16140
+ *pOffset += lclOffset;
16141
+ }
16142
+
16132
16143
return true;
16133
16144
}
16134
16145
else if (addrArg->OperGet() == GT_IND)
16135
16146
{
16136
16147
// A GT_ADDR of a GT_IND can both be optimized away, recurse using the child of the GT_IND
16137
- return addrArg->AsOp()->gtOp1->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire);
16148
+ return addrArg->AsOp()->gtOp1->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire, pOffset );
16138
16149
}
16139
16150
}
16140
16151
else if (OperGet() == GT_ADD)
16141
16152
{
16142
16153
if (AsOp()->gtOp1->IsCnsIntOrI())
16143
16154
{
16155
+ if (pOffset != nullptr)
16156
+ {
16157
+ *pOffset += AsOp()->gtOp1->AsIntCon()->IconValue();
16158
+ }
16159
+
16144
16160
// If we just adding a zero then we allow an IsEntire match against width
16145
16161
// otherwise we change width to zero to disallow an IsEntire Match
16146
16162
return AsOp()->gtOp2->DefinesLocalAddr(comp, AsOp()->gtOp1->IsIntegralConst(0) ? width : 0, pLclVarTree,
16147
- pIsEntire);
16163
+ pIsEntire, pOffset );
16148
16164
}
16149
16165
else if (AsOp()->gtOp2->IsCnsIntOrI())
16150
16166
{
16167
+ if (pOffset != nullptr)
16168
+ {
16169
+ *pOffset += AsOp()->gtOp2->AsIntCon()->IconValue();
16170
+ }
16171
+
16151
16172
// If we just adding a zero then we allow an IsEntire match against width
16152
16173
// otherwise we change width to zero to disallow an IsEntire Match
16153
16174
return AsOp()->gtOp1->DefinesLocalAddr(comp, AsOp()->gtOp2->IsIntegralConst(0) ? width : 0, pLclVarTree,
16154
- pIsEntire);
16175
+ pIsEntire, pOffset );
16155
16176
}
16156
16177
}
16157
16178
// Post rationalization we could have GT_IND(GT_LEA(..)) trees.
@@ -16167,20 +16188,20 @@ bool GenTree::DefinesLocalAddr(Compiler* comp, unsigned width, GenTreeLclVarComm
16167
16188
GenTree* index = AsOp()->gtOp2;
16168
16189
if (index != nullptr)
16169
16190
{
16170
- assert(!index->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire));
16191
+ assert(!index->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire, pOffset ));
16171
16192
}
16172
16193
#endif // DEBUG
16173
16194
16174
16195
// base
16175
- GenTree* base = AsOp ()->gtOp1 ;
16196
+ GenTree* base = AsAddrMode ()->Base() ;
16176
16197
if (base != nullptr)
16177
16198
{
16178
- // Lea could have an Indir as its base.
16179
- if (base->OperGet() == GT_IND)
16199
+ if (pOffset != nullptr)
16180
16200
{
16181
- base = base->AsOp ()->gtOp1->gtEffectiveVal(/*commas only*/ true );
16201
+ *pOffset += AsAddrMode ()->Offset( );
16182
16202
}
16183
- return base->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire);
16203
+
16204
+ return base->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire, pOffset);
16184
16205
}
16185
16206
}
16186
16207
// Otherwise...
@@ -16626,6 +16647,12 @@ ssize_t GenTreeIndir::Offset()
16626
16647
}
16627
16648
}
16628
16649
16650
+ unsigned GenTreeIndir::Size() const
16651
+ {
16652
+ assert(isIndir() || OperIsBlk());
16653
+ return OperIsBlk() ? AsBlk()->Size() : genTypeSize(TypeGet());
16654
+ }
16655
+
16629
16656
//------------------------------------------------------------------------
16630
16657
// GenTreeIntConCommon::ImmedValNeedsReloc: does this immediate value needs recording a relocation with the VM?
16631
16658
//
@@ -16753,6 +16780,7 @@ bool GenTreeIntConCommon::AddrNeedsReloc(Compiler* comp)
16753
16780
// comp - the Compiler object
16754
16781
// pBaseAddr - [out] parameter for "the base address"
16755
16782
// pFldSeq - [out] parameter for the field sequence
16783
+ // pOffset - [out] parameter for the offset of the component struct fields
16756
16784
//
16757
16785
// Return Value:
16758
16786
// If "this" matches patterns denoted above, and the FldSeq found is "full",
@@ -16764,7 +16792,7 @@ bool GenTreeIntConCommon::AddrNeedsReloc(Compiler* comp)
16764
16792
// reference, for statics - the address to which the field offset with the
16765
16793
// field sequence is added, see "impImportStaticFieldAccess" and "fgMorphField".
16766
16794
//
16767
- bool GenTree::IsFieldAddr(Compiler* comp, GenTree** pBaseAddr, FieldSeqNode** pFldSeq)
16795
+ bool GenTree::IsFieldAddr(Compiler* comp, GenTree** pBaseAddr, FieldSeqNode** pFldSeq, ssize_t* pOffset )
16768
16796
{
16769
16797
assert(TypeIs(TYP_I_IMPL, TYP_BYREF, TYP_REF));
16770
16798
@@ -16773,6 +16801,7 @@ bool GenTree::IsFieldAddr(Compiler* comp, GenTree** pBaseAddr, FieldSeqNode** pF
16773
16801
16774
16802
GenTree* baseAddr = nullptr;
16775
16803
FieldSeqNode* fldSeq = FieldSeqStore::NotAField();
16804
+ ssize_t offset = 0;
16776
16805
16777
16806
if (OperIs(GT_ADD))
16778
16807
{
@@ -16781,14 +16810,16 @@ bool GenTree::IsFieldAddr(Compiler* comp, GenTree** pBaseAddr, FieldSeqNode** pF
16781
16810
if (AsOp()->gtOp1->IsCnsIntOrI() && AsOp()->gtOp1->IsIconHandle())
16782
16811
{
16783
16812
assert(!AsOp()->gtOp2->IsCnsIntOrI() || !AsOp()->gtOp2->IsIconHandle());
16784
- fldSeq = AsOp()->gtOp1->AsIntCon()->gtFieldSeq;
16785
16813
baseAddr = AsOp()->gtOp2;
16814
+ fldSeq = AsOp()->gtOp1->AsIntCon()->gtFieldSeq;
16815
+ offset = AsOp()->gtOp1->AsIntCon()->IconValue();
16786
16816
}
16787
16817
else if (AsOp()->gtOp2->IsCnsIntOrI())
16788
16818
{
16789
16819
assert(!AsOp()->gtOp1->IsCnsIntOrI() || !AsOp()->gtOp1->IsIconHandle());
16790
- fldSeq = AsOp()->gtOp2->AsIntCon()->gtFieldSeq;
16791
16820
baseAddr = AsOp()->gtOp1;
16821
+ fldSeq = AsOp()->gtOp2->AsIntCon()->gtFieldSeq;
16822
+ offset = AsOp()->gtOp2->AsIntCon()->IconValue();
16792
16823
}
16793
16824
else
16794
16825
{
@@ -16800,12 +16831,15 @@ bool GenTree::IsFieldAddr(Compiler* comp, GenTree** pBaseAddr, FieldSeqNode** pF
16800
16831
else if (IsCnsIntOrI() && IsIconHandle(GTF_ICON_STATIC_HDL))
16801
16832
{
16802
16833
assert(!comp->GetZeroOffsetFieldMap()->Lookup(this) && (AsIntCon()->gtFieldSeq != nullptr));
16803
- fldSeq = AsIntCon()->gtFieldSeq;
16804
16834
baseAddr = this;
16835
+ fldSeq = AsIntCon()->gtFieldSeq;
16836
+ offset = AsIntCon()->IconValue();
16805
16837
}
16806
16838
else if (comp->GetZeroOffsetFieldMap()->Lookup(this, &fldSeq))
16807
16839
{
16840
+ assert((fldSeq != FieldSeqStore::NotAField()) || (fldSeq->GetOffset() == 0));
16808
16841
baseAddr = this;
16842
+ offset = 0;
16809
16843
}
16810
16844
else
16811
16845
{
@@ -16819,6 +16853,9 @@ bool GenTree::IsFieldAddr(Compiler* comp, GenTree** pBaseAddr, FieldSeqNode** pF
16819
16853
return false;
16820
16854
}
16821
16855
16856
+ // Subtract from the offset such that the portion remaining is relative to the field itself.
16857
+ offset -= fldSeq->GetOffset();
16858
+
16822
16859
// The above screens out obviously invalid cases, but we have more checks to perform. The
16823
16860
// sequence returned from this method *must* start with either a class (NOT struct) field
16824
16861
// or a static field. To avoid the expense of calling "getFieldClass" here, we will instead
@@ -16833,6 +16870,7 @@ bool GenTree::IsFieldAddr(Compiler* comp, GenTree** pBaseAddr, FieldSeqNode** pF
16833
16870
}
16834
16871
16835
16872
*pFldSeq = fldSeq;
16873
+ *pOffset = offset;
16836
16874
return true;
16837
16875
}
16838
16876
@@ -16842,6 +16880,7 @@ bool GenTree::IsFieldAddr(Compiler* comp, GenTree** pBaseAddr, FieldSeqNode** pF
16842
16880
16843
16881
*pBaseAddr = baseAddr;
16844
16882
*pFldSeq = fldSeq;
16883
+ *pOffset = offset;
16845
16884
return true;
16846
16885
}
16847
16886
@@ -18110,16 +18149,18 @@ bool GenTree::IsArrayAddr(GenTreeArrAddr** pArrAddr)
18110
18149
// Note that the value of the below field doesn't matter; it exists only to provide a distinguished address.
18111
18150
//
18112
18151
// static
18113
- FieldSeqNode FieldSeqStore::s_notAField(nullptr, nullptr, FieldSeqNode::FieldKind::Instance);
18152
+ FieldSeqNode FieldSeqStore::s_notAField(nullptr, nullptr, 0, FieldSeqNode::FieldKind::Instance);
18114
18153
18115
18154
// FieldSeqStore methods.
18116
18155
FieldSeqStore::FieldSeqStore(CompAllocator alloc) : m_alloc(alloc), m_canonMap(new (alloc) FieldSeqNodeCanonMap(alloc))
18117
18156
{
18118
18157
}
18119
18158
18120
- FieldSeqNode* FieldSeqStore::CreateSingleton(CORINFO_FIELD_HANDLE fieldHnd, FieldSeqNode::FieldKind fieldKind)
18159
+ FieldSeqNode* FieldSeqStore::CreateSingleton(CORINFO_FIELD_HANDLE fieldHnd,
18160
+ size_t offset,
18161
+ FieldSeqNode::FieldKind fieldKind)
18121
18162
{
18122
- FieldSeqNode fsn(fieldHnd, nullptr, fieldKind);
18163
+ FieldSeqNode fsn(fieldHnd, nullptr, offset, fieldKind);
18123
18164
FieldSeqNode* res = nullptr;
18124
18165
if (m_canonMap->Lookup(fsn, &res))
18125
18166
{
@@ -18158,7 +18199,7 @@ FieldSeqNode* FieldSeqStore::Append(FieldSeqNode* a, FieldSeqNode* b)
18158
18199
assert(a != b);
18159
18200
18160
18201
FieldSeqNode* tmp = Append(a->GetNext(), b);
18161
- FieldSeqNode fsn(a->GetFieldHandleValue(), tmp, a->GetKind());
18202
+ FieldSeqNode fsn(a->GetFieldHandleValue(), tmp, a->GetOffset(), a-> GetKind());
18162
18203
FieldSeqNode* res = nullptr;
18163
18204
if (m_canonMap->Lookup(fsn, &res))
18164
18205
{
@@ -18174,7 +18215,8 @@ FieldSeqNode* FieldSeqStore::Append(FieldSeqNode* a, FieldSeqNode* b)
18174
18215
}
18175
18216
}
18176
18217
18177
- FieldSeqNode::FieldSeqNode(CORINFO_FIELD_HANDLE fieldHnd, FieldSeqNode* next, FieldKind fieldKind) : m_next(next)
18218
+ FieldSeqNode::FieldSeqNode(CORINFO_FIELD_HANDLE fieldHnd, FieldSeqNode* next, size_t offset, FieldKind fieldKind)
18219
+ : m_next(next), m_offset(offset)
18178
18220
{
18179
18221
uintptr_t handleValue = reinterpret_cast<uintptr_t>(fieldHnd);
18180
18222
@@ -18184,6 +18226,7 @@ FieldSeqNode::FieldSeqNode(CORINFO_FIELD_HANDLE fieldHnd, FieldSeqNode* next, Fi
18184
18226
if (fieldHnd != NO_FIELD_HANDLE)
18185
18227
{
18186
18228
assert(JitTls::GetCompiler()->eeIsFieldStatic(fieldHnd) == IsStaticField());
18229
+ // TODO-PhysicalVN: assert that "offset" is correct.
18187
18230
}
18188
18231
else
18189
18232
{
@@ -23173,6 +23216,11 @@ unsigned GenTreeHWIntrinsic::GetResultOpNumForFMA(GenTree* use, GenTree* op1, Ge
23173
23216
}
23174
23217
#endif // TARGET_XARCH && FEATURE_HW_INTRINSICS
23175
23218
23219
+ unsigned GenTreeLclFld::GetSize() const
23220
+ {
23221
+ return genTypeSize(TypeGet());
23222
+ }
23223
+
23176
23224
#ifdef TARGET_ARM
23177
23225
//------------------------------------------------------------------------
23178
23226
// IsOffsetMisaligned: check if the field needs a special handling on arm.
0 commit comments