@@ -16019,7 +16019,7 @@ bool GenTree::IsPartialLclFld(Compiler* comp)
1601916019 (comp->lvaTable[this->AsLclVarCommon()->GetLclNum()].lvExactSize != genTypeSize(gtType)));
1602016020}
1602116021
16022- bool GenTree::DefinesLocal(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire)
16022+ bool GenTree::DefinesLocal(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire, ssize_t* pOffset )
1602316023{
1602416024 GenTreeBlk* blkNode = nullptr;
1602516025 if (OperIs(GT_ASG))
@@ -16039,12 +16039,17 @@ bool GenTree::DefinesLocal(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, bo
1603916039 *pIsEntire = true;
1604016040 }
1604116041 }
16042+ if (pOffset != nullptr)
16043+ {
16044+ *pOffset = AsOp()->gtOp1->AsLclVarCommon()->GetLclOffs();
16045+ }
1604216046 return true;
1604316047 }
1604416048 else if (AsOp()->gtOp1->OperGet() == GT_IND)
1604516049 {
1604616050 GenTree* indArg = AsOp()->gtOp1->AsOp()->gtOp1;
16047- return indArg->DefinesLocalAddr(comp, genTypeSize(AsOp()->gtOp1->TypeGet()), pLclVarTree, pIsEntire);
16051+ return indArg->DefinesLocalAddr(comp, genTypeSize(AsOp()->gtOp1->TypeGet()), pLclVarTree, pIsEntire,
16052+ pOffset);
1604816053 }
1604916054 else if (AsOp()->gtOp1->OperIsBlk())
1605016055 {
@@ -16060,7 +16065,7 @@ bool GenTree::DefinesLocal(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, bo
1606016065 }
1606116066
1606216067 unsigned size = comp->typGetObjLayout(AsCall()->gtRetClsHnd)->GetSize();
16063- return retBufArg->DefinesLocalAddr(comp, size, pLclVarTree, pIsEntire);
16068+ return retBufArg->DefinesLocalAddr(comp, size, pLclVarTree, pIsEntire, pOffset );
1606416069 }
1606516070 else if (OperIsBlk())
1606616071 {
@@ -16086,14 +16091,14 @@ bool GenTree::DefinesLocal(Compiler* comp, GenTreeLclVarCommon** pLclVarTree, bo
1608616091 }
1608716092 }
1608816093
16089- return destAddr->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire);
16094+ return destAddr->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire, pOffset );
1609016095 }
1609116096 // Otherwise...
1609216097 return false;
1609316098}
1609416099
16095- // Returns true if this GenTree defines a result which is based on the address of a local.
16096- bool GenTree::DefinesLocalAddr( Compiler* comp, unsigned width, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire)
16100+ bool GenTree::DefinesLocalAddr(
16101+ Compiler* comp, unsigned width, GenTreeLclVarCommon** pLclVarTree, bool* pIsEntire, ssize_t* pOffset )
1609716102{
1609816103 if (OperIs(GT_ADDR, GT_LCL_VAR_ADDR, GT_LCL_FLD_ADDR))
1609916104 {
@@ -16107,10 +16112,10 @@ bool GenTree::DefinesLocalAddr(Compiler* comp, unsigned width, GenTreeLclVarComm
1610716112 {
1610816113 GenTreeLclVarCommon* addrArgLcl = addrArg->AsLclVarCommon();
1610916114 *pLclVarTree = addrArgLcl;
16115+ unsigned lclOffset = addrArgLcl->GetLclOffs();
16116+
1611016117 if (pIsEntire != nullptr)
1611116118 {
16112- unsigned lclOffset = addrArgLcl->GetLclOffs();
16113-
1611416119 if (lclOffset != 0)
1611516120 {
1611616121 // We aren't updating the bytes at [0..lclOffset-1] so *pIsEntire should be set to false
@@ -16129,29 +16134,45 @@ bool GenTree::DefinesLocalAddr(Compiler* comp, unsigned width, GenTreeLclVarComm
1612916134 *pIsEntire = (varWidth == width);
1613016135 }
1613116136 }
16137+
16138+ if (pOffset != nullptr)
16139+ {
16140+ *pOffset += lclOffset;
16141+ }
16142+
1613216143 return true;
1613316144 }
1613416145 else if (addrArg->OperGet() == GT_IND)
1613516146 {
1613616147 // A GT_ADDR of a GT_IND can both be optimized away, recurse using the child of the GT_IND
16137- return addrArg->AsOp()->gtOp1->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire);
16148+ return addrArg->AsOp()->gtOp1->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire, pOffset );
1613816149 }
1613916150 }
1614016151 else if (OperGet() == GT_ADD)
1614116152 {
1614216153 if (AsOp()->gtOp1->IsCnsIntOrI())
1614316154 {
16155+ if (pOffset != nullptr)
16156+ {
16157+ *pOffset += AsOp()->gtOp1->AsIntCon()->IconValue();
16158+ }
16159+
1614416160 // If we just adding a zero then we allow an IsEntire match against width
1614516161 // otherwise we change width to zero to disallow an IsEntire Match
1614616162 return AsOp()->gtOp2->DefinesLocalAddr(comp, AsOp()->gtOp1->IsIntegralConst(0) ? width : 0, pLclVarTree,
16147- pIsEntire);
16163+ pIsEntire, pOffset );
1614816164 }
1614916165 else if (AsOp()->gtOp2->IsCnsIntOrI())
1615016166 {
16167+ if (pOffset != nullptr)
16168+ {
16169+ *pOffset += AsOp()->gtOp2->AsIntCon()->IconValue();
16170+ }
16171+
1615116172 // If we just adding a zero then we allow an IsEntire match against width
1615216173 // otherwise we change width to zero to disallow an IsEntire Match
1615316174 return AsOp()->gtOp1->DefinesLocalAddr(comp, AsOp()->gtOp2->IsIntegralConst(0) ? width : 0, pLclVarTree,
16154- pIsEntire);
16175+ pIsEntire, pOffset );
1615516176 }
1615616177 }
1615716178 // Post rationalization we could have GT_IND(GT_LEA(..)) trees.
@@ -16167,20 +16188,20 @@ bool GenTree::DefinesLocalAddr(Compiler* comp, unsigned width, GenTreeLclVarComm
1616716188 GenTree* index = AsOp()->gtOp2;
1616816189 if (index != nullptr)
1616916190 {
16170- assert(!index->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire));
16191+ assert(!index->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire, pOffset ));
1617116192 }
1617216193#endif // DEBUG
1617316194
1617416195 // base
16175- GenTree* base = AsOp ()->gtOp1 ;
16196+ GenTree* base = AsAddrMode ()->Base() ;
1617616197 if (base != nullptr)
1617716198 {
16178- // Lea could have an Indir as its base.
16179- if (base->OperGet() == GT_IND)
16199+ if (pOffset != nullptr)
1618016200 {
16181- base = base->AsOp ()->gtOp1->gtEffectiveVal(/*commas only*/ true );
16201+ *pOffset += AsAddrMode ()->Offset( );
1618216202 }
16183- return base->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire);
16203+
16204+ return base->DefinesLocalAddr(comp, width, pLclVarTree, pIsEntire, pOffset);
1618416205 }
1618516206 }
1618616207 // Otherwise...
@@ -16626,6 +16647,12 @@ ssize_t GenTreeIndir::Offset()
1662616647 }
1662716648}
1662816649
16650+ unsigned GenTreeIndir::Size() const
16651+ {
16652+ assert(isIndir() || OperIsBlk());
16653+ return OperIsBlk() ? AsBlk()->Size() : genTypeSize(TypeGet());
16654+ }
16655+
1662916656//------------------------------------------------------------------------
1663016657// GenTreeIntConCommon::ImmedValNeedsReloc: does this immediate value needs recording a relocation with the VM?
1663116658//
@@ -16753,6 +16780,7 @@ bool GenTreeIntConCommon::AddrNeedsReloc(Compiler* comp)
1675316780// comp - the Compiler object
1675416781// pBaseAddr - [out] parameter for "the base address"
1675516782// pFldSeq - [out] parameter for the field sequence
16783+ // pOffset - [out] parameter for the offset of the component struct fields
1675616784//
1675716785// Return Value:
1675816786// If "this" matches patterns denoted above, and the FldSeq found is "full",
@@ -16764,7 +16792,7 @@ bool GenTreeIntConCommon::AddrNeedsReloc(Compiler* comp)
1676416792// reference, for statics - the address to which the field offset with the
1676516793// field sequence is added, see "impImportStaticFieldAccess" and "fgMorphField".
1676616794//
16767- bool GenTree::IsFieldAddr(Compiler* comp, GenTree** pBaseAddr, FieldSeqNode** pFldSeq)
16795+ bool GenTree::IsFieldAddr(Compiler* comp, GenTree** pBaseAddr, FieldSeqNode** pFldSeq, ssize_t* pOffset )
1676816796{
1676916797 assert(TypeIs(TYP_I_IMPL, TYP_BYREF, TYP_REF));
1677016798
@@ -16773,6 +16801,7 @@ bool GenTree::IsFieldAddr(Compiler* comp, GenTree** pBaseAddr, FieldSeqNode** pF
1677316801
1677416802 GenTree* baseAddr = nullptr;
1677516803 FieldSeqNode* fldSeq = FieldSeqStore::NotAField();
16804+ ssize_t offset = 0;
1677616805
1677716806 if (OperIs(GT_ADD))
1677816807 {
@@ -16781,14 +16810,16 @@ bool GenTree::IsFieldAddr(Compiler* comp, GenTree** pBaseAddr, FieldSeqNode** pF
1678116810 if (AsOp()->gtOp1->IsCnsIntOrI() && AsOp()->gtOp1->IsIconHandle())
1678216811 {
1678316812 assert(!AsOp()->gtOp2->IsCnsIntOrI() || !AsOp()->gtOp2->IsIconHandle());
16784- fldSeq = AsOp()->gtOp1->AsIntCon()->gtFieldSeq;
1678516813 baseAddr = AsOp()->gtOp2;
16814+ fldSeq = AsOp()->gtOp1->AsIntCon()->gtFieldSeq;
16815+ offset = AsOp()->gtOp1->AsIntCon()->IconValue();
1678616816 }
1678716817 else if (AsOp()->gtOp2->IsCnsIntOrI())
1678816818 {
1678916819 assert(!AsOp()->gtOp1->IsCnsIntOrI() || !AsOp()->gtOp1->IsIconHandle());
16790- fldSeq = AsOp()->gtOp2->AsIntCon()->gtFieldSeq;
1679116820 baseAddr = AsOp()->gtOp1;
16821+ fldSeq = AsOp()->gtOp2->AsIntCon()->gtFieldSeq;
16822+ offset = AsOp()->gtOp2->AsIntCon()->IconValue();
1679216823 }
1679316824 else
1679416825 {
@@ -16800,12 +16831,15 @@ bool GenTree::IsFieldAddr(Compiler* comp, GenTree** pBaseAddr, FieldSeqNode** pF
1680016831 else if (IsCnsIntOrI() && IsIconHandle(GTF_ICON_STATIC_HDL))
1680116832 {
1680216833 assert(!comp->GetZeroOffsetFieldMap()->Lookup(this) && (AsIntCon()->gtFieldSeq != nullptr));
16803- fldSeq = AsIntCon()->gtFieldSeq;
1680416834 baseAddr = this;
16835+ fldSeq = AsIntCon()->gtFieldSeq;
16836+ offset = AsIntCon()->IconValue();
1680516837 }
1680616838 else if (comp->GetZeroOffsetFieldMap()->Lookup(this, &fldSeq))
1680716839 {
16840+ assert((fldSeq != FieldSeqStore::NotAField()) || (fldSeq->GetOffset() == 0));
1680816841 baseAddr = this;
16842+ offset = 0;
1680916843 }
1681016844 else
1681116845 {
@@ -16819,6 +16853,9 @@ bool GenTree::IsFieldAddr(Compiler* comp, GenTree** pBaseAddr, FieldSeqNode** pF
1681916853 return false;
1682016854 }
1682116855
16856+ // Subtract from the offset such that the portion remaining is relative to the field itself.
16857+ offset -= fldSeq->GetOffset();
16858+
1682216859 // The above screens out obviously invalid cases, but we have more checks to perform. The
1682316860 // sequence returned from this method *must* start with either a class (NOT struct) field
1682416861 // or a static field. To avoid the expense of calling "getFieldClass" here, we will instead
@@ -16833,6 +16870,7 @@ bool GenTree::IsFieldAddr(Compiler* comp, GenTree** pBaseAddr, FieldSeqNode** pF
1683316870 }
1683416871
1683516872 *pFldSeq = fldSeq;
16873+ *pOffset = offset;
1683616874 return true;
1683716875 }
1683816876
@@ -16842,6 +16880,7 @@ bool GenTree::IsFieldAddr(Compiler* comp, GenTree** pBaseAddr, FieldSeqNode** pF
1684216880
1684316881 *pBaseAddr = baseAddr;
1684416882 *pFldSeq = fldSeq;
16883+ *pOffset = offset;
1684516884 return true;
1684616885 }
1684716886
@@ -18110,16 +18149,18 @@ bool GenTree::IsArrayAddr(GenTreeArrAddr** pArrAddr)
1811018149// Note that the value of the below field doesn't matter; it exists only to provide a distinguished address.
1811118150//
1811218151// static
18113- FieldSeqNode FieldSeqStore::s_notAField(nullptr, nullptr, FieldSeqNode::FieldKind::Instance);
18152+ FieldSeqNode FieldSeqStore::s_notAField(nullptr, nullptr, 0, FieldSeqNode::FieldKind::Instance);
1811418153
1811518154// FieldSeqStore methods.
1811618155FieldSeqStore::FieldSeqStore(CompAllocator alloc) : m_alloc(alloc), m_canonMap(new (alloc) FieldSeqNodeCanonMap(alloc))
1811718156{
1811818157}
1811918158
18120- FieldSeqNode* FieldSeqStore::CreateSingleton(CORINFO_FIELD_HANDLE fieldHnd, FieldSeqNode::FieldKind fieldKind)
18159+ FieldSeqNode* FieldSeqStore::CreateSingleton(CORINFO_FIELD_HANDLE fieldHnd,
18160+ size_t offset,
18161+ FieldSeqNode::FieldKind fieldKind)
1812118162{
18122- FieldSeqNode fsn(fieldHnd, nullptr, fieldKind);
18163+ FieldSeqNode fsn(fieldHnd, nullptr, offset, fieldKind);
1812318164 FieldSeqNode* res = nullptr;
1812418165 if (m_canonMap->Lookup(fsn, &res))
1812518166 {
@@ -18158,7 +18199,7 @@ FieldSeqNode* FieldSeqStore::Append(FieldSeqNode* a, FieldSeqNode* b)
1815818199 assert(a != b);
1815918200
1816018201 FieldSeqNode* tmp = Append(a->GetNext(), b);
18161- FieldSeqNode fsn(a->GetFieldHandleValue(), tmp, a->GetKind());
18202+ FieldSeqNode fsn(a->GetFieldHandleValue(), tmp, a->GetOffset(), a-> GetKind());
1816218203 FieldSeqNode* res = nullptr;
1816318204 if (m_canonMap->Lookup(fsn, &res))
1816418205 {
@@ -18174,7 +18215,8 @@ FieldSeqNode* FieldSeqStore::Append(FieldSeqNode* a, FieldSeqNode* b)
1817418215 }
1817518216}
1817618217
18177- FieldSeqNode::FieldSeqNode(CORINFO_FIELD_HANDLE fieldHnd, FieldSeqNode* next, FieldKind fieldKind) : m_next(next)
18218+ FieldSeqNode::FieldSeqNode(CORINFO_FIELD_HANDLE fieldHnd, FieldSeqNode* next, size_t offset, FieldKind fieldKind)
18219+ : m_next(next), m_offset(offset)
1817818220{
1817918221 uintptr_t handleValue = reinterpret_cast<uintptr_t>(fieldHnd);
1818018222
@@ -18184,6 +18226,7 @@ FieldSeqNode::FieldSeqNode(CORINFO_FIELD_HANDLE fieldHnd, FieldSeqNode* next, Fi
1818418226 if (fieldHnd != NO_FIELD_HANDLE)
1818518227 {
1818618228 assert(JitTls::GetCompiler()->eeIsFieldStatic(fieldHnd) == IsStaticField());
18229+ // TODO-PhysicalVN: assert that "offset" is correct.
1818718230 }
1818818231 else
1818918232 {
@@ -23173,6 +23216,11 @@ unsigned GenTreeHWIntrinsic::GetResultOpNumForFMA(GenTree* use, GenTree* op1, Ge
2317323216}
2317423217#endif // TARGET_XARCH && FEATURE_HW_INTRINSICS
2317523218
23219+ unsigned GenTreeLclFld::GetSize() const
23220+ {
23221+ return genTypeSize(TypeGet());
23222+ }
23223+
2317623224#ifdef TARGET_ARM
2317723225//------------------------------------------------------------------------
2317823226// IsOffsetMisaligned: check if the field needs a special handling on arm.
0 commit comments