Skip to content

Commit 284fd87

Browse files
Register funcptr precodes in backpatching table for proper tiered compilation lifecycle
Funcptr precodes for backpatchable methods are now registered in the entry point slot backpatching table at creation time, replacing the ad-hoc funcptr precode lookup and patching that was previously done in TryBackpatchEntryPointSlots. This ensures: - During non-final tiers, funcptr precode targets point to the method's precode (temporary entry point), so calls flow through the same path as vtable calls - At final tier, SetBackpatchableEntryPoint calls Backpatch_Locked to update all registered slots (including funcptr precodes) to the final code - During rejit, BackpatchToResetEntryPointSlots resets funcptr targets via the backpatching table, ensuring proper re-discovery through the prestub The change adds Precode::GetTargetSlot() and StubPrecode::GetTargetSlot() to expose the writable target field address for registration as a SlotType_Normal entry point slot. Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
1 parent 375b663 commit 284fd87

5 files changed

Lines changed: 76 additions & 37 deletions

File tree

src/coreclr/vm/fptrstubs.cpp

Lines changed: 25 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -76,6 +76,9 @@ PCODE FuncPtrStubs::GetFuncPtrStub(MethodDesc * pMD, PrecodeType type)
7676

7777
if (pPrecode != NULL)
7878
{
79+
LOG((LF_TIEREDCOMPILATION, LL_INFO10000,
80+
"FuncPtrStubs::GetFuncPtrStub pMD=%p type=%d - found existing stub, entryPoint=" FMT_ADDR "\n",
81+
pMD, type, DBG_ADDR(pPrecode->GetEntryPoint())));
7982
return pPrecode->GetEntryPoint();
8083
}
8184

@@ -141,6 +144,10 @@ PCODE FuncPtrStubs::GetFuncPtrStub(MethodDesc * pMD, PrecodeType type)
141144
pPrecode = pNewPrecode;
142145
m_hashTable.Add(pPrecode);
143146
amt.SuppressRelease();
147+
LOG((LF_TIEREDCOMPILATION, LL_INFO10000,
148+
"FuncPtrStubs::GetFuncPtrStub pMD=%p type=%d - created new stub,"
149+
" target=" FMT_ADDR " setTargetAfter=%d\n",
150+
pMD, type, DBG_ADDR(target), setTargetAfterAddingToHashTable));
144151
}
145152
else
146153
{
@@ -155,18 +162,26 @@ PCODE FuncPtrStubs::GetFuncPtrStub(MethodDesc * pMD, PrecodeType type)
155162

156163
_ASSERTE(pMD->IsVersionableWithVtableSlotBackpatch());
157164

158-
PCODE temporaryEntryPoint = pMD->GetTemporaryEntryPoint();
165+
LoaderAllocator *mdLoaderAllocator = pMD->GetLoaderAllocator();
159166
MethodDescBackpatchInfoTracker::ConditionalLockHolder slotBackpatchLockHolder;
160167

161-
// Set the funcptr stub's entry point to the current entry point inside the lock and after the funcptr stub is exposed,
162-
// to synchronize with backpatching in MethodDesc::BackpatchEntryPointSlots()
163-
PCODE entryPoint = pMD->GetMethodEntryPoint();
164-
if (entryPoint != temporaryEntryPoint)
165-
{
166-
// Need only patch the precode from the prestub, since if someone else managed to patch the precode already then its
167-
// target would already be up-to-date
168-
pPrecode->SetTargetInterlocked(entryPoint, TRUE /* fOnlyRedirectFromPrestub */);
169-
}
168+
// Register the funcptr precode's target slot in the backpatching table. This records the slot and
169+
// immediately backpatches it to the current entry point. During non-final tiers, GetMethodEntryPoint()
170+
// returns the temporary entry point (method's precode), so calls through the funcptr stub will flow
171+
// through the method's precode to the current code. At final tier, the slot is updated to point
172+
// directly to the final code along with all other registered entry point slots.
173+
PCODE currentEntryPoint = pMD->GetMethodEntryPoint();
174+
LOG((LF_TIEREDCOMPILATION, LL_INFO10000,
175+
"FuncPtrStubs::GetFuncPtrStub pMD=%p (%s::%s) - registering funcptr precode target in backpatch table,"
176+
" currentEntryPoint=" FMT_ADDR "\n",
177+
pMD, pMD->m_pszDebugClassName, pMD->m_pszDebugMethodName, DBG_ADDR(currentEntryPoint)));
178+
MethodDescBackpatchInfoTracker *backpatchTracker = mdLoaderAllocator->GetMethodDescBackpatchInfoTracker();
179+
backpatchTracker->AddSlotAndPatch_Locked(
180+
pMD,
181+
mdLoaderAllocator,
182+
(TADDR)pPrecode->GetTargetSlot(),
183+
EntryPointSlots::SlotType_Normal,
184+
currentEntryPoint);
170185
}
171186

172187
return pPrecode->GetEntryPoint();

src/coreclr/vm/method.cpp

Lines changed: 16 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -3226,27 +3226,9 @@ bool MethodDesc::TryBackpatchEntryPointSlots(
32263226
this, m_pszDebugClassName, m_pszDebugMethodName, DBG_ADDR(entryPoint), DBG_ADDR(previousEntryPoint),
32273227
IsVersionableWithVtableSlotBackpatch()));
32283228

3229-
if (IsVersionableWithVtableSlotBackpatch())
3230-
{
3231-
// Backpatch the func ptr stub if it was created
3232-
FuncPtrStubs *funcPtrStubs = mdLoaderAllocator->GetFuncPtrStubsNoCreate();
3233-
if (funcPtrStubs != nullptr)
3234-
{
3235-
Precode *funcPtrPrecode = funcPtrStubs->Lookup(this);
3236-
if (funcPtrPrecode != nullptr)
3237-
{
3238-
if (isPrestubEntryPoint)
3239-
{
3240-
funcPtrPrecode->ResetTargetInterlocked();
3241-
}
3242-
else
3243-
{
3244-
funcPtrPrecode->SetTargetInterlocked(entryPoint, FALSE /* fOnlyRedirectFromPrestub */);
3245-
}
3246-
}
3247-
}
3248-
}
3249-
3229+
// Backpatch all registered entry point slots. For methods versionable with vtable slot backpatch,
3230+
// this also handles funcptr precodes which are registered in the backpatch table at creation time
3231+
// (see FuncPtrStubs::GetFuncPtrStub).
32503232
backpatchInfoTracker->Backpatch_Locked(this, entryPoint);
32513233

32523234
// Set the entry point to backpatch inside the lock to synchronize with backpatching in MethodDesc::DoBackpatch(), and set
@@ -3359,8 +3341,9 @@ void MethodDesc::ResetCodeEntryPoint()
33593341
}
33603342

33613343
// Sets the entry point for a backpatchable method during tiered compilation.
3362-
// For final tier: sets the owning vtable slot to codeEntryPoint and resets the precode to prestub,
3363-
// enabling lazy vtable slot discovery via DoBackpatch().
3344+
// For final tier: sets the owning vtable slot to codeEntryPoint, backpatches all registered entry point
3345+
// slots (e.g. funcptr precodes) to codeEntryPoint, and resets the method's precode to prestub to enable
3346+
// lazy vtable slot discovery via DoBackpatch().
33643347
// For non-final tier: redirects the precode target to codeEntryPoint without modifying the vtable slot.
33653348
// The vtable slot stays at the temporary entry point (precode), preventing DoBackpatch() from
33663349
// recording vtable slots during non-final tiers.
@@ -3370,16 +3353,25 @@ void MethodDesc::SetBackpatchableEntryPoint(PCODE codeEntryPoint, bool isFinalTi
33703353
_ASSERTE(MayHaveEntryPointSlotsToBackpatch());
33713354
_ASSERTE(codeEntryPoint != (PCODE)NULL);
33723355
_ASSERTE(!isFinalTier || !fOnlyRedirectFromPrestub);
3356+
_ASSERTE(!isFinalTier || MethodDescBackpatchInfoTracker::IsLockOwnedByCurrentThread());
33733357

33743358
Precode *precode = Precode::GetPrecodeFromEntryPoint(GetTemporaryEntryPoint());
33753359
if (isFinalTier)
33763360
{
33773361
LOG((LF_TIEREDCOMPILATION, LL_INFO10000,
33783362
"MethodDesc::SetBackpatchableEntryPoint pMD=%p (%s::%s) entryPoint=" FMT_ADDR
33793363
" isFinalTier=true - setting owning vtable slot to final code,"
3380-
" resetting precode to prestub for lazy DoBackpatch\n",
3364+
" backpatching registered slots, resetting precode to prestub for lazy DoBackpatch\n",
33813365
this, m_pszDebugClassName, m_pszDebugMethodName, DBG_ADDR(codeEntryPoint)));
33823366
SetMethodEntryPoint(codeEntryPoint);
3367+
3368+
// Backpatch all registered entry point slots (e.g. funcptr precodes) to the final code.
3369+
// Vtable slots are not yet registered at this point — they are lazily discovered and recorded
3370+
// by DoBackpatch() when the next call goes through the precode → prestub path.
3371+
LoaderAllocator *mdLoaderAllocator = GetLoaderAllocator();
3372+
MethodDescBackpatchInfoTracker *backpatchTracker = mdLoaderAllocator->GetMethodDescBackpatchInfoTracker();
3373+
backpatchTracker->Backpatch_Locked(this, codeEntryPoint);
3374+
33833375
precode->ResetTargetInterlocked();
33843376
}
33853377
else

src/coreclr/vm/precode.cpp

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -126,6 +126,28 @@ PCODE Precode::GetTarget()
126126
return target;
127127
}
128128

129+
#ifndef DACCESS_COMPILE
130+
PTR_PCODE Precode::GetTargetSlot()
131+
{
132+
LIMITED_METHOD_CONTRACT;
133+
134+
PrecodeType precodeType = GetType();
135+
switch (precodeType)
136+
{
137+
case PRECODE_STUB:
138+
return AsStubPrecode()->GetTargetSlot();
139+
#ifdef HAS_FIXUP_PRECODE
140+
case PRECODE_FIXUP:
141+
return AsFixupPrecode()->GetTargetSlot();
142+
#endif // HAS_FIXUP_PRECODE
143+
144+
default:
145+
UnexpectedPrecodeType("Precode::GetTargetSlot", precodeType);
146+
return NULL;
147+
}
148+
}
149+
#endif // !DACCESS_COMPILE
150+
129151
MethodDesc* Precode::GetMethodDesc(BOOL fSpeculative /*= FALSE*/)
130152
{
131153
CONTRACTL {

src/coreclr/vm/precode.h

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -151,6 +151,12 @@ struct StubPrecode
151151
return GetData()->Target;
152152
}
153153

154+
PCODE *GetTargetSlot()
155+
{
156+
LIMITED_METHOD_CONTRACT;
157+
return &GetData()->Target;
158+
}
159+
154160
BYTE GetType();
155161

156162
static BOOL IsStubPrecodeByASM(PCODE addr);

src/coreclr/vm/prestub.cpp

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -101,9 +101,13 @@ PCODE MethodDesc::DoBackpatch(MethodTable * pMT, MethodTable *pDispatchingMT, bo
101101
_ASSERTE(!(pMT->IsInterface() && !IsStatic()));
102102

103103
// Backpatching the funcptr stub:
104-
// For methods versionable with vtable slot backpatch, a funcptr stub is guaranteed to point to the at-the-time
105-
// current entry point shortly after creation, and backpatching it further is taken care of by
106-
// MethodDesc::BackpatchEntryPointSlots()
104+
// For methods versionable with vtable slot backpatch, funcptr precodes are registered in the
105+
// backpatching table (see FuncPtrStubs::GetFuncPtrStub). During non-final tiers, the funcptr
106+
// precode target is set to the temporary entry point (the method's precode), so calls through
107+
// the funcptr stub flow through the method's precode to the current code. At final tier, the
108+
// funcptr precode target is updated to the final code by Backpatch_Locked in
109+
// SetBackpatchableEntryPoint(). Because the funcptr precode is in the backpatching table, it is
110+
// also updated during rejit scenarios via BackpatchToResetEntryPointSlots().
107111

108112
// Backpatching the temporary entry point:
109113
// The temporary entry point is not directly backpatched for methods versionable with vtable slot backpatch.

0 commit comments

Comments
 (0)