4545
4646
4747#ifndef DACCESS_COMPILE
48-
49- extern " C" VOID __cdecl StubRareEnable (Thread *pThread);
5048#ifdef FEATURE_COMINTEROP
5149extern " C" HRESULT __cdecl StubRareDisableHR (Thread *pThread);
5250#endif // FEATURE_COMINTEROP
@@ -2615,6 +2613,8 @@ void StubLinkerCPU::EmitComMethodStubEpilog(TADDR pFrameVptr,
26152613 PRECONDITION (rgRareLabels[0 ] != NULL && rgRareLabels[1 ] != NULL && rgRareLabels[2 ] != NULL );
26162614 PRECONDITION (rgRejoinLabels != NULL );
26172615 PRECONDITION (rgRejoinLabels[0 ] != NULL && rgRejoinLabels[1 ] != NULL && rgRejoinLabels[2 ] != NULL );
2616+ PRECONDITION (4 == sizeof ( ((Thread*)0 )->m_State ));
2617+ PRECONDITION (4 == sizeof ( ((Thread*)0 )->m_fPreemptiveGCDisabled ));
26182618 }
26192619 CONTRACTL_END;
26202620
@@ -2623,11 +2623,9 @@ void StubLinkerCPU::EmitComMethodStubEpilog(TADDR pFrameVptr,
26232623 // mov [ebx + Thread.GetFrame()], edi ;; restore previous frame
26242624 X86EmitIndexRegStore (kEBX , Thread::GetOffsetOfCurrentFrame (), kEDI );
26252625
2626- // -----------------------------------------------------------------------
2627- // Generate the inline part of disabling preemptive GC
2628- // -----------------------------------------------------------------------
2629- EmitEnable (rgRareLabels[2 ]); // rare gc
2630- EmitLabel (rgRejoinLabels[2 ]); // rejoin for rare gc
2626+ // move byte ptr [ebx + Thread.m_fPreemptiveGCDisabled],0
2627+ X86EmitOffsetModRM (0xc6 , (X86Reg)0 , kEBX , Thread::GetOffsetOfGCFlag ());
2628+ Emit8 (0 );
26312629
26322630 // add esp, popstack
26332631 X86EmitAddEsp (sizeof (GSCookie) + UnmanagedToManagedFrame::GetOffsetOfCalleeSavedRegisters ());
@@ -2651,12 +2649,6 @@ void StubLinkerCPU::EmitComMethodStubEpilog(TADDR pFrameVptr,
26512649 // keeps on going past the previous "jmp eax".
26522650 X86EmitReturn (0 );
26532651
2654- // -----------------------------------------------------------------------
2655- // The out-of-line portion of enabling preemptive GC - rarely executed
2656- // -----------------------------------------------------------------------
2657- EmitLabel (rgRareLabels[2 ]); // label for rare enable gc
2658- EmitRareEnable (rgRejoinLabels[2 ]); // emit rare enable gc
2659-
26602652 // -----------------------------------------------------------------------
26612653 // The out-of-line portion of disabling preemptive GC - rarely executed
26622654 // -----------------------------------------------------------------------
@@ -2736,6 +2728,8 @@ void StubLinkerCPU::EmitSharedComMethodStubEpilog(TADDR pFrameVptr,
27362728 PRECONDITION (rgRareLabels[0 ] != NULL && rgRareLabels[1 ] != NULL && rgRareLabels[2 ] != NULL );
27372729 PRECONDITION (rgRejoinLabels != NULL );
27382730 PRECONDITION (rgRejoinLabels[0 ] != NULL && rgRejoinLabels[1 ] != NULL && rgRejoinLabels[2 ] != NULL );
2731+ PRECONDITION (4 == sizeof ( ((Thread*)0 )->m_State ));
2732+ PRECONDITION (4 == sizeof ( ((Thread*)0 )->m_fPreemptiveGCDisabled ));
27392733 }
27402734 CONTRACTL_END;
27412735
@@ -2748,12 +2742,13 @@ void StubLinkerCPU::EmitSharedComMethodStubEpilog(TADDR pFrameVptr,
27482742 X86EmitIndexRegStore (kEBX , Thread::GetOffsetOfCurrentFrame (), kEDI );
27492743
27502744 // -----------------------------------------------------------------------
2751- // Generate the inline part of enabling preemptive GC
2745+ // Generate enabling preemptive GC
27522746 // -----------------------------------------------------------------------
27532747 EmitLabel (NoEntryLabel); // need to enable preemp mode even when we fail the disable as rare disable will return in coop mode
27542748
2755- EmitEnable (rgRareLabels[2 ]); // rare enable gc
2756- EmitLabel (rgRejoinLabels[2 ]); // rejoin for rare enable gc
2749+ // move byte ptr [ebx + Thread.m_fPreemptiveGCDisabled],0
2750+ X86EmitOffsetModRM (0xc6 , (X86Reg)0 , kEBX , Thread::GetOffsetOfGCFlag ());
2751+ Emit8 (0 );
27572752
27582753#ifdef PROFILING_SUPPORTED
27592754 // If profiling is active, emit code to notify profiler of transition
@@ -2800,12 +2795,6 @@ void StubLinkerCPU::EmitSharedComMethodStubEpilog(TADDR pFrameVptr,
28002795 // keeps on going past the previous "jmp ecx".
28012796 X86EmitReturn (0 );
28022797
2803- // -----------------------------------------------------------------------
2804- // The out-of-line portion of enabling preemptive GC - rarely executed
2805- // -----------------------------------------------------------------------
2806- EmitLabel (rgRareLabels[2 ]); // label for rare enable gc
2807- EmitRareEnable (rgRejoinLabels[2 ]); // emit rare enable gc
2808-
28092798 // -----------------------------------------------------------------------
28102799 // The out-of-line portion of disabling preemptive GC - rarely executed
28112800 // -----------------------------------------------------------------------
@@ -3335,77 +3324,6 @@ VOID StubLinkerCPU::EmitUnwindInfoCheckSubfunction()
33353324
33363325#if defined(FEATURE_COMINTEROP) && defined(TARGET_X86)
33373326
3338- // -----------------------------------------------------------------------
3339- // Generates the inline portion of the code to enable preemptive GC. Hopefully,
3340- // the inline code is all that will execute most of the time. If this code
3341- // path is entered at certain times, however, it will need to jump out to
3342- // a separate out-of-line path which is more expensive. The "pForwardRef"
3343- // label indicates the start of the out-of-line path.
3344- //
3345- // Assumptions:
3346- // ebx = Thread
3347- // Preserves
3348- // all registers except ecx.
3349- //
3350- // -----------------------------------------------------------------------
3351- VOID StubLinkerCPU::EmitEnable (CodeLabel *pForwardRef)
3352- {
3353- CONTRACTL
3354- {
3355- STANDARD_VM_CHECK;
3356-
3357- PRECONDITION (4 == sizeof ( ((Thread*)0 )->m_State ));
3358- PRECONDITION (4 == sizeof ( ((Thread*)0 )->m_fPreemptiveGCDisabled ));
3359- }
3360- CONTRACTL_END;
3361-
3362- // move byte ptr [ebx + Thread.m_fPreemptiveGCDisabled],0
3363- X86EmitOffsetModRM (0xc6 , (X86Reg)0 , kEBX , Thread::GetOffsetOfGCFlag ());
3364- Emit8 (0 );
3365-
3366- _ASSERTE (FitsInI1 (Thread::TS_CatchAtSafePoint));
3367-
3368- // test byte ptr [ebx + Thread.m_State], TS_CatchAtSafePoint
3369- X86EmitOffsetModRM (0xf6 , (X86Reg)0 , kEBX , Thread::GetOffsetOfState ());
3370- Emit8 (Thread::TS_CatchAtSafePoint);
3371-
3372- // jnz RarePath
3373- X86EmitCondJump (pForwardRef, X86CondCode::kJNZ );
3374-
3375- #ifdef _DEBUG
3376- X86EmitDebugTrashReg (kECX );
3377- #endif
3378-
3379- }
3380-
3381-
3382- // -----------------------------------------------------------------------
3383- // Generates the out-of-line portion of the code to enable preemptive GC.
3384- // After the work is done, the code jumps back to the "pRejoinPoint"
3385- // which should be emitted right after the inline part is generated.
3386- //
3387- // Assumptions:
3388- // ebx = Thread
3389- // Preserves
3390- // all registers except ecx.
3391- //
3392- // -----------------------------------------------------------------------
3393- VOID StubLinkerCPU::EmitRareEnable (CodeLabel *pRejoinPoint)
3394- {
3395- STANDARD_VM_CONTRACT;
3396-
3397- X86EmitCall (NewExternalCodeLabel ((LPVOID) StubRareEnable), 0 );
3398- #ifdef _DEBUG
3399- X86EmitDebugTrashReg (kECX );
3400- #endif
3401- if (pRejoinPoint)
3402- {
3403- X86EmitNearJump (pRejoinPoint);
3404- }
3405-
3406- }
3407-
3408-
34093327// -----------------------------------------------------------------------
34103328// Generates the inline portion of the code to disable preemptive GC. Hopefully,
34113329// the inline code is all that will execute most of the time. If this code
0 commit comments