1// Licensed to the .NET Foundation under one or more agreements.
2// The .NET Foundation licenses this file to you under the MIT license.
3// See the LICENSE file in the project root for more information.
4/*
5 * CallHelpers.CPP: helpers to call managed code
6 *
7
8 */
9
10#include "common.h"
11#include "dbginterface.h"
12
13// To include declaration of "AppDomainTransitionExceptionFilter"
14#include "excep.h"
15
16// To include declaration of "SignatureNative"
17#include "runtimehandles.h"
18
19#include "invokeutil.h"
20#include "argdestination.h"
21
22#if defined(FEATURE_MULTICOREJIT) && defined(_DEBUG)
23
24// Allow system module, and first party WinMD files for Appx
25
26void AssertMulticoreJitAllowedModule(PCODE pTarget)
27{
28 CONTRACTL
29 {
30 SO_NOT_MAINLINE;
31 }
32 CONTRACTL_END;
33
34 MethodDesc* pMethod = Entry2MethodDesc(pTarget, NULL);
35
36 Module * pModule = pMethod->GetModule_NoLogging();
37
38
39 _ASSERTE(pModule->IsSystem());
40}
41
42#endif
43
44// For X86, INSTALL_COMPLUS_EXCEPTION_HANDLER grants us sufficient protection to call into
45// managed code.
46//
47// But on 64-bit, the personality routine will not pop frames or trackers as exceptions unwind
48// out of managed code. Instead, we rely on explicit cleanup like CLRException::HandlerState::CleanupTry
49// or UMThunkUnwindFrameChainHandler.
50//
51// So most callers should call through CallDescrWorkerWithHandler (or a wrapper like MethodDesc::Call)
52// and get the platform-appropriate exception handling. A few places try to optimize by calling direct
53// to managed methods (see ArrayInitializeWorker or FastCallFinalize). This sort of thing is
54// dangerous. You have to worry about marking yourself as a legal managed caller and you have to
55// worry about how exceptions will be handled on a WIN64EXCEPTIONS plan. It is generally only suitable
56// for X86.
57
58//*******************************************************************************
59void CallDescrWorkerWithHandler(
60 CallDescrData * pCallDescrData,
61 BOOL fCriticalCall)
62{
63 STATIC_CONTRACT_SO_INTOLERANT;
64
65#if defined(FEATURE_MULTICOREJIT) && defined(_DEBUG)
66
67 // For multicore JITting, background thread should not call managed code, except when calling system code (e.g. throwing managed exception)
68 if (GetThread()->HasThreadStateNC(Thread::TSNC_CallingManagedCodeDisabled))
69 {
70 AssertMulticoreJitAllowedModule(pCallDescrData->pTarget);
71 }
72
73#endif
74
75
76 BEGIN_CALL_TO_MANAGEDEX(fCriticalCall ? EEToManagedCriticalCall : EEToManagedDefault);
77
78 CallDescrWorker(pCallDescrData);
79
80 END_CALL_TO_MANAGED();
81}
82
83
84#if !defined(_WIN64) && defined(_DEBUG)
85
86//*******************************************************************************
87// assembly code, in i386/asmhelpers.asm
88void CallDescrWorker(CallDescrData * pCallDescrData)
89{
90 //
91 // This function must not have a contract ... it's caller has pushed an FS:0 frame (COMPlusFrameHandler) that must
92 // be the first handler on the stack. The contract causes, at a minimum, a C++ exception handler to be pushed to
93 // handle the destruction of the contract object. If there is an exception in the managed code called from here,
94 // and that exception is handled in that same block of managed code, then the COMPlusFrameHandler will actually
95 // unwind the C++ handler before branching to the catch clause in managed code. That essentially causes an
96 // out-of-order destruction of the contract object, resulting in very odd crashes later.
97 //
98#if 0
99 CONTRACTL {
100 THROWS;
101 GC_TRIGGERS;
102 } CONTRACTL_END;
103#endif // 0
104 STATIC_CONTRACT_THROWS;
105 STATIC_CONTRACT_GC_TRIGGERS;
106 STATIC_CONTRACT_SO_TOLERANT;
107
108 _ASSERTE(!NingenEnabled() && "You cannot invoke managed code inside the ngen compilation process.");
109
110 TRIGGERSGC_NOSTOMP(); // Can't stomp object refs because they are args to the function
111
112 // Save a copy of dangerousObjRefs in table.
113 Thread* curThread;
114 DWORD_PTR ObjRefTable[OBJREF_TABSIZE];
115
116 curThread = GetThread();
117 _ASSERTE(curThread != NULL);
118
119 static_assert_no_msg(sizeof(curThread->dangerousObjRefs) == sizeof(ObjRefTable));
120 memcpy(ObjRefTable, curThread->dangerousObjRefs, sizeof(ObjRefTable));
121
122#ifndef FEATURE_INTERPRETER
123 // When the interpreter is used, this mayb be called from preemptive code.
124 _ASSERTE(curThread->PreemptiveGCDisabled()); // Jitted code expects to be in cooperative mode
125#endif
126
127 // If the current thread owns spinlock or unbreakable lock, it cannot call managed code.
128 _ASSERTE(!curThread->HasUnbreakableLock() &&
129 (curThread->m_StateNC & Thread::TSNC_OwnsSpinLock) == 0);
130
131#ifdef _TARGET_ARM_
132 _ASSERTE(IsThumbCode(pCallDescrData->pTarget));
133#endif
134
135 CallDescrWorkerInternal(pCallDescrData);
136
137 // Restore dangerousObjRefs when we return back to EE after call
138 memcpy(curThread->dangerousObjRefs, ObjRefTable, sizeof(ObjRefTable));
139
140 TRIGGERSGC();
141
142 ENABLESTRESSHEAP();
143}
144#endif // !defined(_WIN64) && defined(_DEBUG)
145
146void DispatchCallDebuggerWrapper(
147 CallDescrData * pCallDescrData,
148 ContextTransitionFrame* pFrame,
149 BOOL fCriticalCall
150)
151{
152 // Use static contracts b/c we have SEH.
153 STATIC_CONTRACT_THROWS;
154 STATIC_CONTRACT_GC_TRIGGERS;
155 STATIC_CONTRACT_MODE_COOPERATIVE;
156
157 struct Param : NotifyOfCHFFilterWrapperParam
158 {
159 CallDescrData * pCallDescrData;
160 BOOL fCriticalCall;
161 } param;
162
163 param.pFrame = pFrame;
164 param.pCallDescrData = pCallDescrData;
165 param.fCriticalCall = fCriticalCall;
166
167 PAL_TRY(Param *, pParam, &param)
168 {
169 CallDescrWorkerWithHandler(
170 pParam->pCallDescrData,
171 pParam->fCriticalCall);
172 }
173 PAL_EXCEPT_FILTER(AppDomainTransitionExceptionFilter)
174 {
175 // Should never reach here b/c handler should always continue search.
176 _ASSERTE(!"Unreachable");
177 }
178 PAL_ENDTRY
179}
180
181// Helper for VM->managed calls with simple signatures.
182void * DispatchCallSimple(
183 SIZE_T *pSrc,
184 DWORD numStackSlotsToCopy,
185 PCODE pTargetAddress,
186 DWORD dwDispatchCallSimpleFlags)
187{
188 CONTRACTL
189 {
190 GC_TRIGGERS;
191 THROWS;
192 MODE_COOPERATIVE;
193 }
194 CONTRACTL_END;
195
196#ifdef DEBUGGING_SUPPORTED
197 if (CORDebuggerTraceCall())
198 g_pDebugInterface->TraceCall((const BYTE *)pTargetAddress);
199#endif // DEBUGGING_SUPPORTED
200
201 CallDescrData callDescrData;
202
203#ifdef CALLDESCR_ARGREGS
204 callDescrData.pSrc = pSrc + NUM_ARGUMENT_REGISTERS;
205 callDescrData.numStackSlots = numStackSlotsToCopy;
206 callDescrData.pArgumentRegisters = (ArgumentRegisters *)pSrc;
207#else
208 callDescrData.pSrc = pSrc;
209 callDescrData.numStackSlots = numStackSlotsToCopy;
210#endif
211
212#ifdef CALLDESCR_RETBUFFARGREG
213 UINT64 retBuffArgPlaceholder = 0;
214 callDescrData.pRetBuffArg = &retBuffArgPlaceholder;
215#endif
216
217#ifdef CALLDESCR_FPARGREGS
218 callDescrData.pFloatArgumentRegisters = NULL;
219#endif
220#ifdef CALLDESCR_REGTYPEMAP
221 callDescrData.dwRegTypeMap = 0;
222#endif
223 callDescrData.fpReturnSize = 0;
224 callDescrData.pTarget = pTargetAddress;
225
226 if ((dwDispatchCallSimpleFlags & DispatchCallSimple_CatchHandlerFoundNotification) != 0)
227 {
228 DispatchCallDebuggerWrapper(
229 &callDescrData,
230 NULL,
231 dwDispatchCallSimpleFlags & DispatchCallSimple_CriticalCall);
232 }
233 else
234 {
235 CallDescrWorkerWithHandler(&callDescrData, dwDispatchCallSimpleFlags & DispatchCallSimple_CriticalCall);
236 }
237
238 return *(void **)(&callDescrData.returnValue);
239}
240
241// This method performs the proper profiler and debugger callbacks before dispatching the
242// call. The caller has the responsibility of furnishing the target address, register and stack arguments.
243// Stack arguments should be in reverse order, and pSrc should point to past the last argument
244// Returns the return value or the exception object if one was thrown.
245void DispatchCall(
246 CallDescrData * pCallDescrData,
247 OBJECTREF *pRefException,
248 ContextTransitionFrame* pFrame /* = NULL */
249#ifdef FEATURE_CORRUPTING_EXCEPTIONS
250 , CorruptionSeverity *pSeverity /*= NULL*/
251#endif // FEATURE_CORRUPTING_EXCEPTIONS
252 )
253{
254 CONTRACTL
255 {
256 GC_TRIGGERS;
257 THROWS;
258 MODE_COOPERATIVE;
259 }
260 CONTRACTL_END;
261
262#ifdef DEBUGGING_SUPPORTED
263 if (CORDebuggerTraceCall())
264 g_pDebugInterface->TraceCall((const BYTE *)pCallDescrData->pTarget);
265#endif // DEBUGGING_SUPPORTED
266
267#ifdef FEATURE_CORRUPTING_EXCEPTIONS
268 if (pSeverity != NULL)
269 {
270 // By default, assume any exception that comes out is NotCorrupting
271 *pSeverity = NotCorrupting;
272 }
273#endif // FEATURE_CORRUPTING_EXCEPTIONS
274
275 EX_TRY
276 {
277 DispatchCallDebuggerWrapper(pCallDescrData,
278 pFrame,
279 FALSE);
280 }
281 EX_CATCH
282 {
283 *pRefException = GET_THROWABLE();
284
285#ifdef FEATURE_CORRUPTING_EXCEPTIONS
286 if (pSeverity != NULL)
287 {
288 // By default, assume any exception that comes out is NotCorrupting
289 *pSeverity = GetThread()->GetExceptionState()->GetLastActiveExceptionCorruptionSeverity();
290 }
291#endif // FEATURE_CORRUPTING_EXCEPTIONS
292
293 }
294 EX_END_CATCH(RethrowTransientExceptions);
295}
296
297#ifdef CALLDESCR_REGTYPEMAP
298//*******************************************************************************
299void FillInRegTypeMap(int argOffset, CorElementType typ, BYTE * pMap)
300{
301 CONTRACTL
302 {
303 WRAPPER(THROWS);
304 WRAPPER(GC_TRIGGERS);
305 MODE_ANY;
306 PRECONDITION(CheckPointer(pMap, NULL_NOT_OK));
307 }
308 CONTRACTL_END;
309
310 int regArgNum = TransitionBlock::GetArgumentIndexFromOffset(argOffset);
311
312 // Create a map of the first 8 argument types. This is used in
313 // CallDescrWorkerInternal to load args into general registers or
314 // floating point registers.
315 //
316 // we put these in order from the LSB to the MSB so that we can keep
317 // the map in a register and just examine the low byte and then shift
318 // right for each arg.
319
320 if (regArgNum < NUM_ARGUMENT_REGISTERS)
321 {
322 pMap[regArgNum] = typ;
323 }
324}
325#endif // CALLDESCR_REGTYPEMAP
326
327//*******************************************************************************
328#ifdef FEATURE_INTERPRETER
329void MethodDescCallSite::CallTargetWorker(const ARG_SLOT *pArguments, ARG_SLOT *pReturnValue, int cbReturnValue, bool transitionToPreemptive)
330#else
331void MethodDescCallSite::CallTargetWorker(const ARG_SLOT *pArguments, ARG_SLOT *pReturnValue, int cbReturnValue)
332#endif
333{
334 //
335 // WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING
336 //
337 // This method needs to have a GC_TRIGGERS contract because it
338 // calls managed code. However, IT MAY NOT TRIGGER A GC ITSELF
339 // because the argument array is not protected and may contain gc
340 // refs.
341 //
342 // WARNING WARNING WARNING WARNING WARNING WARNING WARNING WARNING
343 //
344 CONTRACTL
345 {
346 THROWS;
347 GC_TRIGGERS;
348 INJECT_FAULT(COMPlusThrowOM(););
349 MODE_COOPERATIVE;
350 PRECONDITION(GetAppDomain()->CheckCanExecuteManagedCode(m_pMD));
351 PRECONDITION(m_pMD->CheckActivated()); // EnsureActive will trigger, so we must already be activated
352 }
353 CONTRACTL_END;
354
355 _ASSERTE(!NingenEnabled() && "You cannot invoke managed code inside the ngen compilation process.");
356
357 // If we're invoking an mscorlib method, lift the restriction on type load limits. Calls into mscorlib are
358 // typically calls into specific and controlled helper methods for security checks and other linktime tasks.
359 //
360 // @todo: In an ideal world, we would require each of those sites to do the override rather than disabling
361 // the assert broadly here. However, by limiting the override to mscorlib methods, we should still be able
362 // to effectively enforce the more general rule about loader recursion.
363 MAYBE_OVERRIDE_TYPE_LOAD_LEVEL_LIMIT(CLASS_LOADED, m_pMD->GetModule()->IsSystem());
364
365 LPBYTE pTransitionBlock;
366 UINT nStackBytes;
367 UINT fpReturnSize;
368#ifdef CALLDESCR_REGTYPEMAP
369 UINT64 dwRegTypeMap;
370#endif
371#ifdef CALLDESCR_FPARGREGS
372 FloatArgumentRegisters *pFloatArgumentRegisters = NULL;
373#endif
374 void* pvRetBuff = NULL;
375
376 {
377 //
378 // the incoming argument array is not gc-protected, so we
379 // may not trigger a GC before we actually call managed code
380 //
381 GCX_FORBID();
382
383 // Record this call if required
384 g_IBCLogger.LogMethodDescAccess(m_pMD);
385
386 //
387 // All types must already be loaded. This macro also sets up a FAULT_FORBID region which is
388 // also required for critical calls since we cannot inject any failure points between the
389 // caller of MethodDesc::CallDescr and the actual transition to managed code.
390 //
391 ENABLE_FORBID_GC_LOADER_USE_IN_THIS_SCOPE();
392
393#ifdef FEATURE_INTERPRETER
394 _ASSERTE(isCallConv(m_methodSig.GetCallingConvention(), IMAGE_CEE_CS_CALLCONV_DEFAULT)
395 || isCallConv(m_methodSig.GetCallingConvention(), CorCallingConvention(IMAGE_CEE_CS_CALLCONV_C))
396 || isCallConv(m_methodSig.GetCallingConvention(), CorCallingConvention(IMAGE_CEE_CS_CALLCONV_VARARG))
397 || isCallConv(m_methodSig.GetCallingConvention(), CorCallingConvention(IMAGE_CEE_CS_CALLCONV_NATIVEVARARG))
398 || isCallConv(m_methodSig.GetCallingConvention(), CorCallingConvention(IMAGE_CEE_CS_CALLCONV_STDCALL)));
399#else
400 _ASSERTE(isCallConv(m_methodSig.GetCallingConvention(), IMAGE_CEE_CS_CALLCONV_DEFAULT));
401 _ASSERTE(!(m_methodSig.GetCallingConventionInfo() & CORINFO_CALLCONV_PARAMTYPE));
402#endif
403
404#ifdef DEBUGGING_SUPPORTED
405 if (CORDebuggerTraceCall())
406 {
407 g_pDebugInterface->TraceCall((const BYTE *)m_pCallTarget);
408 }
409#endif // DEBUGGING_SUPPORTED
410
411#ifdef _DEBUG
412 {
413#ifdef UNIX_AMD64_ABI
414 // Validate that the return value is not too big for the buffer passed
415 if (m_pMD->GetMethodTable()->IsRegPassedStruct())
416 {
417 TypeHandle thReturnValueType;
418 if (m_methodSig.GetReturnTypeNormalized(&thReturnValueType) == ELEMENT_TYPE_VALUETYPE)
419 {
420 _ASSERTE(cbReturnValue >= thReturnValueType.GetSize());
421 }
422 }
423#endif // UNIX_AMD64_ABI
424
425 // The metasig should be reset
426 _ASSERTE(m_methodSig.GetArgNum() == 0);
427
428 // Check to see that any value type args have been loaded and restored.
429 // This is because we may be calling a FramedMethodFrame which will use the sig
430 // to trace the args, but if any are unloaded we will be stuck if a GC occurs.
431 _ASSERTE(m_pMD->IsRestored_NoLogging());
432 CorElementType argType;
433 while ((argType = m_methodSig.NextArg()) != ELEMENT_TYPE_END)
434 {
435 if (argType == ELEMENT_TYPE_VALUETYPE)
436 {
437 TypeHandle th = m_methodSig.GetLastTypeHandleThrowing(ClassLoader::DontLoadTypes);
438 CONSISTENCY_CHECK(th.CheckFullyLoaded());
439 CONSISTENCY_CHECK(th.IsRestored_NoLogging());
440 }
441 }
442 m_methodSig.Reset();
443 }
444#endif // _DEBUG
445
446 DWORD arg = 0;
447
448 nStackBytes = m_argIt.SizeOfFrameArgumentArray();
449
450 // Create a fake FramedMethodFrame on the stack.
451
452 // Note that SizeOfFrameArgumentArray does overflow checks with sufficient margin to prevent overflows here
453 DWORD dwAllocaSize = TransitionBlock::GetNegSpaceSize() + sizeof(TransitionBlock) + nStackBytes;
454
455 LPBYTE pAlloc = (LPBYTE)_alloca(dwAllocaSize);
456
457 pTransitionBlock = pAlloc + TransitionBlock::GetNegSpaceSize();
458
459#ifdef CALLDESCR_REGTYPEMAP
460 dwRegTypeMap = 0;
461 BYTE* pMap = (BYTE*)&dwRegTypeMap;
462#endif // CALLDESCR_REGTYPEMAP
463
464 if (m_argIt.HasThis())
465 {
466 *((LPVOID*)(pTransitionBlock + m_argIt.GetThisOffset())) = ArgSlotToPtr(pArguments[arg++]);
467 }
468
469 if (m_argIt.HasRetBuffArg())
470 {
471 *((LPVOID*)(pTransitionBlock + m_argIt.GetRetBuffArgOffset())) = ArgSlotToPtr(pArguments[arg++]);
472 }
473#ifdef FEATURE_HFA
474#ifdef FEATURE_INTERPRETER
475 // Something is necessary for HFA's, but what's below (in the FEATURE_INTERPRETER ifdef)
476 // doesn't seem to do the proper test. It fires,
477 // incorrectly, for a one-word struct that *doesn't* have a ret buff. So we'll try this, instead:
478 // We're here because it doesn't have a ret buff. If it would, except that the struct being returned
479 // is an HFA, *then* assume the invoker made this slot a ret buff pointer.
480 // It's an HFA if the return type is a struct, but it has a non-zero FP return size.
481 // (If it were an HFA, but had a ret buff because it was varargs, then we wouldn't be here.
482 // Also this test won't work for float enums.
483 else if (m_methodSig.GetReturnType() == ELEMENT_TYPE_VALUETYPE
484 && m_argIt.GetFPReturnSize() > 0)
485#else // FEATURE_INTERPRETER
486 else if (ELEMENT_TYPE_VALUETYPE == m_methodSig.GetReturnTypeNormalized())
487#endif // FEATURE_INTERPRETER
488 {
489 pvRetBuff = ArgSlotToPtr(pArguments[arg++]);
490 }
491#endif // FEATURE_HFA
492
493
494#ifdef FEATURE_INTERPRETER
495 if (m_argIt.IsVarArg())
496 {
497 *((LPVOID*)(pTransitionBlock + m_argIt.GetVASigCookieOffset())) = ArgSlotToPtr(pArguments[arg++]);
498 }
499
500 if (m_argIt.HasParamType())
501 {
502 *((LPVOID*)(pTransitionBlock + m_argIt.GetParamTypeArgOffset())) = ArgSlotToPtr(pArguments[arg++]);
503 }
504#endif
505
506 int ofs;
507 for (; TransitionBlock::InvalidOffset != (ofs = m_argIt.GetNextOffset()); arg++)
508 {
509#ifdef CALLDESCR_REGTYPEMAP
510 FillInRegTypeMap(ofs, m_argIt.GetArgType(), pMap);
511#endif
512
513#ifdef CALLDESCR_FPARGREGS
514 // Under CALLDESCR_FPARGREGS -ve offsets indicate arguments in floating point registers. If we
515 // have at least one such argument we point the call worker at the floating point area of the
516 // frame (we leave it null otherwise since the worker can perform a useful optimization if it
517 // knows no floating point registers need to be set up).
518 if (TransitionBlock::HasFloatRegister(ofs, m_argIt.GetArgLocDescForStructInRegs()) &&
519 (pFloatArgumentRegisters == NULL))
520 {
521 pFloatArgumentRegisters = (FloatArgumentRegisters*)(pTransitionBlock +
522 TransitionBlock::GetOffsetOfFloatArgumentRegisters());
523 }
524#endif
525
526 ArgDestination argDest(pTransitionBlock, ofs, m_argIt.GetArgLocDescForStructInRegs());
527
528 UINT32 stackSize = m_argIt.GetArgSize();
529 // We need to pass in a pointer, but be careful of the ARG_SLOT calling convention. We might already have a pointer in the ARG_SLOT.
530 PVOID pSrc = stackSize > sizeof(ARG_SLOT) ? (LPVOID)ArgSlotToPtr(pArguments[arg]) : (LPVOID)ArgSlotEndianessFixup((ARG_SLOT*)&pArguments[arg], stackSize);
531
532#if defined(UNIX_AMD64_ABI)
533 if (argDest.IsStructPassedInRegs())
534 {
535 TypeHandle th;
536 m_argIt.GetArgType(&th);
537
538 argDest.CopyStructToRegisters(pSrc, th.AsMethodTable()->GetNumInstanceFieldBytes(), 0);
539 }
540 else
541#endif // UNIX_AMD64_ABI
542 {
543 PVOID pDest = argDest.GetDestinationAddress();
544
545 switch (stackSize)
546 {
547 case 1:
548 case 2:
549 case 4:
550 *((INT32*)pDest) = (INT32)pArguments[arg];
551 break;
552
553 case 8:
554 *((INT64*)pDest) = pArguments[arg];
555 break;
556
557 default:
558 // The ARG_SLOT contains a pointer to the value-type
559 #ifdef ENREGISTERED_PARAMTYPE_MAXSIZE
560 if (m_argIt.IsArgPassedByRef())
561 {
562 *(PVOID*)pDest = pSrc;
563 }
564 else
565 #endif // ENREGISTERED_PARAMTYPE_MAXSIZE
566 if (stackSize > sizeof(ARG_SLOT))
567 {
568 CopyMemory(pDest, ArgSlotToPtr(pArguments[arg]), stackSize);
569 }
570 else
571 {
572 CopyMemory(pDest, (LPVOID) (&pArguments[arg]), stackSize);
573 }
574 break;
575 }
576 }
577 }
578
579 fpReturnSize = m_argIt.GetFPReturnSize();
580
581 } // END GCX_FORBID & ENABLE_FORBID_GC_LOADER_USE_IN_THIS_SCOPE
582
583 CallDescrData callDescrData;
584
585 callDescrData.pSrc = pTransitionBlock + sizeof(TransitionBlock);
586 callDescrData.numStackSlots = nStackBytes / STACK_ELEM_SIZE;
587#ifdef CALLDESCR_ARGREGS
588 callDescrData.pArgumentRegisters = (ArgumentRegisters*)(pTransitionBlock + TransitionBlock::GetOffsetOfArgumentRegisters());
589#endif
590#ifdef CALLDESCR_RETBUFFARGREG
591 callDescrData.pRetBuffArg = (UINT64*)(pTransitionBlock + TransitionBlock::GetOffsetOfRetBuffArgReg());
592#endif
593#ifdef CALLDESCR_FPARGREGS
594 callDescrData.pFloatArgumentRegisters = pFloatArgumentRegisters;
595#endif
596#ifdef CALLDESCR_REGTYPEMAP
597 callDescrData.dwRegTypeMap = dwRegTypeMap;
598#endif
599 callDescrData.fpReturnSize = fpReturnSize;
600 callDescrData.pTarget = m_pCallTarget;
601
602#ifdef FEATURE_INTERPRETER
603 if (transitionToPreemptive)
604 {
605 GCPreemp transitionIfILStub(transitionToPreemptive);
606 DWORD* pLastError = &GetThread()->m_dwLastErrorInterp;
607 CallDescrWorkerInternal(&callDescrData);
608 *pLastError = GetLastError();
609 }
610 else
611#endif // FEATURE_INTERPRETER
612 {
613 CallDescrWorkerWithHandler(&callDescrData);
614 }
615
616 if (pvRetBuff != NULL)
617 {
618 memcpyNoGCRefs(pvRetBuff, &callDescrData.returnValue, sizeof(callDescrData.returnValue));
619 }
620
621 if (pReturnValue != NULL)
622 {
623 _ASSERTE(cbReturnValue <= sizeof(callDescrData.returnValue));
624 memcpyNoGCRefs(pReturnValue, &callDescrData.returnValue, cbReturnValue);
625
626#if !defined(_WIN64) && BIGENDIAN
627 {
628 GCX_FORBID();
629
630 if (!m_methodSig.Is64BitReturn())
631 {
632 pReturnValue[0] >>= 32;
633 }
634 }
635#endif // !defined(_WIN64) && BIGENDIAN
636 }
637}
638
639void CallDefaultConstructor(OBJECTREF ref)
640{
641 CONTRACTL
642 {
643 THROWS;
644 GC_TRIGGERS;
645 MODE_COOPERATIVE;
646 }
647 CONTRACTL_END;
648
649 MethodTable *pMT = ref->GetMethodTable();
650
651 PREFIX_ASSUME(pMT != NULL);
652
653 if (!pMT->HasDefaultConstructor())
654 {
655 SString ctorMethodName(SString::Utf8, COR_CTOR_METHOD_NAME);
656 COMPlusThrowNonLocalized(kMissingMethodException, ctorMethodName.GetUnicode());
657 }
658
659 GCPROTECT_BEGIN (ref);
660
661 MethodDesc *pMD = pMT->GetDefaultConstructor();
662
663 PREPARE_NONVIRTUAL_CALLSITE_USING_METHODDESC(pMD);
664 DECLARE_ARGHOLDER_ARRAY(CtorArgs, 1);
665 CtorArgs[ARGNUM_0] = OBJECTREF_TO_ARGHOLDER(ref);
666
667 // Call the ctor...
668 CATCH_HANDLER_FOUND_NOTIFICATION_CALLSITE;
669 CALL_MANAGED_METHOD_NORET(CtorArgs);
670
671 GCPROTECT_END ();
672}
673