forked from dotnet/runtime
-
Notifications
You must be signed in to change notification settings - Fork 0
/
class.cpp
3091 lines (2598 loc) · 96.1 KB
/
class.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
//
// File: CLASS.CPP
//
#include "common.h"
#include "dllimport.h"
#include "dllimportcallback.h"
#include "fieldmarshaler.h"
#include "customattribute.h"
#include "encee.h"
#include "typestring.h"
#include "dbginterface.h"
#ifdef FEATURE_COMINTEROP
#include "comcallablewrapper.h"
#include "clrtocomcall.h"
#include "runtimecallablewrapper.h"
#endif // FEATURE_COMINTEROP
//#define DEBUG_LAYOUT
#define SORT_BY_RID
#ifndef DACCESS_COMPILE
#include "methodtablebuilder.h"
#endif
#ifndef DACCESS_COMPILE
//*******************************************************************************
EEClass::EEClass(DWORD cbFixedEEClassFields)
{
LIMITED_METHOD_CONTRACT;
// Cache size of fixed fields (this instance also contains a set of packed fields whose final size isn't
// determined until the end of class loading). We store the size into a spare byte made available by
// compiler field alignment, so we need to ensure we never allocate a flavor of EEClass more than 255
// bytes long.
_ASSERTE(cbFixedEEClassFields <= 0xff);
m_cbFixedEEClassFields = (BYTE)cbFixedEEClassFields;
// All other members are initialized to zero
}
//*******************************************************************************
void *EEClass::operator new(
size_t size,
LoaderHeap *pHeap,
AllocMemTracker *pamTracker)
{
CONTRACTL
{
THROWS;
GC_NOTRIGGER;
INJECT_FAULT(COMPlusThrowOM());
}
CONTRACTL_END;
// EEClass (or sub-type) is always followed immediately by an EEClassPackedFields structure. This is
// maximally sized at runtime but in the ngen scenario will be optimized into a smaller structure (which
// is why it must go after all the fixed sized fields).
S_SIZE_T safeSize = S_SIZE_T(size) + S_SIZE_T(sizeof(EEClassPackedFields));
void *p = pamTracker->Track(pHeap->AllocMem(safeSize));
// No need to memset since this memory came from VirtualAlloc'ed memory
// memset (p, 0, size);
return p;
}
//*******************************************************************************
void EEClass::Destruct(MethodTable * pOwningMT)
{
CONTRACTL
{
NOTHROW;
GC_TRIGGERS;
FORBID_FAULT;
PRECONDITION(pOwningMT != NULL);
}
CONTRACTL_END
#ifdef _DEBUG
_ASSERTE(!IsDestroyed());
SetDestroyed();
#endif
#ifdef PROFILING_SUPPORTED
// If profiling, then notify the class is getting unloaded.
{
BEGIN_PROFILER_CALLBACK(CORProfilerTrackClasses());
{
// Calls to the profiler callback may throw, or otherwise fail, if
// the profiler AVs/throws an unhandled exception/etc. We don't want
// those failures to affect the runtime, so we'll ignore them.
//
// Note that the profiler callback may turn around and make calls into
// the profiling runtime that may throw. This try/catch block doesn't
// protect the profiler against such failures. To protect the profiler
// against that, we will need try/catch blocks around all calls into the
// profiling API.
//
// (Bug #26467)
//
FAULT_NOT_FATAL();
EX_TRY
{
GCX_PREEMP();
(&g_profControlBlock)->ClassUnloadStarted((ClassID) pOwningMT);
}
EX_CATCH
{
// The exception here came from the profiler itself. We'll just
// swallow the exception, since we don't want the profiler to bring
// down the runtime.
}
EX_END_CATCH(RethrowTerminalExceptions);
}
END_PROFILER_CALLBACK();
}
#endif // PROFILING_SUPPORTED
#ifdef FEATURE_COMINTEROP
// clean up any COM Data
if (m_pccwTemplate)
{
m_pccwTemplate->Release();
m_pccwTemplate = NULL;
}
#ifdef FEATURE_COMINTEROP_UNMANAGED_ACTIVATION
if (GetComClassFactory())
{
GetComClassFactory()->Cleanup();
}
#endif // FEATURE_COMINTEROP_UNMANAGED_ACTIVATION
#endif // FEATURE_COMINTEROP
if (IsDelegate())
{
DelegateEEClass* pDelegateEEClass = (DelegateEEClass*)this;
if (pDelegateEEClass->m_pStaticCallStub)
{
ExecutableWriterHolder<Stub> stubWriterHolder(pDelegateEEClass->m_pStaticCallStub, sizeof(Stub));
BOOL fStubDeleted = stubWriterHolder.GetRW()->DecRef();
if (fStubDeleted)
{
DelegateInvokeStubManager::g_pManager->RemoveStub(pDelegateEEClass->m_pStaticCallStub);
}
}
if (pDelegateEEClass->m_pInstRetBuffCallStub)
{
ExecutableWriterHolder<Stub> stubWriterHolder(pDelegateEEClass->m_pInstRetBuffCallStub, sizeof(Stub));
stubWriterHolder.GetRW()->DecRef();
}
// While m_pMultiCastInvokeStub is also a member,
// it is owned by the m_pMulticastStubCache, not by the class
// - it is shared across classes. So we don't decrement
// its ref count here
}
#ifdef FEATURE_COMINTEROP
if (GetSparseCOMInteropVTableMap() != NULL)
delete GetSparseCOMInteropVTableMap();
#endif // FEATURE_COMINTEROP
#ifdef PROFILING_SUPPORTED
// If profiling, then notify the class is getting unloaded.
{
BEGIN_PROFILER_CALLBACK(CORProfilerTrackClasses());
{
// See comments in the call to ClassUnloadStarted for details on this
// FAULT_NOT_FATAL marker and exception swallowing.
FAULT_NOT_FATAL();
EX_TRY
{
GCX_PREEMP();
(&g_profControlBlock)->ClassUnloadFinished((ClassID) pOwningMT, S_OK);
}
EX_CATCH
{
}
EX_END_CATCH(RethrowTerminalExceptions);
}
END_PROFILER_CALLBACK();
}
#endif // PROFILING_SUPPORTED
}
//*******************************************************************************
/*static*/ EEClass *
EEClass::CreateMinimalClass(LoaderHeap *pHeap, AllocMemTracker *pamTracker)
{
CONTRACTL
{
THROWS;
GC_NOTRIGGER;
}
CONTRACTL_END;
return new (pHeap, pamTracker) EEClass(sizeof(EEClass));
}
//*******************************************************************************
//-----------------------------------------------------------------------------------
// Note: this only loads the type to CLASS_DEPENDENCIES_LOADED as this can be called
// indirectly from DoFullyLoad() as part of accessibility checking.
//-----------------------------------------------------------------------------------
MethodTable *MethodTable::LoadEnclosingMethodTable(ClassLoadLevel targetLevel)
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
INJECT_FAULT(COMPlusThrowOM(););
MODE_ANY;
}
CONTRACTL_END
mdTypeDef tdEnclosing = GetEnclosingCl();
if (tdEnclosing == mdTypeDefNil)
{
return NULL;
}
return ClassLoader::LoadTypeDefThrowing(GetModule(),
tdEnclosing,
ClassLoader::ThrowIfNotFound,
ClassLoader::PermitUninstDefOrRef,
tdNoTypes,
targetLevel
).GetMethodTable();
}
#ifdef EnC_SUPPORTED
//*******************************************************************************
VOID EEClass::FixupFieldDescForEnC(MethodTable * pMT, EnCFieldDesc *pFD, mdFieldDef fieldDef)
{
CONTRACTL
{
THROWS;
MODE_COOPERATIVE;
WRAPPER(GC_TRIGGERS);
INJECT_FAULT(COMPlusThrowOM(););
}
CONTRACTL_END
Module * pModule = pMT->GetModule();
IMDInternalImport *pImport = pModule->GetMDImport();
#ifdef LOGGING
if (LoggingEnabled())
{
LPCSTR szFieldName;
if (FAILED(pImport->GetNameOfFieldDef(fieldDef, &szFieldName)))
{
szFieldName = "Invalid FieldDef record";
}
LOG((LF_ENC, LL_INFO100, "EEClass::FixupFieldDescForEnC %08x %s\n", fieldDef, szFieldName));
}
#endif //LOGGING
#ifdef _DEBUG
BOOL shouldBreak = CLRConfig::GetConfigValue(CLRConfig::INTERNAL_EncFixupFieldBreak);
if (shouldBreak > 0) {
_ASSERTE(!"EncFixupFieldBreak");
}
#endif // _DEBUG
// MethodTableBuilder uses the stacking allocator for most of it's
// working memory requirements, so this makes sure to free the memory
// once this function is out of scope.
ACQUIRE_STACKING_ALLOCATOR(pStackingAllocator);
MethodTableBuilder::bmtMetaDataInfo bmtMetaData;
bmtMetaData.cFields = 1;
bmtMetaData.pFields = (mdToken*)_alloca(sizeof(mdToken));
bmtMetaData.pFields[0] = fieldDef;
bmtMetaData.pFieldAttrs = (DWORD*)_alloca(sizeof(DWORD));
IfFailThrow(pImport->GetFieldDefProps(fieldDef, &bmtMetaData.pFieldAttrs[0]));
MethodTableBuilder::bmtMethAndFieldDescs bmtMFDescs;
// We need to alloc the memory, but don't have to fill it in. InitializeFieldDescs
// will copy pFD (1st arg) into here.
bmtMFDescs.ppFieldDescList = (FieldDesc**)_alloca(sizeof(FieldDesc*));
MethodTableBuilder::bmtFieldPlacement bmtFP;
// This simulates the environment that BuildMethodTableThrowing creates
// just enough to run InitializeFieldDescs
MethodTableBuilder::bmtErrorInfo bmtError;
bmtError.pModule = pModule;
bmtError.cl = pMT->GetCl();
bmtError.dMethodDefInError = mdTokenNil;
bmtError.szMethodNameForError = NULL;
MethodTableBuilder::bmtInternalInfo bmtInternal;
bmtInternal.pModule = pModule;
bmtInternal.pInternalImport = pImport;
bmtInternal.pParentMT = pMT->GetParentMethodTable();
MethodTableBuilder::bmtProperties bmtProp;
bmtProp.fIsValueClass = !!pMT->IsValueType();
MethodTableBuilder::bmtEnumFieldInfo bmtEnumFields(bmtInternal.pInternalImport);
if (pFD->IsStatic())
{
bmtEnumFields.dwNumStaticFields = 1;
}
else
{
bmtEnumFields.dwNumInstanceFields = 1;
}
// We shouldn't have to fill this in b/c we're not allowed to EnC value classes, or
// anything else with layout info associated with it.
LayoutRawFieldInfo *pLayoutRawFieldInfos = (LayoutRawFieldInfo*)_alloca((2) * sizeof(LayoutRawFieldInfo));
// If not NULL, it means there are some by-value fields, and this contains an entry for each instance or static field,
// which is NULL if not a by value field, and points to the EEClass of the field if a by value field. Instance fields
// come first, statics come second.
MethodTable **pByValueClassCache = NULL;
EEClass * pClass = pMT->GetClass();
// InitializeFieldDescs are going to change these numbers to something wrong,
// even though we already have the right numbers. Save & restore after.
WORD wNumInstanceFields = pMT->GetNumInstanceFields();
WORD wNumStaticFields = pMT->GetNumStaticFields();
unsigned totalDeclaredFieldSize = 0;
AllocMemTracker dummyAmTracker;
BaseDomain * pDomain = pMT->GetDomain();
MethodTableBuilder builder(pMT, pClass,
pStackingAllocator,
&dummyAmTracker);
MethodTableBuilder::bmtGenericsInfo genericsInfo;
OBJECTREF pThrowable = NULL;
GCPROTECT_BEGIN(pThrowable);
builder.SetBMTData(pMT->GetLoaderAllocator(),
&bmtError,
&bmtProp,
NULL,
NULL,
NULL,
&bmtMetaData,
NULL,
&bmtMFDescs,
&bmtFP,
&bmtInternal,
NULL,
NULL,
&genericsInfo,
&bmtEnumFields);
EX_TRY
{
GCX_PREEMP();
builder.InitializeFieldDescs(pFD,
pLayoutRawFieldInfos,
&bmtInternal,
&genericsInfo,
&bmtMetaData,
&bmtEnumFields,
&bmtError,
&pByValueClassCache,
&bmtMFDescs,
&bmtFP,
&totalDeclaredFieldSize);
}
EX_CATCH_THROWABLE(&pThrowable);
dummyAmTracker.SuppressRelease();
// Restore now
pClass->SetNumInstanceFields(wNumInstanceFields);
pClass->SetNumStaticFields(wNumStaticFields);
// PERF: For now, we turn off the fast equality check for valuetypes when a
// a field is modified by EnC. Consider doing a check and setting the bit only when
// necessary.
if (pMT->IsValueType())
{
pClass->SetIsNotTightlyPacked();
}
if (pThrowable != NULL)
{
COMPlusThrow(pThrowable);
}
GCPROTECT_END();
pFD->SetMethodTable(pMT);
// We set this when we first created the FieldDesc, but initializing the FieldDesc
// may have overwritten it so we need to set it again.
pFD->SetEnCNew();
return;
}
//---------------------------------------------------------------------------------------
//
// AddField - called when a new field is added by EnC
//
// Since instances of this class may already exist on the heap, we can't change the
// runtime layout of the object to accommodate the new field. Instead we hang the field
// off the syncblock (for instance fields) or in the FieldDesc for static fields.
//
// Here we just create the FieldDesc and link it to the class. The actual storage will
// be created lazily on demand.
//
HRESULT EEClass::AddField(MethodTable * pMT, mdFieldDef fieldDef, EnCFieldDesc **ppNewFD)
{
CONTRACTL
{
THROWS;
GC_NOTRIGGER;
MODE_COOPERATIVE;
}
CONTRACTL_END;
Module * pModule = pMT->GetModule();
IMDInternalImport *pImport = pModule->GetMDImport();
#ifdef LOGGING
if (LoggingEnabled())
{
LPCSTR szFieldName;
if (FAILED(pImport->GetNameOfFieldDef(fieldDef, &szFieldName)))
{
szFieldName = "Invalid FieldDef record";
}
LOG((LF_ENC, LL_INFO100, "EEClass::AddField %s\n", szFieldName));
}
#endif //LOGGING
// We can only add fields to normal classes
if (pMT->HasLayout() || pMT->IsValueType())
{
return CORDBG_E_ENC_CANT_ADD_FIELD_TO_VALUE_OR_LAYOUT_CLASS;
}
// We only add private fields.
// This may not be strictly necessary, but helps avoid any semantic confusion with
// existing code etc.
DWORD dwFieldAttrs;
IfFailThrow(pImport->GetFieldDefProps(fieldDef, &dwFieldAttrs));
LoaderAllocator* pAllocator = pMT->GetLoaderAllocator();
// Here we allocate a FieldDesc and set just enough info to be able to fix it up later
// when we're running in managed code.
EnCAddedFieldElement *pAddedField = (EnCAddedFieldElement *)
(void*)pAllocator->GetHighFrequencyHeap()->AllocMem_NoThrow(S_SIZE_T(sizeof(EnCAddedFieldElement)));
if (!pAddedField)
{
return E_OUTOFMEMORY;
}
pAddedField->Init( fieldDef, IsFdStatic(dwFieldAttrs) );
EnCFieldDesc *pNewFD = &pAddedField->m_fieldDesc;
// Get the EnCEEClassData for this class
// Don't adjust EEClass stats b/c EnC fields shouldn't touch EE data structures.
// We'll just update our private EnC structures instead.
EnCEEClassData *pEnCClass = ((EditAndContinueModule*)pModule)->GetEnCEEClassData(pMT);
if (! pEnCClass)
return E_FAIL;
// Add the field element to the list of added fields for this class
pEnCClass->AddField(pAddedField);
// Store the FieldDesc into the module's field list
{
CONTRACT_VIOLATION(ThrowsViolation); // B#25680 (Fix Enc violations): Must handle OOM's from Ensure
pModule->EnsureFieldDefCanBeStored(fieldDef);
}
pModule->EnsuredStoreFieldDef(fieldDef, pNewFD);
pNewFD->SetMethodTable(pMT);
// Success, return the new FieldDesc
if (ppNewFD)
{
*ppNewFD = pNewFD;
}
return S_OK;
}
//---------------------------------------------------------------------------------------
//
// AddMethod - called when a new method is added by EnC
//
// The method has already been added to the metadata with token methodDef.
// Create a new MethodDesc for the method.
//
HRESULT EEClass::AddMethod(MethodTable * pMT, mdMethodDef methodDef, RVA newRVA, MethodDesc **ppMethod)
{
CONTRACTL
{
THROWS;
GC_NOTRIGGER;
MODE_COOPERATIVE;
}
CONTRACTL_END;
Module * pModule = pMT->GetModule();
IMDInternalImport *pImport = pModule->GetMDImport();
#ifdef LOGGING
if (LoggingEnabled())
{
LPCSTR szMethodName;
if (FAILED(pImport->GetNameOfMethodDef(methodDef, &szMethodName)))
{
szMethodName = "Invalid MethodDef record";
}
LOG((LF_ENC, LL_INFO100, "EEClass::AddMethod %s\n", szMethodName));
}
#endif //LOGGING
DWORD dwDescrOffset;
DWORD dwImplFlags;
HRESULT hr = S_OK;
if (FAILED(pImport->GetMethodImplProps(methodDef, &dwDescrOffset, &dwImplFlags)))
{
return COR_E_BADIMAGEFORMAT;
}
DWORD dwMemberAttrs;
IfFailThrow(pImport->GetMethodDefProps(methodDef, &dwMemberAttrs));
// Refuse to add other special cases
if (IsReallyMdPinvokeImpl(dwMemberAttrs) ||
(pMT->IsInterface() && !IsMdStatic(dwMemberAttrs)) ||
IsMiRuntime(dwImplFlags))
{
_ASSERTE(! "**Error** EEClass::AddMethod only IL private non-virtual methods are supported");
LOG((LF_ENC, LL_INFO100, "**Error** EEClass::AddMethod only IL private non-virtual methods are supported\n"));
return CORDBG_E_ENC_EDIT_NOT_SUPPORTED;
}
#ifdef _DEBUG
// Validate that this methodDef correctly has a parent typeDef
mdTypeDef parentTypeDef;
if (FAILED(hr = pImport->GetParentToken(methodDef, &parentTypeDef)))
{
_ASSERTE(! "**Error** EEClass::AddMethod parent token not found");
LOG((LF_ENC, LL_INFO100, "**Error** EEClass::AddMethod parent token not found\n"));
return E_FAIL;
}
#endif // _DEBUG
EEClass * pClass = pMT->GetClass();
// @todo: OOM: InitMethodDesc will allocate loaderheap memory but leak it
// on failure. This AllocMemTracker should be replaced with a real one.
AllocMemTracker dummyAmTracker;
LoaderAllocator* pAllocator = pMT->GetLoaderAllocator();
DWORD classification = mcIL;
// Create a new MethodDescChunk to hold the new MethodDesc
// Create the chunk somewhere we'll know is within range of the VTable
MethodDescChunk *pChunk = MethodDescChunk::CreateChunk(pAllocator->GetHighFrequencyHeap(),
1, // methodDescCount
classification,
TRUE /* fNonVtableSlot */,
TRUE /* fNativeCodeSlot */,
pMT,
&dummyAmTracker);
// Get the new MethodDesc (Note: The method desc memory is zero initialized)
MethodDesc *pNewMD = pChunk->GetFirstMethodDesc();
// Initialize the new MethodDesc
// This method runs on a debugger thread. Debugger threads do not have Thread object that caches StackingAllocator.
// Use a local StackingAllocator instead.
StackingAllocator stackingAllocator;
MethodTableBuilder::bmtInternalInfo bmtInternal;
bmtInternal.pModule = pMT->GetModule();
bmtInternal.pInternalImport = NULL;
bmtInternal.pParentMT = NULL;
MethodTableBuilder builder(pMT,
pClass,
&stackingAllocator,
&dummyAmTracker);
builder.SetBMTData(pMT->GetLoaderAllocator(),
NULL,
NULL,
NULL,
NULL,
NULL,
NULL,
NULL,
NULL,
NULL,
&bmtInternal);
EX_TRY
{
INDEBUG(LPCSTR debug_szFieldName);
INDEBUG(if (FAILED(pImport->GetNameOfMethodDef(methodDef, &debug_szFieldName))) { debug_szFieldName = "Invalid MethodDef record"; });
builder.InitMethodDesc(pNewMD,
classification,
methodDef,
dwImplFlags,
dwMemberAttrs,
TRUE, // fEnC
newRVA,
pImport,
NULL
COMMA_INDEBUG(debug_szFieldName)
COMMA_INDEBUG(pMT->GetDebugClassName())
COMMA_INDEBUG(NULL)
);
pNewMD->SetTemporaryEntryPoint(pAllocator, &dummyAmTracker);
// [TODO] if an exception is thrown, asserts will fire in EX_CATCH_HRESULT()
// during an EnC operation due to the debugger thread not being able to
// transition to COOP mode.
}
EX_CATCH_HRESULT(hr);
if (S_OK != hr)
return hr;
dummyAmTracker.SuppressRelease();
_ASSERTE(pNewMD->IsEnCAddedMethod());
pNewMD->SetSlot(MethodTable::NO_SLOT); // we can't ever use the slot for EnC methods
pClass->AddChunk(pChunk);
// Store the new MethodDesc into the collection for this class
pModule->EnsureMethodDefCanBeStored(methodDef);
pModule->EnsuredStoreMethodDef(methodDef, pNewMD);
LOG((LF_ENC, LL_INFO100, "EEClass::AddMethod new methoddesc %p for token %p\n", pNewMD, methodDef));
// Success - return the new MethodDesc
_ASSERTE( SUCCEEDED(hr) );
if (ppMethod)
{
*ppMethod = pNewMD;
}
return S_OK;
}
#endif // EnC_SUPPORTED
//---------------------------------------------------------------------------------------
//
// Check that the class type parameters are used consistently in this signature blob
// in accordance with their variance annotations
// The signature is assumed to be well-formed but indices and arities might not be correct
//
BOOL
EEClass::CheckVarianceInSig(
DWORD numGenericArgs,
BYTE * pVarianceInfo,
Module * pModule,
SigPointer psig,
CorGenericParamAttr position)
{
CONTRACTL
{
THROWS;
GC_TRIGGERS;
MODE_ANY;
}
CONTRACTL_END;
if (pVarianceInfo == NULL)
return TRUE;
CorElementType typ;
IfFailThrow(psig.GetElemType(&typ));
switch (typ)
{
case ELEMENT_TYPE_STRING:
case ELEMENT_TYPE_U:
case ELEMENT_TYPE_I:
case ELEMENT_TYPE_I1:
case ELEMENT_TYPE_U1:
case ELEMENT_TYPE_BOOLEAN:
case ELEMENT_TYPE_I2:
case ELEMENT_TYPE_U2:
case ELEMENT_TYPE_CHAR:
case ELEMENT_TYPE_I4:
case ELEMENT_TYPE_U4:
case ELEMENT_TYPE_I8:
case ELEMENT_TYPE_U8:
case ELEMENT_TYPE_R4:
case ELEMENT_TYPE_R8:
case ELEMENT_TYPE_VOID:
case ELEMENT_TYPE_OBJECT:
case ELEMENT_TYPE_TYPEDBYREF:
case ELEMENT_TYPE_MVAR:
case ELEMENT_TYPE_CLASS:
case ELEMENT_TYPE_VALUETYPE:
return TRUE;
case ELEMENT_TYPE_VAR:
{
uint32_t index;
IfFailThrow(psig.GetData(&index));
// This will be checked later anyway; so give up and don't indicate a variance failure
if (index < 0 || index >= numGenericArgs)
return TRUE;
// Non-variant parameters are allowed to appear anywhere
if (pVarianceInfo[index] == gpNonVariant)
return TRUE;
// Covariant and contravariant parameters can *only* appear in resp. covariant and contravariant positions
return ((CorGenericParamAttr) (pVarianceInfo[index]) == position);
}
case ELEMENT_TYPE_GENERICINST:
{
IfFailThrow(psig.GetElemType(&typ));
mdTypeRef typeref;
IfFailThrow(psig.GetToken(&typeref));
// The number of type parameters follows
uint32_t ntypars;
IfFailThrow(psig.GetData(&ntypars));
// If this is a value type, or position == gpNonVariant, then
// we're disallowing covariant and contravariant completely
if (typ == ELEMENT_TYPE_VALUETYPE || position == gpNonVariant)
{
for (unsigned i = 0; i < ntypars; i++)
{
if (!CheckVarianceInSig(numGenericArgs, pVarianceInfo, pModule, psig, gpNonVariant))
return FALSE;
IfFailThrow(psig.SkipExactlyOne());
}
}
// Otherwise we need to take notice of the variance annotation on each type parameter to the generic type
else
{
mdTypeDef typeDef;
Module * pDefModule;
// This will also be resolved later; so, give up and don't indicate a variance failure
if (!ClassLoader::ResolveTokenToTypeDefThrowing(pModule, typeref, &pDefModule, &typeDef))
return TRUE;
HENUMInternal hEnumGenericPars;
if (FAILED(pDefModule->GetMDImport()->EnumInit(mdtGenericParam, typeDef, &hEnumGenericPars)))
{
pDefModule->GetAssembly()->ThrowTypeLoadException(pDefModule->GetMDImport(), typeDef, IDS_CLASSLOAD_BADFORMAT);
}
for (unsigned i = 0; i < ntypars; i++)
{
mdGenericParam tkTyPar;
pDefModule->GetMDImport()->EnumNext(&hEnumGenericPars, &tkTyPar);
DWORD flags;
if (FAILED(pDefModule->GetMDImport()->GetGenericParamProps(tkTyPar, NULL, &flags, NULL, NULL, NULL)))
{
pDefModule->GetAssembly()->ThrowTypeLoadException(pDefModule->GetMDImport(), typeDef, IDS_CLASSLOAD_BADFORMAT);
}
CorGenericParamAttr genPosition = (CorGenericParamAttr) (flags & gpVarianceMask);
// If the surrounding context is contravariant then we need to flip the variance of this parameter
if (position == gpContravariant)
{
genPosition = genPosition == gpCovariant ? gpContravariant
: genPosition == gpContravariant ? gpCovariant
: gpNonVariant;
}
if (!CheckVarianceInSig(numGenericArgs, pVarianceInfo, pModule, psig, genPosition))
return FALSE;
IfFailThrow(psig.SkipExactlyOne());
}
pDefModule->GetMDImport()->EnumClose(&hEnumGenericPars);
}
return TRUE;
}
// Arrays behave covariantly
case ELEMENT_TYPE_ARRAY:
case ELEMENT_TYPE_SZARRAY:
return CheckVarianceInSig(numGenericArgs, pVarianceInfo, pModule, psig, position);
// Pointers behave non-variantly
case ELEMENT_TYPE_BYREF:
case ELEMENT_TYPE_PTR:
return CheckVarianceInSig(numGenericArgs, pVarianceInfo, pModule, psig, gpNonVariant);
case ELEMENT_TYPE_FNPTR:
{
// Calling convention
IfFailThrow(psig.GetData(NULL));
// Get arg count;
uint32_t cArgs;
IfFailThrow(psig.GetData(&cArgs));
// Conservatively, assume non-variance of function pointer types
if (!CheckVarianceInSig(numGenericArgs, pVarianceInfo, pModule, psig, gpNonVariant))
return FALSE;
IfFailThrow(psig.SkipExactlyOne());
for (unsigned i = 0; i < cArgs; i++)
{
if (!CheckVarianceInSig(numGenericArgs, pVarianceInfo, pModule, psig, gpNonVariant))
return FALSE;
IfFailThrow(psig.SkipExactlyOne());
}
return TRUE;
}
default:
THROW_BAD_FORMAT(IDS_CLASSLOAD_BAD_VARIANCE_SIG, pModule);
}
return FALSE;
} // EEClass::CheckVarianceInSig
void
ClassLoader::LoadExactParentAndInterfacesTransitively(MethodTable *pMT)
{
CONTRACTL
{
STANDARD_VM_CHECK;
PRECONDITION(CheckPointer(pMT));
}
CONTRACTL_END;
TypeHandle thisTH(pMT);
SigTypeContext typeContext(thisTH);
IMDInternalImport* pInternalImport = pMT->GetMDImport();
MethodTable *pParentMT = pMT->GetParentMethodTable();
if (pParentMT != NULL && pParentMT->HasInstantiation())
{
// Fill in exact parent if it's instantiated
mdToken crExtends;
IfFailThrow(pInternalImport->GetTypeDefProps(
pMT->GetCl(),
NULL,
&crExtends));
_ASSERTE(!IsNilToken(crExtends));
_ASSERTE(TypeFromToken(crExtends) == mdtTypeSpec);
TypeHandle newParent = ClassLoader::LoadTypeDefOrRefOrSpecThrowing(pMT->GetModule(), crExtends, &typeContext,
ClassLoader::ThrowIfNotFound,
ClassLoader::FailIfUninstDefOrRef,
ClassLoader::LoadTypes,
CLASS_LOAD_EXACTPARENTS,
TRUE);
MethodTable* pNewParentMT = newParent.AsMethodTable();
if (pNewParentMT != pParentMT)
{
LOG((LF_CLASSLOADER, LL_INFO1000, "GENERICS: Replaced approximate parent %s with exact parent %s from token %x\n", pParentMT->GetDebugClassName(), pNewParentMT->GetDebugClassName(), crExtends));
// SetParentMethodTable is not used here since we want to update the indirection cell in the NGen case
*pMT->GetParentMethodTableValuePtr() = pNewParentMT;
pParentMT = pNewParentMT;
}
}
if (pParentMT != NULL)
{
EnsureLoaded(pParentMT, CLASS_LOAD_EXACTPARENTS);
}
if (pParentMT != NULL && pParentMT->HasPerInstInfo())
{
// Copy down all inherited dictionary pointers which we
// could not embed.
DWORD nDicts = pParentMT->GetNumDicts();
for (DWORD iDict = 0; iDict < nDicts; iDict++)
{
if (pMT->GetPerInstInfo()[iDict] != pParentMT->GetPerInstInfo()[iDict])
{
pMT->GetPerInstInfo()[iDict] = pParentMT->GetPerInstInfo()[iDict];
}
}
}
MethodTableBuilder::LoadExactInterfaceMap(pMT);
#ifdef _DEBUG
if (g_pConfig->ShouldDumpOnClassLoad(pMT->GetDebugClassName()))
{
pMT->Debug_DumpInterfaceMap("Exact");
}
#endif //_DEBUG
} // ClassLoader::LoadExactParentAndInterfacesTransitively
// CLASS_LOAD_EXACTPARENTS phase of loading:
// * Load the base class at exact instantiation
// * Recurse LoadExactParents up parent hierarchy
// * Load explicitly declared interfaces on this class at exact instantiation
// * Fixup vtable
//
/*static*/
void ClassLoader::LoadExactParents(MethodTable *pMT)
{
CONTRACT_VOID
{
STANDARD_VM_CHECK;
PRECONDITION(CheckPointer(pMT));
POSTCONDITION(pMT->CheckLoadLevel(CLASS_LOAD_EXACTPARENTS));
}
CONTRACT_END;
MethodTable *pApproxParentMT = pMT->GetParentMethodTable();
if (!pMT->IsCanonicalMethodTable())
{
EnsureLoaded(TypeHandle(pMT->GetCanonicalMethodTable()), CLASS_LOAD_EXACTPARENTS);
}
LoadExactParentAndInterfacesTransitively(pMT);
MethodTableBuilder::CopyExactParentSlots(pMT, pApproxParentMT);
PropagateCovariantReturnMethodImplSlots(pMT);
// We can now mark this type as having exact parents
pMT->SetHasExactParent();
RETURN;
}
// Get CorElementType of the reduced type of a type.
// The reduced type concept is described in ECMA 335 chapter I.8.7
//
/*static*/
CorElementType ClassLoader::GetReducedTypeElementType(TypeHandle hType)
{
CorElementType elemType = hType.GetVerifierCorElementType();
switch (elemType)
{
case ELEMENT_TYPE_U1:
return ELEMENT_TYPE_I1;
case ELEMENT_TYPE_U2:
return ELEMENT_TYPE_I2;
case ELEMENT_TYPE_U4:
return ELEMENT_TYPE_I4;
case ELEMENT_TYPE_U8:
return ELEMENT_TYPE_I8;
case ELEMENT_TYPE_U:
return ELEMENT_TYPE_I;
default:
return elemType;
}
}
// Get CorElementType of the verification type of a type.
// The verification type concepts is described in ECMA 335 chapter I.8.7