123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584 |
- /*
- * Copyright (c) 2005-2007 Apple Inc. All Rights Reserved.
- *
- * @APPLE_LICENSE_HEADER_START@
- *
- * This file contains Original Code and/or Modifications of Original Code
- * as defined in and that are subject to the Apple Public Source License
- * Version 2.0 (the 'License'). You may not use this file except in
- * compliance with the License. Please obtain a copy of the License at
- * http://www.opensource.apple.com/apsl/ and read it before using this
- * file.
- *
- * The Original Code and all software distributed under the License are
- * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
- * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
- * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
- * Please see the License for the specific language governing rights and
- * limitations under the License.
- *
- * @APPLE_LICENSE_HEADER_END@
- */
- #ifndef _OBJC_RUNTIME_NEW_H
- #define _OBJC_RUNTIME_NEW_H
- // class_data_bits_t is the class_t->data field (class_rw_t pointer plus flags)
- // The extra bits are optimized for the retain/release and alloc/dealloc paths.
- // Values for class_ro_t->flags
- // These are emitted by the compiler and are part of the ABI.
- // Note: See CGObjCNonFragileABIMac::BuildClassRoTInitializer in clang
- // class is a metaclass
- #define RO_META (1<<0)
- // class is a root class
- #define RO_ROOT (1<<1)
- // class has .cxx_construct/destruct implementations
- #define RO_HAS_CXX_STRUCTORS (1<<2)
- // class has +load implementation
- // #define RO_HAS_LOAD_METHOD (1<<3)
- // class has visibility=hidden set
- #define RO_HIDDEN (1<<4)
- // class has attribute(objc_exception): OBJC_EHTYPE_$_ThisClass is non-weak
- #define RO_EXCEPTION (1<<5)
- // class has ro field for Swift metadata initializer callback
- #define RO_HAS_SWIFT_INITIALIZER (1<<6)
- // class compiled with ARC
- #define RO_IS_ARC (1<<7)
- // class has .cxx_destruct but no .cxx_construct (with RO_HAS_CXX_STRUCTORS)
- #define RO_HAS_CXX_DTOR_ONLY (1<<8)
- // class is not ARC but has ARC-style weak ivar layout
- #define RO_HAS_WEAK_WITHOUT_ARC (1<<9)
- // class does not allow associated objects on instances
- #define RO_FORBIDS_ASSOCIATED_OBJECTS (1<<10)
- // class is in an unloadable bundle - must never be set by compiler
- #define RO_FROM_BUNDLE (1<<29)
- // class is unrealized future class - must never be set by compiler
- #define RO_FUTURE (1<<30)
- // class is realized - must never be set by compiler
- #define RO_REALIZED (1<<31)
- // Values for class_rw_t->flags
- // These are not emitted by the compiler and are never used in class_ro_t.
- // Their presence should be considered in future ABI versions.
- // class_t->data is class_rw_t, not class_ro_t
- #define RW_REALIZED (1<<31)
- // class is unresolved future class
- #define RW_FUTURE (1<<30)
- // class is initialized
- #define RW_INITIALIZED (1<<29)
- // class is initializing
- #define RW_INITIALIZING (1<<28)
- // class_rw_t->ro is heap copy of class_ro_t
- #define RW_COPIED_RO (1<<27)
- // class allocated but not yet registered
- #define RW_CONSTRUCTING (1<<26)
- // class allocated and registered
- #define RW_CONSTRUCTED (1<<25)
- // available for use; was RW_FINALIZE_ON_MAIN_THREAD
- // #define RW_24 (1<<24)
- // class +load has been called
- #define RW_LOADED (1<<23)
- #if !SUPPORT_NONPOINTER_ISA
- // class instances may have associative references
- #define RW_INSTANCES_HAVE_ASSOCIATED_OBJECTS (1<<22)
- #endif
- // class has instance-specific GC layout
- #define RW_HAS_INSTANCE_SPECIFIC_LAYOUT (1 << 21)
- // class does not allow associated objects on its instances
- #define RW_FORBIDS_ASSOCIATED_OBJECTS (1<<20)
- // class has started realizing but not yet completed it
- #define RW_REALIZING (1<<19)
- // NOTE: MORE RW_ FLAGS DEFINED BELOW
- // Values for class_rw_t->flags (RW_*), cache_t->_flags (FAST_CACHE_*),
- // or class_t->bits (FAST_*).
- //
- // FAST_* and FAST_CACHE_* are stored on the class, reducing pointer indirection.
- #if __LP64__
- // class is a Swift class from the pre-stable Swift ABI
- #define FAST_IS_SWIFT_LEGACY (1UL<<0)
- // class is a Swift class from the stable Swift ABI
- #define FAST_IS_SWIFT_STABLE (1UL<<1)
- // class or superclass has default retain/release/autorelease/retainCount/
- // _tryRetain/_isDeallocating/retainWeakReference/allowsWeakReference
- #define FAST_HAS_DEFAULT_RR (1UL<<2)
- // data pointer
- #define FAST_DATA_MASK 0x00007ffffffffff8UL
- #if __arm64__
- // class or superclass has .cxx_construct/.cxx_destruct implementation
- // FAST_CACHE_HAS_CXX_DTOR is the first bit so that setting it in
- // isa_t::has_cxx_dtor is a single bfi
- #define FAST_CACHE_HAS_CXX_DTOR (1<<0)
- #define FAST_CACHE_HAS_CXX_CTOR (1<<1)
- // Denormalized RO_META to avoid an indirection
- #define FAST_CACHE_META (1<<2)
- #else
- // Denormalized RO_META to avoid an indirection
- #define FAST_CACHE_META (1<<0)
- // class or superclass has .cxx_construct/.cxx_destruct implementation
- // FAST_CACHE_HAS_CXX_DTOR is chosen to alias with isa_t::has_cxx_dtor
- #define FAST_CACHE_HAS_CXX_CTOR (1<<1)
- #define FAST_CACHE_HAS_CXX_DTOR (1<<2)
- #endif
- // Fast Alloc fields:
- // This stores the word-aligned size of instances + "ALLOC_DELTA16",
- // or 0 if the instance size doesn't fit.
- //
- // These bits occupy the same bits than in the instance size, so that
- // the size can be extracted with a simple mask operation.
- //
- // FAST_CACHE_ALLOC_MASK16 allows to extract the instance size rounded
- // rounded up to the next 16 byte boundary, which is a fastpath for
- // _objc_rootAllocWithZone()
- #define FAST_CACHE_ALLOC_MASK 0x1ff8
- #define FAST_CACHE_ALLOC_MASK16 0x1ff0
- #define FAST_CACHE_ALLOC_DELTA16 0x0008
- // class's instances requires raw isa
- #define FAST_CACHE_REQUIRES_RAW_ISA (1<<13)
- // class or superclass has default alloc/allocWithZone: implementation
- // Note this is is stored in the metaclass.
- #define FAST_CACHE_HAS_DEFAULT_AWZ (1<<14)
- // class or superclass has default new/self/class/respondsToSelector/isKindOfClass
- #define FAST_CACHE_HAS_DEFAULT_CORE (1<<15)
- #else
- // class or superclass has .cxx_construct implementation
- #define RW_HAS_CXX_CTOR (1<<18)
- // class or superclass has .cxx_destruct implementation
- #define RW_HAS_CXX_DTOR (1<<17)
- // class or superclass has default alloc/allocWithZone: implementation
- // Note this is is stored in the metaclass.
- #define RW_HAS_DEFAULT_AWZ (1<<16)
- // class's instances requires raw isa
- #if SUPPORT_NONPOINTER_ISA
- #define RW_REQUIRES_RAW_ISA (1<<15)
- #endif
- // class or superclass has default retain/release/autorelease/retainCount/
- // _tryRetain/_isDeallocating/retainWeakReference/allowsWeakReference
- #define RW_HAS_DEFAULT_RR (1<<14)
- // class or superclass has default new/self/class/respondsToSelector/isKindOfClass
- #define RW_HAS_DEFAULT_CORE (1<<13)
- // class is a Swift class from the pre-stable Swift ABI
- #define FAST_IS_SWIFT_LEGACY (1UL<<0)
- // class is a Swift class from the stable Swift ABI
- #define FAST_IS_SWIFT_STABLE (1UL<<1)
- // data pointer
- #define FAST_DATA_MASK 0xfffffffcUL
- #endif // __LP64__
- // The Swift ABI requires that these bits be defined like this on all platforms.
- static_assert(FAST_IS_SWIFT_LEGACY == 1, "resistance is futile");
- static_assert(FAST_IS_SWIFT_STABLE == 2, "resistance is futile");
- #if __LP64__
- typedef uint32_t mask_t; // x86_64 & arm64 asm are less efficient with 16-bits
- #else
- typedef uint16_t mask_t;
- #endif
- typedef uintptr_t SEL;
- struct swift_class_t;
- enum Atomicity { Atomic = true, NotAtomic = false };
- enum IMPEncoding { Encoded = true, Raw = false };
- struct bucket_t {
- private:
- // IMP-first is better for arm64e ptrauth and no worse for arm64.
- // SEL-first is better for armv7* and i386 and x86_64.
- #if __arm64__
- explicit_atomic<uintptr_t> _imp;
- explicit_atomic<SEL> _sel;
- #else
- explicit_atomic<SEL> _sel;
- explicit_atomic<uintptr_t> _imp;
- #endif
- // Compute the ptrauth signing modifier from &_imp, newSel, and cls.
- uintptr_t modifierForSEL(SEL newSel, Class cls) const {
- return (uintptr_t)&_imp ^ (uintptr_t)newSel ^ (uintptr_t)cls;
- }
- // Sign newImp, with &_imp, newSel, and cls as modifiers.
- uintptr_t encodeImp(IMP newImp, SEL newSel, Class cls) const {
- if (!newImp) return 0;
- #if CACHE_IMP_ENCODING == CACHE_IMP_ENCODING_PTRAUTH
- return (uintptr_t)
- ptrauth_auth_and_resign(newImp,
- ptrauth_key_function_pointer, 0,
- ptrauth_key_process_dependent_code,
- modifierForSEL(newSel, cls));
- #elif CACHE_IMP_ENCODING == CACHE_IMP_ENCODING_ISA_XOR
- return (uintptr_t)newImp ^ (uintptr_t)cls;
- #elif CACHE_IMP_ENCODING == CACHE_IMP_ENCODING_NONE
- return (uintptr_t)newImp;
- #else
- #error Unknown method cache IMP encoding.
- #endif
- }
- public:
- inline SEL sel() const { return _sel.load(memory_order::memory_order_relaxed); }
- inline IMP imp(Class cls) const {
- uintptr_t imp = _imp.load(memory_order::memory_order_relaxed);
- if (!imp) return nil;
- #if CACHE_IMP_ENCODING == CACHE_IMP_ENCODING_PTRAUTH
- SEL sel = _sel.load(memory_order::memory_order_relaxed);
- return (IMP)
- ptrauth_auth_and_resign((const void *)imp,
- ptrauth_key_process_dependent_code,
- modifierForSEL(sel, cls),
- ptrauth_key_function_pointer, 0);
- #elif CACHE_IMP_ENCODING == CACHE_IMP_ENCODING_ISA_XOR
- return (IMP)(imp ^ (uintptr_t)cls);
- #elif CACHE_IMP_ENCODING == CACHE_IMP_ENCODING_NONE
- return (IMP)imp;
- #else
- #error Unknown method cache IMP encoding.
- #endif
- }
- template <Atomicity, IMPEncoding>
- void set(SEL newSel, IMP newImp, Class cls);
- };
- struct cache_t {
- #if CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_OUTLINED
- explicit_atomic<struct bucket_t *> _buckets;
- explicit_atomic<mask_t> _mask;
- #elif CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16
- explicit_atomic<uintptr_t> _maskAndBuckets;
- mask_t _mask_unused;
-
- // How much the mask is shifted by.
- static constexpr uintptr_t maskShift = 48;
-
- // Additional bits after the mask which must be zero. msgSend
- // takes advantage of these additional bits to construct the value
- // `mask << 4` from `_maskAndBuckets` in a single instruction.
- static constexpr uintptr_t maskZeroBits = 4;
-
- // The largest mask value we can store.
- static constexpr uintptr_t maxMask = ((uintptr_t)1 << (64 - maskShift)) - 1;
-
- // The mask applied to `_maskAndBuckets` to retrieve the buckets pointer.
- static constexpr uintptr_t bucketsMask = ((uintptr_t)1 << (maskShift - maskZeroBits)) - 1;
-
- // Ensure we have enough bits for the buckets pointer.
- static_assert(bucketsMask >= MACH_VM_MAX_ADDRESS, "Bucket field doesn't have enough bits for arbitrary pointers.");
- #elif CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_LOW_4
- // _maskAndBuckets stores the mask shift in the low 4 bits, and
- // the buckets pointer in the remainder of the value. The mask
- // shift is the value where (0xffff >> shift) produces the correct
- // mask. This is equal to 16 - log2(cache_size).
- explicit_atomic<uintptr_t> _maskAndBuckets;
- mask_t _mask_unused;
- static constexpr uintptr_t maskBits = 4;
- static constexpr uintptr_t maskMask = (1 << maskBits) - 1;
- static constexpr uintptr_t bucketsMask = ~maskMask;
- #else
- #error Unknown cache mask storage type.
- #endif
-
- #if __LP64__
- uint16_t _flags;
- #endif
- uint16_t _occupied;
- public:
- static bucket_t *emptyBuckets();
-
- struct bucket_t *buckets();
- mask_t mask();
- mask_t occupied();
- void incrementOccupied();
- void setBucketsAndMask(struct bucket_t *newBuckets, mask_t newMask);
- void initializeToEmpty();
- unsigned capacity();
- bool isConstantEmptyCache();
- bool canBeFreed();
- #if __LP64__
- bool getBit(uint16_t flags) const {
- return _flags & flags;
- }
- void setBit(uint16_t set) {
- __c11_atomic_fetch_or((_Atomic(uint16_t) *)&_flags, set, __ATOMIC_RELAXED);
- }
- void clearBit(uint16_t clear) {
- __c11_atomic_fetch_and((_Atomic(uint16_t) *)&_flags, ~clear, __ATOMIC_RELAXED);
- }
- #endif
- #if FAST_CACHE_ALLOC_MASK
- bool hasFastInstanceSize(size_t extra) const
- {
- if (__builtin_constant_p(extra) && extra == 0) {
- return _flags & FAST_CACHE_ALLOC_MASK16;
- }
- return _flags & FAST_CACHE_ALLOC_MASK;
- }
- size_t fastInstanceSize(size_t extra) const
- {
- ASSERT(hasFastInstanceSize(extra));
- if (__builtin_constant_p(extra) && extra == 0) {
- return _flags & FAST_CACHE_ALLOC_MASK16;
- } else {
- size_t size = _flags & FAST_CACHE_ALLOC_MASK;
- // remove the FAST_CACHE_ALLOC_DELTA16 that was added
- // by setFastInstanceSize
- return align16(size + extra - FAST_CACHE_ALLOC_DELTA16);
- }
- }
- void setFastInstanceSize(size_t newSize)
- {
- // Set during realization or construction only. No locking needed.
- uint16_t newBits = _flags & ~FAST_CACHE_ALLOC_MASK;
- uint16_t sizeBits;
- // Adding FAST_CACHE_ALLOC_DELTA16 allows for FAST_CACHE_ALLOC_MASK16
- // to yield the proper 16byte aligned allocation size with a single mask
- sizeBits = word_align(newSize) + FAST_CACHE_ALLOC_DELTA16;
- sizeBits &= FAST_CACHE_ALLOC_MASK;
- if (newSize <= sizeBits) {
- newBits |= sizeBits;
- }
- _flags = newBits;
- }
- #else
- bool hasFastInstanceSize(size_t extra) const {
- return false;
- }
- size_t fastInstanceSize(size_t extra) const {
- abort();
- }
- void setFastInstanceSize(size_t extra) {
- // nothing
- }
- #endif
- static size_t bytesForCapacity(uint32_t cap);
- static struct bucket_t * endMarker(struct bucket_t *b, uint32_t cap);
- void reallocate(mask_t oldCapacity, mask_t newCapacity, bool freeOld);
- void insert(Class cls, SEL sel, IMP imp, id receiver);
- static void bad_cache(id receiver, SEL sel, Class isa) __attribute__((noreturn, cold));
- };
- // classref_t is unremapped class_t*
- typedef struct classref * classref_t;
- #ifdef __PTRAUTH_INTRINSICS__
- # define StubClassInitializerPtrauth __ptrauth(ptrauth_key_function_pointer, 1, 0xc671)
- #else
- # define StubClassInitializerPtrauth
- #endif
- struct stub_class_t {
- uintptr_t isa;
- _objc_swiftMetadataInitializer StubClassInitializerPtrauth initializer;
- };
- /***********************************************************************
- * entsize_list_tt<Element, List, FlagMask>
- * Generic implementation of an array of non-fragile structs.
- *
- * Element is the struct type (e.g. method_t)
- * List is the specialization of entsize_list_tt (e.g. method_list_t)
- * FlagMask is used to stash extra bits in the entsize field
- * (e.g. method list fixup markers)
- **********************************************************************/
- template <typename Element, typename List, uint32_t FlagMask>
- struct entsize_list_tt {
- uint32_t entsizeAndFlags;
- uint32_t count;
- Element first;
- uint32_t entsize() const {
- return entsizeAndFlags & ~FlagMask;
- }
- uint32_t flags() const {
- return entsizeAndFlags & FlagMask;
- }
- Element& getOrEnd(uint32_t i) const {
- ASSERT(i <= count);
- return *(Element *)((uint8_t *)&first + i*entsize());
- }
- Element& get(uint32_t i) const {
- ASSERT(i < count);
- return getOrEnd(i);
- }
- size_t byteSize() const {
- return byteSize(entsize(), count);
- }
-
- static size_t byteSize(uint32_t entsize, uint32_t count) {
- return sizeof(entsize_list_tt) + (count-1)*entsize;
- }
- List *duplicate() const {
- auto *dup = (List *)calloc(this->byteSize(), 1);
- dup->entsizeAndFlags = this->entsizeAndFlags;
- dup->count = this->count;
- std::copy(begin(), end(), dup->begin());
- return dup;
- }
- struct iterator;
- const iterator begin() const {
- return iterator(*static_cast<const List*>(this), 0);
- }
- iterator begin() {
- return iterator(*static_cast<const List*>(this), 0);
- }
- const iterator end() const {
- return iterator(*static_cast<const List*>(this), count);
- }
- iterator end() {
- return iterator(*static_cast<const List*>(this), count);
- }
- struct iterator {
- uint32_t entsize;
- uint32_t index; // keeping track of this saves a divide in operator-
- Element* element;
- typedef std::random_access_iterator_tag iterator_category;
- typedef Element value_type;
- typedef ptrdiff_t difference_type;
- typedef Element* pointer;
- typedef Element& reference;
- iterator() { }
- iterator(const List& list, uint32_t start = 0)
- : entsize(list.entsize())
- , index(start)
- , element(&list.getOrEnd(start))
- { }
- const iterator& operator += (ptrdiff_t delta) {
- element = (Element*)((uint8_t *)element + delta*entsize);
- index += (int32_t)delta;
- return *this;
- }
- const iterator& operator -= (ptrdiff_t delta) {
- element = (Element*)((uint8_t *)element - delta*entsize);
- index -= (int32_t)delta;
- return *this;
- }
- const iterator operator + (ptrdiff_t delta) const {
- return iterator(*this) += delta;
- }
- const iterator operator - (ptrdiff_t delta) const {
- return iterator(*this) -= delta;
- }
- iterator& operator ++ () { *this += 1; return *this; }
- iterator& operator -- () { *this -= 1; return *this; }
- iterator operator ++ (int) {
- iterator result(*this); *this += 1; return result;
- }
- iterator operator -- (int) {
- iterator result(*this); *this -= 1; return result;
- }
- ptrdiff_t operator - (const iterator& rhs) const {
- return (ptrdiff_t)this->index - (ptrdiff_t)rhs.index;
- }
- Element& operator * () const { return *element; }
- Element* operator -> () const { return element; }
- operator Element& () const { return *element; }
- bool operator == (const iterator& rhs) const {
- return this->element == rhs.element;
- }
- bool operator != (const iterator& rhs) const {
- return this->element != rhs.element;
- }
- bool operator < (const iterator& rhs) const {
- return this->element < rhs.element;
- }
- bool operator > (const iterator& rhs) const {
- return this->element > rhs.element;
- }
- };
- };
- struct method_t {
- SEL name;
- const char *types;
- MethodListIMP imp;
- struct SortBySELAddress :
- public std::binary_function<const method_t&,
- const method_t&, bool>
- {
- bool operator() (const method_t& lhs,
- const method_t& rhs)
- { return lhs.name < rhs.name; }
- };
- };
- struct ivar_t {
- #if __x86_64__
- // *offset was originally 64-bit on some x86_64 platforms.
- // We read and write only 32 bits of it.
- // Some metadata provides all 64 bits. This is harmless for unsigned
- // little-endian values.
- // Some code uses all 64 bits. class_addIvar() over-allocates the
- // offset for their benefit.
- #endif
- int32_t *offset;
- const char *name;
- const char *type;
- // alignment is sometimes -1; use alignment() instead
- uint32_t alignment_raw;
- uint32_t size;
- uint32_t alignment() const {
- if (alignment_raw == ~(uint32_t)0) return 1U << WORD_SHIFT;
- return 1 << alignment_raw;
- }
- };
- struct property_t {
- const char *name;
- const char *attributes;
- };
- // Two bits of entsize are used for fixup markers.
- struct method_list_t : entsize_list_tt<method_t, method_list_t, 0x3> {
- bool isUniqued() const;
- bool isFixedUp() const;
- void setFixedUp();
- uint32_t indexOfMethod(const method_t *meth) const {
- uint32_t i =
- (uint32_t)(((uintptr_t)meth - (uintptr_t)this) / entsize());
- ASSERT(i < count);
- return i;
- }
- };
- struct ivar_list_t : entsize_list_tt<ivar_t, ivar_list_t, 0> {
- bool containsIvar(Ivar ivar) const {
- return (ivar >= (Ivar)&*begin() && ivar < (Ivar)&*end());
- }
- };
- struct property_list_t : entsize_list_tt<property_t, property_list_t, 0> {
- };
- typedef uintptr_t protocol_ref_t; // protocol_t *, but unremapped
- // Values for protocol_t->flags
- #define PROTOCOL_FIXED_UP_2 (1<<31) // must never be set by compiler
- #define PROTOCOL_FIXED_UP_1 (1<<30) // must never be set by compiler
- #define PROTOCOL_IS_CANONICAL (1<<29) // must never be set by compiler
- // Bits 0..15 are reserved for Swift's use.
- #define PROTOCOL_FIXED_UP_MASK (PROTOCOL_FIXED_UP_1 | PROTOCOL_FIXED_UP_2)
- struct protocol_t : objc_object {
- const char *mangledName;
- struct protocol_list_t *protocols;
- method_list_t *instanceMethods;
- method_list_t *classMethods;
- method_list_t *optionalInstanceMethods;
- method_list_t *optionalClassMethods;
- property_list_t *instanceProperties;
- uint32_t size; // sizeof(protocol_t)
- uint32_t flags;
- // Fields below this point are not always present on disk.
- const char **_extendedMethodTypes;
- const char *_demangledName;
- property_list_t *_classProperties;
- const char *demangledName();
- const char *nameForLogging() {
- return demangledName();
- }
- bool isFixedUp() const;
- void setFixedUp();
- bool isCanonical() const;
- void clearIsCanonical();
- # define HAS_FIELD(f) (size >= offsetof(protocol_t, f) + sizeof(f))
- bool hasExtendedMethodTypesField() const {
- return HAS_FIELD(_extendedMethodTypes);
- }
- bool hasDemangledNameField() const {
- return HAS_FIELD(_demangledName);
- }
- bool hasClassPropertiesField() const {
- return HAS_FIELD(_classProperties);
- }
- # undef HAS_FIELD
- const char **extendedMethodTypes() const {
- return hasExtendedMethodTypesField() ? _extendedMethodTypes : nil;
- }
- property_list_t *classProperties() const {
- return hasClassPropertiesField() ? _classProperties : nil;
- }
- };
- struct protocol_list_t {
- // count is pointer-sized by accident.
- uintptr_t count;
- protocol_ref_t list[0]; // variable-size
- size_t byteSize() const {
- return sizeof(*this) + count*sizeof(list[0]);
- }
- protocol_list_t *duplicate() const {
- return (protocol_list_t *)memdup(this, this->byteSize());
- }
- typedef protocol_ref_t* iterator;
- typedef const protocol_ref_t* const_iterator;
- const_iterator begin() const {
- return list;
- }
- iterator begin() {
- return list;
- }
- const_iterator end() const {
- return list + count;
- }
- iterator end() {
- return list + count;
- }
- };
- struct class_ro_t {
- uint32_t flags;
- uint32_t instanceStart;
- uint32_t instanceSize;
- #ifdef __LP64__
- uint32_t reserved;
- #endif
- const uint8_t * ivarLayout;
-
- const char * name;
- method_list_t * baseMethodList;
- protocol_list_t * baseProtocols;
- const ivar_list_t * ivars;
- const uint8_t * weakIvarLayout;
- property_list_t *baseProperties;
- // This field exists only when RO_HAS_SWIFT_INITIALIZER is set.
- _objc_swiftMetadataInitializer __ptrauth_objc_method_list_imp _swiftMetadataInitializer_NEVER_USE[0];
- _objc_swiftMetadataInitializer swiftMetadataInitializer() const {
- if (flags & RO_HAS_SWIFT_INITIALIZER) {
- return _swiftMetadataInitializer_NEVER_USE[0];
- } else {
- return nil;
- }
- }
- method_list_t *baseMethods() const {
- return baseMethodList;
- }
- class_ro_t *duplicate() const {
- if (flags & RO_HAS_SWIFT_INITIALIZER) {
- size_t size = sizeof(*this) + sizeof(_swiftMetadataInitializer_NEVER_USE[0]);
- class_ro_t *ro = (class_ro_t *)memdup(this, size);
- ro->_swiftMetadataInitializer_NEVER_USE[0] = this->_swiftMetadataInitializer_NEVER_USE[0];
- return ro;
- } else {
- size_t size = sizeof(*this);
- class_ro_t *ro = (class_ro_t *)memdup(this, size);
- return ro;
- }
- }
- };
- /***********************************************************************
- * list_array_tt<Element, List>
- * Generic implementation for metadata that can be augmented by categories.
- *
- * Element is the underlying metadata type (e.g. method_t)
- * List is the metadata's list type (e.g. method_list_t)
- *
- * A list_array_tt has one of three values:
- * - empty
- * - a pointer to a single list
- * - an array of pointers to lists
- *
- * countLists/beginLists/endLists iterate the metadata lists
- * count/begin/end iterate the underlying metadata elements
- **********************************************************************/
- template <typename Element, typename List>
- class list_array_tt {
- struct array_t {
- uint32_t count;
- List* lists[0];
- static size_t byteSize(uint32_t count) {
- return sizeof(array_t) + count*sizeof(lists[0]);
- }
- size_t byteSize() {
- return byteSize(count);
- }
- };
- protected:
- class iterator {
- List **lists;
- List **listsEnd;
- typename List::iterator m, mEnd;
- public:
- iterator(List **begin, List **end)
- : lists(begin), listsEnd(end)
- {
- if (begin != end) {
- m = (*begin)->begin();
- mEnd = (*begin)->end();
- }
- }
- const Element& operator * () const {
- return *m;
- }
- Element& operator * () {
- return *m;
- }
- bool operator != (const iterator& rhs) const {
- if (lists != rhs.lists) return true;
- if (lists == listsEnd) return false; // m is undefined
- if (m != rhs.m) return true;
- return false;
- }
- const iterator& operator ++ () {
- ASSERT(m != mEnd);
- m++;
- if (m == mEnd) {
- ASSERT(lists != listsEnd);
- lists++;
- if (lists != listsEnd) {
- m = (*lists)->begin();
- mEnd = (*lists)->end();
- }
- }
- return *this;
- }
- };
- private:
- union {
- List* list;
- uintptr_t arrayAndFlag;
- };
- bool hasArray() const {
- return arrayAndFlag & 1;
- }
- array_t *array() {
- return (array_t *)(arrayAndFlag & ~1);
- }
- void setArray(array_t *array) {
- arrayAndFlag = (uintptr_t)array | 1;
- }
- public:
- uint32_t count() {
- uint32_t result = 0;
- for (auto lists = beginLists(), end = endLists();
- lists != end;
- ++lists)
- {
- result += (*lists)->count;
- }
- return result;
- }
- iterator begin() {
- return iterator(beginLists(), endLists());
- }
- iterator end() {
- List **e = endLists();
- return iterator(e, e);
- }
- uint32_t countLists() {
- if (hasArray()) {
- return array()->count;
- } else if (list) {
- return 1;
- } else {
- return 0;
- }
- }
- List** beginLists() {
- if (hasArray()) {
- return array()->lists;
- } else {
- return &list;
- }
- }
- List** endLists() {
- if (hasArray()) {
- return array()->lists + array()->count;
- } else if (list) {
- return &list + 1;
- } else {
- return &list;
- }
- }
- void attachLists(List* const * addedLists, uint32_t addedCount) {
- if (addedCount == 0) return;
- if (hasArray()) {
- // many lists -> many lists
- uint32_t oldCount = array()->count;
- uint32_t newCount = oldCount + addedCount;
- setArray((array_t *)realloc(array(), array_t::byteSize(newCount)));
- array()->count = newCount;
- memmove(array()->lists + addedCount, array()->lists,
- oldCount * sizeof(array()->lists[0]));
- memcpy(array()->lists, addedLists,
- addedCount * sizeof(array()->lists[0]));
- }
- else if (!list && addedCount == 1) {
- // 0 lists -> 1 list
- list = addedLists[0];
- }
- else {
- // 1 list -> many lists
- List* oldList = list;
- uint32_t oldCount = oldList ? 1 : 0;
- uint32_t newCount = oldCount + addedCount;
- setArray((array_t *)malloc(array_t::byteSize(newCount)));
- array()->count = newCount;
- if (oldList) array()->lists[addedCount] = oldList;
- memcpy(array()->lists, addedLists,
- addedCount * sizeof(array()->lists[0]));
- }
- }
- void tryFree() {
- if (hasArray()) {
- for (uint32_t i = 0; i < array()->count; i++) {
- try_free(array()->lists[i]);
- }
- try_free(array());
- }
- else if (list) {
- try_free(list);
- }
- }
- template<typename Result>
- Result duplicate() {
- Result result;
- if (hasArray()) {
- array_t *a = array();
- result.setArray((array_t *)memdup(a, a->byteSize()));
- for (uint32_t i = 0; i < a->count; i++) {
- result.array()->lists[i] = a->lists[i]->duplicate();
- }
- } else if (list) {
- result.list = list->duplicate();
- } else {
- result.list = nil;
- }
- return result;
- }
- };
- class method_array_t :
- public list_array_tt<method_t, method_list_t>
- {
- typedef list_array_tt<method_t, method_list_t> Super;
- public:
- method_list_t **beginCategoryMethodLists() {
- return beginLists();
- }
-
- method_list_t **endCategoryMethodLists(Class cls);
- method_array_t duplicate() {
- return Super::duplicate<method_array_t>();
- }
- };
- class property_array_t :
- public list_array_tt<property_t, property_list_t>
- {
- typedef list_array_tt<property_t, property_list_t> Super;
- public:
- property_array_t duplicate() {
- return Super::duplicate<property_array_t>();
- }
- };
- class protocol_array_t :
- public list_array_tt<protocol_ref_t, protocol_list_t>
- {
- typedef list_array_tt<protocol_ref_t, protocol_list_t> Super;
- public:
- protocol_array_t duplicate() {
- return Super::duplicate<protocol_array_t>();
- }
- };
- struct class_rw_t {
- // Be warned that Symbolication knows the layout of this structure.
- uint32_t flags;
- uint16_t version;
- uint16_t witness;
- const class_ro_t *ro;
- method_array_t methods;
- property_array_t properties;
- protocol_array_t protocols;
- Class firstSubclass;
- Class nextSiblingClass;
- char *demangledName;
- #if SUPPORT_INDEXED_ISA
- uint32_t index;
- #endif
- void setFlags(uint32_t set)
- {
- __c11_atomic_fetch_or((_Atomic(uint32_t) *)&flags, set, __ATOMIC_RELAXED);
- }
- void clearFlags(uint32_t clear)
- {
- __c11_atomic_fetch_and((_Atomic(uint32_t) *)&flags, ~clear, __ATOMIC_RELAXED);
- }
- // set and clear must not overlap
- void changeFlags(uint32_t set, uint32_t clear)
- {
- ASSERT((set & clear) == 0);
- uint32_t oldf, newf;
- do {
- oldf = flags;
- newf = (oldf | set) & ~clear;
- } while (!OSAtomicCompareAndSwap32Barrier(oldf, newf, (volatile int32_t *)&flags));
- }
- };
- struct class_data_bits_t {
- friend objc_class;
- // Values are the FAST_ flags above.
- uintptr_t bits;
- private:
- bool getBit(uintptr_t bit) const
- {
- return bits & bit;
- }
- // Atomically set the bits in `set` and clear the bits in `clear`.
- // set and clear must not overlap.
- void setAndClearBits(uintptr_t set, uintptr_t clear)
- {
- ASSERT((set & clear) == 0);
- uintptr_t oldBits;
- uintptr_t newBits;
- do {
- oldBits = LoadExclusive(&bits);
- newBits = (oldBits | set) & ~clear;
- } while (!StoreReleaseExclusive(&bits, oldBits, newBits));
- }
- void setBits(uintptr_t set) {
- __c11_atomic_fetch_or((_Atomic(uintptr_t) *)&bits, set, __ATOMIC_RELAXED);
- }
- void clearBits(uintptr_t clear) {
- __c11_atomic_fetch_and((_Atomic(uintptr_t) *)&bits, ~clear, __ATOMIC_RELAXED);
- }
- public:
- class_rw_t* data() const {
- return (class_rw_t *)(bits & FAST_DATA_MASK);
- }
- void setData(class_rw_t *newData)
- {
- ASSERT(!data() || (newData->flags & (RW_REALIZING | RW_FUTURE)));
- // Set during realization or construction only. No locking needed.
- // Use a store-release fence because there may be concurrent
- // readers of data and data's contents.
- uintptr_t newBits = (bits & ~FAST_DATA_MASK) | (uintptr_t)newData;
- atomic_thread_fence(memory_order_release);
- bits = newBits;
- }
- // Get the class's ro data, even in the presence of concurrent realization.
- // fixme this isn't really safe without a compiler barrier at least
- // and probably a memory barrier when realizeClass changes the data field
- const class_ro_t *safe_ro() {
- class_rw_t *maybe_rw = data();
- if (maybe_rw->flags & RW_REALIZED) {
- // maybe_rw is rw
- return maybe_rw->ro;
- } else {
- // maybe_rw is actually ro
- return (class_ro_t *)maybe_rw;
- }
- }
- void setClassArrayIndex(unsigned Idx) {
- #if SUPPORT_INDEXED_ISA
- // 0 is unused as then we can rely on zero-initialisation from calloc.
- ASSERT(Idx > 0);
- data()->index = Idx;
- #endif
- }
- unsigned classArrayIndex() {
- #if SUPPORT_INDEXED_ISA
- return data()->index;
- #else
- return 0;
- #endif
- }
- bool isAnySwift() {
- return isSwiftStable() || isSwiftLegacy();
- }
- bool isSwiftStable() {
- return getBit(FAST_IS_SWIFT_STABLE);
- }
- void setIsSwiftStable() {
- setAndClearBits(FAST_IS_SWIFT_STABLE, FAST_IS_SWIFT_LEGACY);
- }
- bool isSwiftLegacy() {
- return getBit(FAST_IS_SWIFT_LEGACY);
- }
- void setIsSwiftLegacy() {
- setAndClearBits(FAST_IS_SWIFT_LEGACY, FAST_IS_SWIFT_STABLE);
- }
- // fixme remove this once the Swift runtime uses the stable bits
- bool isSwiftStable_ButAllowLegacyForNow() {
- return isAnySwift();
- }
- _objc_swiftMetadataInitializer swiftMetadataInitializer() {
- // This function is called on un-realized classes without
- // holding any locks.
- // Beware of races with other realizers.
- return safe_ro()->swiftMetadataInitializer();
- }
- };
- struct objc_class : objc_object {
- // Class ISA;
- Class superclass;
- cache_t cache; // formerly cache pointer and vtable
- class_data_bits_t bits; // class_rw_t * plus custom rr/alloc flags
- class_rw_t *data() const {
- return bits.data();
- }
- void setData(class_rw_t *newData) {
- bits.setData(newData);
- }
- void setInfo(uint32_t set) {
- ASSERT(isFuture() || isRealized());
- data()->setFlags(set);
- }
- void clearInfo(uint32_t clear) {
- ASSERT(isFuture() || isRealized());
- data()->clearFlags(clear);
- }
- // set and clear must not overlap
- void changeInfo(uint32_t set, uint32_t clear) {
- ASSERT(isFuture() || isRealized());
- ASSERT((set & clear) == 0);
- data()->changeFlags(set, clear);
- }
- #if FAST_HAS_DEFAULT_RR
- bool hasCustomRR() const {
- return !bits.getBit(FAST_HAS_DEFAULT_RR);
- }
- void setHasDefaultRR() {
- bits.setBits(FAST_HAS_DEFAULT_RR);
- }
- void setHasCustomRR() {
- bits.clearBits(FAST_HAS_DEFAULT_RR);
- }
- #else
- bool hasCustomRR() const {
- return !(bits.data()->flags & RW_HAS_DEFAULT_RR);
- }
- void setHasDefaultRR() {
- bits.data()->setFlags(RW_HAS_DEFAULT_RR);
- }
- void setHasCustomRR() {
- bits.data()->clearFlags(RW_HAS_DEFAULT_RR);
- }
- #endif
- #if FAST_CACHE_HAS_DEFAULT_AWZ
- bool hasCustomAWZ() const {
- return !cache.getBit(FAST_CACHE_HAS_DEFAULT_AWZ);
- }
- void setHasDefaultAWZ() {
- cache.setBit(FAST_CACHE_HAS_DEFAULT_AWZ);
- }
- void setHasCustomAWZ() {
- cache.clearBit(FAST_CACHE_HAS_DEFAULT_AWZ);
- }
- #else
- bool hasCustomAWZ() const {
- return !(bits.data()->flags & RW_HAS_DEFAULT_AWZ);
- }
- void setHasDefaultAWZ() {
- bits.data()->setFlags(RW_HAS_DEFAULT_AWZ);
- }
- void setHasCustomAWZ() {
- bits.data()->clearFlags(RW_HAS_DEFAULT_AWZ);
- }
- #endif
- #if FAST_CACHE_HAS_DEFAULT_CORE
- bool hasCustomCore() const {
- return !cache.getBit(FAST_CACHE_HAS_DEFAULT_CORE);
- }
- void setHasDefaultCore() {
- return cache.setBit(FAST_CACHE_HAS_DEFAULT_CORE);
- }
- void setHasCustomCore() {
- return cache.clearBit(FAST_CACHE_HAS_DEFAULT_CORE);
- }
- #else
- bool hasCustomCore() const {
- return !(bits.data()->flags & RW_HAS_DEFAULT_CORE);
- }
- void setHasDefaultCore() {
- bits.data()->setFlags(RW_HAS_DEFAULT_CORE);
- }
- void setHasCustomCore() {
- bits.data()->clearFlags(RW_HAS_DEFAULT_CORE);
- }
- #endif
- #if FAST_CACHE_HAS_CXX_CTOR
- bool hasCxxCtor() {
- ASSERT(isRealized());
- return cache.getBit(FAST_CACHE_HAS_CXX_CTOR);
- }
- void setHasCxxCtor() {
- cache.setBit(FAST_CACHE_HAS_CXX_CTOR);
- }
- #else
- bool hasCxxCtor() {
- ASSERT(isRealized());
- return bits.data()->flags & RW_HAS_CXX_CTOR;
- }
- void setHasCxxCtor() {
- bits.data()->setFlags(RW_HAS_CXX_CTOR);
- }
- #endif
- #if FAST_CACHE_HAS_CXX_DTOR
- bool hasCxxDtor() {
- ASSERT(isRealized());
- return cache.getBit(FAST_CACHE_HAS_CXX_DTOR);
- }
- void setHasCxxDtor() {
- cache.setBit(FAST_CACHE_HAS_CXX_DTOR);
- }
- #else
- bool hasCxxDtor() {
- ASSERT(isRealized());
- return bits.data()->flags & RW_HAS_CXX_DTOR;
- }
- void setHasCxxDtor() {
- bits.data()->setFlags(RW_HAS_CXX_DTOR);
- }
- #endif
- #if FAST_CACHE_REQUIRES_RAW_ISA
- bool instancesRequireRawIsa() {
- return cache.getBit(FAST_CACHE_REQUIRES_RAW_ISA);
- }
- void setInstancesRequireRawIsa() {
- cache.setBit(FAST_CACHE_REQUIRES_RAW_ISA);
- }
- #elif SUPPORT_NONPOINTER_ISA
- bool instancesRequireRawIsa() {
- return bits.data()->flags & RW_REQUIRES_RAW_ISA;
- }
- void setInstancesRequireRawIsa() {
- bits.data()->setFlags(RW_REQUIRES_RAW_ISA);
- }
- #else
- bool instancesRequireRawIsa() {
- return true;
- }
- void setInstancesRequireRawIsa() {
- // nothing
- }
- #endif
- void setInstancesRequireRawIsaRecursively(bool inherited = false);
- void printInstancesRequireRawIsa(bool inherited);
- bool canAllocNonpointer() {
- ASSERT(!isFuture());
- return !instancesRequireRawIsa();
- }
- bool isSwiftStable() {
- return bits.isSwiftStable();
- }
- bool isSwiftLegacy() {
- return bits.isSwiftLegacy();
- }
- bool isAnySwift() {
- return bits.isAnySwift();
- }
- bool isSwiftStable_ButAllowLegacyForNow() {
- return bits.isSwiftStable_ButAllowLegacyForNow();
- }
- bool isStubClass() const {
- uintptr_t isa = (uintptr_t)isaBits();
- return 1 <= isa && isa < 16;
- }
- // Swift stable ABI built for old deployment targets looks weird.
- // The is-legacy bit is set for compatibility with old libobjc.
- // We are on a "new" deployment target so we need to rewrite that bit.
- // These stable-with-legacy-bit classes are distinguished from real
- // legacy classes using another bit in the Swift data
- // (ClassFlags::IsSwiftPreStableABI)
- bool isUnfixedBackwardDeployingStableSwift() {
- // Only classes marked as Swift legacy need apply.
- if (!bits.isSwiftLegacy()) return false;
- // Check the true legacy vs stable distinguisher.
- // The low bit of Swift's ClassFlags is SET for true legacy
- // and UNSET for stable pretending to be legacy.
- uint32_t swiftClassFlags = *(uint32_t *)(&bits + 1);
- bool isActuallySwiftLegacy = bool(swiftClassFlags & 1);
- return !isActuallySwiftLegacy;
- }
- void fixupBackwardDeployingStableSwift() {
- if (isUnfixedBackwardDeployingStableSwift()) {
- // Class really is stable Swift, pretending to be pre-stable.
- // Fix its lie.
- bits.setIsSwiftStable();
- }
- }
- _objc_swiftMetadataInitializer swiftMetadataInitializer() {
- return bits.swiftMetadataInitializer();
- }
- // Return YES if the class's ivars are managed by ARC,
- // or the class is MRC but has ARC-style weak ivars.
- bool hasAutomaticIvars() {
- return data()->ro->flags & (RO_IS_ARC | RO_HAS_WEAK_WITHOUT_ARC);
- }
- // Return YES if the class's ivars are managed by ARC.
- bool isARC() {
- return data()->ro->flags & RO_IS_ARC;
- }
- bool forbidsAssociatedObjects() {
- return (data()->flags & RW_FORBIDS_ASSOCIATED_OBJECTS);
- }
- #if SUPPORT_NONPOINTER_ISA
- // Tracked in non-pointer isas; not tracked otherwise
- #else
- bool instancesHaveAssociatedObjects() {
- // this may be an unrealized future class in the CF-bridged case
- ASSERT(isFuture() || isRealized());
- return data()->flags & RW_INSTANCES_HAVE_ASSOCIATED_OBJECTS;
- }
- void setInstancesHaveAssociatedObjects() {
- // this may be an unrealized future class in the CF-bridged case
- ASSERT(isFuture() || isRealized());
- setInfo(RW_INSTANCES_HAVE_ASSOCIATED_OBJECTS);
- }
- #endif
- bool shouldGrowCache() {
- return true;
- }
- void setShouldGrowCache(bool) {
- // fixme good or bad for memory use?
- }
- bool isInitializing() {
- return getMeta()->data()->flags & RW_INITIALIZING;
- }
- void setInitializing() {
- ASSERT(!isMetaClass());
- ISA()->setInfo(RW_INITIALIZING);
- }
- bool isInitialized() {
- return getMeta()->data()->flags & RW_INITIALIZED;
- }
- void setInitialized();
- bool isLoadable() {
- ASSERT(isRealized());
- return true; // any class registered for +load is definitely loadable
- }
- IMP getLoadMethod();
- // Locking: To prevent concurrent realization, hold runtimeLock.
- bool isRealized() const {
- return !isStubClass() && (data()->flags & RW_REALIZED);
- }
- // Returns true if this is an unrealized future class.
- // Locking: To prevent concurrent realization, hold runtimeLock.
- bool isFuture() const {
- return data()->flags & RW_FUTURE;
- }
- bool isMetaClass() {
- ASSERT(this);
- ASSERT(isRealized());
- #if FAST_CACHE_META
- return cache.getBit(FAST_CACHE_META);
- #else
- return data()->ro->flags & RO_META;
- #endif
- }
- // Like isMetaClass, but also valid on un-realized classes
- bool isMetaClassMaybeUnrealized() {
- return bits.safe_ro()->flags & RO_META;
- }
- // NOT identical to this->ISA when this is a metaclass
- Class getMeta() {
- if (isMetaClass()) return (Class)this;
- else return this->ISA();
- }
- bool isRootClass() {
- return superclass == nil;
- }
- bool isRootMetaclass() {
- return ISA() == (Class)this;
- }
- const char *mangledName() {
- // fixme can't assert locks here
- ASSERT(this);
- if (isRealized() || isFuture()) {
- return data()->ro->name;
- } else {
- return ((const class_ro_t *)data())->name;
- }
- }
-
- const char *demangledName();
- const char *nameForLogging();
- // May be unaligned depending on class's ivars.
- uint32_t unalignedInstanceStart() const {
- ASSERT(isRealized());
- return data()->ro->instanceStart;
- }
- // Class's instance start rounded up to a pointer-size boundary.
- // This is used for ARC layout bitmaps.
- uint32_t alignedInstanceStart() const {
- return word_align(unalignedInstanceStart());
- }
- // May be unaligned depending on class's ivars.
- uint32_t unalignedInstanceSize() const {
- ASSERT(isRealized());
- return data()->ro->instanceSize;
- }
- // Class's ivar size rounded up to a pointer-size boundary.
- uint32_t alignedInstanceSize() const {
- return word_align(unalignedInstanceSize());
- }
- size_t instanceSize(size_t extraBytes) const {
- if (fastpath(cache.hasFastInstanceSize(extraBytes))) {
- return cache.fastInstanceSize(extraBytes);
- }
- size_t size = alignedInstanceSize() + extraBytes;
- // CF requires all objects be at least 16 bytes.
- if (size < 16) size = 16;
- return size;
- }
- void setInstanceSize(uint32_t newSize) {
- ASSERT(isRealized());
- ASSERT(data()->flags & RW_REALIZING);
- if (newSize != data()->ro->instanceSize) {
- ASSERT(data()->flags & RW_COPIED_RO);
- *const_cast<uint32_t *>(&data()->ro->instanceSize) = newSize;
- }
- cache.setFastInstanceSize(newSize);
- }
- void chooseClassArrayIndex();
- void setClassArrayIndex(unsigned Idx) {
- bits.setClassArrayIndex(Idx);
- }
- unsigned classArrayIndex() {
- return bits.classArrayIndex();
- }
- };
- struct swift_class_t : objc_class {
- uint32_t flags;
- uint32_t instanceAddressOffset;
- uint32_t instanceSize;
- uint16_t instanceAlignMask;
- uint16_t reserved;
- uint32_t classSize;
- uint32_t classAddressOffset;
- void *description;
- // ...
- void *baseAddress() {
- return (void *)((uint8_t *)this - classAddressOffset);
- }
- };
- struct category_t {
- const char *name;
- classref_t cls;
- struct method_list_t *instanceMethods;
- struct method_list_t *classMethods;
- struct protocol_list_t *protocols;
- struct property_list_t *instanceProperties;
- // Fields below this point are not always present on disk.
- struct property_list_t *_classProperties;
- method_list_t *methodsForMeta(bool isMeta) {
- if (isMeta) return classMethods;
- else return instanceMethods;
- }
- property_list_t *propertiesForMeta(bool isMeta, struct header_info *hi);
-
- protocol_list_t *protocolsForMeta(bool isMeta) {
- if (isMeta) return nullptr;
- else return protocols;
- }
- };
- struct objc_super2 {
- id receiver;
- Class current_class;
- };
- struct message_ref_t {
- IMP imp;
- SEL sel;
- };
- extern Method protocol_getMethod(protocol_t *p, SEL sel, bool isRequiredMethod, bool isInstanceMethod, bool recursive);
- #endif
|