123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218 |
- /*
- * Copyright (c) 2010-2012 Apple Inc. All rights reserved.
- *
- * @APPLE_LICENSE_HEADER_START@
- *
- * This file contains Original Code and/or Modifications of Original Code
- * as defined in and that are subject to the Apple Public Source License
- * Version 2.0 (the 'License'). You may not use this file except in
- * compliance with the License. Please obtain a copy of the License at
- * http://www.opensource.apple.com/apsl/ and read it before using this
- * file.
- *
- * The Original Code and all software distributed under the License are
- * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
- * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
- * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
- * Please see the License for the specific language governing rights and
- * limitations under the License.
- *
- * @APPLE_LICENSE_HEADER_END@
- */
- /***********************************************************************
- * Inlineable parts of NSObject / objc_object implementation
- **********************************************************************/
- #ifndef _OBJC_OBJCOBJECT_H_
- #define _OBJC_OBJCOBJECT_H_
- #include "objc-private.h"
- enum ReturnDisposition : bool {
- ReturnAtPlus0 = false, ReturnAtPlus1 = true
- };
- static ALWAYS_INLINE
- bool prepareOptimizedReturn(ReturnDisposition disposition);
- #if SUPPORT_TAGGED_POINTERS
- extern "C" {
- extern Class objc_debug_taggedpointer_classes[_OBJC_TAG_SLOT_COUNT*2];
- extern Class objc_debug_taggedpointer_ext_classes[_OBJC_TAG_EXT_SLOT_COUNT];
- }
- #define objc_tag_classes objc_debug_taggedpointer_classes
- #define objc_tag_ext_classes objc_debug_taggedpointer_ext_classes
- #endif
- #if SUPPORT_INDEXED_ISA
- ALWAYS_INLINE Class &
- classForIndex(uintptr_t index) {
- assert(index > 0);
- assert(index < (uintptr_t)objc_indexed_classes_count);
- return objc_indexed_classes[index];
- }
- #endif
- inline bool
- objc_object::isClass()
- {
- if (isTaggedPointer()) return false;
- return ISA()->isMetaClass();
- }
- #if SUPPORT_TAGGED_POINTERS
- inline Class
- objc_object::getIsa()
- {
- if (!isTaggedPointer()) return ISA();
- uintptr_t ptr = (uintptr_t)this;
- if (isExtTaggedPointer()) {
- uintptr_t slot =
- (ptr >> _OBJC_TAG_EXT_SLOT_SHIFT) & _OBJC_TAG_EXT_SLOT_MASK;
- return objc_tag_ext_classes[slot];
- } else {
- uintptr_t slot =
- (ptr >> _OBJC_TAG_SLOT_SHIFT) & _OBJC_TAG_SLOT_MASK;
- return objc_tag_classes[slot];
- }
- }
- inline bool
- objc_object::isTaggedPointer()
- {
- return _objc_isTaggedPointer(this);
- }
- inline bool
- objc_object::isBasicTaggedPointer()
- {
- return isTaggedPointer() && !isExtTaggedPointer();
- }
- inline bool
- objc_object::isExtTaggedPointer()
- {
- uintptr_t ptr = _objc_decodeTaggedPointer(this);
- return (ptr & _OBJC_TAG_EXT_MASK) == _OBJC_TAG_EXT_MASK;
- }
- // SUPPORT_TAGGED_POINTERS
- #else
- // not SUPPORT_TAGGED_POINTERS
- inline Class
- objc_object::getIsa()
- {
- return ISA();
- }
- inline bool
- objc_object::isTaggedPointer()
- {
- return false;
- }
- inline bool
- objc_object::isBasicTaggedPointer()
- {
- return false;
- }
- inline bool
- objc_object::isExtTaggedPointer()
- {
- return false;
- }
- // not SUPPORT_TAGGED_POINTERS
- #endif
- #if SUPPORT_NONPOINTER_ISA
- inline Class
- objc_object::ISA()
- {
- assert(!isTaggedPointer());
- #if SUPPORT_INDEXED_ISA
- if (isa.nonpointer) {
- uintptr_t slot = isa.indexcls;
- return classForIndex((unsigned)slot);
- }
- return (Class)isa.bits;
- #else
- return (Class)(isa.bits & ISA_MASK);
- #endif
- }
- inline bool
- objc_object::hasNonpointerIsa()
- {
- return isa.nonpointer;
- }
- inline void
- objc_object::initIsa(Class cls)
- {
- initIsa(cls, false, false);
- }
- inline void
- objc_object::initClassIsa(Class cls)
- {
- if (DisableNonpointerIsa || cls->instancesRequireRawIsa()) {
- initIsa(cls, false/*not nonpointer*/, false);
- } else {
- initIsa(cls, true/*nonpointer*/, false);
- }
- }
- inline void
- objc_object::initProtocolIsa(Class cls)
- {
- return initClassIsa(cls);
- }
- inline void
- objc_object::initInstanceIsa(Class cls, bool hasCxxDtor)
- {
- assert(!cls->instancesRequireRawIsa());
- assert(hasCxxDtor == cls->hasCxxDtor());
- initIsa(cls, true, hasCxxDtor);
- }
- inline void
- objc_object::initIsa(Class cls, bool nonpointer, bool hasCxxDtor)
- {
- assert(!isTaggedPointer());
-
- if (!nonpointer) {
- isa.cls = cls;
- } else {
- assert(!DisableNonpointerIsa);
- assert(!cls->instancesRequireRawIsa());
- isa_t newisa(0);
- #if SUPPORT_INDEXED_ISA
- assert(cls->classArrayIndex() > 0);
- newisa.bits = ISA_INDEX_MAGIC_VALUE;
- // isa.magic is part of ISA_MAGIC_VALUE
- // isa.nonpointer is part of ISA_MAGIC_VALUE
- newisa.has_cxx_dtor = hasCxxDtor;
- newisa.indexcls = (uintptr_t)cls->classArrayIndex();
- #else
- newisa.bits = ISA_MAGIC_VALUE;
- // isa.magic is part of ISA_MAGIC_VALUE
- // isa.nonpointer is part of ISA_MAGIC_VALUE
- newisa.has_cxx_dtor = hasCxxDtor;
- newisa.shiftcls = (uintptr_t)cls >> 3;
- #endif
- // This write must be performed in a single store in some cases
- // (for example when realizing a class because other threads
- // may simultaneously try to use the class).
- // fixme use atomics here to guarantee single-store and to
- // guarantee memory order w.r.t. the class index table
- // ...but not too atomic because we don't want to hurt instantiation
- isa = newisa;
- }
- }
- inline Class
- objc_object::changeIsa(Class newCls)
- {
- // This is almost always true but there are
- // enough edge cases that we can't assert it.
- // assert(newCls->isFuture() ||
- // newCls->isInitializing() || newCls->isInitialized());
- assert(!isTaggedPointer());
- isa_t oldisa;
- isa_t newisa;
- bool sideTableLocked = false;
- bool transcribeToSideTable = false;
- do {
- transcribeToSideTable = false;
- oldisa = LoadExclusive(&isa.bits);
- if ((oldisa.bits == 0 || oldisa.nonpointer) &&
- !newCls->isFuture() && newCls->canAllocNonpointer())
- {
- // 0 -> nonpointer
- // nonpointer -> nonpointer
- #if SUPPORT_INDEXED_ISA
- if (oldisa.bits == 0) newisa.bits = ISA_INDEX_MAGIC_VALUE;
- else newisa = oldisa;
- // isa.magic is part of ISA_MAGIC_VALUE
- // isa.nonpointer is part of ISA_MAGIC_VALUE
- newisa.has_cxx_dtor = newCls->hasCxxDtor();
- assert(newCls->classArrayIndex() > 0);
- newisa.indexcls = (uintptr_t)newCls->classArrayIndex();
- #else
- if (oldisa.bits == 0) newisa.bits = ISA_MAGIC_VALUE;
- else newisa = oldisa;
- // isa.magic is part of ISA_MAGIC_VALUE
- // isa.nonpointer is part of ISA_MAGIC_VALUE
- newisa.has_cxx_dtor = newCls->hasCxxDtor();
- newisa.shiftcls = (uintptr_t)newCls >> 3;
- #endif
- }
- else if (oldisa.nonpointer) {
- // nonpointer -> raw pointer
- // Need to copy retain count et al to side table.
- // Acquire side table lock before setting isa to
- // prevent races such as concurrent -release.
- if (!sideTableLocked) sidetable_lock();
- sideTableLocked = true;
- transcribeToSideTable = true;
- newisa.cls = newCls;
- }
- else {
- // raw pointer -> raw pointer
- newisa.cls = newCls;
- }
- } while (!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits));
- if (transcribeToSideTable) {
- // Copy oldisa's retain count et al to side table.
- // oldisa.has_assoc: nothing to do
- // oldisa.has_cxx_dtor: nothing to do
- sidetable_moveExtraRC_nolock(oldisa.extra_rc,
- oldisa.deallocating,
- oldisa.weakly_referenced);
- }
- if (sideTableLocked) sidetable_unlock();
- if (oldisa.nonpointer) {
- #if SUPPORT_INDEXED_ISA
- return classForIndex(oldisa.indexcls);
- #else
- return (Class)((uintptr_t)oldisa.shiftcls << 3);
- #endif
- }
- else {
- return oldisa.cls;
- }
- }
- inline bool
- objc_object::hasAssociatedObjects()
- {
- if (isTaggedPointer()) return true;
- if (isa.nonpointer) return isa.has_assoc;
- return true;
- }
- inline void
- objc_object::setHasAssociatedObjects()
- {
- if (isTaggedPointer()) return;
- retry:
- isa_t oldisa = LoadExclusive(&isa.bits);
- isa_t newisa = oldisa;
- if (!newisa.nonpointer || newisa.has_assoc) {
- ClearExclusive(&isa.bits);
- return;
- }
- newisa.has_assoc = true;
- if (!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits)) goto retry;
- }
- inline bool
- objc_object::isWeaklyReferenced()
- {
- assert(!isTaggedPointer());
- if (isa.nonpointer) return isa.weakly_referenced;
- else return sidetable_isWeaklyReferenced();
- }
- inline void
- objc_object::setWeaklyReferenced_nolock()
- {
- retry:
- isa_t oldisa = LoadExclusive(&isa.bits);
- isa_t newisa = oldisa;
- if (slowpath(!newisa.nonpointer)) {
- ClearExclusive(&isa.bits);
- sidetable_setWeaklyReferenced_nolock();
- return;
- }
- if (newisa.weakly_referenced) {
- ClearExclusive(&isa.bits);
- return;
- }
- newisa.weakly_referenced = true;
- if (!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits)) goto retry;
- }
- inline bool
- objc_object::hasCxxDtor()
- {
- assert(!isTaggedPointer());
- if (isa.nonpointer) return isa.has_cxx_dtor;
- else return isa.cls->hasCxxDtor();
- }
- inline bool
- objc_object::rootIsDeallocating()
- {
- if (isTaggedPointer()) return false;
- if (isa.nonpointer) return isa.deallocating;
- return sidetable_isDeallocating();
- }
- inline void
- objc_object::clearDeallocating()
- {
- if (slowpath(!isa.nonpointer)) {
- // Slow path for raw pointer isa.
- sidetable_clearDeallocating();
- }
- else if (slowpath(isa.weakly_referenced || isa.has_sidetable_rc)) {
- // Slow path for non-pointer isa with weak refs and/or side table data.
- clearDeallocating_slow();
- }
- assert(!sidetable_present());
- }
- inline void
- objc_object::rootDealloc()
- {
- if (isTaggedPointer()) return; // fixme necessary?
- if (fastpath(isa.nonpointer &&
- !isa.weakly_referenced &&
- !isa.has_assoc &&
- !isa.has_cxx_dtor &&
- !isa.has_sidetable_rc))
- {
- assert(!sidetable_present());
- free(this);
- }
- else {
- object_dispose((id)this);
- }
- }
- // Equivalent to calling [this retain], with shortcuts if there is no override
- inline id
- objc_object::retain()
- {
- assert(!isTaggedPointer());
- if (fastpath(!ISA()->hasCustomRR())) {
- return rootRetain();
- }
- return ((id(*)(objc_object *, SEL))objc_msgSend)(this, SEL_retain);
- }
- // Base retain implementation, ignoring overrides.
- // This does not check isa.fast_rr; if there is an RR override then
- // it was already called and it chose to call [super retain].
- //
- // tryRetain=true is the -_tryRetain path.
- // handleOverflow=false is the frameless fast path.
- // handleOverflow=true is the framed slow path including overflow to side table
- // The code is structured this way to prevent duplication.
- ALWAYS_INLINE id
- objc_object::rootRetain()
- {
- return rootRetain(false, false);
- }
- ALWAYS_INLINE bool
- objc_object::rootTryRetain()
- {
- return rootRetain(true, false) ? true : false;
- }
- ALWAYS_INLINE id
- objc_object::rootRetain(bool tryRetain, bool handleOverflow)
- {
- if (isTaggedPointer()) return (id)this;
- bool sideTableLocked = false;
- bool transcribeToSideTable = false;
- isa_t oldisa;
- isa_t newisa;
- do {
- transcribeToSideTable = false;
- oldisa = LoadExclusive(&isa.bits);
- newisa = oldisa;
- if (slowpath(!newisa.nonpointer)) {
- ClearExclusive(&isa.bits);
- if (!tryRetain && sideTableLocked) sidetable_unlock();
- if (tryRetain) return sidetable_tryRetain() ? (id)this : nil;
- else return sidetable_retain();
- }
- // don't check newisa.fast_rr; we already called any RR overrides
- if (slowpath(tryRetain && newisa.deallocating)) {
- ClearExclusive(&isa.bits);
- if (!tryRetain && sideTableLocked) sidetable_unlock();
- return nil;
- }
- uintptr_t carry;
- newisa.bits = addc(newisa.bits, RC_ONE, 0, &carry); // extra_rc++
- if (slowpath(carry)) {
- // newisa.extra_rc++ overflowed
- if (!handleOverflow) {
- ClearExclusive(&isa.bits);
- return rootRetain_overflow(tryRetain);
- }
- // Leave half of the retain counts inline and
- // prepare to copy the other half to the side table.
- if (!tryRetain && !sideTableLocked) sidetable_lock();
- sideTableLocked = true;
- transcribeToSideTable = true;
- newisa.extra_rc = RC_HALF;
- newisa.has_sidetable_rc = true;
- }
- } while (slowpath(!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits)));
- if (slowpath(transcribeToSideTable)) {
- // Copy the other half of the retain counts to the side table.
- sidetable_addExtraRC_nolock(RC_HALF);
- }
- if (slowpath(!tryRetain && sideTableLocked)) sidetable_unlock();
- return (id)this;
- }
- // Equivalent to calling [this release], with shortcuts if there is no override
- inline void
- objc_object::release()
- {
- assert(!isTaggedPointer());
- if (fastpath(!ISA()->hasCustomRR())) {
- rootRelease();
- return;
- }
- ((void(*)(objc_object *, SEL))objc_msgSend)(this, SEL_release);
- }
- // Base release implementation, ignoring overrides.
- // Does not call -dealloc.
- // Returns true if the object should now be deallocated.
- // This does not check isa.fast_rr; if there is an RR override then
- // it was already called and it chose to call [super release].
- //
- // handleUnderflow=false is the frameless fast path.
- // handleUnderflow=true is the framed slow path including side table borrow
- // The code is structured this way to prevent duplication.
- ALWAYS_INLINE bool
- objc_object::rootRelease()
- {
- return rootRelease(true, false);
- }
- ALWAYS_INLINE bool
- objc_object::rootReleaseShouldDealloc()
- {
- return rootRelease(false, false);
- }
- ALWAYS_INLINE bool
- objc_object::rootRelease(bool performDealloc, bool handleUnderflow)
- {
- if (isTaggedPointer()) return false;
- bool sideTableLocked = false;
- isa_t oldisa;
- isa_t newisa;
- retry:
- do {
- oldisa = LoadExclusive(&isa.bits);
- newisa = oldisa;
- if (slowpath(!newisa.nonpointer)) {
- ClearExclusive(&isa.bits);
- if (sideTableLocked) sidetable_unlock();
- return sidetable_release(performDealloc);
- }
- // don't check newisa.fast_rr; we already called any RR overrides
- uintptr_t carry;
- newisa.bits = subc(newisa.bits, RC_ONE, 0, &carry); // extra_rc--
- if (slowpath(carry)) {
- // don't ClearExclusive()
- goto underflow;
- }
- } while (slowpath(!StoreReleaseExclusive(&isa.bits,
- oldisa.bits, newisa.bits)));
- if (slowpath(sideTableLocked)) sidetable_unlock();
- return false;
- underflow:
- // newisa.extra_rc-- underflowed: borrow from side table or deallocate
- // abandon newisa to undo the decrement
- newisa = oldisa;
- if (slowpath(newisa.has_sidetable_rc)) {
- if (!handleUnderflow) {
- ClearExclusive(&isa.bits);
- return rootRelease_underflow(performDealloc);
- }
- // Transfer retain count from side table to inline storage.
- if (!sideTableLocked) {
- ClearExclusive(&isa.bits);
- sidetable_lock();
- sideTableLocked = true;
- // Need to start over to avoid a race against
- // the nonpointer -> raw pointer transition.
- goto retry;
- }
- // Try to remove some retain counts from the side table.
- size_t borrowed = sidetable_subExtraRC_nolock(RC_HALF);
- // To avoid races, has_sidetable_rc must remain set
- // even if the side table count is now zero.
- if (borrowed > 0) {
- // Side table retain count decreased.
- // Try to add them to the inline count.
- newisa.extra_rc = borrowed - 1; // redo the original decrement too
- bool stored = StoreReleaseExclusive(&isa.bits,
- oldisa.bits, newisa.bits);
- if (!stored) {
- // Inline update failed.
- // Try it again right now. This prevents livelock on LL/SC
- // architectures where the side table access itself may have
- // dropped the reservation.
- isa_t oldisa2 = LoadExclusive(&isa.bits);
- isa_t newisa2 = oldisa2;
- if (newisa2.nonpointer) {
- uintptr_t overflow;
- newisa2.bits =
- addc(newisa2.bits, RC_ONE * (borrowed-1), 0, &overflow);
- if (!overflow) {
- stored = StoreReleaseExclusive(&isa.bits, oldisa2.bits,
- newisa2.bits);
- }
- }
- }
- if (!stored) {
- // Inline update failed.
- // Put the retains back in the side table.
- sidetable_addExtraRC_nolock(borrowed);
- goto retry;
- }
- // Decrement successful after borrowing from side table.
- // This decrement cannot be the deallocating decrement - the side
- // table lock and has_sidetable_rc bit ensure that if everyone
- // else tried to -release while we worked, the last one would block.
- sidetable_unlock();
- return false;
- }
- else {
- // Side table is empty after all. Fall-through to the dealloc path.
- }
- }
- // Really deallocate.
- if (slowpath(newisa.deallocating)) {
- ClearExclusive(&isa.bits);
- if (sideTableLocked) sidetable_unlock();
- return overrelease_error();
- // does not actually return
- }
- newisa.deallocating = true;
- if (!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits)) goto retry;
- if (slowpath(sideTableLocked)) sidetable_unlock();
- __sync_synchronize();
- if (performDealloc) {
- ((void(*)(objc_object *, SEL))objc_msgSend)(this, SEL_dealloc);
- }
- return true;
- }
- // Equivalent to [this autorelease], with shortcuts if there is no override
- inline id
- objc_object::autorelease()
- {
- if (isTaggedPointer()) return (id)this;
- if (fastpath(!ISA()->hasCustomRR())) return rootAutorelease();
- return ((id(*)(objc_object *, SEL))objc_msgSend)(this, SEL_autorelease);
- }
- // Base autorelease implementation, ignoring overrides.
- inline id
- objc_object::rootAutorelease()
- {
- if (isTaggedPointer()) return (id)this;
- if (prepareOptimizedReturn(ReturnAtPlus1)) return (id)this;
- return rootAutorelease2();
- }
- inline uintptr_t
- objc_object::rootRetainCount()
- {
- if (isTaggedPointer()) return (uintptr_t)this;
- sidetable_lock();
- isa_t bits = LoadExclusive(&isa.bits);
- ClearExclusive(&isa.bits);
- if (bits.nonpointer) {
- uintptr_t rc = 1 + bits.extra_rc;
- if (bits.has_sidetable_rc) {
- rc += sidetable_getExtraRC_nolock();
- }
- sidetable_unlock();
- return rc;
- }
- sidetable_unlock();
- return sidetable_retainCount();
- }
- // SUPPORT_NONPOINTER_ISA
- #else
- // not SUPPORT_NONPOINTER_ISA
- inline Class
- objc_object::ISA()
- {
- assert(!isTaggedPointer());
- return isa.cls;
- }
- inline bool
- objc_object::hasNonpointerIsa()
- {
- return false;
- }
- inline void
- objc_object::initIsa(Class cls)
- {
- assert(!isTaggedPointer());
- isa = (uintptr_t)cls;
- }
- inline void
- objc_object::initClassIsa(Class cls)
- {
- initIsa(cls);
- }
- inline void
- objc_object::initProtocolIsa(Class cls)
- {
- initIsa(cls);
- }
- inline void
- objc_object::initInstanceIsa(Class cls, bool)
- {
- initIsa(cls);
- }
- inline void
- objc_object::initIsa(Class cls, bool, bool)
- {
- initIsa(cls);
- }
- inline Class
- objc_object::changeIsa(Class cls)
- {
- // This is almost always rue but there are
- // enough edge cases that we can't assert it.
- // assert(cls->isFuture() ||
- // cls->isInitializing() || cls->isInitialized());
- assert(!isTaggedPointer());
-
- isa_t oldisa, newisa;
- newisa.cls = cls;
- do {
- oldisa = LoadExclusive(&isa.bits);
- } while (!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits));
-
- if (oldisa.cls && oldisa.cls->instancesHaveAssociatedObjects()) {
- cls->setInstancesHaveAssociatedObjects();
- }
-
- return oldisa.cls;
- }
- inline bool
- objc_object::hasAssociatedObjects()
- {
- return getIsa()->instancesHaveAssociatedObjects();
- }
- inline void
- objc_object::setHasAssociatedObjects()
- {
- getIsa()->setInstancesHaveAssociatedObjects();
- }
- inline bool
- objc_object::isWeaklyReferenced()
- {
- assert(!isTaggedPointer());
- return sidetable_isWeaklyReferenced();
- }
- inline void
- objc_object::setWeaklyReferenced_nolock()
- {
- assert(!isTaggedPointer());
- sidetable_setWeaklyReferenced_nolock();
- }
- inline bool
- objc_object::hasCxxDtor()
- {
- assert(!isTaggedPointer());
- return isa.cls->hasCxxDtor();
- }
- inline bool
- objc_object::rootIsDeallocating()
- {
- if (isTaggedPointer()) return false;
- return sidetable_isDeallocating();
- }
- inline void
- objc_object::clearDeallocating()
- {
- sidetable_clearDeallocating();
- }
- inline void
- objc_object::rootDealloc()
- {
- if (isTaggedPointer()) return;
- object_dispose((id)this);
- }
- // Equivalent to calling [this retain], with shortcuts if there is no override
- inline id
- objc_object::retain()
- {
- assert(!isTaggedPointer());
- if (fastpath(!ISA()->hasCustomRR())) {
- return sidetable_retain();
- }
- return ((id(*)(objc_object *, SEL))objc_msgSend)(this, SEL_retain);
- }
- // Base retain implementation, ignoring overrides.
- // This does not check isa.fast_rr; if there is an RR override then
- // it was already called and it chose to call [super retain].
- inline id
- objc_object::rootRetain()
- {
- if (isTaggedPointer()) return (id)this;
- return sidetable_retain();
- }
- // Equivalent to calling [this release], with shortcuts if there is no override
- inline void
- objc_object::release()
- {
- assert(!isTaggedPointer());
- if (fastpath(!ISA()->hasCustomRR())) {
- sidetable_release();
- return;
- }
- ((void(*)(objc_object *, SEL))objc_msgSend)(this, SEL_release);
- }
- // Base release implementation, ignoring overrides.
- // Does not call -dealloc.
- // Returns true if the object should now be deallocated.
- // This does not check isa.fast_rr; if there is an RR override then
- // it was already called and it chose to call [super release].
- inline bool
- objc_object::rootRelease()
- {
- if (isTaggedPointer()) return false;
- return sidetable_release(true);
- }
- inline bool
- objc_object::rootReleaseShouldDealloc()
- {
- if (isTaggedPointer()) return false;
- return sidetable_release(false);
- }
- // Equivalent to [this autorelease], with shortcuts if there is no override
- inline id
- objc_object::autorelease()
- {
- if (isTaggedPointer()) return (id)this;
- if (fastpath(!ISA()->hasCustomRR())) return rootAutorelease();
- return ((id(*)(objc_object *, SEL))objc_msgSend)(this, SEL_autorelease);
- }
- // Base autorelease implementation, ignoring overrides.
- inline id
- objc_object::rootAutorelease()
- {
- if (isTaggedPointer()) return (id)this;
- if (prepareOptimizedReturn(ReturnAtPlus1)) return (id)this;
- return rootAutorelease2();
- }
- // Base tryRetain implementation, ignoring overrides.
- // This does not check isa.fast_rr; if there is an RR override then
- // it was already called and it chose to call [super _tryRetain].
- inline bool
- objc_object::rootTryRetain()
- {
- if (isTaggedPointer()) return true;
- return sidetable_tryRetain();
- }
- inline uintptr_t
- objc_object::rootRetainCount()
- {
- if (isTaggedPointer()) return (uintptr_t)this;
- return sidetable_retainCount();
- }
- // not SUPPORT_NONPOINTER_ISA
- #endif
- #if SUPPORT_RETURN_AUTORELEASE
- /***********************************************************************
- Fast handling of return through Cocoa's +0 autoreleasing convention.
- The caller and callee cooperate to keep the returned object
- out of the autorelease pool and eliminate redundant retain/release pairs.
- An optimized callee looks at the caller's instructions following the
- return. If the caller's instructions are also optimized then the callee
- skips all retain count operations: no autorelease, no retain/autorelease.
- Instead it saves the result's current retain count (+0 or +1) in
- thread-local storage. If the caller does not look optimized then
- the callee performs autorelease or retain/autorelease as usual.
- An optimized caller looks at the thread-local storage. If the result
- is set then it performs any retain or release needed to change the
- result from the retain count left by the callee to the retain count
- desired by the caller. Otherwise the caller assumes the result is
- currently at +0 from an unoptimized callee and performs any retain
- needed for that case.
- There are two optimized callees:
- objc_autoreleaseReturnValue
- result is currently +1. The unoptimized path autoreleases it.
- objc_retainAutoreleaseReturnValue
- result is currently +0. The unoptimized path retains and autoreleases it.
- There are two optimized callers:
- objc_retainAutoreleasedReturnValue
- caller wants the value at +1. The unoptimized path retains it.
- objc_unsafeClaimAutoreleasedReturnValue
- caller wants the value at +0 unsafely. The unoptimized path does nothing.
- Example:
- Callee:
- // compute ret at +1
- return objc_autoreleaseReturnValue(ret);
-
- Caller:
- ret = callee();
- ret = objc_retainAutoreleasedReturnValue(ret);
- // use ret at +1 here
- Callee sees the optimized caller, sets TLS, and leaves the result at +1.
- Caller sees the TLS, clears it, and accepts the result at +1 as-is.
- The callee's recognition of the optimized caller is architecture-dependent.
- x86_64: Callee looks for `mov rax, rdi` followed by a call or
- jump instruction to objc_retainAutoreleasedReturnValue or
- objc_unsafeClaimAutoreleasedReturnValue.
- i386: Callee looks for a magic nop `movl %ebp, %ebp` (frame pointer register)
- armv7: Callee looks for a magic nop `mov r7, r7` (frame pointer register).
- arm64: Callee looks for a magic nop `mov x29, x29` (frame pointer register).
- Tagged pointer objects do participate in the optimized return scheme,
- because it saves message sends. They are not entered in the autorelease
- pool in the unoptimized case.
- **********************************************************************/
- # if __x86_64__
- static ALWAYS_INLINE bool
- callerAcceptsOptimizedReturn(const void * const ra0)
- {
- const uint8_t *ra1 = (const uint8_t *)ra0;
- const unaligned_uint16_t *ra2;
- const unaligned_uint32_t *ra4 = (const unaligned_uint32_t *)ra1;
- const void **sym;
- #define PREFER_GOTPCREL 0
- #if PREFER_GOTPCREL
- // 48 89 c7 movq %rax,%rdi
- // ff 15 callq *symbol@GOTPCREL(%rip)
- if (*ra4 != 0xffc78948) {
- return false;
- }
- if (ra1[4] != 0x15) {
- return false;
- }
- ra1 += 3;
- #else
- // 48 89 c7 movq %rax,%rdi
- // e8 callq symbol
- if (*ra4 != 0xe8c78948) {
- return false;
- }
- ra1 += (long)*(const unaligned_int32_t *)(ra1 + 4) + 8l;
- ra2 = (const unaligned_uint16_t *)ra1;
- // ff 25 jmpq *symbol@DYLDMAGIC(%rip)
- if (*ra2 != 0x25ff) {
- return false;
- }
- #endif
- ra1 += 6l + (long)*(const unaligned_int32_t *)(ra1 + 2);
- sym = (const void **)ra1;
- if (*sym != objc_retainAutoreleasedReturnValue &&
- *sym != objc_unsafeClaimAutoreleasedReturnValue)
- {
- return false;
- }
- return true;
- }
- // __x86_64__
- # elif __arm__
- static ALWAYS_INLINE bool
- callerAcceptsOptimizedReturn(const void *ra)
- {
- // if the low bit is set, we're returning to thumb mode
- if ((uintptr_t)ra & 1) {
- // 3f 46 mov r7, r7
- // we mask off the low bit via subtraction
- // 16-bit instructions are well-aligned
- if (*(uint16_t *)((uint8_t *)ra - 1) == 0x463f) {
- return true;
- }
- } else {
- // 07 70 a0 e1 mov r7, r7
- // 32-bit instructions may be only 16-bit aligned
- if (*(unaligned_uint32_t *)ra == 0xe1a07007) {
- return true;
- }
- }
- return false;
- }
- // __arm__
- # elif __arm64__
- static ALWAYS_INLINE bool
- callerAcceptsOptimizedReturn(const void *ra)
- {
- // fd 03 1d aa mov fp, fp
- // arm64 instructions are well-aligned
- if (*(uint32_t *)ra == 0xaa1d03fd) {
- return true;
- }
- return false;
- }
- // __arm64__
- # elif __i386__
- static ALWAYS_INLINE bool
- callerAcceptsOptimizedReturn(const void *ra)
- {
- // 89 ed movl %ebp, %ebp
- if (*(unaligned_uint16_t *)ra == 0xed89) {
- return true;
- }
- return false;
- }
- // __i386__
- # else
- #warning unknown architecture
- static ALWAYS_INLINE bool
- callerAcceptsOptimizedReturn(const void *ra)
- {
- return false;
- }
- // unknown architecture
- # endif
- static ALWAYS_INLINE ReturnDisposition
- getReturnDisposition()
- {
- return (ReturnDisposition)(uintptr_t)tls_get_direct(RETURN_DISPOSITION_KEY);
- }
- static ALWAYS_INLINE void
- setReturnDisposition(ReturnDisposition disposition)
- {
- tls_set_direct(RETURN_DISPOSITION_KEY, (void*)(uintptr_t)disposition);
- }
- // Try to prepare for optimized return with the given disposition (+0 or +1).
- // Returns true if the optimized path is successful.
- // Otherwise the return value must be retained and/or autoreleased as usual.
- static ALWAYS_INLINE bool
- prepareOptimizedReturn(ReturnDisposition disposition)
- {
- assert(getReturnDisposition() == ReturnAtPlus0);
- if (callerAcceptsOptimizedReturn(__builtin_return_address(0))) {
- if (disposition) setReturnDisposition(disposition);
- return true;
- }
- return false;
- }
- // Try to accept an optimized return.
- // Returns the disposition of the returned object (+0 or +1).
- // An un-optimized return is +0.
- static ALWAYS_INLINE ReturnDisposition
- acceptOptimizedReturn()
- {
- ReturnDisposition disposition = getReturnDisposition();
- setReturnDisposition(ReturnAtPlus0); // reset to the unoptimized state
- return disposition;
- }
- // SUPPORT_RETURN_AUTORELEASE
- #else
- // not SUPPORT_RETURN_AUTORELEASE
- static ALWAYS_INLINE bool
- prepareOptimizedReturn(ReturnDisposition disposition __unused)
- {
- return false;
- }
- static ALWAYS_INLINE ReturnDisposition
- acceptOptimizedReturn()
- {
- return ReturnAtPlus0;
- }
- // not SUPPORT_RETURN_AUTORELEASE
- #endif
- // _OBJC_OBJECT_H_
- #endif
|