1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246 |
- /*
- * Copyright (c) 2010-2012 Apple Inc. All rights reserved.
- *
- * @APPLE_LICENSE_HEADER_START@
- *
- * This file contains Original Code and/or Modifications of Original Code
- * as defined in and that are subject to the Apple Public Source License
- * Version 2.0 (the 'License'). You may not use this file except in
- * compliance with the License. Please obtain a copy of the License at
- * http://www.opensource.apple.com/apsl/ and read it before using this
- * file.
- *
- * The Original Code and all software distributed under the License are
- * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
- * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
- * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
- * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
- * Please see the License for the specific language governing rights and
- * limitations under the License.
- *
- * @APPLE_LICENSE_HEADER_END@
- */
- /***********************************************************************
- * Inlineable parts of NSObject / objc_object implementation
- **********************************************************************/
- #ifndef _OBJC_OBJCOBJECT_H_
- #define _OBJC_OBJCOBJECT_H_
- #include "objc-private.h"
- enum ReturnDisposition : bool {
- ReturnAtPlus0 = false, ReturnAtPlus1 = true
- };
- static ALWAYS_INLINE
- bool prepareOptimizedReturn(ReturnDisposition disposition);
- #if SUPPORT_TAGGED_POINTERS
- extern "C" {
- extern Class objc_debug_taggedpointer_classes[_OBJC_TAG_SLOT_COUNT];
- extern Class objc_debug_taggedpointer_ext_classes[_OBJC_TAG_EXT_SLOT_COUNT];
- }
- #define objc_tag_classes objc_debug_taggedpointer_classes
- #define objc_tag_ext_classes objc_debug_taggedpointer_ext_classes
- #endif
- #if SUPPORT_INDEXED_ISA
- ALWAYS_INLINE Class &
- classForIndex(uintptr_t index) {
- ASSERT(index > 0);
- ASSERT(index < (uintptr_t)objc_indexed_classes_count);
- return objc_indexed_classes[index];
- }
- #endif
- inline bool
- objc_object::isClass()
- {
- if (isTaggedPointer()) return false;
- return ISA()->isMetaClass();
- }
- #if SUPPORT_TAGGED_POINTERS
- inline Class
- objc_object::getIsa()
- {
- if (fastpath(!isTaggedPointer())) return ISA();
- extern objc_class OBJC_CLASS_$___NSUnrecognizedTaggedPointer;
- uintptr_t slot, ptr = (uintptr_t)this;
- Class cls;
- slot = (ptr >> _OBJC_TAG_SLOT_SHIFT) & _OBJC_TAG_SLOT_MASK;
- cls = objc_tag_classes[slot];
- if (slowpath(cls == (Class)&OBJC_CLASS_$___NSUnrecognizedTaggedPointer)) {
- slot = (ptr >> _OBJC_TAG_EXT_SLOT_SHIFT) & _OBJC_TAG_EXT_SLOT_MASK;
- cls = objc_tag_ext_classes[slot];
- }
- return cls;
- }
- inline uintptr_t
- objc_object::isaBits() const
- {
- return isa.bits;
- }
- inline bool
- objc_object::isTaggedPointer()
- {
- return _objc_isTaggedPointer(this);
- }
- inline bool
- objc_object::isBasicTaggedPointer()
- {
- return isTaggedPointer() && !isExtTaggedPointer();
- }
- inline bool
- objc_object::isExtTaggedPointer()
- {
- uintptr_t ptr = _objc_decodeTaggedPointer(this);
- return (ptr & _OBJC_TAG_EXT_MASK) == _OBJC_TAG_EXT_MASK;
- }
- // SUPPORT_TAGGED_POINTERS
- #else
- // not SUPPORT_TAGGED_POINTERS
- inline Class
- objc_object::getIsa()
- {
- return ISA();
- }
- inline uintptr_t
- objc_object::isaBits() const
- {
- return isa.bits;
- }
- inline bool
- objc_object::isTaggedPointer()
- {
- return false;
- }
- inline bool
- objc_object::isBasicTaggedPointer()
- {
- return false;
- }
- inline bool
- objc_object::isExtTaggedPointer()
- {
- return false;
- }
- // not SUPPORT_TAGGED_POINTERS
- #endif
- #if SUPPORT_NONPOINTER_ISA
- inline Class
- objc_object::ISA()
- {
- ASSERT(!isTaggedPointer());
- #if SUPPORT_INDEXED_ISA
- if (isa.nonpointer) {
- uintptr_t slot = isa.indexcls;
- return classForIndex((unsigned)slot);
- }
- return (Class)isa.bits;
- #else
- return (Class)(isa.bits & ISA_MASK);
- #endif
- }
- inline Class
- objc_object::rawISA()
- {
- ASSERT(!isTaggedPointer() && !isa.nonpointer);
- return (Class)isa.bits;
- }
- inline bool
- objc_object::hasNonpointerIsa()
- {
- return isa.nonpointer;
- }
- inline void
- objc_object::initIsa(Class cls)
- {
- initIsa(cls, false, false);
- }
- inline void
- objc_object::initClassIsa(Class cls)
- {
- if (DisableNonpointerIsa || cls->instancesRequireRawIsa()) {
- initIsa(cls, false/*not nonpointer*/, false);
- } else {
- initIsa(cls, true/*nonpointer*/, false);
- }
- }
- inline void
- objc_object::initProtocolIsa(Class cls)
- {
- return initClassIsa(cls);
- }
- inline void
- objc_object::initInstanceIsa(Class cls, bool hasCxxDtor)
- {
- ASSERT(!cls->instancesRequireRawIsa());
- ASSERT(hasCxxDtor == cls->hasCxxDtor());
- initIsa(cls, true, hasCxxDtor);
- }
- inline void
- objc_object::initIsa(Class cls, bool nonpointer, bool hasCxxDtor)
- {
- ASSERT(!isTaggedPointer());
-
- if (!nonpointer) {
- isa = isa_t((uintptr_t)cls);
- } else {
- ASSERT(!DisableNonpointerIsa);
- ASSERT(!cls->instancesRequireRawIsa());
- isa_t newisa(0);
- #if SUPPORT_INDEXED_ISA
- ASSERT(cls->classArrayIndex() > 0);
- newisa.bits = ISA_INDEX_MAGIC_VALUE;
- // isa.magic is part of ISA_MAGIC_VALUE
- // isa.nonpointer is part of ISA_MAGIC_VALUE
- newisa.has_cxx_dtor = hasCxxDtor;
- newisa.indexcls = (uintptr_t)cls->classArrayIndex();
- #else
- newisa.bits = ISA_MAGIC_VALUE;
- // isa.magic is part of ISA_MAGIC_VALUE
- // isa.nonpointer is part of ISA_MAGIC_VALUE
- newisa.has_cxx_dtor = hasCxxDtor;
- newisa.shiftcls = (uintptr_t)cls >> 3;
- #endif
- // This write must be performed in a single store in some cases
- // (for example when realizing a class because other threads
- // may simultaneously try to use the class).
- // fixme use atomics here to guarantee single-store and to
- // guarantee memory order w.r.t. the class index table
- // ...but not too atomic because we don't want to hurt instantiation
- isa = newisa;
- }
- }
- inline Class
- objc_object::changeIsa(Class newCls)
- {
- // This is almost always true but there are
- // enough edge cases that we can't assert it.
- // assert(newCls->isFuture() ||
- // newCls->isInitializing() || newCls->isInitialized());
- ASSERT(!isTaggedPointer());
- isa_t oldisa;
- isa_t newisa;
- bool sideTableLocked = false;
- bool transcribeToSideTable = false;
- do {
- transcribeToSideTable = false;
- oldisa = LoadExclusive(&isa.bits);
- if ((oldisa.bits == 0 || oldisa.nonpointer) &&
- !newCls->isFuture() && newCls->canAllocNonpointer())
- {
- // 0 -> nonpointer
- // nonpointer -> nonpointer
- #if SUPPORT_INDEXED_ISA
- if (oldisa.bits == 0) newisa.bits = ISA_INDEX_MAGIC_VALUE;
- else newisa = oldisa;
- // isa.magic is part of ISA_MAGIC_VALUE
- // isa.nonpointer is part of ISA_MAGIC_VALUE
- newisa.has_cxx_dtor = newCls->hasCxxDtor();
- ASSERT(newCls->classArrayIndex() > 0);
- newisa.indexcls = (uintptr_t)newCls->classArrayIndex();
- #else
- if (oldisa.bits == 0) newisa.bits = ISA_MAGIC_VALUE;
- else newisa = oldisa;
- // isa.magic is part of ISA_MAGIC_VALUE
- // isa.nonpointer is part of ISA_MAGIC_VALUE
- newisa.has_cxx_dtor = newCls->hasCxxDtor();
- newisa.shiftcls = (uintptr_t)newCls >> 3;
- #endif
- }
- else if (oldisa.nonpointer) {
- // nonpointer -> raw pointer
- // Need to copy retain count et al to side table.
- // Acquire side table lock before setting isa to
- // prevent races such as concurrent -release.
- if (!sideTableLocked) sidetable_lock();
- sideTableLocked = true;
- transcribeToSideTable = true;
- newisa.cls = newCls;
- }
- else {
- // raw pointer -> raw pointer
- newisa.cls = newCls;
- }
- } while (!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits));
- if (transcribeToSideTable) {
- // Copy oldisa's retain count et al to side table.
- // oldisa.has_assoc: nothing to do
- // oldisa.has_cxx_dtor: nothing to do
- sidetable_moveExtraRC_nolock(oldisa.extra_rc,
- oldisa.deallocating,
- oldisa.weakly_referenced);
- }
- if (sideTableLocked) sidetable_unlock();
- if (oldisa.nonpointer) {
- #if SUPPORT_INDEXED_ISA
- return classForIndex(oldisa.indexcls);
- #else
- return (Class)((uintptr_t)oldisa.shiftcls << 3);
- #endif
- }
- else {
- return oldisa.cls;
- }
- }
- inline bool
- objc_object::hasAssociatedObjects()
- {
- if (isTaggedPointer()) return true;
- if (isa.nonpointer) return isa.has_assoc;
- return true;
- }
- inline void
- objc_object::setHasAssociatedObjects()
- {
- if (isTaggedPointer()) return;
- retry:
- isa_t oldisa = LoadExclusive(&isa.bits);
- isa_t newisa = oldisa;
- if (!newisa.nonpointer || newisa.has_assoc) {
- ClearExclusive(&isa.bits);
- return;
- }
- newisa.has_assoc = true;
- if (!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits)) goto retry;
- }
- inline bool
- objc_object::isWeaklyReferenced()
- {
- ASSERT(!isTaggedPointer());
- if (isa.nonpointer) return isa.weakly_referenced;
- else return sidetable_isWeaklyReferenced();
- }
- inline void
- objc_object::setWeaklyReferenced_nolock()
- {
- retry:
- isa_t oldisa = LoadExclusive(&isa.bits);
- isa_t newisa = oldisa;
- if (slowpath(!newisa.nonpointer)) {
- ClearExclusive(&isa.bits);
- sidetable_setWeaklyReferenced_nolock();
- return;
- }
- if (newisa.weakly_referenced) {
- ClearExclusive(&isa.bits);
- return;
- }
- newisa.weakly_referenced = true;
- if (!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits)) goto retry;
- }
- inline bool
- objc_object::hasCxxDtor()
- {
- ASSERT(!isTaggedPointer());
- if (isa.nonpointer) return isa.has_cxx_dtor;
- else return isa.cls->hasCxxDtor();
- }
- inline bool
- objc_object::rootIsDeallocating()
- {
- if (isTaggedPointer()) return false;
- if (isa.nonpointer) return isa.deallocating;
- return sidetable_isDeallocating();
- }
- inline void
- objc_object::clearDeallocating()
- {
- if (slowpath(!isa.nonpointer)) {
- // Slow path for raw pointer isa.
- sidetable_clearDeallocating();
- }
- else if (slowpath(isa.weakly_referenced || isa.has_sidetable_rc)) {
- // Slow path for non-pointer isa with weak refs and/or side table data.
- clearDeallocating_slow();
- }
- assert(!sidetable_present());
- }
- inline void
- objc_object::rootDealloc()
- {
- if (isTaggedPointer()) return; // fixme necessary?
- if (fastpath(isa.nonpointer &&
- !isa.weakly_referenced &&
- !isa.has_assoc &&
- !isa.has_cxx_dtor &&
- !isa.has_sidetable_rc))
- {
- assert(!sidetable_present());
- free(this);
- }
- else {
- object_dispose((id)this);
- }
- }
- // Equivalent to calling [this retain], with shortcuts if there is no override
- inline id
- objc_object::retain()
- {
- ASSERT(!isTaggedPointer());
- if (fastpath(!ISA()->hasCustomRR())) {
- return rootRetain();
- }
- return ((id(*)(objc_object *, SEL))objc_msgSend)(this, @selector(retain));
- }
- // Base retain implementation, ignoring overrides.
- // This does not check isa.fast_rr; if there is an RR override then
- // it was already called and it chose to call [super retain].
- //
- // tryRetain=true is the -_tryRetain path.
- // handleOverflow=false is the frameless fast path.
- // handleOverflow=true is the framed slow path including overflow to side table
- // The code is structured this way to prevent duplication.
- ALWAYS_INLINE id
- objc_object::rootRetain()
- {
- return rootRetain(false, false);
- }
- ALWAYS_INLINE bool
- objc_object::rootTryRetain()
- {
- return rootRetain(true, false) ? true : false;
- }
- ALWAYS_INLINE id
- objc_object::rootRetain(bool tryRetain, bool handleOverflow)
- {
- if (isTaggedPointer()) return (id)this;
- bool sideTableLocked = false;
- bool transcribeToSideTable = false;
- isa_t oldisa;
- isa_t newisa;
- do {
- transcribeToSideTable = false;
- oldisa = LoadExclusive(&isa.bits);
- newisa = oldisa;
- if (slowpath(!newisa.nonpointer)) {
- ClearExclusive(&isa.bits);
- if (rawISA()->isMetaClass()) return (id)this;
- if (!tryRetain && sideTableLocked) sidetable_unlock();
- if (tryRetain) return sidetable_tryRetain() ? (id)this : nil;
- else return sidetable_retain();
- }
- // don't check newisa.fast_rr; we already called any RR overrides
- if (slowpath(tryRetain && newisa.deallocating)) {
- ClearExclusive(&isa.bits);
- if (!tryRetain && sideTableLocked) sidetable_unlock();
- return nil;
- }
- uintptr_t carry;
- newisa.bits = addc(newisa.bits, RC_ONE, 0, &carry); // extra_rc++
- if (slowpath(carry)) {
- // newisa.extra_rc++ overflowed
- if (!handleOverflow) {
- ClearExclusive(&isa.bits);
- return rootRetain_overflow(tryRetain);
- }
- // Leave half of the retain counts inline and
- // prepare to copy the other half to the side table.
- if (!tryRetain && !sideTableLocked) sidetable_lock();
- sideTableLocked = true;
- transcribeToSideTable = true;
- newisa.extra_rc = RC_HALF;
- newisa.has_sidetable_rc = true;
- }
- } while (slowpath(!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits)));
- if (slowpath(transcribeToSideTable)) {
- // Copy the other half of the retain counts to the side table.
- sidetable_addExtraRC_nolock(RC_HALF);
- }
- if (slowpath(!tryRetain && sideTableLocked)) sidetable_unlock();
- return (id)this;
- }
- // Equivalent to calling [this release], with shortcuts if there is no override
- inline void
- objc_object::release()
- {
- ASSERT(!isTaggedPointer());
- if (fastpath(!ISA()->hasCustomRR())) {
- rootRelease();
- return;
- }
- ((void(*)(objc_object *, SEL))objc_msgSend)(this, @selector(release));
- }
- // Base release implementation, ignoring overrides.
- // Does not call -dealloc.
- // Returns true if the object should now be deallocated.
- // This does not check isa.fast_rr; if there is an RR override then
- // it was already called and it chose to call [super release].
- //
- // handleUnderflow=false is the frameless fast path.
- // handleUnderflow=true is the framed slow path including side table borrow
- // The code is structured this way to prevent duplication.
- ALWAYS_INLINE bool
- objc_object::rootRelease()
- {
- return rootRelease(true, false);
- }
- ALWAYS_INLINE bool
- objc_object::rootReleaseShouldDealloc()
- {
- return rootRelease(false, false);
- }
- ALWAYS_INLINE bool
- objc_object::rootRelease(bool performDealloc, bool handleUnderflow)
- {
- if (isTaggedPointer()) return false;
- bool sideTableLocked = false;
- isa_t oldisa;
- isa_t newisa;
- retry:
- do {
- oldisa = LoadExclusive(&isa.bits);
- newisa = oldisa;
- if (slowpath(!newisa.nonpointer)) {
- ClearExclusive(&isa.bits);
- if (rawISA()->isMetaClass()) return false;
- if (sideTableLocked) sidetable_unlock();
- return sidetable_release(performDealloc);
- }
- // don't check newisa.fast_rr; we already called any RR overrides
- uintptr_t carry;
- newisa.bits = subc(newisa.bits, RC_ONE, 0, &carry); // extra_rc--
- if (slowpath(carry)) {
- // don't ClearExclusive()
- goto underflow;
- }
- } while (slowpath(!StoreReleaseExclusive(&isa.bits,
- oldisa.bits, newisa.bits)));
- if (slowpath(sideTableLocked)) sidetable_unlock();
- return false;
- underflow:
- // newisa.extra_rc-- underflowed: borrow from side table or deallocate
- // abandon newisa to undo the decrement
- newisa = oldisa;
- if (slowpath(newisa.has_sidetable_rc)) {
- if (!handleUnderflow) {
- ClearExclusive(&isa.bits);
- return rootRelease_underflow(performDealloc);
- }
- // Transfer retain count from side table to inline storage.
- if (!sideTableLocked) {
- ClearExclusive(&isa.bits);
- sidetable_lock();
- sideTableLocked = true;
- // Need to start over to avoid a race against
- // the nonpointer -> raw pointer transition.
- goto retry;
- }
- // Try to remove some retain counts from the side table.
- size_t borrowed = sidetable_subExtraRC_nolock(RC_HALF);
- // To avoid races, has_sidetable_rc must remain set
- // even if the side table count is now zero.
- if (borrowed > 0) {
- // Side table retain count decreased.
- // Try to add them to the inline count.
- newisa.extra_rc = borrowed - 1; // redo the original decrement too
- bool stored = StoreReleaseExclusive(&isa.bits,
- oldisa.bits, newisa.bits);
- if (!stored) {
- // Inline update failed.
- // Try it again right now. This prevents livelock on LL/SC
- // architectures where the side table access itself may have
- // dropped the reservation.
- isa_t oldisa2 = LoadExclusive(&isa.bits);
- isa_t newisa2 = oldisa2;
- if (newisa2.nonpointer) {
- uintptr_t overflow;
- newisa2.bits =
- addc(newisa2.bits, RC_ONE * (borrowed-1), 0, &overflow);
- if (!overflow) {
- stored = StoreReleaseExclusive(&isa.bits, oldisa2.bits,
- newisa2.bits);
- }
- }
- }
- if (!stored) {
- // Inline update failed.
- // Put the retains back in the side table.
- sidetable_addExtraRC_nolock(borrowed);
- goto retry;
- }
- // Decrement successful after borrowing from side table.
- // This decrement cannot be the deallocating decrement - the side
- // table lock and has_sidetable_rc bit ensure that if everyone
- // else tried to -release while we worked, the last one would block.
- sidetable_unlock();
- return false;
- }
- else {
- // Side table is empty after all. Fall-through to the dealloc path.
- }
- }
- // Really deallocate.
- if (slowpath(newisa.deallocating)) {
- ClearExclusive(&isa.bits);
- if (sideTableLocked) sidetable_unlock();
- return overrelease_error();
- // does not actually return
- }
- newisa.deallocating = true;
- if (!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits)) goto retry;
- if (slowpath(sideTableLocked)) sidetable_unlock();
- __c11_atomic_thread_fence(__ATOMIC_ACQUIRE);
- if (performDealloc) {
- ((void(*)(objc_object *, SEL))objc_msgSend)(this, @selector(dealloc));
- }
- return true;
- }
- // Equivalent to [this autorelease], with shortcuts if there is no override
- inline id
- objc_object::autorelease()
- {
- ASSERT(!isTaggedPointer());
- if (fastpath(!ISA()->hasCustomRR())) {
- return rootAutorelease();
- }
- return ((id(*)(objc_object *, SEL))objc_msgSend)(this, @selector(autorelease));
- }
- // Base autorelease implementation, ignoring overrides.
- inline id
- objc_object::rootAutorelease()
- {
- if (isTaggedPointer()) return (id)this;
- if (prepareOptimizedReturn(ReturnAtPlus1)) return (id)this;
- return rootAutorelease2();
- }
- inline uintptr_t
- objc_object::rootRetainCount()
- {
- if (isTaggedPointer()) return (uintptr_t)this;
- sidetable_lock();
- isa_t bits = LoadExclusive(&isa.bits);
- ClearExclusive(&isa.bits);
- if (bits.nonpointer) {
- uintptr_t rc = 1 + bits.extra_rc;
- if (bits.has_sidetable_rc) {
- rc += sidetable_getExtraRC_nolock();
- }
- sidetable_unlock();
- return rc;
- }
- sidetable_unlock();
- return sidetable_retainCount();
- }
- // SUPPORT_NONPOINTER_ISA
- #else
- // not SUPPORT_NONPOINTER_ISA
- inline Class
- objc_object::ISA()
- {
- ASSERT(!isTaggedPointer());
- return isa.cls;
- }
- inline Class
- objc_object::rawISA()
- {
- return ISA();
- }
- inline bool
- objc_object::hasNonpointerIsa()
- {
- return false;
- }
- inline void
- objc_object::initIsa(Class cls)
- {
- ASSERT(!isTaggedPointer());
- isa = (uintptr_t)cls;
- }
- inline void
- objc_object::initClassIsa(Class cls)
- {
- initIsa(cls);
- }
- inline void
- objc_object::initProtocolIsa(Class cls)
- {
- initIsa(cls);
- }
- inline void
- objc_object::initInstanceIsa(Class cls, bool)
- {
- initIsa(cls);
- }
- inline void
- objc_object::initIsa(Class cls, bool, bool)
- {
- initIsa(cls);
- }
- inline Class
- objc_object::changeIsa(Class cls)
- {
- // This is almost always rue but there are
- // enough edge cases that we can't assert it.
- // assert(cls->isFuture() ||
- // cls->isInitializing() || cls->isInitialized());
- ASSERT(!isTaggedPointer());
-
- isa_t oldisa, newisa;
- newisa.cls = cls;
- do {
- oldisa = LoadExclusive(&isa.bits);
- } while (!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits));
-
- if (oldisa.cls && oldisa.cls->instancesHaveAssociatedObjects()) {
- cls->setInstancesHaveAssociatedObjects();
- }
-
- return oldisa.cls;
- }
- inline bool
- objc_object::hasAssociatedObjects()
- {
- return getIsa()->instancesHaveAssociatedObjects();
- }
- inline void
- objc_object::setHasAssociatedObjects()
- {
- getIsa()->setInstancesHaveAssociatedObjects();
- }
- inline bool
- objc_object::isWeaklyReferenced()
- {
- ASSERT(!isTaggedPointer());
- return sidetable_isWeaklyReferenced();
- }
- inline void
- objc_object::setWeaklyReferenced_nolock()
- {
- ASSERT(!isTaggedPointer());
- sidetable_setWeaklyReferenced_nolock();
- }
- inline bool
- objc_object::hasCxxDtor()
- {
- ASSERT(!isTaggedPointer());
- return isa.cls->hasCxxDtor();
- }
- inline bool
- objc_object::rootIsDeallocating()
- {
- if (isTaggedPointer()) return false;
- return sidetable_isDeallocating();
- }
- inline void
- objc_object::clearDeallocating()
- {
- sidetable_clearDeallocating();
- }
- inline void
- objc_object::rootDealloc()
- {
- if (isTaggedPointer()) return;
- object_dispose((id)this);
- }
- // Equivalent to calling [this retain], with shortcuts if there is no override
- inline id
- objc_object::retain()
- {
- ASSERT(!isTaggedPointer());
- if (fastpath(!ISA()->hasCustomRR())) {
- return sidetable_retain();
- }
- return ((id(*)(objc_object *, SEL))objc_msgSend)(this, @selector(retain));
- }
- // Base retain implementation, ignoring overrides.
- // This does not check isa.fast_rr; if there is an RR override then
- // it was already called and it chose to call [super retain].
- inline id
- objc_object::rootRetain()
- {
- if (isTaggedPointer()) return (id)this;
- return sidetable_retain();
- }
- // Equivalent to calling [this release], with shortcuts if there is no override
- inline void
- objc_object::release()
- {
- ASSERT(!isTaggedPointer());
- if (fastpath(!ISA()->hasCustomRR())) {
- sidetable_release();
- return;
- }
- ((void(*)(objc_object *, SEL))objc_msgSend)(this, @selector(release));
- }
- // Base release implementation, ignoring overrides.
- // Does not call -dealloc.
- // Returns true if the object should now be deallocated.
- // This does not check isa.fast_rr; if there is an RR override then
- // it was already called and it chose to call [super release].
- inline bool
- objc_object::rootRelease()
- {
- if (isTaggedPointer()) return false;
- return sidetable_release(true);
- }
- inline bool
- objc_object::rootReleaseShouldDealloc()
- {
- if (isTaggedPointer()) return false;
- return sidetable_release(false);
- }
- // Equivalent to [this autorelease], with shortcuts if there is no override
- inline id
- objc_object::autorelease()
- {
- if (isTaggedPointer()) return (id)this;
- if (fastpath(!ISA()->hasCustomRR())) return rootAutorelease();
- return ((id(*)(objc_object *, SEL))objc_msgSend)(this, @selector(autorelease));
- }
- // Base autorelease implementation, ignoring overrides.
- inline id
- objc_object::rootAutorelease()
- {
- if (isTaggedPointer()) return (id)this;
- if (prepareOptimizedReturn(ReturnAtPlus1)) return (id)this;
- return rootAutorelease2();
- }
- // Base tryRetain implementation, ignoring overrides.
- // This does not check isa.fast_rr; if there is an RR override then
- // it was already called and it chose to call [super _tryRetain].
- inline bool
- objc_object::rootTryRetain()
- {
- if (isTaggedPointer()) return true;
- return sidetable_tryRetain();
- }
- inline uintptr_t
- objc_object::rootRetainCount()
- {
- if (isTaggedPointer()) return (uintptr_t)this;
- return sidetable_retainCount();
- }
- // not SUPPORT_NONPOINTER_ISA
- #endif
- #if SUPPORT_RETURN_AUTORELEASE
- /***********************************************************************
- Fast handling of return through Cocoa's +0 autoreleasing convention.
- The caller and callee cooperate to keep the returned object
- out of the autorelease pool and eliminate redundant retain/release pairs.
- An optimized callee looks at the caller's instructions following the
- return. If the caller's instructions are also optimized then the callee
- skips all retain count operations: no autorelease, no retain/autorelease.
- Instead it saves the result's current retain count (+0 or +1) in
- thread-local storage. If the caller does not look optimized then
- the callee performs autorelease or retain/autorelease as usual.
- An optimized caller looks at the thread-local storage. If the result
- is set then it performs any retain or release needed to change the
- result from the retain count left by the callee to the retain count
- desired by the caller. Otherwise the caller assumes the result is
- currently at +0 from an unoptimized callee and performs any retain
- needed for that case.
- There are two optimized callees:
- objc_autoreleaseReturnValue
- result is currently +1. The unoptimized path autoreleases it.
- objc_retainAutoreleaseReturnValue
- result is currently +0. The unoptimized path retains and autoreleases it.
- There are two optimized callers:
- objc_retainAutoreleasedReturnValue
- caller wants the value at +1. The unoptimized path retains it.
- objc_unsafeClaimAutoreleasedReturnValue
- caller wants the value at +0 unsafely. The unoptimized path does nothing.
- Example:
- Callee:
- // compute ret at +1
- return objc_autoreleaseReturnValue(ret);
-
- Caller:
- ret = callee();
- ret = objc_retainAutoreleasedReturnValue(ret);
- // use ret at +1 here
- Callee sees the optimized caller, sets TLS, and leaves the result at +1.
- Caller sees the TLS, clears it, and accepts the result at +1 as-is.
- The callee's recognition of the optimized caller is architecture-dependent.
- x86_64: Callee looks for `mov rax, rdi` followed by a call or
- jump instruction to objc_retainAutoreleasedReturnValue or
- objc_unsafeClaimAutoreleasedReturnValue.
- i386: Callee looks for a magic nop `movl %ebp, %ebp` (frame pointer register)
- armv7: Callee looks for a magic nop `mov r7, r7` (frame pointer register).
- arm64: Callee looks for a magic nop `mov x29, x29` (frame pointer register).
- Tagged pointer objects do participate in the optimized return scheme,
- because it saves message sends. They are not entered in the autorelease
- pool in the unoptimized case.
- **********************************************************************/
- # if __x86_64__
- static ALWAYS_INLINE bool
- callerAcceptsOptimizedReturn(const void * const ra0)
- {
- const uint8_t *ra1 = (const uint8_t *)ra0;
- const unaligned_uint16_t *ra2;
- const unaligned_uint32_t *ra4 = (const unaligned_uint32_t *)ra1;
- const void **sym;
- #define PREFER_GOTPCREL 0
- #if PREFER_GOTPCREL
- // 48 89 c7 movq %rax,%rdi
- // ff 15 callq *symbol@GOTPCREL(%rip)
- if (*ra4 != 0xffc78948) {
- return false;
- }
- if (ra1[4] != 0x15) {
- return false;
- }
- ra1 += 3;
- #else
- // 48 89 c7 movq %rax,%rdi
- // e8 callq symbol
- if (*ra4 != 0xe8c78948) {
- return false;
- }
- ra1 += (long)*(const unaligned_int32_t *)(ra1 + 4) + 8l;
- ra2 = (const unaligned_uint16_t *)ra1;
- // ff 25 jmpq *symbol@DYLDMAGIC(%rip)
- if (*ra2 != 0x25ff) {
- return false;
- }
- #endif
- ra1 += 6l + (long)*(const unaligned_int32_t *)(ra1 + 2);
- sym = (const void **)ra1;
- if (*sym != objc_retainAutoreleasedReturnValue &&
- *sym != objc_unsafeClaimAutoreleasedReturnValue)
- {
- return false;
- }
- return true;
- }
- // __x86_64__
- # elif __arm__
- static ALWAYS_INLINE bool
- callerAcceptsOptimizedReturn(const void *ra)
- {
- // if the low bit is set, we're returning to thumb mode
- if ((uintptr_t)ra & 1) {
- // 3f 46 mov r7, r7
- // we mask off the low bit via subtraction
- // 16-bit instructions are well-aligned
- if (*(uint16_t *)((uint8_t *)ra - 1) == 0x463f) {
- return true;
- }
- } else {
- // 07 70 a0 e1 mov r7, r7
- // 32-bit instructions may be only 16-bit aligned
- if (*(unaligned_uint32_t *)ra == 0xe1a07007) {
- return true;
- }
- }
- return false;
- }
- // __arm__
- # elif __arm64__
- static ALWAYS_INLINE bool
- callerAcceptsOptimizedReturn(const void *ra)
- {
- // fd 03 1d aa mov fp, fp
- // arm64 instructions are well-aligned
- if (*(uint32_t *)ra == 0xaa1d03fd) {
- return true;
- }
- return false;
- }
- // __arm64__
- # elif __i386__
- static ALWAYS_INLINE bool
- callerAcceptsOptimizedReturn(const void *ra)
- {
- // 89 ed movl %ebp, %ebp
- if (*(unaligned_uint16_t *)ra == 0xed89) {
- return true;
- }
- return false;
- }
- // __i386__
- # else
- #warning unknown architecture
- static ALWAYS_INLINE bool
- callerAcceptsOptimizedReturn(const void *ra)
- {
- return false;
- }
- // unknown architecture
- # endif
- static ALWAYS_INLINE ReturnDisposition
- getReturnDisposition()
- {
- return (ReturnDisposition)(uintptr_t)tls_get_direct(RETURN_DISPOSITION_KEY);
- }
- static ALWAYS_INLINE void
- setReturnDisposition(ReturnDisposition disposition)
- {
- tls_set_direct(RETURN_DISPOSITION_KEY, (void*)(uintptr_t)disposition);
- }
- // Try to prepare for optimized return with the given disposition (+0 or +1).
- // Returns true if the optimized path is successful.
- // Otherwise the return value must be retained and/or autoreleased as usual.
- static ALWAYS_INLINE bool
- prepareOptimizedReturn(ReturnDisposition disposition)
- {
- ASSERT(getReturnDisposition() == ReturnAtPlus0);
- if (callerAcceptsOptimizedReturn(__builtin_return_address(0))) {
- if (disposition) setReturnDisposition(disposition);
- return true;
- }
- return false;
- }
- // Try to accept an optimized return.
- // Returns the disposition of the returned object (+0 or +1).
- // An un-optimized return is +0.
- static ALWAYS_INLINE ReturnDisposition
- acceptOptimizedReturn()
- {
- ReturnDisposition disposition = getReturnDisposition();
- setReturnDisposition(ReturnAtPlus0); // reset to the unoptimized state
- return disposition;
- }
- // SUPPORT_RETURN_AUTORELEASE
- #else
- // not SUPPORT_RETURN_AUTORELEASE
- static ALWAYS_INLINE bool
- prepareOptimizedReturn(ReturnDisposition disposition __unused)
- {
- return false;
- }
- static ALWAYS_INLINE ReturnDisposition
- acceptOptimizedReturn()
- {
- return ReturnAtPlus0;
- }
- // not SUPPORT_RETURN_AUTORELEASE
- #endif
- // _OBJC_OBJECT_H_
- #endif
|