objc-object.h 31 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246
  1. /*
  2. * Copyright (c) 2010-2012 Apple Inc. All rights reserved.
  3. *
  4. * @APPLE_LICENSE_HEADER_START@
  5. *
  6. * This file contains Original Code and/or Modifications of Original Code
  7. * as defined in and that are subject to the Apple Public Source License
  8. * Version 2.0 (the 'License'). You may not use this file except in
  9. * compliance with the License. Please obtain a copy of the License at
  10. * http://www.opensource.apple.com/apsl/ and read it before using this
  11. * file.
  12. *
  13. * The Original Code and all software distributed under the License are
  14. * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
  15. * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
  16. * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
  17. * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
  18. * Please see the License for the specific language governing rights and
  19. * limitations under the License.
  20. *
  21. * @APPLE_LICENSE_HEADER_END@
  22. */
  23. /***********************************************************************
  24. * Inlineable parts of NSObject / objc_object implementation
  25. **********************************************************************/
  26. #ifndef _OBJC_OBJCOBJECT_H_
  27. #define _OBJC_OBJCOBJECT_H_
  28. #include "objc-private.h"
  29. enum ReturnDisposition : bool {
  30. ReturnAtPlus0 = false, ReturnAtPlus1 = true
  31. };
  32. static ALWAYS_INLINE
  33. bool prepareOptimizedReturn(ReturnDisposition disposition);
  34. #if SUPPORT_TAGGED_POINTERS
  35. extern "C" {
  36. extern Class objc_debug_taggedpointer_classes[_OBJC_TAG_SLOT_COUNT];
  37. extern Class objc_debug_taggedpointer_ext_classes[_OBJC_TAG_EXT_SLOT_COUNT];
  38. }
  39. #define objc_tag_classes objc_debug_taggedpointer_classes
  40. #define objc_tag_ext_classes objc_debug_taggedpointer_ext_classes
  41. #endif
  42. #if SUPPORT_INDEXED_ISA
  43. ALWAYS_INLINE Class &
  44. classForIndex(uintptr_t index) {
  45. ASSERT(index > 0);
  46. ASSERT(index < (uintptr_t)objc_indexed_classes_count);
  47. return objc_indexed_classes[index];
  48. }
  49. #endif
  50. inline bool
  51. objc_object::isClass()
  52. {
  53. if (isTaggedPointer()) return false;
  54. return ISA()->isMetaClass();
  55. }
  56. #if SUPPORT_TAGGED_POINTERS
  57. inline Class
  58. objc_object::getIsa()
  59. {
  60. if (fastpath(!isTaggedPointer())) return ISA();
  61. extern objc_class OBJC_CLASS_$___NSUnrecognizedTaggedPointer;
  62. uintptr_t slot, ptr = (uintptr_t)this;
  63. Class cls;
  64. slot = (ptr >> _OBJC_TAG_SLOT_SHIFT) & _OBJC_TAG_SLOT_MASK;
  65. cls = objc_tag_classes[slot];
  66. if (slowpath(cls == (Class)&OBJC_CLASS_$___NSUnrecognizedTaggedPointer)) {
  67. slot = (ptr >> _OBJC_TAG_EXT_SLOT_SHIFT) & _OBJC_TAG_EXT_SLOT_MASK;
  68. cls = objc_tag_ext_classes[slot];
  69. }
  70. return cls;
  71. }
  72. inline uintptr_t
  73. objc_object::isaBits() const
  74. {
  75. return isa.bits;
  76. }
  77. inline bool
  78. objc_object::isTaggedPointer()
  79. {
  80. return _objc_isTaggedPointer(this);
  81. }
  82. inline bool
  83. objc_object::isBasicTaggedPointer()
  84. {
  85. return isTaggedPointer() && !isExtTaggedPointer();
  86. }
  87. inline bool
  88. objc_object::isExtTaggedPointer()
  89. {
  90. uintptr_t ptr = _objc_decodeTaggedPointer(this);
  91. return (ptr & _OBJC_TAG_EXT_MASK) == _OBJC_TAG_EXT_MASK;
  92. }
  93. // SUPPORT_TAGGED_POINTERS
  94. #else
  95. // not SUPPORT_TAGGED_POINTERS
  96. inline Class
  97. objc_object::getIsa()
  98. {
  99. return ISA();
  100. }
  101. inline uintptr_t
  102. objc_object::isaBits() const
  103. {
  104. return isa.bits;
  105. }
  106. inline bool
  107. objc_object::isTaggedPointer()
  108. {
  109. return false;
  110. }
  111. inline bool
  112. objc_object::isBasicTaggedPointer()
  113. {
  114. return false;
  115. }
  116. inline bool
  117. objc_object::isExtTaggedPointer()
  118. {
  119. return false;
  120. }
  121. // not SUPPORT_TAGGED_POINTERS
  122. #endif
  123. #if SUPPORT_NONPOINTER_ISA
  124. inline Class
  125. objc_object::ISA()
  126. {
  127. ASSERT(!isTaggedPointer());
  128. #if SUPPORT_INDEXED_ISA
  129. if (isa.nonpointer) {
  130. uintptr_t slot = isa.indexcls;
  131. return classForIndex((unsigned)slot);
  132. }
  133. return (Class)isa.bits;
  134. #else
  135. return (Class)(isa.bits & ISA_MASK);
  136. #endif
  137. }
  138. inline Class
  139. objc_object::rawISA()
  140. {
  141. ASSERT(!isTaggedPointer() && !isa.nonpointer);
  142. return (Class)isa.bits;
  143. }
  144. inline bool
  145. objc_object::hasNonpointerIsa()
  146. {
  147. return isa.nonpointer;
  148. }
  149. inline void
  150. objc_object::initIsa(Class cls)
  151. {
  152. initIsa(cls, false, false);
  153. }
  154. inline void
  155. objc_object::initClassIsa(Class cls)
  156. {
  157. if (DisableNonpointerIsa || cls->instancesRequireRawIsa()) {
  158. initIsa(cls, false/*not nonpointer*/, false);
  159. } else {
  160. initIsa(cls, true/*nonpointer*/, false);
  161. }
  162. }
  163. inline void
  164. objc_object::initProtocolIsa(Class cls)
  165. {
  166. return initClassIsa(cls);
  167. }
  168. inline void
  169. objc_object::initInstanceIsa(Class cls, bool hasCxxDtor)
  170. {
  171. ASSERT(!cls->instancesRequireRawIsa());
  172. ASSERT(hasCxxDtor == cls->hasCxxDtor());
  173. initIsa(cls, true, hasCxxDtor);
  174. }
  175. inline void
  176. objc_object::initIsa(Class cls, bool nonpointer, bool hasCxxDtor)
  177. {
  178. ASSERT(!isTaggedPointer());
  179. if (!nonpointer) {
  180. isa = isa_t((uintptr_t)cls);
  181. } else {
  182. ASSERT(!DisableNonpointerIsa);
  183. ASSERT(!cls->instancesRequireRawIsa());
  184. isa_t newisa(0);
  185. #if SUPPORT_INDEXED_ISA
  186. ASSERT(cls->classArrayIndex() > 0);
  187. newisa.bits = ISA_INDEX_MAGIC_VALUE;
  188. // isa.magic is part of ISA_MAGIC_VALUE
  189. // isa.nonpointer is part of ISA_MAGIC_VALUE
  190. newisa.has_cxx_dtor = hasCxxDtor;
  191. newisa.indexcls = (uintptr_t)cls->classArrayIndex();
  192. #else
  193. newisa.bits = ISA_MAGIC_VALUE;
  194. // isa.magic is part of ISA_MAGIC_VALUE
  195. // isa.nonpointer is part of ISA_MAGIC_VALUE
  196. newisa.has_cxx_dtor = hasCxxDtor;
  197. newisa.shiftcls = (uintptr_t)cls >> 3;
  198. #endif
  199. // This write must be performed in a single store in some cases
  200. // (for example when realizing a class because other threads
  201. // may simultaneously try to use the class).
  202. // fixme use atomics here to guarantee single-store and to
  203. // guarantee memory order w.r.t. the class index table
  204. // ...but not too atomic because we don't want to hurt instantiation
  205. isa = newisa;
  206. }
  207. }
  208. inline Class
  209. objc_object::changeIsa(Class newCls)
  210. {
  211. // This is almost always true but there are
  212. // enough edge cases that we can't assert it.
  213. // assert(newCls->isFuture() ||
  214. // newCls->isInitializing() || newCls->isInitialized());
  215. ASSERT(!isTaggedPointer());
  216. isa_t oldisa;
  217. isa_t newisa;
  218. bool sideTableLocked = false;
  219. bool transcribeToSideTable = false;
  220. do {
  221. transcribeToSideTable = false;
  222. oldisa = LoadExclusive(&isa.bits);
  223. if ((oldisa.bits == 0 || oldisa.nonpointer) &&
  224. !newCls->isFuture() && newCls->canAllocNonpointer())
  225. {
  226. // 0 -> nonpointer
  227. // nonpointer -> nonpointer
  228. #if SUPPORT_INDEXED_ISA
  229. if (oldisa.bits == 0) newisa.bits = ISA_INDEX_MAGIC_VALUE;
  230. else newisa = oldisa;
  231. // isa.magic is part of ISA_MAGIC_VALUE
  232. // isa.nonpointer is part of ISA_MAGIC_VALUE
  233. newisa.has_cxx_dtor = newCls->hasCxxDtor();
  234. ASSERT(newCls->classArrayIndex() > 0);
  235. newisa.indexcls = (uintptr_t)newCls->classArrayIndex();
  236. #else
  237. if (oldisa.bits == 0) newisa.bits = ISA_MAGIC_VALUE;
  238. else newisa = oldisa;
  239. // isa.magic is part of ISA_MAGIC_VALUE
  240. // isa.nonpointer is part of ISA_MAGIC_VALUE
  241. newisa.has_cxx_dtor = newCls->hasCxxDtor();
  242. newisa.shiftcls = (uintptr_t)newCls >> 3;
  243. #endif
  244. }
  245. else if (oldisa.nonpointer) {
  246. // nonpointer -> raw pointer
  247. // Need to copy retain count et al to side table.
  248. // Acquire side table lock before setting isa to
  249. // prevent races such as concurrent -release.
  250. if (!sideTableLocked) sidetable_lock();
  251. sideTableLocked = true;
  252. transcribeToSideTable = true;
  253. newisa.cls = newCls;
  254. }
  255. else {
  256. // raw pointer -> raw pointer
  257. newisa.cls = newCls;
  258. }
  259. } while (!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits));
  260. if (transcribeToSideTable) {
  261. // Copy oldisa's retain count et al to side table.
  262. // oldisa.has_assoc: nothing to do
  263. // oldisa.has_cxx_dtor: nothing to do
  264. sidetable_moveExtraRC_nolock(oldisa.extra_rc,
  265. oldisa.deallocating,
  266. oldisa.weakly_referenced);
  267. }
  268. if (sideTableLocked) sidetable_unlock();
  269. if (oldisa.nonpointer) {
  270. #if SUPPORT_INDEXED_ISA
  271. return classForIndex(oldisa.indexcls);
  272. #else
  273. return (Class)((uintptr_t)oldisa.shiftcls << 3);
  274. #endif
  275. }
  276. else {
  277. return oldisa.cls;
  278. }
  279. }
  280. inline bool
  281. objc_object::hasAssociatedObjects()
  282. {
  283. if (isTaggedPointer()) return true;
  284. if (isa.nonpointer) return isa.has_assoc;
  285. return true;
  286. }
  287. inline void
  288. objc_object::setHasAssociatedObjects()
  289. {
  290. if (isTaggedPointer()) return;
  291. retry:
  292. isa_t oldisa = LoadExclusive(&isa.bits);
  293. isa_t newisa = oldisa;
  294. if (!newisa.nonpointer || newisa.has_assoc) {
  295. ClearExclusive(&isa.bits);
  296. return;
  297. }
  298. newisa.has_assoc = true;
  299. if (!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits)) goto retry;
  300. }
  301. inline bool
  302. objc_object::isWeaklyReferenced()
  303. {
  304. ASSERT(!isTaggedPointer());
  305. if (isa.nonpointer) return isa.weakly_referenced;
  306. else return sidetable_isWeaklyReferenced();
  307. }
  308. inline void
  309. objc_object::setWeaklyReferenced_nolock()
  310. {
  311. retry:
  312. isa_t oldisa = LoadExclusive(&isa.bits);
  313. isa_t newisa = oldisa;
  314. if (slowpath(!newisa.nonpointer)) {
  315. ClearExclusive(&isa.bits);
  316. sidetable_setWeaklyReferenced_nolock();
  317. return;
  318. }
  319. if (newisa.weakly_referenced) {
  320. ClearExclusive(&isa.bits);
  321. return;
  322. }
  323. newisa.weakly_referenced = true;
  324. if (!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits)) goto retry;
  325. }
  326. inline bool
  327. objc_object::hasCxxDtor()
  328. {
  329. ASSERT(!isTaggedPointer());
  330. if (isa.nonpointer) return isa.has_cxx_dtor;
  331. else return isa.cls->hasCxxDtor();
  332. }
  333. inline bool
  334. objc_object::rootIsDeallocating()
  335. {
  336. if (isTaggedPointer()) return false;
  337. if (isa.nonpointer) return isa.deallocating;
  338. return sidetable_isDeallocating();
  339. }
  340. inline void
  341. objc_object::clearDeallocating()
  342. {
  343. if (slowpath(!isa.nonpointer)) {
  344. // Slow path for raw pointer isa.
  345. sidetable_clearDeallocating();
  346. }
  347. else if (slowpath(isa.weakly_referenced || isa.has_sidetable_rc)) {
  348. // Slow path for non-pointer isa with weak refs and/or side table data.
  349. clearDeallocating_slow();
  350. }
  351. assert(!sidetable_present());
  352. }
  353. inline void
  354. objc_object::rootDealloc()
  355. {
  356. if (isTaggedPointer()) return; // fixme necessary?
  357. if (fastpath(isa.nonpointer &&
  358. !isa.weakly_referenced &&
  359. !isa.has_assoc &&
  360. !isa.has_cxx_dtor &&
  361. !isa.has_sidetable_rc))
  362. {
  363. assert(!sidetable_present());
  364. free(this);
  365. }
  366. else {
  367. object_dispose((id)this);
  368. }
  369. }
  370. // Equivalent to calling [this retain], with shortcuts if there is no override
  371. inline id
  372. objc_object::retain()
  373. {
  374. ASSERT(!isTaggedPointer());
  375. if (fastpath(!ISA()->hasCustomRR())) {
  376. return rootRetain();
  377. }
  378. return ((id(*)(objc_object *, SEL))objc_msgSend)(this, @selector(retain));
  379. }
  380. // Base retain implementation, ignoring overrides.
  381. // This does not check isa.fast_rr; if there is an RR override then
  382. // it was already called and it chose to call [super retain].
  383. //
  384. // tryRetain=true is the -_tryRetain path.
  385. // handleOverflow=false is the frameless fast path.
  386. // handleOverflow=true is the framed slow path including overflow to side table
  387. // The code is structured this way to prevent duplication.
  388. ALWAYS_INLINE id
  389. objc_object::rootRetain()
  390. {
  391. return rootRetain(false, false);
  392. }
  393. ALWAYS_INLINE bool
  394. objc_object::rootTryRetain()
  395. {
  396. return rootRetain(true, false) ? true : false;
  397. }
  398. ALWAYS_INLINE id
  399. objc_object::rootRetain(bool tryRetain, bool handleOverflow)
  400. {
  401. if (isTaggedPointer()) return (id)this;
  402. bool sideTableLocked = false;
  403. bool transcribeToSideTable = false;
  404. isa_t oldisa;
  405. isa_t newisa;
  406. do {
  407. transcribeToSideTable = false;
  408. oldisa = LoadExclusive(&isa.bits);
  409. newisa = oldisa;
  410. if (slowpath(!newisa.nonpointer)) {
  411. ClearExclusive(&isa.bits);
  412. if (rawISA()->isMetaClass()) return (id)this;
  413. if (!tryRetain && sideTableLocked) sidetable_unlock();
  414. if (tryRetain) return sidetable_tryRetain() ? (id)this : nil;
  415. else return sidetable_retain();
  416. }
  417. // don't check newisa.fast_rr; we already called any RR overrides
  418. if (slowpath(tryRetain && newisa.deallocating)) {
  419. ClearExclusive(&isa.bits);
  420. if (!tryRetain && sideTableLocked) sidetable_unlock();
  421. return nil;
  422. }
  423. uintptr_t carry;
  424. newisa.bits = addc(newisa.bits, RC_ONE, 0, &carry); // extra_rc++
  425. if (slowpath(carry)) {
  426. // newisa.extra_rc++ overflowed
  427. if (!handleOverflow) {
  428. ClearExclusive(&isa.bits);
  429. return rootRetain_overflow(tryRetain);
  430. }
  431. // Leave half of the retain counts inline and
  432. // prepare to copy the other half to the side table.
  433. if (!tryRetain && !sideTableLocked) sidetable_lock();
  434. sideTableLocked = true;
  435. transcribeToSideTable = true;
  436. newisa.extra_rc = RC_HALF;
  437. newisa.has_sidetable_rc = true;
  438. }
  439. } while (slowpath(!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits)));
  440. if (slowpath(transcribeToSideTable)) {
  441. // Copy the other half of the retain counts to the side table.
  442. sidetable_addExtraRC_nolock(RC_HALF);
  443. }
  444. if (slowpath(!tryRetain && sideTableLocked)) sidetable_unlock();
  445. return (id)this;
  446. }
  447. // Equivalent to calling [this release], with shortcuts if there is no override
  448. inline void
  449. objc_object::release()
  450. {
  451. ASSERT(!isTaggedPointer());
  452. if (fastpath(!ISA()->hasCustomRR())) {
  453. rootRelease();
  454. return;
  455. }
  456. ((void(*)(objc_object *, SEL))objc_msgSend)(this, @selector(release));
  457. }
  458. // Base release implementation, ignoring overrides.
  459. // Does not call -dealloc.
  460. // Returns true if the object should now be deallocated.
  461. // This does not check isa.fast_rr; if there is an RR override then
  462. // it was already called and it chose to call [super release].
  463. //
  464. // handleUnderflow=false is the frameless fast path.
  465. // handleUnderflow=true is the framed slow path including side table borrow
  466. // The code is structured this way to prevent duplication.
  467. ALWAYS_INLINE bool
  468. objc_object::rootRelease()
  469. {
  470. return rootRelease(true, false);
  471. }
  472. ALWAYS_INLINE bool
  473. objc_object::rootReleaseShouldDealloc()
  474. {
  475. return rootRelease(false, false);
  476. }
  477. ALWAYS_INLINE bool
  478. objc_object::rootRelease(bool performDealloc, bool handleUnderflow)
  479. {
  480. if (isTaggedPointer()) return false;
  481. bool sideTableLocked = false;
  482. isa_t oldisa;
  483. isa_t newisa;
  484. retry:
  485. do {
  486. oldisa = LoadExclusive(&isa.bits);
  487. newisa = oldisa;
  488. if (slowpath(!newisa.nonpointer)) {
  489. ClearExclusive(&isa.bits);
  490. if (rawISA()->isMetaClass()) return false;
  491. if (sideTableLocked) sidetable_unlock();
  492. return sidetable_release(performDealloc);
  493. }
  494. // don't check newisa.fast_rr; we already called any RR overrides
  495. uintptr_t carry;
  496. newisa.bits = subc(newisa.bits, RC_ONE, 0, &carry); // extra_rc--
  497. if (slowpath(carry)) {
  498. // don't ClearExclusive()
  499. goto underflow;
  500. }
  501. } while (slowpath(!StoreReleaseExclusive(&isa.bits,
  502. oldisa.bits, newisa.bits)));
  503. if (slowpath(sideTableLocked)) sidetable_unlock();
  504. return false;
  505. underflow:
  506. // newisa.extra_rc-- underflowed: borrow from side table or deallocate
  507. // abandon newisa to undo the decrement
  508. newisa = oldisa;
  509. if (slowpath(newisa.has_sidetable_rc)) {
  510. if (!handleUnderflow) {
  511. ClearExclusive(&isa.bits);
  512. return rootRelease_underflow(performDealloc);
  513. }
  514. // Transfer retain count from side table to inline storage.
  515. if (!sideTableLocked) {
  516. ClearExclusive(&isa.bits);
  517. sidetable_lock();
  518. sideTableLocked = true;
  519. // Need to start over to avoid a race against
  520. // the nonpointer -> raw pointer transition.
  521. goto retry;
  522. }
  523. // Try to remove some retain counts from the side table.
  524. size_t borrowed = sidetable_subExtraRC_nolock(RC_HALF);
  525. // To avoid races, has_sidetable_rc must remain set
  526. // even if the side table count is now zero.
  527. if (borrowed > 0) {
  528. // Side table retain count decreased.
  529. // Try to add them to the inline count.
  530. newisa.extra_rc = borrowed - 1; // redo the original decrement too
  531. bool stored = StoreReleaseExclusive(&isa.bits,
  532. oldisa.bits, newisa.bits);
  533. if (!stored) {
  534. // Inline update failed.
  535. // Try it again right now. This prevents livelock on LL/SC
  536. // architectures where the side table access itself may have
  537. // dropped the reservation.
  538. isa_t oldisa2 = LoadExclusive(&isa.bits);
  539. isa_t newisa2 = oldisa2;
  540. if (newisa2.nonpointer) {
  541. uintptr_t overflow;
  542. newisa2.bits =
  543. addc(newisa2.bits, RC_ONE * (borrowed-1), 0, &overflow);
  544. if (!overflow) {
  545. stored = StoreReleaseExclusive(&isa.bits, oldisa2.bits,
  546. newisa2.bits);
  547. }
  548. }
  549. }
  550. if (!stored) {
  551. // Inline update failed.
  552. // Put the retains back in the side table.
  553. sidetable_addExtraRC_nolock(borrowed);
  554. goto retry;
  555. }
  556. // Decrement successful after borrowing from side table.
  557. // This decrement cannot be the deallocating decrement - the side
  558. // table lock and has_sidetable_rc bit ensure that if everyone
  559. // else tried to -release while we worked, the last one would block.
  560. sidetable_unlock();
  561. return false;
  562. }
  563. else {
  564. // Side table is empty after all. Fall-through to the dealloc path.
  565. }
  566. }
  567. // Really deallocate.
  568. if (slowpath(newisa.deallocating)) {
  569. ClearExclusive(&isa.bits);
  570. if (sideTableLocked) sidetable_unlock();
  571. return overrelease_error();
  572. // does not actually return
  573. }
  574. newisa.deallocating = true;
  575. if (!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits)) goto retry;
  576. if (slowpath(sideTableLocked)) sidetable_unlock();
  577. __c11_atomic_thread_fence(__ATOMIC_ACQUIRE);
  578. if (performDealloc) {
  579. ((void(*)(objc_object *, SEL))objc_msgSend)(this, @selector(dealloc));
  580. }
  581. return true;
  582. }
  583. // Equivalent to [this autorelease], with shortcuts if there is no override
  584. inline id
  585. objc_object::autorelease()
  586. {
  587. ASSERT(!isTaggedPointer());
  588. if (fastpath(!ISA()->hasCustomRR())) {
  589. return rootAutorelease();
  590. }
  591. return ((id(*)(objc_object *, SEL))objc_msgSend)(this, @selector(autorelease));
  592. }
  593. // Base autorelease implementation, ignoring overrides.
  594. inline id
  595. objc_object::rootAutorelease()
  596. {
  597. if (isTaggedPointer()) return (id)this;
  598. if (prepareOptimizedReturn(ReturnAtPlus1)) return (id)this;
  599. return rootAutorelease2();
  600. }
  601. inline uintptr_t
  602. objc_object::rootRetainCount()
  603. {
  604. if (isTaggedPointer()) return (uintptr_t)this;
  605. sidetable_lock();
  606. isa_t bits = LoadExclusive(&isa.bits);
  607. ClearExclusive(&isa.bits);
  608. if (bits.nonpointer) {
  609. uintptr_t rc = 1 + bits.extra_rc;
  610. if (bits.has_sidetable_rc) {
  611. rc += sidetable_getExtraRC_nolock();
  612. }
  613. sidetable_unlock();
  614. return rc;
  615. }
  616. sidetable_unlock();
  617. return sidetable_retainCount();
  618. }
  619. // SUPPORT_NONPOINTER_ISA
  620. #else
  621. // not SUPPORT_NONPOINTER_ISA
  622. inline Class
  623. objc_object::ISA()
  624. {
  625. ASSERT(!isTaggedPointer());
  626. return isa.cls;
  627. }
  628. inline Class
  629. objc_object::rawISA()
  630. {
  631. return ISA();
  632. }
  633. inline bool
  634. objc_object::hasNonpointerIsa()
  635. {
  636. return false;
  637. }
  638. inline void
  639. objc_object::initIsa(Class cls)
  640. {
  641. ASSERT(!isTaggedPointer());
  642. isa = (uintptr_t)cls;
  643. }
  644. inline void
  645. objc_object::initClassIsa(Class cls)
  646. {
  647. initIsa(cls);
  648. }
  649. inline void
  650. objc_object::initProtocolIsa(Class cls)
  651. {
  652. initIsa(cls);
  653. }
  654. inline void
  655. objc_object::initInstanceIsa(Class cls, bool)
  656. {
  657. initIsa(cls);
  658. }
  659. inline void
  660. objc_object::initIsa(Class cls, bool, bool)
  661. {
  662. initIsa(cls);
  663. }
  664. inline Class
  665. objc_object::changeIsa(Class cls)
  666. {
  667. // This is almost always rue but there are
  668. // enough edge cases that we can't assert it.
  669. // assert(cls->isFuture() ||
  670. // cls->isInitializing() || cls->isInitialized());
  671. ASSERT(!isTaggedPointer());
  672. isa_t oldisa, newisa;
  673. newisa.cls = cls;
  674. do {
  675. oldisa = LoadExclusive(&isa.bits);
  676. } while (!StoreExclusive(&isa.bits, oldisa.bits, newisa.bits));
  677. if (oldisa.cls && oldisa.cls->instancesHaveAssociatedObjects()) {
  678. cls->setInstancesHaveAssociatedObjects();
  679. }
  680. return oldisa.cls;
  681. }
  682. inline bool
  683. objc_object::hasAssociatedObjects()
  684. {
  685. return getIsa()->instancesHaveAssociatedObjects();
  686. }
  687. inline void
  688. objc_object::setHasAssociatedObjects()
  689. {
  690. getIsa()->setInstancesHaveAssociatedObjects();
  691. }
  692. inline bool
  693. objc_object::isWeaklyReferenced()
  694. {
  695. ASSERT(!isTaggedPointer());
  696. return sidetable_isWeaklyReferenced();
  697. }
  698. inline void
  699. objc_object::setWeaklyReferenced_nolock()
  700. {
  701. ASSERT(!isTaggedPointer());
  702. sidetable_setWeaklyReferenced_nolock();
  703. }
  704. inline bool
  705. objc_object::hasCxxDtor()
  706. {
  707. ASSERT(!isTaggedPointer());
  708. return isa.cls->hasCxxDtor();
  709. }
  710. inline bool
  711. objc_object::rootIsDeallocating()
  712. {
  713. if (isTaggedPointer()) return false;
  714. return sidetable_isDeallocating();
  715. }
  716. inline void
  717. objc_object::clearDeallocating()
  718. {
  719. sidetable_clearDeallocating();
  720. }
  721. inline void
  722. objc_object::rootDealloc()
  723. {
  724. if (isTaggedPointer()) return;
  725. object_dispose((id)this);
  726. }
  727. // Equivalent to calling [this retain], with shortcuts if there is no override
  728. inline id
  729. objc_object::retain()
  730. {
  731. ASSERT(!isTaggedPointer());
  732. if (fastpath(!ISA()->hasCustomRR())) {
  733. return sidetable_retain();
  734. }
  735. return ((id(*)(objc_object *, SEL))objc_msgSend)(this, @selector(retain));
  736. }
  737. // Base retain implementation, ignoring overrides.
  738. // This does not check isa.fast_rr; if there is an RR override then
  739. // it was already called and it chose to call [super retain].
  740. inline id
  741. objc_object::rootRetain()
  742. {
  743. if (isTaggedPointer()) return (id)this;
  744. return sidetable_retain();
  745. }
  746. // Equivalent to calling [this release], with shortcuts if there is no override
  747. inline void
  748. objc_object::release()
  749. {
  750. ASSERT(!isTaggedPointer());
  751. if (fastpath(!ISA()->hasCustomRR())) {
  752. sidetable_release();
  753. return;
  754. }
  755. ((void(*)(objc_object *, SEL))objc_msgSend)(this, @selector(release));
  756. }
  757. // Base release implementation, ignoring overrides.
  758. // Does not call -dealloc.
  759. // Returns true if the object should now be deallocated.
  760. // This does not check isa.fast_rr; if there is an RR override then
  761. // it was already called and it chose to call [super release].
  762. inline bool
  763. objc_object::rootRelease()
  764. {
  765. if (isTaggedPointer()) return false;
  766. return sidetable_release(true);
  767. }
  768. inline bool
  769. objc_object::rootReleaseShouldDealloc()
  770. {
  771. if (isTaggedPointer()) return false;
  772. return sidetable_release(false);
  773. }
  774. // Equivalent to [this autorelease], with shortcuts if there is no override
  775. inline id
  776. objc_object::autorelease()
  777. {
  778. if (isTaggedPointer()) return (id)this;
  779. if (fastpath(!ISA()->hasCustomRR())) return rootAutorelease();
  780. return ((id(*)(objc_object *, SEL))objc_msgSend)(this, @selector(autorelease));
  781. }
  782. // Base autorelease implementation, ignoring overrides.
  783. inline id
  784. objc_object::rootAutorelease()
  785. {
  786. if (isTaggedPointer()) return (id)this;
  787. if (prepareOptimizedReturn(ReturnAtPlus1)) return (id)this;
  788. return rootAutorelease2();
  789. }
  790. // Base tryRetain implementation, ignoring overrides.
  791. // This does not check isa.fast_rr; if there is an RR override then
  792. // it was already called and it chose to call [super _tryRetain].
  793. inline bool
  794. objc_object::rootTryRetain()
  795. {
  796. if (isTaggedPointer()) return true;
  797. return sidetable_tryRetain();
  798. }
  799. inline uintptr_t
  800. objc_object::rootRetainCount()
  801. {
  802. if (isTaggedPointer()) return (uintptr_t)this;
  803. return sidetable_retainCount();
  804. }
  805. // not SUPPORT_NONPOINTER_ISA
  806. #endif
  807. #if SUPPORT_RETURN_AUTORELEASE
  808. /***********************************************************************
  809. Fast handling of return through Cocoa's +0 autoreleasing convention.
  810. The caller and callee cooperate to keep the returned object
  811. out of the autorelease pool and eliminate redundant retain/release pairs.
  812. An optimized callee looks at the caller's instructions following the
  813. return. If the caller's instructions are also optimized then the callee
  814. skips all retain count operations: no autorelease, no retain/autorelease.
  815. Instead it saves the result's current retain count (+0 or +1) in
  816. thread-local storage. If the caller does not look optimized then
  817. the callee performs autorelease or retain/autorelease as usual.
  818. An optimized caller looks at the thread-local storage. If the result
  819. is set then it performs any retain or release needed to change the
  820. result from the retain count left by the callee to the retain count
  821. desired by the caller. Otherwise the caller assumes the result is
  822. currently at +0 from an unoptimized callee and performs any retain
  823. needed for that case.
  824. There are two optimized callees:
  825. objc_autoreleaseReturnValue
  826. result is currently +1. The unoptimized path autoreleases it.
  827. objc_retainAutoreleaseReturnValue
  828. result is currently +0. The unoptimized path retains and autoreleases it.
  829. There are two optimized callers:
  830. objc_retainAutoreleasedReturnValue
  831. caller wants the value at +1. The unoptimized path retains it.
  832. objc_unsafeClaimAutoreleasedReturnValue
  833. caller wants the value at +0 unsafely. The unoptimized path does nothing.
  834. Example:
  835. Callee:
  836. // compute ret at +1
  837. return objc_autoreleaseReturnValue(ret);
  838. Caller:
  839. ret = callee();
  840. ret = objc_retainAutoreleasedReturnValue(ret);
  841. // use ret at +1 here
  842. Callee sees the optimized caller, sets TLS, and leaves the result at +1.
  843. Caller sees the TLS, clears it, and accepts the result at +1 as-is.
  844. The callee's recognition of the optimized caller is architecture-dependent.
  845. x86_64: Callee looks for `mov rax, rdi` followed by a call or
  846. jump instruction to objc_retainAutoreleasedReturnValue or
  847. objc_unsafeClaimAutoreleasedReturnValue.
  848. i386: Callee looks for a magic nop `movl %ebp, %ebp` (frame pointer register)
  849. armv7: Callee looks for a magic nop `mov r7, r7` (frame pointer register).
  850. arm64: Callee looks for a magic nop `mov x29, x29` (frame pointer register).
  851. Tagged pointer objects do participate in the optimized return scheme,
  852. because it saves message sends. They are not entered in the autorelease
  853. pool in the unoptimized case.
  854. **********************************************************************/
  855. # if __x86_64__
  856. static ALWAYS_INLINE bool
  857. callerAcceptsOptimizedReturn(const void * const ra0)
  858. {
  859. const uint8_t *ra1 = (const uint8_t *)ra0;
  860. const unaligned_uint16_t *ra2;
  861. const unaligned_uint32_t *ra4 = (const unaligned_uint32_t *)ra1;
  862. const void **sym;
  863. #define PREFER_GOTPCREL 0
  864. #if PREFER_GOTPCREL
  865. // 48 89 c7 movq %rax,%rdi
  866. // ff 15 callq *symbol@GOTPCREL(%rip)
  867. if (*ra4 != 0xffc78948) {
  868. return false;
  869. }
  870. if (ra1[4] != 0x15) {
  871. return false;
  872. }
  873. ra1 += 3;
  874. #else
  875. // 48 89 c7 movq %rax,%rdi
  876. // e8 callq symbol
  877. if (*ra4 != 0xe8c78948) {
  878. return false;
  879. }
  880. ra1 += (long)*(const unaligned_int32_t *)(ra1 + 4) + 8l;
  881. ra2 = (const unaligned_uint16_t *)ra1;
  882. // ff 25 jmpq *symbol@DYLDMAGIC(%rip)
  883. if (*ra2 != 0x25ff) {
  884. return false;
  885. }
  886. #endif
  887. ra1 += 6l + (long)*(const unaligned_int32_t *)(ra1 + 2);
  888. sym = (const void **)ra1;
  889. if (*sym != objc_retainAutoreleasedReturnValue &&
  890. *sym != objc_unsafeClaimAutoreleasedReturnValue)
  891. {
  892. return false;
  893. }
  894. return true;
  895. }
  896. // __x86_64__
  897. # elif __arm__
  898. static ALWAYS_INLINE bool
  899. callerAcceptsOptimizedReturn(const void *ra)
  900. {
  901. // if the low bit is set, we're returning to thumb mode
  902. if ((uintptr_t)ra & 1) {
  903. // 3f 46 mov r7, r7
  904. // we mask off the low bit via subtraction
  905. // 16-bit instructions are well-aligned
  906. if (*(uint16_t *)((uint8_t *)ra - 1) == 0x463f) {
  907. return true;
  908. }
  909. } else {
  910. // 07 70 a0 e1 mov r7, r7
  911. // 32-bit instructions may be only 16-bit aligned
  912. if (*(unaligned_uint32_t *)ra == 0xe1a07007) {
  913. return true;
  914. }
  915. }
  916. return false;
  917. }
  918. // __arm__
  919. # elif __arm64__
  920. static ALWAYS_INLINE bool
  921. callerAcceptsOptimizedReturn(const void *ra)
  922. {
  923. // fd 03 1d aa mov fp, fp
  924. // arm64 instructions are well-aligned
  925. if (*(uint32_t *)ra == 0xaa1d03fd) {
  926. return true;
  927. }
  928. return false;
  929. }
  930. // __arm64__
  931. # elif __i386__
  932. static ALWAYS_INLINE bool
  933. callerAcceptsOptimizedReturn(const void *ra)
  934. {
  935. // 89 ed movl %ebp, %ebp
  936. if (*(unaligned_uint16_t *)ra == 0xed89) {
  937. return true;
  938. }
  939. return false;
  940. }
  941. // __i386__
  942. # else
  943. #warning unknown architecture
  944. static ALWAYS_INLINE bool
  945. callerAcceptsOptimizedReturn(const void *ra)
  946. {
  947. return false;
  948. }
  949. // unknown architecture
  950. # endif
  951. static ALWAYS_INLINE ReturnDisposition
  952. getReturnDisposition()
  953. {
  954. return (ReturnDisposition)(uintptr_t)tls_get_direct(RETURN_DISPOSITION_KEY);
  955. }
  956. static ALWAYS_INLINE void
  957. setReturnDisposition(ReturnDisposition disposition)
  958. {
  959. tls_set_direct(RETURN_DISPOSITION_KEY, (void*)(uintptr_t)disposition);
  960. }
  961. // Try to prepare for optimized return with the given disposition (+0 or +1).
  962. // Returns true if the optimized path is successful.
  963. // Otherwise the return value must be retained and/or autoreleased as usual.
  964. static ALWAYS_INLINE bool
  965. prepareOptimizedReturn(ReturnDisposition disposition)
  966. {
  967. ASSERT(getReturnDisposition() == ReturnAtPlus0);
  968. if (callerAcceptsOptimizedReturn(__builtin_return_address(0))) {
  969. if (disposition) setReturnDisposition(disposition);
  970. return true;
  971. }
  972. return false;
  973. }
  974. // Try to accept an optimized return.
  975. // Returns the disposition of the returned object (+0 or +1).
  976. // An un-optimized return is +0.
  977. static ALWAYS_INLINE ReturnDisposition
  978. acceptOptimizedReturn()
  979. {
  980. ReturnDisposition disposition = getReturnDisposition();
  981. setReturnDisposition(ReturnAtPlus0); // reset to the unoptimized state
  982. return disposition;
  983. }
  984. // SUPPORT_RETURN_AUTORELEASE
  985. #else
  986. // not SUPPORT_RETURN_AUTORELEASE
  987. static ALWAYS_INLINE bool
  988. prepareOptimizedReturn(ReturnDisposition disposition __unused)
  989. {
  990. return false;
  991. }
  992. static ALWAYS_INLINE ReturnDisposition
  993. acceptOptimizedReturn()
  994. {
  995. return ReturnAtPlus0;
  996. }
  997. // not SUPPORT_RETURN_AUTORELEASE
  998. #endif
  999. // _OBJC_OBJECT_H_
  1000. #endif