objc-msg-arm64.s 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625
  1. /*
  2. * @APPLE_LICENSE_HEADER_START@
  3. *
  4. * Copyright (c) 2011 Apple Inc. All Rights Reserved.
  5. *
  6. * This file contains Original Code and/or Modifications of Original Code
  7. * as defined in and that are subject to the Apple Public Source License
  8. * Version 2.0 (the 'License'). You may not use this file except in
  9. * compliance with the License. Please obtain a copy of the License at
  10. * http://www.opensource.apple.com/apsl/ and read it before using this
  11. * file.
  12. *
  13. * The Original Code and all software distributed under the License are
  14. * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
  15. * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
  16. * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
  17. * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
  18. * Please see the License for the specific language governing rights and
  19. * limitations under the License.
  20. *
  21. * @APPLE_LICENSE_HEADER_END@
  22. */
  23. /********************************************************************
  24. *
  25. * objc-msg-arm64.s - ARM64 code to support objc messaging
  26. *
  27. ********************************************************************/
  28. #ifdef __arm64__
  29. #include <arm/arch.h>
  30. #include "isa.h"
  31. #include "arm64-asm.h"
  32. #include "objc-config.h"
  33. .data
  34. // _objc_restartableRanges is used by method dispatch
  35. // caching code to figure out whether any threads are actively
  36. // in the cache for dispatching. The labels surround the asm code
  37. // that do cache lookups. The tables are zero-terminated.
  38. .macro RestartableEntry
  39. #if __LP64__
  40. .quad LLookupStart$0
  41. #else
  42. .long LLookupStart$0
  43. .long 0
  44. #endif
  45. .short LLookupEnd$0 - LLookupStart$0
  46. .short LLookupRecover$0 - LLookupStart$0
  47. .long 0
  48. .endmacro
  49. .align 4
  50. .private_extern _objc_restartableRanges
  51. _objc_restartableRanges:
  52. RestartableEntry _cache_getImp
  53. RestartableEntry _objc_msgSend
  54. RestartableEntry _objc_msgSendSuper
  55. RestartableEntry _objc_msgSendSuper2
  56. RestartableEntry _objc_msgLookup
  57. RestartableEntry _objc_msgLookupSuper2
  58. .fill 16, 1, 0
  59. /* objc_super parameter to sendSuper */
  60. #define RECEIVER 0
  61. #define CLASS __SIZEOF_POINTER__
  62. /* Selected field offsets in class structure */
  63. #define SUPERCLASS __SIZEOF_POINTER__
  64. #define CACHE (2 * __SIZEOF_POINTER__)
  65. /* Selected field offsets in method structure */
  66. #define METHOD_NAME 0
  67. #define METHOD_TYPES __SIZEOF_POINTER__
  68. #define METHOD_IMP (2 * __SIZEOF_POINTER__)
  69. #define BUCKET_SIZE (2 * __SIZEOF_POINTER__)
  70. /********************************************************************
  71. * GetClassFromIsa_p16 src
  72. * src is a raw isa field. Sets p16 to the corresponding class pointer.
  73. * The raw isa might be an indexed isa to be decoded, or a
  74. * packed isa that needs to be masked.
  75. *
  76. * On exit:
  77. * $0 is unchanged
  78. * p16 is a class pointer
  79. * x10 is clobbered
  80. ********************************************************************/
  81. #if SUPPORT_INDEXED_ISA
  82. .align 3
  83. .globl _objc_indexed_classes
  84. _objc_indexed_classes:
  85. .fill ISA_INDEX_COUNT, PTRSIZE, 0
  86. #endif
  87. .macro GetClassFromIsa_p16 /* src */
  88. #if SUPPORT_INDEXED_ISA
  89. // Indexed isa
  90. mov p16, $0 // optimistically set dst = src
  91. tbz p16, #ISA_INDEX_IS_NPI_BIT, 1f // done if not non-pointer isa
  92. // isa in p16 is indexed
  93. adrp x10, _objc_indexed_classes@PAGE
  94. add x10, x10, _objc_indexed_classes@PAGEOFF
  95. ubfx p16, p16, #ISA_INDEX_SHIFT, #ISA_INDEX_BITS // extract index
  96. ldr p16, [x10, p16, UXTP #PTRSHIFT] // load class from array
  97. 1:
  98. #elif __LP64__
  99. // 64-bit packed isa
  100. and p16, $0, #ISA_MASK
  101. #else
  102. // 32-bit raw isa
  103. mov p16, $0
  104. #endif
  105. .endmacro
  106. /********************************************************************
  107. * ENTRY functionName
  108. * STATIC_ENTRY functionName
  109. * END_ENTRY functionName
  110. ********************************************************************/
  111. .macro ENTRY /* name */
  112. .text
  113. .align 5
  114. .globl $0
  115. $0:
  116. .endmacro
  117. .macro STATIC_ENTRY /*name*/
  118. .text
  119. .align 5
  120. .private_extern $0
  121. $0:
  122. .endmacro
  123. .macro END_ENTRY /* name */
  124. LExit$0:
  125. .endmacro
  126. /********************************************************************
  127. * UNWIND name, flags
  128. * Unwind info generation
  129. ********************************************************************/
  130. .macro UNWIND
  131. .section __LD,__compact_unwind,regular,debug
  132. PTR $0
  133. .set LUnwind$0, LExit$0 - $0
  134. .long LUnwind$0
  135. .long $1
  136. PTR 0 /* no personality */
  137. PTR 0 /* no LSDA */
  138. .text
  139. .endmacro
  140. #define NoFrame 0x02000000 // no frame, no SP adjustment
  141. #define FrameWithNoSaves 0x04000000 // frame, no non-volatile saves
  142. /********************************************************************
  143. *
  144. * CacheLookup NORMAL|GETIMP|LOOKUP <function>
  145. *
  146. * Locate the implementation for a selector in a class method cache.
  147. *
  148. * When this is used in a function that doesn't hold the runtime lock,
  149. * this represents the critical section that may access dead memory.
  150. * If the kernel causes one of these functions to go down the recovery
  151. * path, we pretend the lookup failed by jumping the JumpMiss branch.
  152. *
  153. * Takes:
  154. * x1 = selector
  155. * x16 = class to be searched
  156. *
  157. * Kills:
  158. * x9,x10,x11,x12, x17
  159. *
  160. * On exit: (found) calls or returns IMP
  161. * with x16 = class, x17 = IMP
  162. * (not found) jumps to LCacheMiss
  163. *
  164. ********************************************************************/
  165. #define NORMAL 0
  166. #define GETIMP 1
  167. #define LOOKUP 2
  168. // CacheHit: x17 = cached IMP, x12 = address of cached IMP, x1 = SEL, x16 = isa
  169. .macro CacheHit
  170. .if $0 == NORMAL
  171. TailCallCachedImp x17, x12, x1, x16 // authenticate and call imp
  172. .elseif $0 == GETIMP
  173. mov p0, p17
  174. cbz p0, 9f // don't ptrauth a nil imp
  175. AuthAndResignAsIMP x0, x12, x1, x16 // authenticate imp and re-sign as IMP
  176. 9: ret // return IMP
  177. .elseif $0 == LOOKUP
  178. // No nil check for ptrauth: the caller would crash anyway when they
  179. // jump to a nil IMP. We don't care if that jump also fails ptrauth.
  180. AuthAndResignAsIMP x17, x12, x1, x16 // authenticate imp and re-sign as IMP
  181. ret // return imp via x17
  182. .else
  183. .abort oops
  184. .endif
  185. .endmacro
  186. .macro CheckMiss
  187. // miss if bucket->sel == 0
  188. .if $0 == GETIMP
  189. cbz p9, LGetImpMiss
  190. .elseif $0 == NORMAL
  191. cbz p9, __objc_msgSend_uncached
  192. .elseif $0 == LOOKUP
  193. cbz p9, __objc_msgLookup_uncached
  194. .else
  195. .abort oops
  196. .endif
  197. .endmacro
  198. .macro JumpMiss
  199. .if $0 == GETIMP
  200. b LGetImpMiss
  201. .elseif $0 == NORMAL
  202. b __objc_msgSend_uncached
  203. .elseif $0 == LOOKUP
  204. b __objc_msgLookup_uncached
  205. .else
  206. .abort oops
  207. .endif
  208. .endmacro
  209. .macro CacheLookup
  210. //
  211. // Restart protocol:
  212. //
  213. // As soon as we're past the LLookupStart$1 label we may have loaded
  214. // an invalid cache pointer or mask.
  215. //
  216. // When task_restartable_ranges_synchronize() is called,
  217. // (or when a signal hits us) before we're past LLookupEnd$1,
  218. // then our PC will be reset to LLookupRecover$1 which forcefully
  219. // jumps to the cache-miss codepath which have the following
  220. // requirements:
  221. //
  222. // GETIMP:
  223. // The cache-miss is just returning NULL (setting x0 to 0)
  224. //
  225. // NORMAL and LOOKUP:
  226. // - x0 contains the receiver
  227. // - x1 contains the selector
  228. // - x16 contains the isa
  229. // - other registers are set as per calling conventions
  230. //
  231. LLookupStart$1:
  232. // p1 = SEL, p16 = isa
  233. ldr p11, [x16, #CACHE] // p11 = mask|buckets
  234. #if CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16
  235. and p10, p11, #0x0000ffffffffffff // p10 = buckets
  236. and p12, p1, p11, LSR #48 // x12 = _cmd & mask
  237. #elif CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_LOW_4
  238. and p10, p11, #~0xf // p10 = buckets
  239. and p11, p11, #0xf // p11 = maskShift
  240. mov p12, #0xffff
  241. lsr p11, p12, p11 // p11 = mask = 0xffff >> p11
  242. and p12, p1, p11 // x12 = _cmd & mask
  243. #else
  244. #error Unsupported cache mask storage for ARM64.
  245. #endif
  246. add p12, p10, p12, LSL #(1+PTRSHIFT)
  247. // p12 = buckets + ((_cmd & mask) << (1+PTRSHIFT))
  248. ldp p17, p9, [x12] // {imp, sel} = *bucket
  249. 1: cmp p9, p1 // if (bucket->sel != _cmd)
  250. b.ne 2f // scan more
  251. CacheHit $0 // call or return imp
  252. 2: // not hit: p12 = not-hit bucket
  253. CheckMiss $0 // miss if bucket->sel == 0
  254. cmp p12, p10 // wrap if bucket == buckets
  255. b.eq 3f
  256. ldp p17, p9, [x12, #-BUCKET_SIZE]! // {imp, sel} = *--bucket
  257. b 1b // loop
  258. 3: // wrap: p12 = first bucket, w11 = mask
  259. #if CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_HIGH_16
  260. add p12, p12, p11, LSR #(48 - (1+PTRSHIFT))
  261. // p12 = buckets + (mask << 1+PTRSHIFT)
  262. #elif CACHE_MASK_STORAGE == CACHE_MASK_STORAGE_LOW_4
  263. add p12, p12, p11, LSL #(1+PTRSHIFT)
  264. // p12 = buckets + (mask << 1+PTRSHIFT)
  265. #else
  266. #error Unsupported cache mask storage for ARM64.
  267. #endif
  268. // Clone scanning loop to miss instead of hang when cache is corrupt.
  269. // The slow path may detect any corruption and halt later.
  270. ldp p17, p9, [x12] // {imp, sel} = *bucket
  271. 1: cmp p9, p1 // if (bucket->sel != _cmd)
  272. b.ne 2f // scan more
  273. CacheHit $0 // call or return imp
  274. 2: // not hit: p12 = not-hit bucket
  275. CheckMiss $0 // miss if bucket->sel == 0
  276. cmp p12, p10 // wrap if bucket == buckets
  277. b.eq 3f
  278. ldp p17, p9, [x12, #-BUCKET_SIZE]! // {imp, sel} = *--bucket
  279. b 1b // loop
  280. LLookupEnd$1:
  281. LLookupRecover$1:
  282. 3: // double wrap
  283. JumpMiss $0
  284. .endmacro
  285. /********************************************************************
  286. *
  287. * id objc_msgSend(id self, SEL _cmd, ...);
  288. * IMP objc_msgLookup(id self, SEL _cmd, ...);
  289. *
  290. * objc_msgLookup ABI:
  291. * IMP returned in x17
  292. * x16 reserved for our use but not used
  293. *
  294. ********************************************************************/
  295. #if SUPPORT_TAGGED_POINTERS
  296. .data
  297. .align 3
  298. .globl _objc_debug_taggedpointer_classes
  299. _objc_debug_taggedpointer_classes:
  300. .fill 16, 8, 0
  301. .globl _objc_debug_taggedpointer_ext_classes
  302. _objc_debug_taggedpointer_ext_classes:
  303. .fill 256, 8, 0
  304. #endif
  305. ENTRY _objc_msgSend
  306. UNWIND _objc_msgSend, NoFrame
  307. cmp p0, #0 // nil check and tagged pointer check
  308. #if SUPPORT_TAGGED_POINTERS
  309. b.le LNilOrTagged // (MSB tagged pointer looks negative)
  310. #else
  311. b.eq LReturnZero
  312. #endif
  313. ldr p13, [x0] // p13 = isa
  314. GetClassFromIsa_p16 p13 // p16 = class
  315. LGetIsaDone:
  316. // calls imp or objc_msgSend_uncached
  317. CacheLookup NORMAL, _objc_msgSend
  318. #if SUPPORT_TAGGED_POINTERS
  319. LNilOrTagged:
  320. b.eq LReturnZero // nil check
  321. // tagged
  322. adrp x10, _objc_debug_taggedpointer_classes@PAGE
  323. add x10, x10, _objc_debug_taggedpointer_classes@PAGEOFF
  324. ubfx x11, x0, #60, #4
  325. ldr x16, [x10, x11, LSL #3]
  326. adrp x10, _OBJC_CLASS_$___NSUnrecognizedTaggedPointer@PAGE
  327. add x10, x10, _OBJC_CLASS_$___NSUnrecognizedTaggedPointer@PAGEOFF
  328. cmp x10, x16
  329. b.ne LGetIsaDone
  330. // ext tagged
  331. adrp x10, _objc_debug_taggedpointer_ext_classes@PAGE
  332. add x10, x10, _objc_debug_taggedpointer_ext_classes@PAGEOFF
  333. ubfx x11, x0, #52, #8
  334. ldr x16, [x10, x11, LSL #3]
  335. b LGetIsaDone
  336. // SUPPORT_TAGGED_POINTERS
  337. #endif
  338. LReturnZero:
  339. // x0 is already zero
  340. mov x1, #0
  341. movi d0, #0
  342. movi d1, #0
  343. movi d2, #0
  344. movi d3, #0
  345. ret
  346. END_ENTRY _objc_msgSend
  347. ENTRY _objc_msgLookup
  348. UNWIND _objc_msgLookup, NoFrame
  349. cmp p0, #0 // nil check and tagged pointer check
  350. #if SUPPORT_TAGGED_POINTERS
  351. b.le LLookup_NilOrTagged // (MSB tagged pointer looks negative)
  352. #else
  353. b.eq LLookup_Nil
  354. #endif
  355. ldr p13, [x0] // p13 = isa
  356. GetClassFromIsa_p16 p13 // p16 = class
  357. LLookup_GetIsaDone:
  358. // returns imp
  359. CacheLookup LOOKUP, _objc_msgLookup
  360. #if SUPPORT_TAGGED_POINTERS
  361. LLookup_NilOrTagged:
  362. b.eq LLookup_Nil // nil check
  363. // tagged
  364. adrp x10, _objc_debug_taggedpointer_classes@PAGE
  365. add x10, x10, _objc_debug_taggedpointer_classes@PAGEOFF
  366. ubfx x11, x0, #60, #4
  367. ldr x16, [x10, x11, LSL #3]
  368. adrp x10, _OBJC_CLASS_$___NSUnrecognizedTaggedPointer@PAGE
  369. add x10, x10, _OBJC_CLASS_$___NSUnrecognizedTaggedPointer@PAGEOFF
  370. cmp x10, x16
  371. b.ne LLookup_GetIsaDone
  372. LLookup_ExtTag:
  373. adrp x10, _objc_debug_taggedpointer_ext_classes@PAGE
  374. add x10, x10, _objc_debug_taggedpointer_ext_classes@PAGEOFF
  375. ubfx x11, x0, #52, #8
  376. ldr x16, [x10, x11, LSL #3]
  377. b LLookup_GetIsaDone
  378. // SUPPORT_TAGGED_POINTERS
  379. #endif
  380. LLookup_Nil:
  381. adrp x17, __objc_msgNil@PAGE
  382. add x17, x17, __objc_msgNil@PAGEOFF
  383. ret
  384. END_ENTRY _objc_msgLookup
  385. STATIC_ENTRY __objc_msgNil
  386. // x0 is already zero
  387. mov x1, #0
  388. movi d0, #0
  389. movi d1, #0
  390. movi d2, #0
  391. movi d3, #0
  392. ret
  393. END_ENTRY __objc_msgNil
  394. ENTRY _objc_msgSendSuper
  395. UNWIND _objc_msgSendSuper, NoFrame
  396. ldp p0, p16, [x0] // p0 = real receiver, p16 = class
  397. // calls imp or objc_msgSend_uncached
  398. CacheLookup NORMAL, _objc_msgSendSuper
  399. END_ENTRY _objc_msgSendSuper
  400. // no _objc_msgLookupSuper
  401. ENTRY _objc_msgSendSuper2
  402. UNWIND _objc_msgSendSuper2, NoFrame
  403. ldp p0, p16, [x0] // p0 = real receiver, p16 = class
  404. ldr p16, [x16, #SUPERCLASS] // p16 = class->superclass
  405. CacheLookup NORMAL, _objc_msgSendSuper2
  406. END_ENTRY _objc_msgSendSuper2
  407. ENTRY _objc_msgLookupSuper2
  408. UNWIND _objc_msgLookupSuper2, NoFrame
  409. ldp p0, p16, [x0] // p0 = real receiver, p16 = class
  410. ldr p16, [x16, #SUPERCLASS] // p16 = class->superclass
  411. CacheLookup LOOKUP, _objc_msgLookupSuper2
  412. END_ENTRY _objc_msgLookupSuper2
  413. .macro MethodTableLookup
  414. // push frame
  415. SignLR
  416. stp fp, lr, [sp, #-16]!
  417. mov fp, sp
  418. // save parameter registers: x0..x8, q0..q7
  419. sub sp, sp, #(10*8 + 8*16)
  420. stp q0, q1, [sp, #(0*16)]
  421. stp q2, q3, [sp, #(2*16)]
  422. stp q4, q5, [sp, #(4*16)]
  423. stp q6, q7, [sp, #(6*16)]
  424. stp x0, x1, [sp, #(8*16+0*8)]
  425. stp x2, x3, [sp, #(8*16+2*8)]
  426. stp x4, x5, [sp, #(8*16+4*8)]
  427. stp x6, x7, [sp, #(8*16+6*8)]
  428. str x8, [sp, #(8*16+8*8)]
  429. // lookUpImpOrForward(obj, sel, cls, LOOKUP_INITIALIZE | LOOKUP_RESOLVER)
  430. // receiver and selector already in x0 and x1
  431. mov x2, x16
  432. mov x3, #3
  433. bl _lookUpImpOrForward
  434. // IMP in x0
  435. mov x17, x0
  436. // restore registers and return
  437. ldp q0, q1, [sp, #(0*16)]
  438. ldp q2, q3, [sp, #(2*16)]
  439. ldp q4, q5, [sp, #(4*16)]
  440. ldp q6, q7, [sp, #(6*16)]
  441. ldp x0, x1, [sp, #(8*16+0*8)]
  442. ldp x2, x3, [sp, #(8*16+2*8)]
  443. ldp x4, x5, [sp, #(8*16+4*8)]
  444. ldp x6, x7, [sp, #(8*16+6*8)]
  445. ldr x8, [sp, #(8*16+8*8)]
  446. mov sp, fp
  447. ldp fp, lr, [sp], #16
  448. AuthenticateLR
  449. .endmacro
  450. STATIC_ENTRY __objc_msgSend_uncached
  451. UNWIND __objc_msgSend_uncached, FrameWithNoSaves
  452. // THIS IS NOT A CALLABLE C FUNCTION
  453. // Out-of-band p16 is the class to search
  454. MethodTableLookup
  455. TailCallFunctionPointer x17
  456. END_ENTRY __objc_msgSend_uncached
  457. STATIC_ENTRY __objc_msgLookup_uncached
  458. UNWIND __objc_msgLookup_uncached, FrameWithNoSaves
  459. // THIS IS NOT A CALLABLE C FUNCTION
  460. // Out-of-band p16 is the class to search
  461. MethodTableLookup
  462. ret
  463. END_ENTRY __objc_msgLookup_uncached
  464. STATIC_ENTRY _cache_getImp
  465. GetClassFromIsa_p16 p0
  466. CacheLookup GETIMP, _cache_getImp
  467. LGetImpMiss:
  468. mov p0, #0
  469. ret
  470. END_ENTRY _cache_getImp
  471. /********************************************************************
  472. *
  473. * id _objc_msgForward(id self, SEL _cmd,...);
  474. *
  475. * _objc_msgForward is the externally-callable
  476. * function returned by things like method_getImplementation().
  477. * _objc_msgForward_impcache is the function pointer actually stored in
  478. * method caches.
  479. *
  480. ********************************************************************/
  481. STATIC_ENTRY __objc_msgForward_impcache
  482. // No stret specialization.
  483. b __objc_msgForward
  484. END_ENTRY __objc_msgForward_impcache
  485. ENTRY __objc_msgForward
  486. adrp x17, __objc_forward_handler@PAGE
  487. ldr p17, [x17, __objc_forward_handler@PAGEOFF]
  488. TailCallFunctionPointer x17
  489. END_ENTRY __objc_msgForward
  490. ENTRY _objc_msgSend_noarg
  491. b _objc_msgSend
  492. END_ENTRY _objc_msgSend_noarg
  493. ENTRY _objc_msgSend_debug
  494. b _objc_msgSend
  495. END_ENTRY _objc_msgSend_debug
  496. ENTRY _objc_msgSendSuper2_debug
  497. b _objc_msgSendSuper2
  498. END_ENTRY _objc_msgSendSuper2_debug
  499. ENTRY _method_invoke
  500. // x1 is method triplet instead of SEL
  501. add p16, p1, #METHOD_IMP
  502. ldr p17, [x16]
  503. ldr p1, [x1, #METHOD_NAME]
  504. TailCallMethodListImp x17, x16
  505. END_ENTRY _method_invoke
  506. #endif