objc-runtime-new.mm 252 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012601360146015601660176018601960206021602260236024602560266027602860296030603160326033603460356036603760386039604060416042604360446045604660476048604960506051605260536054605560566057605860596060606160626063606460656066606760686069607060716072607360746075607660776078607960806081608260836084608560866087608860896090609160926093609460956096609760986099610061016102610361046105610661076108610961106111611261136114611561166117611861196120612161226123612461256126612761286129613061316132613361346135613661376138613961406141614261436144614561466147614861496150615161526153615461556156615761586159616061616162616361646165616661676168616961706171617261736174617561766177617861796180618161826183618461856186618761886189619061916192619361946195619661976198619962006201620262036204620562066207620862096210621162126213621462156216621762186219622062216222622362246225622662276228622962306231623262336234623562366237623862396240624162426243624462456246624762486249625062516252625362546255625662576258625962606261626262636264626562666267626862696270627162726273627462756276627762786279628062816282628362846285628662876288628962906291629262936294629562966297629862996300630163026303630463056306630763086309631063116312631363146315631663176318631963206321632263236324632563266327632863296330633163326333633463356336633763386339634063416342634363446345634663476348634963506351635263536354635563566357635863596360636163626363636463656366636763686369637063716372637363746375637663776378637963806381638263836384638563866387638863896390639163926393639463956396639763986399640064016402640364046405640664076408640964106411641264136414641564166417641864196420642164226423642464256426642764286429643064316432643364346435643664376438643964406441644264436444644564466447644864496450645164526453645464556456645764586459646064616462646364646465646664676468646964706471647264736474647564766477647864796480648164826483648464856486648764886489649064916492649364946495649664976498649965006501650265036504650565066507650865096510651165126513651465156516651765186519652065216522652365246525652665276528652965306531653265336534653565366537653865396540654165426543654465456546654765486549655065516552655365546555655665576558655965606561656265636564656565666567656865696570657165726573657465756576657765786579658065816582658365846585658665876588658965906591659265936594659565966597659865996600660166026603660466056606660766086609661066116612661366146615661666176618661966206621662266236624662566266627662866296630663166326633663466356636663766386639664066416642664366446645664666476648664966506651665266536654665566566657665866596660666166626663666466656666666766686669667066716672667366746675667666776678667966806681668266836684668566866687668866896690669166926693669466956696669766986699670067016702670367046705670667076708670967106711671267136714671567166717671867196720672167226723672467256726672767286729673067316732673367346735673667376738673967406741674267436744674567466747674867496750675167526753675467556756675767586759676067616762676367646765676667676768676967706771677267736774677567766777677867796780678167826783678467856786678767886789679067916792679367946795679667976798679968006801680268036804680568066807680868096810681168126813681468156816681768186819682068216822682368246825682668276828682968306831683268336834683568366837683868396840684168426843684468456846684768486849685068516852685368546855685668576858685968606861686268636864686568666867686868696870687168726873687468756876687768786879688068816882688368846885688668876888688968906891689268936894689568966897689868996900690169026903690469056906690769086909691069116912691369146915691669176918691969206921692269236924692569266927692869296930693169326933693469356936693769386939694069416942694369446945694669476948694969506951695269536954695569566957695869596960696169626963696469656966696769686969697069716972697369746975697669776978697969806981698269836984698569866987698869896990699169926993699469956996699769986999700070017002700370047005700670077008700970107011701270137014701570167017701870197020702170227023702470257026702770287029703070317032703370347035703670377038703970407041704270437044704570467047704870497050705170527053705470557056705770587059706070617062706370647065706670677068706970707071707270737074707570767077707870797080708170827083708470857086708770887089709070917092709370947095709670977098709971007101710271037104710571067107710871097110711171127113711471157116711771187119712071217122712371247125712671277128712971307131713271337134713571367137713871397140714171427143714471457146714771487149715071517152715371547155715671577158715971607161716271637164716571667167716871697170717171727173717471757176717771787179718071817182718371847185718671877188718971907191719271937194719571967197719871997200720172027203720472057206720772087209721072117212721372147215721672177218721972207221722272237224722572267227722872297230723172327233723472357236723772387239724072417242724372447245724672477248724972507251725272537254725572567257725872597260726172627263726472657266726772687269727072717272727372747275727672777278727972807281728272837284728572867287728872897290729172927293729472957296729772987299730073017302730373047305730673077308730973107311731273137314731573167317731873197320732173227323732473257326732773287329733073317332733373347335733673377338733973407341734273437344734573467347734873497350735173527353735473557356735773587359736073617362736373647365736673677368736973707371737273737374737573767377737873797380738173827383738473857386738773887389739073917392739373947395739673977398739974007401740274037404740574067407740874097410741174127413741474157416741774187419742074217422742374247425742674277428742974307431743274337434743574367437743874397440744174427443744474457446744774487449745074517452745374547455745674577458745974607461746274637464746574667467746874697470747174727473747474757476747774787479748074817482748374847485748674877488748974907491749274937494749574967497749874997500750175027503750475057506750775087509751075117512751375147515751675177518751975207521752275237524752575267527752875297530753175327533753475357536753775387539754075417542754375447545754675477548754975507551755275537554755575567557755875597560756175627563756475657566756775687569757075717572757375747575757675777578757975807581758275837584758575867587758875897590759175927593759475957596759775987599760076017602760376047605760676077608760976107611761276137614761576167617761876197620762176227623762476257626762776287629763076317632763376347635763676377638763976407641764276437644764576467647764876497650765176527653765476557656765776587659766076617662766376647665766676677668766976707671767276737674767576767677767876797680768176827683768476857686768776887689769076917692769376947695769676977698769977007701770277037704770577067707770877097710771177127713771477157716771777187719772077217722772377247725772677277728772977307731773277337734773577367737773877397740774177427743774477457746774777487749775077517752775377547755775677577758775977607761776277637764776577667767776877697770777177727773777477757776777777787779778077817782778377847785778677877788778977907791779277937794779577967797779877997800780178027803780478057806780778087809781078117812781378147815781678177818781978207821782278237824782578267827782878297830783178327833783478357836783778387839784078417842784378447845784678477848784978507851785278537854785578567857785878597860786178627863786478657866786778687869787078717872787378747875787678777878787978807881788278837884788578867887788878897890789178927893789478957896789778987899790079017902790379047905790679077908790979107911791279137914791579167917791879197920792179227923
  1. /*
  2. * Copyright (c) 2005-2009 Apple Inc. All Rights Reserved.
  3. *
  4. * @APPLE_LICENSE_HEADER_START@
  5. *
  6. * This file contains Original Code and/or Modifications of Original Code
  7. * as defined in and that are subject to the Apple Public Source License
  8. * Version 2.0 (the 'License'). You may not use this file except in
  9. * compliance with the License. Please obtain a copy of the License at
  10. * http://www.opensource.apple.com/apsl/ and read it before using this
  11. * file.
  12. *
  13. * The Original Code and all software distributed under the License are
  14. * distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
  15. * EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
  16. * INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
  17. * FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
  18. * Please see the License for the specific language governing rights and
  19. * limitations under the License.
  20. *
  21. * @APPLE_LICENSE_HEADER_END@
  22. */
  23. /***********************************************************************
  24. * objc-runtime-new.m
  25. * Support for new-ABI classes and images.
  26. **********************************************************************/
  27. #if __OBJC2__
  28. #include "DenseMapExtras.h"
  29. #include "objc-private.h"
  30. #include "objc-runtime-new.h"
  31. #include "objc-file.h"
  32. #include "objc-cache.h"
  33. #include <Block.h>
  34. #include <objc/message.h>
  35. #include <mach/shared_region.h>
  36. #define newprotocol(p) ((protocol_t *)p)
  37. static void disableTaggedPointers();
  38. static void detach_class(Class cls, bool isMeta);
  39. static void free_class(Class cls);
  40. static IMP addMethod(Class cls, SEL name, IMP imp, const char *types, bool replace);
  41. static void adjustCustomFlagsForMethodChange(Class cls, method_t *meth);
  42. static method_t *search_method_list(const method_list_t *mlist, SEL sel);
  43. static bool method_lists_contains_any(method_list_t **mlists, method_list_t **end,
  44. SEL sels[], size_t selcount);
  45. static void flushCaches(Class cls);
  46. static void initializeTaggedPointerObfuscator(void);
  47. #if SUPPORT_FIXUP
  48. static void fixupMessageRef(message_ref_t *msg);
  49. #endif
  50. static Class realizeClassMaybeSwiftAndUnlock(Class cls, mutex_t& lock);
  51. static Class readClass(Class cls, bool headerIsBundle, bool headerIsPreoptimized);
  52. struct locstamped_category_t {
  53. category_t *cat;
  54. struct header_info *hi;
  55. };
  56. enum {
  57. ATTACH_CLASS = 1 << 0,
  58. ATTACH_METACLASS = 1 << 1,
  59. ATTACH_CLASS_AND_METACLASS = 1 << 2,
  60. ATTACH_EXISTING = 1 << 3,
  61. };
  62. static void attachCategories(Class cls, const struct locstamped_category_t *cats_list, uint32_t cats_count, int flags);
  63. /***********************************************************************
  64. * Lock management
  65. **********************************************************************/
  66. mutex_t runtimeLock;
  67. mutex_t selLock;
  68. #if CONFIG_USE_CACHE_LOCK
  69. mutex_t cacheUpdateLock;
  70. #endif
  71. recursive_mutex_t loadMethodLock;
  72. /***********************************************************************
  73. * Class structure decoding
  74. **********************************************************************/
  75. const uintptr_t objc_debug_class_rw_data_mask = FAST_DATA_MASK;
  76. /***********************************************************************
  77. * Non-pointer isa decoding
  78. **********************************************************************/
  79. #if SUPPORT_INDEXED_ISA
  80. // Indexed non-pointer isa.
  81. // These are used to mask the ISA and see if its got an index or not.
  82. const uintptr_t objc_debug_indexed_isa_magic_mask = ISA_INDEX_MAGIC_MASK;
  83. const uintptr_t objc_debug_indexed_isa_magic_value = ISA_INDEX_MAGIC_VALUE;
  84. // die if masks overlap
  85. STATIC_ASSERT((ISA_INDEX_MASK & ISA_INDEX_MAGIC_MASK) == 0);
  86. // die if magic is wrong
  87. STATIC_ASSERT((~ISA_INDEX_MAGIC_MASK & ISA_INDEX_MAGIC_VALUE) == 0);
  88. // Then these are used to extract the index from the ISA.
  89. const uintptr_t objc_debug_indexed_isa_index_mask = ISA_INDEX_MASK;
  90. const uintptr_t objc_debug_indexed_isa_index_shift = ISA_INDEX_SHIFT;
  91. asm("\n .globl _objc_absolute_indexed_isa_magic_mask" \
  92. "\n _objc_absolute_indexed_isa_magic_mask = " STRINGIFY2(ISA_INDEX_MAGIC_MASK));
  93. asm("\n .globl _objc_absolute_indexed_isa_magic_value" \
  94. "\n _objc_absolute_indexed_isa_magic_value = " STRINGIFY2(ISA_INDEX_MAGIC_VALUE));
  95. asm("\n .globl _objc_absolute_indexed_isa_index_mask" \
  96. "\n _objc_absolute_indexed_isa_index_mask = " STRINGIFY2(ISA_INDEX_MASK));
  97. asm("\n .globl _objc_absolute_indexed_isa_index_shift" \
  98. "\n _objc_absolute_indexed_isa_index_shift = " STRINGIFY2(ISA_INDEX_SHIFT));
  99. // And then we can use that index to get the class from this array. Note
  100. // the size is provided so that clients can ensure the index they get is in
  101. // bounds and not read off the end of the array.
  102. // Defined in the objc-msg-*.s files
  103. // const Class objc_indexed_classes[]
  104. // When we don't have enough bits to store a class*, we can instead store an
  105. // index in to this array. Classes are added here when they are realized.
  106. // Note, an index of 0 is illegal.
  107. uintptr_t objc_indexed_classes_count = 0;
  108. // SUPPORT_INDEXED_ISA
  109. #else
  110. // not SUPPORT_INDEXED_ISA
  111. // These variables exist but are all set to 0 so that they are ignored.
  112. const uintptr_t objc_debug_indexed_isa_magic_mask = 0;
  113. const uintptr_t objc_debug_indexed_isa_magic_value = 0;
  114. const uintptr_t objc_debug_indexed_isa_index_mask = 0;
  115. const uintptr_t objc_debug_indexed_isa_index_shift = 0;
  116. Class objc_indexed_classes[1] = { nil };
  117. uintptr_t objc_indexed_classes_count = 0;
  118. // not SUPPORT_INDEXED_ISA
  119. #endif
  120. #if SUPPORT_PACKED_ISA
  121. // Packed non-pointer isa.
  122. asm("\n .globl _objc_absolute_packed_isa_class_mask" \
  123. "\n _objc_absolute_packed_isa_class_mask = " STRINGIFY2(ISA_MASK));
  124. const uintptr_t objc_debug_isa_class_mask = ISA_MASK;
  125. const uintptr_t objc_debug_isa_magic_mask = ISA_MAGIC_MASK;
  126. const uintptr_t objc_debug_isa_magic_value = ISA_MAGIC_VALUE;
  127. // die if masks overlap
  128. STATIC_ASSERT((ISA_MASK & ISA_MAGIC_MASK) == 0);
  129. // die if magic is wrong
  130. STATIC_ASSERT((~ISA_MAGIC_MASK & ISA_MAGIC_VALUE) == 0);
  131. // die if virtual address space bound goes up
  132. STATIC_ASSERT((~ISA_MASK & MACH_VM_MAX_ADDRESS) == 0 ||
  133. ISA_MASK + sizeof(void*) == MACH_VM_MAX_ADDRESS);
  134. // SUPPORT_PACKED_ISA
  135. #else
  136. // not SUPPORT_PACKED_ISA
  137. // These variables exist but enforce pointer alignment only.
  138. const uintptr_t objc_debug_isa_class_mask = (~WORD_MASK);
  139. const uintptr_t objc_debug_isa_magic_mask = WORD_MASK;
  140. const uintptr_t objc_debug_isa_magic_value = 0;
  141. // not SUPPORT_PACKED_ISA
  142. #endif
  143. /***********************************************************************
  144. * Swift marker bits
  145. **********************************************************************/
  146. const uintptr_t objc_debug_swift_stable_abi_bit = FAST_IS_SWIFT_STABLE;
  147. /***********************************************************************
  148. * allocatedClasses
  149. * A table of all classes (and metaclasses) which have been allocated
  150. * with objc_allocateClassPair.
  151. **********************************************************************/
  152. namespace objc {
  153. static ExplicitInitDenseSet<Class> allocatedClasses;
  154. }
  155. /***********************************************************************
  156. * _firstRealizedClass
  157. * The root of all realized classes
  158. **********************************************************************/
  159. static Class _firstRealizedClass = nil;
  160. /*
  161. Low two bits of mlist->entsize is used as the fixed-up marker.
  162. PREOPTIMIZED VERSION:
  163. Method lists from shared cache are 1 (uniqued) or 3 (uniqued and sorted).
  164. (Protocol method lists are not sorted because of their extra parallel data)
  165. Runtime fixed-up method lists get 3.
  166. UN-PREOPTIMIZED VERSION:
  167. Method lists from shared cache are 1 (uniqued) or 3 (uniqued and sorted)
  168. Shared cache's sorting and uniquing are not trusted, but do affect the
  169. location of the selector name string.
  170. Runtime fixed-up method lists get 2.
  171. High two bits of protocol->flags is used as the fixed-up marker.
  172. PREOPTIMIZED VERSION:
  173. Protocols from shared cache are 1<<30.
  174. Runtime fixed-up protocols get 1<<30.
  175. UN-PREOPTIMIZED VERSION:
  176. Protocols from shared cache are 1<<30.
  177. Shared cache's fixups are not trusted.
  178. Runtime fixed-up protocols get 3<<30.
  179. */
  180. static uint32_t fixed_up_method_list = 3;
  181. static uint32_t uniqued_method_list = 1;
  182. static uint32_t fixed_up_protocol = PROTOCOL_FIXED_UP_1;
  183. static uint32_t canonical_protocol = PROTOCOL_IS_CANONICAL;
  184. void
  185. disableSharedCacheOptimizations(void)
  186. {
  187. fixed_up_method_list = 2;
  188. // It is safe to set uniqued method lists to 0 as we'll never call it unless
  189. // the method list was already in need of being fixed up
  190. uniqued_method_list = 0;
  191. fixed_up_protocol = PROTOCOL_FIXED_UP_1 | PROTOCOL_FIXED_UP_2;
  192. // Its safe to just set canonical protocol to 0 as we'll never call
  193. // clearIsCanonical() unless isCanonical() returned true, which can't happen
  194. // with a 0 mask
  195. canonical_protocol = 0;
  196. }
  197. bool method_list_t::isUniqued() const {
  198. return (flags() & uniqued_method_list) != 0;
  199. }
  200. bool method_list_t::isFixedUp() const {
  201. return flags() == fixed_up_method_list;
  202. }
  203. void method_list_t::setFixedUp() {
  204. runtimeLock.assertLocked();
  205. ASSERT(!isFixedUp());
  206. entsizeAndFlags = entsize() | fixed_up_method_list;
  207. }
  208. bool protocol_t::isFixedUp() const {
  209. return (flags & PROTOCOL_FIXED_UP_MASK) == fixed_up_protocol;
  210. }
  211. void protocol_t::setFixedUp() {
  212. runtimeLock.assertLocked();
  213. ASSERT(!isFixedUp());
  214. flags = (flags & ~PROTOCOL_FIXED_UP_MASK) | fixed_up_protocol;
  215. }
  216. bool protocol_t::isCanonical() const {
  217. return (flags & canonical_protocol) != 0;
  218. }
  219. void protocol_t::clearIsCanonical() {
  220. runtimeLock.assertLocked();
  221. ASSERT(isCanonical());
  222. flags = flags & ~canonical_protocol;
  223. }
  224. method_list_t **method_array_t::endCategoryMethodLists(Class cls)
  225. {
  226. method_list_t **mlists = beginLists();
  227. method_list_t **mlistsEnd = endLists();
  228. if (mlists == mlistsEnd || !cls->data()->ro->baseMethods())
  229. {
  230. // No methods, or no base methods.
  231. // Everything here is a category method.
  232. return mlistsEnd;
  233. }
  234. // Have base methods. Category methods are
  235. // everything except the last method list.
  236. return mlistsEnd - 1;
  237. }
  238. static const char *sel_cname(SEL sel)
  239. {
  240. return (const char *)(void *)sel;
  241. }
  242. static size_t protocol_list_size(const protocol_list_t *plist)
  243. {
  244. return sizeof(protocol_list_t) + plist->count * sizeof(protocol_t *);
  245. }
  246. static void try_free(const void *p)
  247. {
  248. if (p && malloc_size(p)) free((void *)p);
  249. }
  250. using ClassCopyFixupHandler = void (*)(Class _Nonnull oldClass,
  251. Class _Nonnull newClass);
  252. // Normally there's only one handler registered.
  253. static GlobalSmallVector<ClassCopyFixupHandler, 1> classCopyFixupHandlers;
  254. void _objc_setClassCopyFixupHandler(void (* _Nonnull newFixupHandler)
  255. (Class _Nonnull oldClass, Class _Nonnull newClass)) {
  256. mutex_locker_t lock(runtimeLock);
  257. classCopyFixupHandlers.append(newFixupHandler);
  258. }
  259. static Class
  260. alloc_class_for_subclass(Class supercls, size_t extraBytes)
  261. {
  262. if (!supercls || !supercls->isAnySwift()) {
  263. return _calloc_class(sizeof(objc_class) + extraBytes);
  264. }
  265. // Superclass is a Swift class. New subclass must duplicate its extra bits.
  266. // Allocate the new class, with space for super's prefix and suffix
  267. // and self's extraBytes.
  268. swift_class_t *swiftSupercls = (swift_class_t *)supercls;
  269. size_t superSize = swiftSupercls->classSize;
  270. void *superBits = swiftSupercls->baseAddress();
  271. void *bits = malloc(superSize + extraBytes);
  272. // Copy all of the superclass's data to the new class.
  273. memcpy(bits, superBits, superSize);
  274. // Erase the objc data and the Swift description in the new class.
  275. swift_class_t *swcls = (swift_class_t *)
  276. ((uint8_t *)bits + swiftSupercls->classAddressOffset);
  277. bzero(swcls, sizeof(objc_class));
  278. swcls->description = nil;
  279. for (auto handler : classCopyFixupHandlers) {
  280. handler(supercls, (Class)swcls);
  281. }
  282. // Mark this class as Swift-enhanced.
  283. if (supercls->isSwiftStable()) {
  284. swcls->bits.setIsSwiftStable();
  285. }
  286. if (supercls->isSwiftLegacy()) {
  287. swcls->bits.setIsSwiftLegacy();
  288. }
  289. return (Class)swcls;
  290. }
  291. /***********************************************************************
  292. * object_getIndexedIvars.
  293. **********************************************************************/
  294. void *object_getIndexedIvars(id obj)
  295. {
  296. uint8_t *base = (uint8_t *)obj;
  297. if (!obj) return nil;
  298. if (obj->isTaggedPointer()) return nil;
  299. if (!obj->isClass()) return base + obj->ISA()->alignedInstanceSize();
  300. Class cls = (Class)obj;
  301. if (!cls->isAnySwift()) return base + sizeof(objc_class);
  302. swift_class_t *swcls = (swift_class_t *)cls;
  303. return base - swcls->classAddressOffset + word_align(swcls->classSize);
  304. }
  305. /***********************************************************************
  306. * make_ro_writeable
  307. * Reallocates rw->ro if necessary to make it writeable.
  308. * Locking: runtimeLock must be held by the caller.
  309. **********************************************************************/
  310. static class_ro_t *make_ro_writeable(class_rw_t *rw)
  311. {
  312. runtimeLock.assertLocked();
  313. if (rw->flags & RW_COPIED_RO) {
  314. // already writeable, do nothing
  315. } else {
  316. rw->ro = rw->ro->duplicate();
  317. rw->flags |= RW_COPIED_RO;
  318. }
  319. return (class_ro_t *)rw->ro;
  320. }
  321. /***********************************************************************
  322. * dataSegmentsContain
  323. * Returns true if the given address lies within a data segment in any
  324. * loaded image.
  325. **********************************************************************/
  326. NEVER_INLINE
  327. static bool
  328. dataSegmentsContain(Class cls)
  329. {
  330. uint32_t index;
  331. if (objc::dataSegmentsRanges.find((uintptr_t)cls, index)) {
  332. // if the class is realized (hence has a class_rw_t),
  333. // memorize where we found the range
  334. if (cls->isRealized()) {
  335. cls->data()->witness = (uint16_t)index;
  336. }
  337. return true;
  338. }
  339. return false;
  340. }
  341. /***********************************************************************
  342. * isKnownClass
  343. * Return true if the class is known to the runtime (located within the
  344. * shared cache, within the data segment of a loaded image, or has been
  345. * allocated with obj_allocateClassPair).
  346. *
  347. * The result of this operation is cached on the class in a "witness"
  348. * value that is cheaply checked in the fastpath.
  349. **********************************************************************/
  350. ALWAYS_INLINE
  351. static bool
  352. isKnownClass(Class cls)
  353. {
  354. if (fastpath(objc::dataSegmentsRanges.contains(cls->data()->witness, (uintptr_t)cls))) {
  355. return true;
  356. }
  357. auto &set = objc::allocatedClasses.get();
  358. return set.find(cls) != set.end() || dataSegmentsContain(cls);
  359. }
  360. /***********************************************************************
  361. * addClassTableEntry
  362. * Add a class to the table of all classes. If addMeta is true,
  363. * automatically adds the metaclass of the class as well.
  364. * Locking: runtimeLock must be held by the caller.
  365. **********************************************************************/
  366. static void
  367. addClassTableEntry(Class cls, bool addMeta = true)
  368. {
  369. runtimeLock.assertLocked();
  370. // This class is allowed to be a known class via the shared cache or via
  371. // data segments, but it is not allowed to be in the dynamic table already.
  372. auto &set = objc::allocatedClasses.get();
  373. ASSERT(set.find(cls) == set.end());
  374. if (!isKnownClass(cls))
  375. set.insert(cls);
  376. if (addMeta)
  377. addClassTableEntry(cls->ISA(), false);
  378. }
  379. /***********************************************************************
  380. * checkIsKnownClass
  381. * Checks the given class against the list of all known classes. Dies
  382. * with a fatal error if the class is not known.
  383. * Locking: runtimeLock must be held by the caller.
  384. **********************************************************************/
  385. ALWAYS_INLINE
  386. static void
  387. checkIsKnownClass(Class cls)
  388. {
  389. if (slowpath(!isKnownClass(cls))) {
  390. _objc_fatal("Attempt to use unknown class %p.", cls);
  391. }
  392. }
  393. /***********************************************************************
  394. * classNSObject
  395. * Returns class NSObject.
  396. * Locking: none
  397. **********************************************************************/
  398. static Class classNSObject(void)
  399. {
  400. extern objc_class OBJC_CLASS_$_NSObject;
  401. return (Class)&OBJC_CLASS_$_NSObject;
  402. }
  403. static Class metaclassNSObject(void)
  404. {
  405. extern objc_class OBJC_METACLASS_$_NSObject;
  406. return (Class)&OBJC_METACLASS_$_NSObject;
  407. }
  408. /***********************************************************************
  409. * printReplacements
  410. * Implementation of PrintReplacedMethods / OBJC_PRINT_REPLACED_METHODS.
  411. * Warn about methods from cats that override other methods in cats or cls.
  412. * Assumes no methods from cats have been added to cls yet.
  413. **********************************************************************/
  414. __attribute__((cold, noinline))
  415. static void
  416. printReplacements(Class cls, const locstamped_category_t *cats_list, uint32_t cats_count)
  417. {
  418. uint32_t c;
  419. bool isMeta = cls->isMetaClass();
  420. // Newest categories are LAST in cats
  421. // Later categories override earlier ones.
  422. for (c = 0; c < cats_count; c++) {
  423. category_t *cat = cats_list[c].cat;
  424. method_list_t *mlist = cat->methodsForMeta(isMeta);
  425. if (!mlist) continue;
  426. for (const auto& meth : *mlist) {
  427. SEL s = sel_registerName(sel_cname(meth.name));
  428. // Search for replaced methods in method lookup order.
  429. // Complain about the first duplicate only.
  430. // Look for method in earlier categories
  431. for (uint32_t c2 = 0; c2 < c; c2++) {
  432. category_t *cat2 = cats_list[c2].cat;
  433. const method_list_t *mlist2 = cat2->methodsForMeta(isMeta);
  434. if (!mlist2) continue;
  435. for (const auto& meth2 : *mlist2) {
  436. SEL s2 = sel_registerName(sel_cname(meth2.name));
  437. if (s == s2) {
  438. logReplacedMethod(cls->nameForLogging(), s,
  439. cls->isMetaClass(), cat->name,
  440. meth2.imp, meth.imp);
  441. goto complained;
  442. }
  443. }
  444. }
  445. // Look for method in cls
  446. for (const auto& meth2 : cls->data()->methods) {
  447. SEL s2 = sel_registerName(sel_cname(meth2.name));
  448. if (s == s2) {
  449. logReplacedMethod(cls->nameForLogging(), s,
  450. cls->isMetaClass(), cat->name,
  451. meth2.imp, meth.imp);
  452. goto complained;
  453. }
  454. }
  455. complained:
  456. ;
  457. }
  458. }
  459. }
  460. /***********************************************************************
  461. * unreasonableClassCount
  462. * Provides an upper bound for any iteration of classes,
  463. * to prevent spins when runtime metadata is corrupted.
  464. **********************************************************************/
  465. static unsigned unreasonableClassCount()
  466. {
  467. runtimeLock.assertLocked();
  468. int base = NXCountMapTable(gdb_objc_realized_classes) +
  469. getPreoptimizedClassUnreasonableCount();
  470. // Provide lots of slack here. Some iterations touch metaclasses too.
  471. // Some iterations backtrack (like realized class iteration).
  472. // We don't need an efficient bound, merely one that prevents spins.
  473. return (base + 1) * 16;
  474. }
  475. /***********************************************************************
  476. * Class enumerators
  477. * The passed in block returns `false` if subclasses can be skipped
  478. * Locking: runtimeLock must be held by the caller.
  479. **********************************************************************/
  480. static inline void
  481. foreach_realized_class_and_subclass_2(Class top, unsigned &count,
  482. bool skip_metaclass,
  483. bool (^code)(Class) __attribute((noescape)))
  484. {
  485. Class cls = top;
  486. runtimeLock.assertLocked();
  487. ASSERT(top);
  488. while (1) {
  489. if (--count == 0) {
  490. _objc_fatal("Memory corruption in class list.");
  491. }
  492. bool skip_subclasses;
  493. if (skip_metaclass && cls->isMetaClass()) {
  494. skip_subclasses = true;
  495. } else {
  496. skip_subclasses = !code(cls);
  497. }
  498. if (!skip_subclasses && cls->data()->firstSubclass) {
  499. cls = cls->data()->firstSubclass;
  500. } else {
  501. while (!cls->data()->nextSiblingClass && cls != top) {
  502. cls = cls->superclass;
  503. if (--count == 0) {
  504. _objc_fatal("Memory corruption in class list.");
  505. }
  506. }
  507. if (cls == top) break;
  508. cls = cls->data()->nextSiblingClass;
  509. }
  510. }
  511. }
  512. // Enumerates a class and all of its realized subclasses.
  513. static void
  514. foreach_realized_class_and_subclass(Class top, bool (^code)(Class) __attribute((noescape)))
  515. {
  516. unsigned int count = unreasonableClassCount();
  517. foreach_realized_class_and_subclass_2(top, count, false, code);
  518. }
  519. // Enumerates all realized classes and metaclasses.
  520. static void
  521. foreach_realized_class_and_metaclass(bool (^code)(Class) __attribute((noescape)))
  522. {
  523. unsigned int count = unreasonableClassCount();
  524. for (Class top = _firstRealizedClass;
  525. top != nil;
  526. top = top->data()->nextSiblingClass)
  527. {
  528. foreach_realized_class_and_subclass_2(top, count, false, code);
  529. }
  530. }
  531. // Enumerates all realized classes (ignoring metaclasses).
  532. static void
  533. foreach_realized_class(bool (^code)(Class) __attribute((noescape)))
  534. {
  535. unsigned int count = unreasonableClassCount();
  536. for (Class top = _firstRealizedClass;
  537. top != nil;
  538. top = top->data()->nextSiblingClass)
  539. {
  540. foreach_realized_class_and_subclass_2(top, count, true, code);
  541. }
  542. }
  543. /***********************************************************************
  544. * Method Scanners / Optimization tracking
  545. * Implementation of scanning for various implementations of methods.
  546. **********************************************************************/
  547. namespace objc {
  548. enum SelectorBundle {
  549. AWZ,
  550. RR,
  551. Core,
  552. };
  553. namespace scanner {
  554. // The current state of NSObject swizzling for every scanner
  555. //
  556. // It allows for cheap checks of global swizzles, and also lets
  557. // things like IMP Swizzling before NSObject has been initialized
  558. // to be remembered, as setInitialized() would miss these.
  559. //
  560. // Every pair of bits describes a SelectorBundle.
  561. // even bits: is NSObject class swizzled for this bundle
  562. // odd bits: is NSObject meta class swizzled for this bundle
  563. static uintptr_t NSObjectSwizzledMask;
  564. static ALWAYS_INLINE uintptr_t
  565. swizzlingBit(SelectorBundle bundle, bool isMeta)
  566. {
  567. return 1UL << (2 * bundle + isMeta);
  568. }
  569. static void __attribute__((cold, noinline))
  570. printCustom(Class cls, SelectorBundle bundle, bool inherited)
  571. {
  572. static char const * const SelectorBundleName[] = {
  573. [AWZ] = "CUSTOM AWZ",
  574. [RR] = "CUSTOM RR",
  575. [Core] = "CUSTOM Core",
  576. };
  577. _objc_inform("%s: %s%s%s", SelectorBundleName[bundle],
  578. cls->nameForLogging(),
  579. cls->isMetaClass() ? " (meta)" : "",
  580. inherited ? " (inherited)" : "");
  581. }
  582. enum class Scope { Instances, Classes, Both };
  583. template <typename Traits, SelectorBundle Bundle, bool &ShouldPrint, Scope Domain = Scope::Both>
  584. class Mixin {
  585. // work around compiler being broken with templates using Class/objc_class,
  586. // probably some weird confusion with Class being builtin
  587. ALWAYS_INLINE static objc_class *as_objc_class(Class cls) {
  588. return (objc_class *)cls;
  589. }
  590. static void
  591. setCustomRecursively(Class cls, bool inherited = false)
  592. {
  593. foreach_realized_class_and_subclass(cls, [=](Class c){
  594. if (c != cls && !as_objc_class(c)->isInitialized()) {
  595. // Subclass not yet initialized. Wait for setInitialized() to do it
  596. return false;
  597. }
  598. if (Traits::isCustom(c)) {
  599. return false;
  600. }
  601. Traits::setCustom(c);
  602. if (ShouldPrint) {
  603. printCustom(cls, Bundle, inherited || c != cls);
  604. }
  605. return true;
  606. });
  607. }
  608. static bool
  609. isNSObjectSwizzled(bool isMeta)
  610. {
  611. return NSObjectSwizzledMask & swizzlingBit(Bundle, isMeta);
  612. }
  613. static void
  614. setNSObjectSwizzled(Class NSOClass, bool isMeta)
  615. {
  616. NSObjectSwizzledMask |= swizzlingBit(Bundle, isMeta);
  617. if (as_objc_class(NSOClass)->isInitialized()) {
  618. setCustomRecursively(NSOClass);
  619. }
  620. }
  621. static void
  622. scanChangedMethodForUnknownClass(const method_t *meth)
  623. {
  624. Class cls;
  625. cls = classNSObject();
  626. if (Domain != Scope::Classes && !isNSObjectSwizzled(NO)) {
  627. for (const auto &meth2: as_objc_class(cls)->data()->methods) {
  628. if (meth == &meth2) {
  629. setNSObjectSwizzled(cls, NO);
  630. break;
  631. }
  632. }
  633. }
  634. cls = metaclassNSObject();
  635. if (Domain != Scope::Instances && !isNSObjectSwizzled(YES)) {
  636. for (const auto &meth2: as_objc_class(cls)->data()->methods) {
  637. if (meth == &meth2) {
  638. setNSObjectSwizzled(cls, YES);
  639. break;
  640. }
  641. }
  642. }
  643. }
  644. static void
  645. scanAddedClassImpl(Class cls, bool isMeta)
  646. {
  647. Class NSOClass = (isMeta ? metaclassNSObject() : classNSObject());
  648. bool setCustom = NO, inherited = NO;
  649. if (isNSObjectSwizzled(isMeta)) {
  650. setCustom = YES;
  651. } else if (cls == NSOClass) {
  652. // NSObject is default but we need to check categories
  653. auto &methods = as_objc_class(cls)->data()->methods;
  654. setCustom = Traits::scanMethodLists(methods.beginCategoryMethodLists(),
  655. methods.endCategoryMethodLists(cls));
  656. } else if (!isMeta && !as_objc_class(cls)->superclass) {
  657. // Custom Root class
  658. setCustom = YES;
  659. } else if (Traits::isCustom(as_objc_class(cls)->superclass)) {
  660. // Superclass is custom, therefore we are too.
  661. setCustom = YES;
  662. inherited = YES;
  663. } else {
  664. // Not NSObject.
  665. auto &methods = as_objc_class(cls)->data()->methods;
  666. setCustom = Traits::scanMethodLists(methods.beginLists(),
  667. methods.endLists());
  668. }
  669. if (slowpath(setCustom)) {
  670. if (ShouldPrint) printCustom(cls, Bundle, inherited);
  671. } else {
  672. Traits::setDefault(cls);
  673. }
  674. }
  675. public:
  676. // Scan a class that is about to be marked Initialized for particular
  677. // bundles of selectors, and mark the class and its children
  678. // accordingly.
  679. //
  680. // This also handles inheriting properties from its superclass.
  681. //
  682. // Caller: objc_class::setInitialized()
  683. static void
  684. scanInitializedClass(Class cls, Class metacls)
  685. {
  686. if (Domain != Scope::Classes) {
  687. scanAddedClassImpl(cls, false);
  688. }
  689. if (Domain != Scope::Instances) {
  690. scanAddedClassImpl(metacls, true);
  691. }
  692. }
  693. // Inherit various properties from the superclass when a class
  694. // is being added to the graph.
  695. //
  696. // Caller: addSubclass()
  697. static void
  698. scanAddedSubClass(Class subcls, Class supercls)
  699. {
  700. if (slowpath(Traits::isCustom(supercls) && !Traits::isCustom(subcls))) {
  701. setCustomRecursively(subcls, true);
  702. }
  703. }
  704. // Scan Method lists for selectors that would override things
  705. // in a Bundle.
  706. //
  707. // This is used to detect when categories override problematic selectors
  708. // are injected in a class after it has been initialized.
  709. //
  710. // Caller: prepareMethodLists()
  711. static void
  712. scanAddedMethodLists(Class cls, method_list_t **mlists, int count)
  713. {
  714. if (slowpath(Traits::isCustom(cls))) {
  715. return;
  716. }
  717. if (slowpath(Traits::scanMethodLists(mlists, mlists + count))) {
  718. setCustomRecursively(cls);
  719. }
  720. }
  721. // Handle IMP Swizzling (the IMP for an exisiting method being changed).
  722. //
  723. // In almost all cases, IMP swizzling does not affect custom bits.
  724. // Custom search will already find the method whether or not
  725. // it is swizzled, so it does not transition from non-custom to custom.
  726. //
  727. // The only cases where IMP swizzling can affect the custom bits is
  728. // if the swizzled method is one of the methods that is assumed to be
  729. // non-custom. These special cases are listed in setInitialized().
  730. // We look for such cases here.
  731. //
  732. // Caller: Swizzling methods via adjustCustomFlagsForMethodChange()
  733. static void
  734. scanChangedMethod(Class cls, const method_t *meth)
  735. {
  736. if (fastpath(!Traits::isInterestingSelector(meth->name))) {
  737. return;
  738. }
  739. if (cls) {
  740. bool isMeta = as_objc_class(cls)->isMetaClass();
  741. if (isMeta && Domain != Scope::Instances) {
  742. if (cls == metaclassNSObject() && !isNSObjectSwizzled(isMeta)) {
  743. setNSObjectSwizzled(cls, isMeta);
  744. }
  745. }
  746. if (!isMeta && Domain != Scope::Classes) {
  747. if (cls == classNSObject() && !isNSObjectSwizzled(isMeta)) {
  748. setNSObjectSwizzled(cls, isMeta);
  749. }
  750. }
  751. } else {
  752. // We're called from method_exchangeImplementations, only NSObject
  753. // class and metaclass may be problematic (exchanging the default
  754. // builtin IMP of an interesting seleector, is a swizzling that,
  755. // may flip our scanned property. For other classes, the previous
  756. // value had already flipped the property).
  757. //
  758. // However, as we don't know the class, we need to scan all of
  759. // NSObject class and metaclass methods (this is SLOW).
  760. scanChangedMethodForUnknownClass(meth);
  761. }
  762. }
  763. };
  764. } // namespace scanner
  765. // AWZ methods: +alloc / +allocWithZone:
  766. struct AWZScanner : scanner::Mixin<AWZScanner, AWZ, PrintCustomAWZ, scanner::Scope::Classes> {
  767. static bool isCustom(Class cls) {
  768. return cls->hasCustomAWZ();
  769. }
  770. static void setCustom(Class cls) {
  771. cls->setHasCustomAWZ();
  772. }
  773. static void setDefault(Class cls) {
  774. cls->setHasDefaultAWZ();
  775. }
  776. static bool isInterestingSelector(SEL sel) {
  777. return sel == @selector(alloc) || sel == @selector(allocWithZone:);
  778. }
  779. static bool scanMethodLists(method_list_t **mlists, method_list_t **end) {
  780. SEL sels[2] = { @selector(alloc), @selector(allocWithZone:), };
  781. return method_lists_contains_any(mlists, end, sels, 2);
  782. }
  783. };
  784. // Retain/Release methods that are extremely rarely overridden
  785. //
  786. // retain/release/autorelease/retainCount/
  787. // _tryRetain/_isDeallocating/retainWeakReference/allowsWeakReference
  788. struct RRScanner : scanner::Mixin<RRScanner, RR, PrintCustomRR
  789. #if !SUPPORT_NONPOINTER_ISA
  790. , scanner::Scope::Instances
  791. #endif
  792. > {
  793. static bool isCustom(Class cls) {
  794. return cls->hasCustomRR();
  795. }
  796. static void setCustom(Class cls) {
  797. cls->setHasCustomRR();
  798. }
  799. static void setDefault(Class cls) {
  800. cls->setHasDefaultRR();
  801. }
  802. static bool isInterestingSelector(SEL sel) {
  803. return sel == @selector(retain) ||
  804. sel == @selector(release) ||
  805. sel == @selector(autorelease) ||
  806. sel == @selector(_tryRetain) ||
  807. sel == @selector(_isDeallocating) ||
  808. sel == @selector(retainCount) ||
  809. sel == @selector(allowsWeakReference) ||
  810. sel == @selector(retainWeakReference);
  811. }
  812. static bool scanMethodLists(method_list_t **mlists, method_list_t **end) {
  813. SEL sels[8] = {
  814. @selector(retain),
  815. @selector(release),
  816. @selector(autorelease),
  817. @selector(_tryRetain),
  818. @selector(_isDeallocating),
  819. @selector(retainCount),
  820. @selector(allowsWeakReference),
  821. @selector(retainWeakReference),
  822. };
  823. return method_lists_contains_any(mlists, end, sels, 8);
  824. }
  825. };
  826. // Core NSObject methods that are extremely rarely overridden
  827. //
  828. // +new, ±class, ±self, ±isKindOfClass:, ±respondsToSelector
  829. struct CoreScanner : scanner::Mixin<CoreScanner, Core, PrintCustomCore> {
  830. static bool isCustom(Class cls) {
  831. return cls->hasCustomCore();
  832. }
  833. static void setCustom(Class cls) {
  834. cls->setHasCustomCore();
  835. }
  836. static void setDefault(Class cls) {
  837. cls->setHasDefaultCore();
  838. }
  839. static bool isInterestingSelector(SEL sel) {
  840. return sel == @selector(new) ||
  841. sel == @selector(self) ||
  842. sel == @selector(class) ||
  843. sel == @selector(isKindOfClass:) ||
  844. sel == @selector(respondsToSelector:);
  845. }
  846. static bool scanMethodLists(method_list_t **mlists, method_list_t **end) {
  847. SEL sels[5] = {
  848. @selector(new),
  849. @selector(self),
  850. @selector(class),
  851. @selector(isKindOfClass:),
  852. @selector(respondsToSelector:)
  853. };
  854. return method_lists_contains_any(mlists, end, sels, 5);
  855. }
  856. };
  857. class category_list : nocopy_t {
  858. union {
  859. locstamped_category_t lc;
  860. struct {
  861. locstamped_category_t *array;
  862. // this aliases with locstamped_category_t::hi
  863. // which is an aliased pointer
  864. uint32_t is_array : 1;
  865. uint32_t count : 31;
  866. uint32_t size : 32;
  867. };
  868. } _u;
  869. public:
  870. category_list() : _u{{nullptr, nullptr}} { }
  871. category_list(locstamped_category_t lc) : _u{{lc}} { }
  872. category_list(category_list &&other) : category_list() {
  873. std::swap(_u, other._u);
  874. }
  875. ~category_list()
  876. {
  877. if (_u.is_array) {
  878. free(_u.array);
  879. }
  880. }
  881. uint32_t count() const
  882. {
  883. if (_u.is_array) return _u.count;
  884. return _u.lc.cat ? 1 : 0;
  885. }
  886. uint32_t arrayByteSize(uint32_t size) const
  887. {
  888. return sizeof(locstamped_category_t) * size;
  889. }
  890. const locstamped_category_t *array() const
  891. {
  892. return _u.is_array ? _u.array : &_u.lc;
  893. }
  894. void append(locstamped_category_t lc)
  895. {
  896. if (_u.is_array) {
  897. if (_u.count == _u.size) {
  898. // Have a typical malloc growth:
  899. // - size <= 8: grow by 2
  900. // - size <= 16: grow by 4
  901. // - size <= 32: grow by 8
  902. // ... etc
  903. _u.size += _u.size < 8 ? 2 : 1 << (fls(_u.size) - 2);
  904. _u.array = (locstamped_category_t *)reallocf(_u.array, arrayByteSize(_u.size));
  905. }
  906. _u.array[_u.count++] = lc;
  907. } else if (_u.lc.cat == NULL) {
  908. _u.lc = lc;
  909. } else {
  910. locstamped_category_t *arr = (locstamped_category_t *)malloc(arrayByteSize(2));
  911. arr[0] = _u.lc;
  912. arr[1] = lc;
  913. _u.array = arr;
  914. _u.is_array = true;
  915. _u.count = 2;
  916. _u.size = 2;
  917. }
  918. }
  919. void erase(category_t *cat)
  920. {
  921. if (_u.is_array) {
  922. for (int i = 0; i < _u.count; i++) {
  923. if (_u.array[i].cat == cat) {
  924. // shift entries to preserve list order
  925. memmove(&_u.array[i], &_u.array[i+1], arrayByteSize(_u.count - i - 1));
  926. return;
  927. }
  928. }
  929. } else if (_u.lc.cat == cat) {
  930. _u.lc.cat = NULL;
  931. _u.lc.hi = NULL;
  932. }
  933. }
  934. };
  935. class UnattachedCategories : public ExplicitInitDenseMap<Class, category_list>
  936. {
  937. public:
  938. void addForClass(locstamped_category_t lc, Class cls)
  939. {
  940. runtimeLock.assertLocked();
  941. if (slowpath(PrintConnecting)) {
  942. _objc_inform("CLASS: found category %c%s(%s)",
  943. cls->isMetaClass() ? '+' : '-',
  944. cls->nameForLogging(), lc.cat->name);
  945. }
  946. auto result = get().try_emplace(cls, lc);
  947. if (!result.second) {
  948. result.first->second.append(lc);
  949. }
  950. }
  951. void attachToClass(Class cls, Class previously, int flags)
  952. {
  953. runtimeLock.assertLocked();
  954. ASSERT((flags & ATTACH_CLASS) ||
  955. (flags & ATTACH_METACLASS) ||
  956. (flags & ATTACH_CLASS_AND_METACLASS));
  957. auto &map = get();
  958. auto it = map.find(previously);
  959. if (it != map.end()) {
  960. category_list &list = it->second;
  961. if (flags & ATTACH_CLASS_AND_METACLASS) {
  962. int otherFlags = flags & ~ATTACH_CLASS_AND_METACLASS;
  963. attachCategories(cls, list.array(), list.count(), otherFlags | ATTACH_CLASS);
  964. attachCategories(cls->ISA(), list.array(), list.count(), otherFlags | ATTACH_METACLASS);
  965. } else {
  966. attachCategories(cls, list.array(), list.count(), flags);
  967. }
  968. map.erase(it);
  969. }
  970. }
  971. void eraseCategoryForClass(category_t *cat, Class cls)
  972. {
  973. runtimeLock.assertLocked();
  974. auto &map = get();
  975. auto it = map.find(cls);
  976. if (it != map.end()) {
  977. category_list &list = it->second;
  978. list.erase(cat);
  979. if (list.count() == 0) {
  980. map.erase(it);
  981. }
  982. }
  983. }
  984. void eraseClass(Class cls)
  985. {
  986. runtimeLock.assertLocked();
  987. get().erase(cls);
  988. }
  989. };
  990. static UnattachedCategories unattachedCategories;
  991. } // namespace objc
  992. static bool isBundleClass(Class cls)
  993. {
  994. return cls->data()->ro->flags & RO_FROM_BUNDLE;
  995. }
  996. static void
  997. fixupMethodList(method_list_t *mlist, bool bundleCopy, bool sort)
  998. {
  999. runtimeLock.assertLocked();
  1000. ASSERT(!mlist->isFixedUp());
  1001. // fixme lock less in attachMethodLists ?
  1002. // dyld3 may have already uniqued, but not sorted, the list
  1003. if (!mlist->isUniqued()) {
  1004. mutex_locker_t lock(selLock);
  1005. // Unique selectors in list.
  1006. for (auto& meth : *mlist) {
  1007. const char *name = sel_cname(meth.name);
  1008. meth.name = sel_registerNameNoLock(name, bundleCopy);
  1009. }
  1010. }
  1011. // Sort by selector address.
  1012. if (sort) {
  1013. method_t::SortBySELAddress sorter;
  1014. std::stable_sort(mlist->begin(), mlist->end(), sorter);
  1015. }
  1016. // Mark method list as uniqued and sorted
  1017. mlist->setFixedUp();
  1018. }
  1019. static void
  1020. prepareMethodLists(Class cls, method_list_t **addedLists, int addedCount,
  1021. bool baseMethods, bool methodsFromBundle)
  1022. {
  1023. runtimeLock.assertLocked();
  1024. if (addedCount == 0) return;
  1025. // There exist RR/AWZ/Core special cases for some class's base methods.
  1026. // But this code should never need to scan base methods for RR/AWZ/Core:
  1027. // default RR/AWZ/Core cannot be set before setInitialized().
  1028. // Therefore we need not handle any special cases here.
  1029. if (baseMethods) {
  1030. ASSERT(cls->hasCustomAWZ() && cls->hasCustomRR() && cls->hasCustomCore());
  1031. }
  1032. // Add method lists to array.
  1033. // Reallocate un-fixed method lists.
  1034. // The new methods are PREPENDED to the method list array.
  1035. for (int i = 0; i < addedCount; i++) {
  1036. method_list_t *mlist = addedLists[i];
  1037. ASSERT(mlist);
  1038. // Fixup selectors if necessary
  1039. if (!mlist->isFixedUp()) {
  1040. fixupMethodList(mlist, methodsFromBundle, true/*sort*/);
  1041. }
  1042. }
  1043. // If the class is initialized, then scan for method implementations
  1044. // tracked by the class's flags. If it's not initialized yet,
  1045. // then objc_class::setInitialized() will take care of it.
  1046. if (cls->isInitialized()) {
  1047. objc::AWZScanner::scanAddedMethodLists(cls, addedLists, addedCount);
  1048. objc::RRScanner::scanAddedMethodLists(cls, addedLists, addedCount);
  1049. objc::CoreScanner::scanAddedMethodLists(cls, addedLists, addedCount);
  1050. }
  1051. }
  1052. // Attach method lists and properties and protocols from categories to a class.
  1053. // Assumes the categories in cats are all loaded and sorted by load order,
  1054. // oldest categories first.
  1055. static void
  1056. attachCategories(Class cls, const locstamped_category_t *cats_list, uint32_t cats_count,
  1057. int flags)
  1058. {
  1059. if (slowpath(PrintReplacedMethods)) {
  1060. printReplacements(cls, cats_list, cats_count);
  1061. }
  1062. if (slowpath(PrintConnecting)) {
  1063. _objc_inform("CLASS: attaching %d categories to%s class '%s'%s",
  1064. cats_count, (flags & ATTACH_EXISTING) ? " existing" : "",
  1065. cls->nameForLogging(), (flags & ATTACH_METACLASS) ? " (meta)" : "");
  1066. }
  1067. /*
  1068. * Only a few classes have more than 64 categories during launch.
  1069. * This uses a little stack, and avoids malloc.
  1070. *
  1071. * Categories must be added in the proper order, which is back
  1072. * to front. To do that with the chunking, we iterate cats_list
  1073. * from front to back, build up the local buffers backwards,
  1074. * and call attachLists on the chunks. attachLists prepends the
  1075. * lists, so the final result is in the expected order.
  1076. */
  1077. constexpr uint32_t ATTACH_BUFSIZ = 64;
  1078. method_list_t *mlists[ATTACH_BUFSIZ];
  1079. property_list_t *proplists[ATTACH_BUFSIZ];
  1080. protocol_list_t *protolists[ATTACH_BUFSIZ];
  1081. uint32_t mcount = 0;
  1082. uint32_t propcount = 0;
  1083. uint32_t protocount = 0;
  1084. bool fromBundle = NO;
  1085. bool isMeta = (flags & ATTACH_METACLASS);
  1086. auto rw = cls->data();
  1087. for (uint32_t i = 0; i < cats_count; i++) {
  1088. auto& entry = cats_list[i];
  1089. method_list_t *mlist = entry.cat->methodsForMeta(isMeta);
  1090. if (mlist) {
  1091. if (mcount == ATTACH_BUFSIZ) {
  1092. prepareMethodLists(cls, mlists, mcount, NO, fromBundle);
  1093. rw->methods.attachLists(mlists, mcount);
  1094. mcount = 0;
  1095. }
  1096. mlists[ATTACH_BUFSIZ - ++mcount] = mlist;
  1097. fromBundle |= entry.hi->isBundle();
  1098. }
  1099. property_list_t *proplist =
  1100. entry.cat->propertiesForMeta(isMeta, entry.hi);
  1101. if (proplist) {
  1102. if (propcount == ATTACH_BUFSIZ) {
  1103. rw->properties.attachLists(proplists, propcount);
  1104. propcount = 0;
  1105. }
  1106. proplists[ATTACH_BUFSIZ - ++propcount] = proplist;
  1107. }
  1108. protocol_list_t *protolist = entry.cat->protocolsForMeta(isMeta);
  1109. if (protolist) {
  1110. if (protocount == ATTACH_BUFSIZ) {
  1111. rw->protocols.attachLists(protolists, protocount);
  1112. protocount = 0;
  1113. }
  1114. protolists[ATTACH_BUFSIZ - ++protocount] = protolist;
  1115. }
  1116. }
  1117. if (mcount > 0) {
  1118. prepareMethodLists(cls, mlists + ATTACH_BUFSIZ - mcount, mcount, NO, fromBundle);
  1119. rw->methods.attachLists(mlists + ATTACH_BUFSIZ - mcount, mcount);
  1120. if (flags & ATTACH_EXISTING) flushCaches(cls);
  1121. }
  1122. rw->properties.attachLists(proplists + ATTACH_BUFSIZ - propcount, propcount);
  1123. rw->protocols.attachLists(protolists + ATTACH_BUFSIZ - protocount, protocount);
  1124. }
  1125. /***********************************************************************
  1126. * methodizeClass
  1127. * Fixes up cls's method list, protocol list, and property list.
  1128. * Attaches any outstanding categories.
  1129. * Locking: runtimeLock must be held by the caller
  1130. **********************************************************************/
  1131. static void methodizeClass(Class cls, Class previously)
  1132. {
  1133. runtimeLock.assertLocked();
  1134. bool isMeta = cls->isMetaClass();
  1135. auto rw = cls->data();
  1136. auto ro = rw->ro;
  1137. // Methodizing for the first time
  1138. if (PrintConnecting) {
  1139. _objc_inform("CLASS: methodizing class '%s' %s",
  1140. cls->nameForLogging(), isMeta ? "(meta)" : "");
  1141. }
  1142. // Install methods and properties that the class implements itself.
  1143. method_list_t *list = ro->baseMethods();
  1144. if (list) {
  1145. prepareMethodLists(cls, &list, 1, YES, isBundleClass(cls));
  1146. rw->methods.attachLists(&list, 1);
  1147. }
  1148. property_list_t *proplist = ro->baseProperties;
  1149. if (proplist) {
  1150. rw->properties.attachLists(&proplist, 1);
  1151. }
  1152. protocol_list_t *protolist = ro->baseProtocols;
  1153. if (protolist) {
  1154. rw->protocols.attachLists(&protolist, 1);
  1155. }
  1156. // Root classes get bonus method implementations if they don't have
  1157. // them already. These apply before category replacements.
  1158. if (cls->isRootMetaclass()) {
  1159. // root metaclass
  1160. addMethod(cls, @selector(initialize), (IMP)&objc_noop_imp, "", NO);
  1161. }
  1162. // Attach categories.
  1163. if (previously) {
  1164. if (isMeta) {
  1165. objc::unattachedCategories.attachToClass(cls, previously,
  1166. ATTACH_METACLASS);
  1167. } else {
  1168. // When a class relocates, categories with class methods
  1169. // may be registered on the class itself rather than on
  1170. // the metaclass. Tell attachToClass to look for those.
  1171. objc::unattachedCategories.attachToClass(cls, previously,
  1172. ATTACH_CLASS_AND_METACLASS);
  1173. }
  1174. }
  1175. objc::unattachedCategories.attachToClass(cls, cls,
  1176. isMeta ? ATTACH_METACLASS : ATTACH_CLASS);
  1177. #if DEBUG
  1178. // Debug: sanity-check all SELs; log method list contents
  1179. for (const auto& meth : rw->methods) {
  1180. if (PrintConnecting) {
  1181. _objc_inform("METHOD %c[%s %s]", isMeta ? '+' : '-',
  1182. cls->nameForLogging(), sel_getName(meth.name));
  1183. }
  1184. ASSERT(sel_registerName(sel_getName(meth.name)) == meth.name);
  1185. }
  1186. #endif
  1187. }
  1188. /***********************************************************************
  1189. * nonMetaClasses
  1190. * Returns the secondary metaclass => class map
  1191. * Used for some cases of +initialize and +resolveClassMethod:.
  1192. * This map does not contain all class and metaclass pairs. It only
  1193. * contains metaclasses whose classes would be in the runtime-allocated
  1194. * named-class table, but are not because some other class with the same name
  1195. * is in that table.
  1196. * Classes with no duplicates are not included.
  1197. * Classes in the preoptimized named-class table are not included.
  1198. * Classes whose duplicates are in the preoptimized table are not included.
  1199. * Most code should use getMaybeUnrealizedNonMetaClass()
  1200. * instead of reading this table.
  1201. * Locking: runtimeLock must be read- or write-locked by the caller
  1202. **********************************************************************/
  1203. static NXMapTable *nonmeta_class_map = nil;
  1204. static NXMapTable *nonMetaClasses(void)
  1205. {
  1206. runtimeLock.assertLocked();
  1207. if (nonmeta_class_map) return nonmeta_class_map;
  1208. // nonmeta_class_map is typically small
  1209. INIT_ONCE_PTR(nonmeta_class_map,
  1210. NXCreateMapTable(NXPtrValueMapPrototype, 32),
  1211. NXFreeMapTable(v));
  1212. return nonmeta_class_map;
  1213. }
  1214. /***********************************************************************
  1215. * addNonMetaClass
  1216. * Adds metacls => cls to the secondary metaclass map
  1217. * Locking: runtimeLock must be held by the caller
  1218. **********************************************************************/
  1219. static void addNonMetaClass(Class cls)
  1220. {
  1221. runtimeLock.assertLocked();
  1222. void *old;
  1223. old = NXMapInsert(nonMetaClasses(), cls->ISA(), cls);
  1224. ASSERT(!cls->isMetaClassMaybeUnrealized());
  1225. ASSERT(cls->ISA()->isMetaClassMaybeUnrealized());
  1226. ASSERT(!old);
  1227. }
  1228. static void removeNonMetaClass(Class cls)
  1229. {
  1230. runtimeLock.assertLocked();
  1231. NXMapRemove(nonMetaClasses(), cls->ISA());
  1232. }
  1233. static bool scanMangledField(const char *&string, const char *end,
  1234. const char *&field, int& length)
  1235. {
  1236. // Leading zero not allowed.
  1237. if (*string == '0') return false;
  1238. length = 0;
  1239. field = string;
  1240. while (field < end) {
  1241. char c = *field;
  1242. if (!isdigit(c)) break;
  1243. field++;
  1244. if (__builtin_smul_overflow(length, 10, &length)) return false;
  1245. if (__builtin_sadd_overflow(length, c - '0', &length)) return false;
  1246. }
  1247. string = field + length;
  1248. return length > 0 && string <= end;
  1249. }
  1250. /***********************************************************************
  1251. * copySwiftV1DemangledName
  1252. * Returns the pretty form of the given Swift-v1-mangled class or protocol name.
  1253. * Returns nil if the string doesn't look like a mangled Swift v1 name.
  1254. * The result must be freed with free().
  1255. **********************************************************************/
  1256. static char *copySwiftV1DemangledName(const char *string, bool isProtocol = false)
  1257. {
  1258. if (!string) return nil;
  1259. // Swift mangling prefix.
  1260. if (strncmp(string, isProtocol ? "_TtP" : "_TtC", 4) != 0) return nil;
  1261. string += 4;
  1262. const char *end = string + strlen(string);
  1263. // Module name.
  1264. const char *prefix;
  1265. int prefixLength;
  1266. if (string[0] == 's') {
  1267. // "s" is the Swift module.
  1268. prefix = "Swift";
  1269. prefixLength = 5;
  1270. string += 1;
  1271. } else {
  1272. if (! scanMangledField(string, end, prefix, prefixLength)) return nil;
  1273. }
  1274. // Class or protocol name.
  1275. const char *suffix;
  1276. int suffixLength;
  1277. if (! scanMangledField(string, end, suffix, suffixLength)) return nil;
  1278. if (isProtocol) {
  1279. // Remainder must be "_".
  1280. if (strcmp(string, "_") != 0) return nil;
  1281. } else {
  1282. // Remainder must be empty.
  1283. if (string != end) return nil;
  1284. }
  1285. char *result;
  1286. asprintf(&result, "%.*s.%.*s", prefixLength,prefix, suffixLength,suffix);
  1287. return result;
  1288. }
  1289. /***********************************************************************
  1290. * copySwiftV1MangledName
  1291. * Returns the Swift 1.0 mangled form of the given class or protocol name.
  1292. * Returns nil if the string doesn't look like an unmangled Swift name.
  1293. * The result must be freed with free().
  1294. **********************************************************************/
  1295. static char *copySwiftV1MangledName(const char *string, bool isProtocol = false)
  1296. {
  1297. if (!string) return nil;
  1298. size_t dotCount = 0;
  1299. size_t dotIndex;
  1300. const char *s;
  1301. for (s = string; *s; s++) {
  1302. if (*s == '.') {
  1303. dotCount++;
  1304. dotIndex = s - string;
  1305. }
  1306. }
  1307. size_t stringLength = s - string;
  1308. if (dotCount != 1 || dotIndex == 0 || dotIndex >= stringLength-1) {
  1309. return nil;
  1310. }
  1311. const char *prefix = string;
  1312. size_t prefixLength = dotIndex;
  1313. const char *suffix = string + dotIndex + 1;
  1314. size_t suffixLength = stringLength - (dotIndex + 1);
  1315. char *name;
  1316. if (prefixLength == 5 && memcmp(prefix, "Swift", 5) == 0) {
  1317. asprintf(&name, "_Tt%cs%zu%.*s%s",
  1318. isProtocol ? 'P' : 'C',
  1319. suffixLength, (int)suffixLength, suffix,
  1320. isProtocol ? "_" : "");
  1321. } else {
  1322. asprintf(&name, "_Tt%c%zu%.*s%zu%.*s%s",
  1323. isProtocol ? 'P' : 'C',
  1324. prefixLength, (int)prefixLength, prefix,
  1325. suffixLength, (int)suffixLength, suffix,
  1326. isProtocol ? "_" : "");
  1327. }
  1328. return name;
  1329. }
  1330. /***********************************************************************
  1331. * getClassExceptSomeSwift
  1332. * Looks up a class by name. The class MIGHT NOT be realized.
  1333. * Demangled Swift names are recognized.
  1334. * Classes known to the Swift runtime but not yet used are NOT recognized.
  1335. * (such as subclasses of un-instantiated generics)
  1336. * Use look_up_class() to find them as well.
  1337. * Locking: runtimeLock must be read- or write-locked by the caller.
  1338. **********************************************************************/
  1339. // This is a misnomer: gdb_objc_realized_classes is actually a list of
  1340. // named classes not in the dyld shared cache, whether realized or not.
  1341. NXMapTable *gdb_objc_realized_classes; // exported for debuggers in objc-gdb.h
  1342. uintptr_t objc_debug_realized_class_generation_count;
  1343. static Class getClass_impl(const char *name)
  1344. {
  1345. runtimeLock.assertLocked();
  1346. // allocated in _read_images
  1347. ASSERT(gdb_objc_realized_classes);
  1348. // Try runtime-allocated table
  1349. Class result = (Class)NXMapGet(gdb_objc_realized_classes, name);
  1350. if (result) return result;
  1351. // Try table from dyld shared cache.
  1352. // Note we do this last to handle the case where we dlopen'ed a shared cache
  1353. // dylib with duplicates of classes already present in the main executable.
  1354. // In that case, we put the class from the main executable in
  1355. // gdb_objc_realized_classes and want to check that before considering any
  1356. // newly loaded shared cache binaries.
  1357. return getPreoptimizedClass(name);
  1358. }
  1359. static Class getClassExceptSomeSwift(const char *name)
  1360. {
  1361. runtimeLock.assertLocked();
  1362. // Try name as-is
  1363. Class result = getClass_impl(name);
  1364. if (result) return result;
  1365. // Try Swift-mangled equivalent of the given name.
  1366. if (char *swName = copySwiftV1MangledName(name)) {
  1367. result = getClass_impl(swName);
  1368. free(swName);
  1369. return result;
  1370. }
  1371. return nil;
  1372. }
  1373. /***********************************************************************
  1374. * addNamedClass
  1375. * Adds name => cls to the named non-meta class map.
  1376. * Warns about duplicate class names and keeps the old mapping.
  1377. * Locking: runtimeLock must be held by the caller
  1378. **********************************************************************/
  1379. static void addNamedClass(Class cls, const char *name, Class replacing = nil)
  1380. {
  1381. runtimeLock.assertLocked();
  1382. Class old;
  1383. if ((old = getClassExceptSomeSwift(name)) && old != replacing) {
  1384. inform_duplicate(name, old, cls);
  1385. // getMaybeUnrealizedNonMetaClass uses name lookups.
  1386. // Classes not found by name lookup must be in the
  1387. // secondary meta->nonmeta table.
  1388. addNonMetaClass(cls);
  1389. } else {
  1390. NXMapInsert(gdb_objc_realized_classes, name, cls);
  1391. }
  1392. ASSERT(!(cls->data()->flags & RO_META));
  1393. // wrong: constructed classes are already realized when they get here
  1394. // ASSERT(!cls->isRealized());
  1395. }
  1396. /***********************************************************************
  1397. * removeNamedClass
  1398. * Removes cls from the name => cls map.
  1399. * Locking: runtimeLock must be held by the caller
  1400. **********************************************************************/
  1401. static void removeNamedClass(Class cls, const char *name)
  1402. {
  1403. runtimeLock.assertLocked();
  1404. ASSERT(!(cls->data()->flags & RO_META));
  1405. if (cls == NXMapGet(gdb_objc_realized_classes, name)) {
  1406. NXMapRemove(gdb_objc_realized_classes, name);
  1407. } else {
  1408. // cls has a name collision with another class - don't remove the other
  1409. // but do remove cls from the secondary metaclass->class map.
  1410. removeNonMetaClass(cls);
  1411. }
  1412. }
  1413. /***********************************************************************
  1414. * futureNamedClasses
  1415. * Returns the classname => future class map for unrealized future classes.
  1416. * Locking: runtimeLock must be held by the caller
  1417. **********************************************************************/
  1418. static NXMapTable *future_named_class_map = nil;
  1419. static NXMapTable *futureNamedClasses()
  1420. {
  1421. runtimeLock.assertLocked();
  1422. if (future_named_class_map) return future_named_class_map;
  1423. // future_named_class_map is big enough for CF's classes and a few others
  1424. future_named_class_map =
  1425. NXCreateMapTable(NXStrValueMapPrototype, 32);
  1426. return future_named_class_map;
  1427. }
  1428. static bool haveFutureNamedClasses() {
  1429. return future_named_class_map && NXCountMapTable(future_named_class_map);
  1430. }
  1431. /***********************************************************************
  1432. * addFutureNamedClass
  1433. * Installs cls as the class structure to use for the named class if it appears.
  1434. * Locking: runtimeLock must be held by the caller
  1435. **********************************************************************/
  1436. static void addFutureNamedClass(const char *name, Class cls)
  1437. {
  1438. void *old;
  1439. runtimeLock.assertLocked();
  1440. if (PrintFuture) {
  1441. _objc_inform("FUTURE: reserving %p for %s", (void*)cls, name);
  1442. }
  1443. class_rw_t *rw = (class_rw_t *)calloc(sizeof(class_rw_t), 1);
  1444. class_ro_t *ro = (class_ro_t *)calloc(sizeof(class_ro_t), 1);
  1445. ro->name = strdupIfMutable(name);
  1446. rw->ro = ro;
  1447. cls->setData(rw);
  1448. cls->data()->flags = RO_FUTURE;
  1449. old = NXMapKeyCopyingInsert(futureNamedClasses(), name, cls);
  1450. ASSERT(!old);
  1451. }
  1452. /***********************************************************************
  1453. * popFutureNamedClass
  1454. * Removes the named class from the unrealized future class list,
  1455. * because it has been realized.
  1456. * Returns nil if the name is not used by a future class.
  1457. * Locking: runtimeLock must be held by the caller
  1458. **********************************************************************/
  1459. static Class popFutureNamedClass(const char *name)
  1460. {
  1461. runtimeLock.assertLocked();
  1462. Class cls = nil;
  1463. if (future_named_class_map) {
  1464. cls = (Class)NXMapKeyFreeingRemove(future_named_class_map, name);
  1465. if (cls && NXCountMapTable(future_named_class_map) == 0) {
  1466. NXFreeMapTable(future_named_class_map);
  1467. future_named_class_map = nil;
  1468. }
  1469. }
  1470. return cls;
  1471. }
  1472. /***********************************************************************
  1473. * remappedClasses
  1474. * Returns the oldClass => newClass map for realized future classes.
  1475. * Returns the oldClass => nil map for ignored weak-linked classes.
  1476. * Locking: runtimeLock must be read- or write-locked by the caller
  1477. **********************************************************************/
  1478. static objc::DenseMap<Class, Class> *remappedClasses(bool create)
  1479. {
  1480. static objc::LazyInitDenseMap<Class, Class> remapped_class_map;
  1481. runtimeLock.assertLocked();
  1482. // start big enough to hold CF's classes and a few others
  1483. return remapped_class_map.get(create, 32);
  1484. }
  1485. /***********************************************************************
  1486. * noClassesRemapped
  1487. * Returns YES if no classes have been remapped
  1488. * Locking: runtimeLock must be read- or write-locked by the caller
  1489. **********************************************************************/
  1490. static bool noClassesRemapped(void)
  1491. {
  1492. runtimeLock.assertLocked();
  1493. bool result = (remappedClasses(NO) == nil);
  1494. #if DEBUG
  1495. // Catch construction of an empty table, which defeats optimization.
  1496. auto *map = remappedClasses(NO);
  1497. if (map) ASSERT(map->size() > 0);
  1498. #endif
  1499. return result;
  1500. }
  1501. /***********************************************************************
  1502. * addRemappedClass
  1503. * newcls is a realized future class, replacing oldcls.
  1504. * OR newcls is nil, replacing ignored weak-linked class oldcls.
  1505. * Locking: runtimeLock must be write-locked by the caller
  1506. **********************************************************************/
  1507. static void addRemappedClass(Class oldcls, Class newcls)
  1508. {
  1509. runtimeLock.assertLocked();
  1510. if (PrintFuture) {
  1511. _objc_inform("FUTURE: using %p instead of %p for %s",
  1512. (void*)newcls, (void*)oldcls, oldcls->nameForLogging());
  1513. }
  1514. auto result = remappedClasses(YES)->insert({ oldcls, newcls });
  1515. #if DEBUG
  1516. if (!std::get<1>(result)) {
  1517. // An existing mapping was overwritten. This is not allowed
  1518. // unless it was to nil.
  1519. auto iterator = std::get<0>(result);
  1520. auto value = std::get<1>(*iterator);
  1521. ASSERT(value == nil);
  1522. }
  1523. #else
  1524. (void)result;
  1525. #endif
  1526. }
  1527. /***********************************************************************
  1528. * remapClass
  1529. * Returns the live class pointer for cls, which may be pointing to
  1530. * a class struct that has been reallocated.
  1531. * Returns nil if cls is ignored because of weak linking.
  1532. * Locking: runtimeLock must be read- or write-locked by the caller
  1533. **********************************************************************/
  1534. static Class remapClass(Class cls)
  1535. {
  1536. runtimeLock.assertLocked();
  1537. if (!cls) return nil;
  1538. auto *map = remappedClasses(NO);
  1539. if (!map)
  1540. return cls;
  1541. auto iterator = map->find(cls);
  1542. if (iterator == map->end())
  1543. return cls;
  1544. return std::get<1>(*iterator);
  1545. }
  1546. static Class remapClass(classref_t cls)
  1547. {
  1548. return remapClass((Class)cls);
  1549. }
  1550. Class _class_remap(Class cls)
  1551. {
  1552. mutex_locker_t lock(runtimeLock);
  1553. return remapClass(cls);
  1554. }
  1555. /***********************************************************************
  1556. * remapClassRef
  1557. * Fix up a class ref, in case the class referenced has been reallocated
  1558. * or is an ignored weak-linked class.
  1559. * Locking: runtimeLock must be read- or write-locked by the caller
  1560. **********************************************************************/
  1561. static void remapClassRef(Class *clsref)
  1562. {
  1563. runtimeLock.assertLocked();
  1564. Class newcls = remapClass(*clsref);
  1565. if (*clsref != newcls) *clsref = newcls;
  1566. }
  1567. _Nullable Class
  1568. objc_loadClassref(_Nullable Class * _Nonnull clsref)
  1569. {
  1570. auto *atomicClsref = explicit_atomic<uintptr_t>::from_pointer((uintptr_t *)clsref);
  1571. uintptr_t cls = atomicClsref->load(std::memory_order_relaxed);
  1572. if (fastpath((cls & 1) == 0))
  1573. return (Class)cls;
  1574. auto stub = (stub_class_t *)(cls & ~1ULL);
  1575. Class initialized = stub->initializer((Class)stub, nil);
  1576. atomicClsref->store((uintptr_t)initialized, std::memory_order_relaxed);
  1577. return initialized;
  1578. }
  1579. /***********************************************************************
  1580. * getMaybeUnrealizedNonMetaClass
  1581. * Return the ordinary class for this class or metaclass.
  1582. * `inst` is an instance of `cls` or a subclass thereof, or nil.
  1583. * Non-nil inst is faster.
  1584. * The result may be unrealized.
  1585. * Used by +initialize.
  1586. * Locking: runtimeLock must be read- or write-locked by the caller
  1587. **********************************************************************/
  1588. static Class getMaybeUnrealizedNonMetaClass(Class metacls, id inst)
  1589. {
  1590. static int total, named, secondary, sharedcache, dyld3;
  1591. runtimeLock.assertLocked();
  1592. ASSERT(metacls->isRealized());
  1593. total++;
  1594. // return cls itself if it's already a non-meta class
  1595. if (!metacls->isMetaClass()) return metacls;
  1596. // metacls really is a metaclass
  1597. // which means inst (if any) is a class
  1598. // special case for root metaclass
  1599. // where inst == inst->ISA() == metacls is possible
  1600. if (metacls->ISA() == metacls) {
  1601. Class cls = metacls->superclass;
  1602. ASSERT(cls->isRealized());
  1603. ASSERT(!cls->isMetaClass());
  1604. ASSERT(cls->ISA() == metacls);
  1605. if (cls->ISA() == metacls) return cls;
  1606. }
  1607. // use inst if available
  1608. if (inst) {
  1609. Class cls = remapClass((Class)inst);
  1610. // cls may be a subclass - find the real class for metacls
  1611. // fixme this probably stops working once Swift starts
  1612. // reallocating classes if cls is unrealized.
  1613. while (cls) {
  1614. if (cls->ISA() == metacls) {
  1615. ASSERT(!cls->isMetaClassMaybeUnrealized());
  1616. return cls;
  1617. }
  1618. cls = cls->superclass;
  1619. }
  1620. #if DEBUG
  1621. _objc_fatal("cls is not an instance of metacls");
  1622. #else
  1623. // release build: be forgiving and fall through to slow lookups
  1624. #endif
  1625. }
  1626. // try name lookup
  1627. {
  1628. Class cls = getClassExceptSomeSwift(metacls->mangledName());
  1629. if (cls && cls->ISA() == metacls) {
  1630. named++;
  1631. if (PrintInitializing) {
  1632. _objc_inform("INITIALIZE: %d/%d (%g%%) "
  1633. "successful by-name metaclass lookups",
  1634. named, total, named*100.0/total);
  1635. }
  1636. return cls;
  1637. }
  1638. }
  1639. // try secondary table
  1640. {
  1641. Class cls = (Class)NXMapGet(nonMetaClasses(), metacls);
  1642. if (cls) {
  1643. secondary++;
  1644. if (PrintInitializing) {
  1645. _objc_inform("INITIALIZE: %d/%d (%g%%) "
  1646. "successful secondary metaclass lookups",
  1647. secondary, total, secondary*100.0/total);
  1648. }
  1649. ASSERT(cls->ISA() == metacls);
  1650. return cls;
  1651. }
  1652. }
  1653. // try the dyld closure table
  1654. if (isPreoptimized())
  1655. {
  1656. // Try table from dyld closure first. It was built to ignore the dupes it
  1657. // knows will come from the cache, so anything left in here was there when
  1658. // we launched
  1659. Class cls = nil;
  1660. // Note, we have to pass the lambda directly here as otherwise we would try
  1661. // message copy and autorelease.
  1662. _dyld_for_each_objc_class(metacls->mangledName(),
  1663. [&cls, metacls](void* classPtr, bool isLoaded, bool* stop) {
  1664. // Skip images which aren't loaded. This supports the case where dyld
  1665. // might soft link an image from the main binary so its possibly not
  1666. // loaded yet.
  1667. if (!isLoaded)
  1668. return;
  1669. // Found a loaded image with this class name, so check if its the right one
  1670. Class result = (Class)classPtr;
  1671. if (result->ISA() == metacls) {
  1672. cls = result;
  1673. *stop = true;
  1674. }
  1675. });
  1676. if (cls) {
  1677. dyld3++;
  1678. if (PrintInitializing) {
  1679. _objc_inform("INITIALIZE: %d/%d (%g%%) "
  1680. "successful dyld closure metaclass lookups",
  1681. dyld3, total, dyld3*100.0/total);
  1682. }
  1683. return cls;
  1684. }
  1685. }
  1686. // try any duplicates in the dyld shared cache
  1687. {
  1688. Class cls = nil;
  1689. int count;
  1690. Class *classes = copyPreoptimizedClasses(metacls->mangledName(),&count);
  1691. if (classes) {
  1692. for (int i = 0; i < count; i++) {
  1693. if (classes[i]->ISA() == metacls) {
  1694. cls = classes[i];
  1695. break;
  1696. }
  1697. }
  1698. free(classes);
  1699. }
  1700. if (cls) {
  1701. sharedcache++;
  1702. if (PrintInitializing) {
  1703. _objc_inform("INITIALIZE: %d/%d (%g%%) "
  1704. "successful shared cache metaclass lookups",
  1705. sharedcache, total, sharedcache*100.0/total);
  1706. }
  1707. return cls;
  1708. }
  1709. }
  1710. _objc_fatal("no class for metaclass %p", (void*)metacls);
  1711. }
  1712. /***********************************************************************
  1713. * class_initialize. Send the '+initialize' message on demand to any
  1714. * uninitialized class. Force initialization of superclasses first.
  1715. * inst is an instance of cls, or nil. Non-nil is better for performance.
  1716. * Returns the class pointer. If the class was unrealized then
  1717. * it may be reallocated.
  1718. * Locking:
  1719. * runtimeLock must be held by the caller
  1720. * This function may drop the lock.
  1721. * On exit the lock is re-acquired or dropped as requested by leaveLocked.
  1722. **********************************************************************/
  1723. static Class initializeAndMaybeRelock(Class cls, id inst,
  1724. mutex_t& lock, bool leaveLocked)
  1725. {
  1726. lock.assertLocked();
  1727. ASSERT(cls->isRealized());
  1728. if (cls->isInitialized()) {
  1729. if (!leaveLocked) lock.unlock();
  1730. return cls;
  1731. }
  1732. // Find the non-meta class for cls, if it is not already one.
  1733. // The +initialize message is sent to the non-meta class object.
  1734. Class nonmeta = getMaybeUnrealizedNonMetaClass(cls, inst);
  1735. // Realize the non-meta class if necessary.
  1736. if (nonmeta->isRealized()) {
  1737. // nonmeta is cls, which was already realized
  1738. // OR nonmeta is distinct, but is already realized
  1739. // - nothing else to do
  1740. lock.unlock();
  1741. } else {
  1742. nonmeta = realizeClassMaybeSwiftAndUnlock(nonmeta, lock);
  1743. // runtimeLock is now unlocked
  1744. // fixme Swift can't relocate the class today,
  1745. // but someday it will:
  1746. cls = object_getClass(nonmeta);
  1747. }
  1748. // runtimeLock is now unlocked, for +initialize dispatch
  1749. ASSERT(nonmeta->isRealized());
  1750. initializeNonMetaClass(nonmeta);
  1751. if (leaveLocked) runtimeLock.lock();
  1752. return cls;
  1753. }
  1754. // Locking: acquires runtimeLock
  1755. Class class_initialize(Class cls, id obj)
  1756. {
  1757. runtimeLock.lock();
  1758. return initializeAndMaybeRelock(cls, obj, runtimeLock, false);
  1759. }
  1760. // Locking: caller must hold runtimeLock; this may drop and re-acquire it
  1761. static Class initializeAndLeaveLocked(Class cls, id obj, mutex_t& lock)
  1762. {
  1763. return initializeAndMaybeRelock(cls, obj, lock, true);
  1764. }
  1765. /***********************************************************************
  1766. * addRootClass
  1767. * Adds cls as a new realized root class.
  1768. * Locking: runtimeLock must be held by the caller.
  1769. **********************************************************************/
  1770. static void addRootClass(Class cls)
  1771. {
  1772. runtimeLock.assertLocked();
  1773. ASSERT(cls->isRealized());
  1774. objc_debug_realized_class_generation_count++;
  1775. cls->data()->nextSiblingClass = _firstRealizedClass;
  1776. _firstRealizedClass = cls;
  1777. }
  1778. static void removeRootClass(Class cls)
  1779. {
  1780. runtimeLock.assertLocked();
  1781. objc_debug_realized_class_generation_count++;
  1782. Class *classp;
  1783. for (classp = &_firstRealizedClass;
  1784. *classp != cls;
  1785. classp = &(*classp)->data()->nextSiblingClass)
  1786. { }
  1787. *classp = (*classp)->data()->nextSiblingClass;
  1788. }
  1789. /***********************************************************************
  1790. * addSubclass
  1791. * Adds subcls as a subclass of supercls.
  1792. * Locking: runtimeLock must be held by the caller.
  1793. **********************************************************************/
  1794. static void addSubclass(Class supercls, Class subcls)
  1795. {
  1796. runtimeLock.assertLocked();
  1797. if (supercls && subcls) {
  1798. ASSERT(supercls->isRealized());
  1799. ASSERT(subcls->isRealized());
  1800. objc_debug_realized_class_generation_count++;
  1801. subcls->data()->nextSiblingClass = supercls->data()->firstSubclass;
  1802. supercls->data()->firstSubclass = subcls;
  1803. if (supercls->hasCxxCtor()) {
  1804. subcls->setHasCxxCtor();
  1805. }
  1806. if (supercls->hasCxxDtor()) {
  1807. subcls->setHasCxxDtor();
  1808. }
  1809. objc::AWZScanner::scanAddedSubClass(subcls, supercls);
  1810. objc::RRScanner::scanAddedSubClass(subcls, supercls);
  1811. objc::CoreScanner::scanAddedSubClass(subcls, supercls);
  1812. // Special case: instancesRequireRawIsa does not propagate
  1813. // from root class to root metaclass
  1814. if (supercls->instancesRequireRawIsa() && supercls->superclass) {
  1815. subcls->setInstancesRequireRawIsaRecursively(true);
  1816. }
  1817. }
  1818. }
  1819. /***********************************************************************
  1820. * removeSubclass
  1821. * Removes subcls as a subclass of supercls.
  1822. * Locking: runtimeLock must be held by the caller.
  1823. **********************************************************************/
  1824. static void removeSubclass(Class supercls, Class subcls)
  1825. {
  1826. runtimeLock.assertLocked();
  1827. ASSERT(supercls->isRealized());
  1828. ASSERT(subcls->isRealized());
  1829. ASSERT(subcls->superclass == supercls);
  1830. objc_debug_realized_class_generation_count++;
  1831. Class *cp;
  1832. for (cp = &supercls->data()->firstSubclass;
  1833. *cp && *cp != subcls;
  1834. cp = &(*cp)->data()->nextSiblingClass)
  1835. ;
  1836. ASSERT(*cp == subcls);
  1837. *cp = subcls->data()->nextSiblingClass;
  1838. }
  1839. /***********************************************************************
  1840. * protocols
  1841. * Returns the protocol name => protocol map for protocols.
  1842. * Locking: runtimeLock must read- or write-locked by the caller
  1843. **********************************************************************/
  1844. static NXMapTable *protocols(void)
  1845. {
  1846. static NXMapTable *protocol_map = nil;
  1847. runtimeLock.assertLocked();
  1848. INIT_ONCE_PTR(protocol_map,
  1849. NXCreateMapTable(NXStrValueMapPrototype, 16),
  1850. NXFreeMapTable(v) );
  1851. return protocol_map;
  1852. }
  1853. /***********************************************************************
  1854. * getProtocol
  1855. * Looks up a protocol by name. Demangled Swift names are recognized.
  1856. * Locking: runtimeLock must be read- or write-locked by the caller.
  1857. **********************************************************************/
  1858. static NEVER_INLINE Protocol *getProtocol(const char *name)
  1859. {
  1860. runtimeLock.assertLocked();
  1861. // Try name as-is.
  1862. Protocol *result = (Protocol *)NXMapGet(protocols(), name);
  1863. if (result) return result;
  1864. // Try Swift-mangled equivalent of the given name.
  1865. if (char *swName = copySwiftV1MangledName(name, true/*isProtocol*/)) {
  1866. result = (Protocol *)NXMapGet(protocols(), swName);
  1867. free(swName);
  1868. if (result) return result;
  1869. }
  1870. // Try table from dyld shared cache
  1871. // Temporarily check that we are using the new table. Eventually this check
  1872. // will always be true.
  1873. // FIXME: Remove this check when we can
  1874. if (sharedCacheSupportsProtocolRoots()) {
  1875. result = getPreoptimizedProtocol(name);
  1876. if (result) return result;
  1877. }
  1878. return nil;
  1879. }
  1880. /***********************************************************************
  1881. * remapProtocol
  1882. * Returns the live protocol pointer for proto, which may be pointing to
  1883. * a protocol struct that has been reallocated.
  1884. * Locking: runtimeLock must be read- or write-locked by the caller
  1885. **********************************************************************/
  1886. static ALWAYS_INLINE protocol_t *remapProtocol(protocol_ref_t proto)
  1887. {
  1888. runtimeLock.assertLocked();
  1889. // Protocols in shared cache images have a canonical bit to mark that they
  1890. // are the definition we should use
  1891. if (((protocol_t *)proto)->isCanonical())
  1892. return (protocol_t *)proto;
  1893. protocol_t *newproto = (protocol_t *)
  1894. getProtocol(((protocol_t *)proto)->mangledName);
  1895. return newproto ? newproto : (protocol_t *)proto;
  1896. }
  1897. /***********************************************************************
  1898. * remapProtocolRef
  1899. * Fix up a protocol ref, in case the protocol referenced has been reallocated.
  1900. * Locking: runtimeLock must be read- or write-locked by the caller
  1901. **********************************************************************/
  1902. static size_t UnfixedProtocolReferences;
  1903. static void remapProtocolRef(protocol_t **protoref)
  1904. {
  1905. runtimeLock.assertLocked();
  1906. protocol_t *newproto = remapProtocol((protocol_ref_t)*protoref);
  1907. if (*protoref != newproto) {
  1908. *protoref = newproto;
  1909. UnfixedProtocolReferences++;
  1910. }
  1911. }
  1912. /***********************************************************************
  1913. * moveIvars
  1914. * Slides a class's ivars to accommodate the given superclass size.
  1915. * Ivars are NOT compacted to compensate for a superclass that shrunk.
  1916. * Locking: runtimeLock must be held by the caller.
  1917. **********************************************************************/
  1918. static void moveIvars(class_ro_t *ro, uint32_t superSize)
  1919. {
  1920. runtimeLock.assertLocked();
  1921. uint32_t diff;
  1922. ASSERT(superSize > ro->instanceStart);
  1923. diff = superSize - ro->instanceStart;
  1924. if (ro->ivars) {
  1925. // Find maximum alignment in this class's ivars
  1926. uint32_t maxAlignment = 1;
  1927. for (const auto& ivar : *ro->ivars) {
  1928. if (!ivar.offset) continue; // anonymous bitfield
  1929. uint32_t alignment = ivar.alignment();
  1930. if (alignment > maxAlignment) maxAlignment = alignment;
  1931. }
  1932. // Compute a slide value that preserves that alignment
  1933. uint32_t alignMask = maxAlignment - 1;
  1934. diff = (diff + alignMask) & ~alignMask;
  1935. // Slide all of this class's ivars en masse
  1936. for (const auto& ivar : *ro->ivars) {
  1937. if (!ivar.offset) continue; // anonymous bitfield
  1938. uint32_t oldOffset = (uint32_t)*ivar.offset;
  1939. uint32_t newOffset = oldOffset + diff;
  1940. *ivar.offset = newOffset;
  1941. if (PrintIvars) {
  1942. _objc_inform("IVARS: offset %u -> %u for %s "
  1943. "(size %u, align %u)",
  1944. oldOffset, newOffset, ivar.name,
  1945. ivar.size, ivar.alignment());
  1946. }
  1947. }
  1948. }
  1949. *(uint32_t *)&ro->instanceStart += diff;
  1950. *(uint32_t *)&ro->instanceSize += diff;
  1951. }
  1952. static void reconcileInstanceVariables(Class cls, Class supercls, const class_ro_t*& ro)
  1953. {
  1954. class_rw_t *rw = cls->data();
  1955. ASSERT(supercls);
  1956. ASSERT(!cls->isMetaClass());
  1957. /* debug: print them all before sliding
  1958. if (ro->ivars) {
  1959. for (const auto& ivar : *ro->ivars) {
  1960. if (!ivar.offset) continue; // anonymous bitfield
  1961. _objc_inform("IVARS: %s.%s (offset %u, size %u, align %u)",
  1962. ro->name, ivar.name,
  1963. *ivar.offset, ivar.size, ivar.alignment());
  1964. }
  1965. }
  1966. */
  1967. // Non-fragile ivars - reconcile this class with its superclass
  1968. const class_ro_t *super_ro = supercls->data()->ro;
  1969. if (DebugNonFragileIvars) {
  1970. // Debugging: Force non-fragile ivars to slide.
  1971. // Intended to find compiler, runtime, and program bugs.
  1972. // If it fails with this and works without, you have a problem.
  1973. // Operation: Reset everything to 0 + misalignment.
  1974. // Then force the normal sliding logic to push everything back.
  1975. // Exceptions: root classes, metaclasses, *NSCF* classes,
  1976. // __CF* classes, NSConstantString, NSSimpleCString
  1977. // (already know it's not root because supercls != nil)
  1978. const char *clsname = cls->mangledName();
  1979. if (!strstr(clsname, "NSCF") &&
  1980. 0 != strncmp(clsname, "__CF", 4) &&
  1981. 0 != strcmp(clsname, "NSConstantString") &&
  1982. 0 != strcmp(clsname, "NSSimpleCString"))
  1983. {
  1984. uint32_t oldStart = ro->instanceStart;
  1985. class_ro_t *ro_w = make_ro_writeable(rw);
  1986. ro = rw->ro;
  1987. // Find max ivar alignment in class.
  1988. // default to word size to simplify ivar update
  1989. uint32_t alignment = 1<<WORD_SHIFT;
  1990. if (ro->ivars) {
  1991. for (const auto& ivar : *ro->ivars) {
  1992. if (ivar.alignment() > alignment) {
  1993. alignment = ivar.alignment();
  1994. }
  1995. }
  1996. }
  1997. uint32_t misalignment = ro->instanceStart % alignment;
  1998. uint32_t delta = ro->instanceStart - misalignment;
  1999. ro_w->instanceStart = misalignment;
  2000. ro_w->instanceSize -= delta;
  2001. if (PrintIvars) {
  2002. _objc_inform("IVARS: DEBUG: forcing ivars for class '%s' "
  2003. "to slide (instanceStart %zu -> %zu)",
  2004. cls->nameForLogging(), (size_t)oldStart,
  2005. (size_t)ro->instanceStart);
  2006. }
  2007. if (ro->ivars) {
  2008. for (const auto& ivar : *ro->ivars) {
  2009. if (!ivar.offset) continue; // anonymous bitfield
  2010. *ivar.offset -= delta;
  2011. }
  2012. }
  2013. }
  2014. }
  2015. if (ro->instanceStart >= super_ro->instanceSize) {
  2016. // Superclass has not overgrown its space. We're done here.
  2017. return;
  2018. }
  2019. // fixme can optimize for "class has no new ivars", etc
  2020. if (ro->instanceStart < super_ro->instanceSize) {
  2021. // Superclass has changed size. This class's ivars must move.
  2022. // Also slide layout bits in parallel.
  2023. // This code is incapable of compacting the subclass to
  2024. // compensate for a superclass that shrunk, so don't do that.
  2025. if (PrintIvars) {
  2026. _objc_inform("IVARS: sliding ivars for class %s "
  2027. "(superclass was %u bytes, now %u)",
  2028. cls->nameForLogging(), ro->instanceStart,
  2029. super_ro->instanceSize);
  2030. }
  2031. class_ro_t *ro_w = make_ro_writeable(rw);
  2032. ro = rw->ro;
  2033. moveIvars(ro_w, super_ro->instanceSize);
  2034. gdb_objc_class_changed(cls, OBJC_CLASS_IVARS_CHANGED, ro->name);
  2035. }
  2036. }
  2037. /***********************************************************************
  2038. * realizeClassWithoutSwift
  2039. * Performs first-time initialization on class cls,
  2040. * including allocating its read-write data.
  2041. * Does not perform any Swift-side initialization.
  2042. * Returns the real class structure for the class.
  2043. * Locking: runtimeLock must be write-locked by the caller
  2044. **********************************************************************/
  2045. static Class realizeClassWithoutSwift(Class cls, Class previously)
  2046. {
  2047. runtimeLock.assertLocked();
  2048. const class_ro_t *ro;
  2049. class_rw_t *rw;
  2050. Class supercls;
  2051. Class metacls;
  2052. bool isMeta;
  2053. // 如果类已经实现了,直接返回
  2054. if (!cls) return nil;
  2055. if (cls->isRealized()) return cls;
  2056. ASSERT(cls == remapClass(cls));
  2057. // fixme verify class is not in an un-dlopened part of the shared cache?
  2058. // 编译期间,cls->data指向的是class_ro_t结构体
  2059. // 因此可以强制转成class_ro_t
  2060. ro = (const class_ro_t *)cls->data();
  2061. if (ro->flags & RO_FUTURE) {
  2062. // This was a future class. rw data is already allocated.
  2063. rw = cls->data();
  2064. ro = cls->data()->ro;
  2065. cls->changeInfo(RW_REALIZED|RW_REALIZING, RW_FUTURE);
  2066. } else {
  2067. // Normal class. Allocate writeable class data.
  2068. // 初始化class_rw_t结构体
  2069. rw = (class_rw_t *)calloc(sizeof(class_rw_t), 1);
  2070. rw->ro = ro;
  2071. rw->flags = RW_REALIZED|RW_REALIZING;
  2072. cls->setData(rw);
  2073. }
  2074. isMeta = ro->flags & RO_META;
  2075. #if FAST_CACHE_META
  2076. if (isMeta) cls->cache.setBit(FAST_CACHE_META);
  2077. #endif
  2078. rw->version = isMeta ? 7 : 0; // old runtime went up to 6
  2079. // Choose an index for this class.
  2080. // Sets cls->instancesRequireRawIsa if indexes no more indexes are available
  2081. cls->chooseClassArrayIndex();
  2082. if (PrintConnecting) {
  2083. _objc_inform("CLASS: realizing class '%s'%s %p %p #%u %s%s",
  2084. cls->nameForLogging(), isMeta ? " (meta)" : "",
  2085. (void*)cls, ro, cls->classArrayIndex(),
  2086. cls->isSwiftStable() ? "(swift)" : "",
  2087. cls->isSwiftLegacy() ? "(pre-stable swift)" : "");
  2088. }
  2089. // Realize superclass and metaclass, if they aren't already.
  2090. // This needs to be done after RW_REALIZED is set above, for root classes.
  2091. // This needs to be done after class index is chosen, for root metaclasses.
  2092. // This assumes that none of those classes have Swift contents,
  2093. // or that Swift's initializers have already been called.
  2094. // fixme that assumption will be wrong if we add support
  2095. // for ObjC subclasses of Swift classes.
  2096. supercls = realizeClassWithoutSwift(remapClass(cls->superclass), nil);
  2097. metacls = realizeClassWithoutSwift(remapClass(cls->ISA()), nil);
  2098. #if SUPPORT_NONPOINTER_ISA
  2099. if (isMeta) {
  2100. // Metaclasses do not need any features from non pointer ISA
  2101. // This allows for a faspath for classes in objc_retain/objc_release.
  2102. cls->setInstancesRequireRawIsa();
  2103. } else {
  2104. // Disable non-pointer isa for some classes and/or platforms.
  2105. // Set instancesRequireRawIsa.
  2106. bool instancesRequireRawIsa = cls->instancesRequireRawIsa();
  2107. bool rawIsaIsInherited = false;
  2108. static bool hackedDispatch = false;
  2109. if (DisableNonpointerIsa) {
  2110. // Non-pointer isa disabled by environment or app SDK version
  2111. instancesRequireRawIsa = true;
  2112. }
  2113. else if (!hackedDispatch && 0 == strcmp(ro->name, "OS_object"))
  2114. {
  2115. // hack for libdispatch et al - isa also acts as vtable pointer
  2116. hackedDispatch = true;
  2117. instancesRequireRawIsa = true;
  2118. }
  2119. else if (supercls && supercls->superclass &&
  2120. supercls->instancesRequireRawIsa())
  2121. {
  2122. // This is also propagated by addSubclass()
  2123. // but nonpointer isa setup needs it earlier.
  2124. // Special case: instancesRequireRawIsa does not propagate
  2125. // from root class to root metaclass
  2126. instancesRequireRawIsa = true;
  2127. rawIsaIsInherited = true;
  2128. }
  2129. if (instancesRequireRawIsa) {
  2130. cls->setInstancesRequireRawIsaRecursively(rawIsaIsInherited);
  2131. }
  2132. }
  2133. // SUPPORT_NONPOINTER_ISA
  2134. #endif
  2135. // Update superclass and metaclass in case of remapping
  2136. cls->superclass = supercls;
  2137. cls->initClassIsa(metacls);
  2138. // Reconcile instance variable offsets / layout.
  2139. // This may reallocate class_ro_t, updating our ro variable.
  2140. if (supercls && !isMeta) reconcileInstanceVariables(cls, supercls, ro);
  2141. // Set fastInstanceSize if it wasn't set already.
  2142. cls->setInstanceSize(ro->instanceSize);
  2143. // Copy some flags from ro to rw
  2144. if (ro->flags & RO_HAS_CXX_STRUCTORS) {
  2145. cls->setHasCxxDtor();
  2146. if (! (ro->flags & RO_HAS_CXX_DTOR_ONLY)) {
  2147. cls->setHasCxxCtor();
  2148. }
  2149. }
  2150. // Propagate the associated objects forbidden flag from ro or from
  2151. // the superclass.
  2152. if ((ro->flags & RO_FORBIDS_ASSOCIATED_OBJECTS) ||
  2153. (supercls && supercls->forbidsAssociatedObjects()))
  2154. {
  2155. rw->flags |= RW_FORBIDS_ASSOCIATED_OBJECTS;
  2156. }
  2157. // Connect this class to its superclass's subclass lists
  2158. if (supercls) {
  2159. addSubclass(supercls, cls);
  2160. } else {
  2161. addRootClass(cls);
  2162. }
  2163. // 将类实现的方法(包括分类)、属性和遵循的协议添加到class_rw_t结构体中的methods、properties、protocols列表中
  2164. // Attach categories
  2165. methodizeClass(cls, previously);
  2166. return cls;
  2167. }
  2168. /***********************************************************************
  2169. * _objc_realizeClassFromSwift
  2170. * Called by Swift when it needs the ObjC part of a class to be realized.
  2171. * There are four cases:
  2172. * 1. cls != nil; previously == cls
  2173. * Class cls is being realized in place
  2174. * 2. cls != nil; previously == nil
  2175. * Class cls is being constructed at runtime
  2176. * 3. cls != nil; previously != cls
  2177. * The class that was at previously has been reallocated to cls
  2178. * 4. cls == nil, previously != nil
  2179. * The class at previously is hereby disavowed
  2180. *
  2181. * Only variants #1 and #2 are supported today.
  2182. *
  2183. * Locking: acquires runtimeLock
  2184. **********************************************************************/
  2185. Class _objc_realizeClassFromSwift(Class cls, void *previously)
  2186. {
  2187. if (cls) {
  2188. if (previously && previously != (void*)cls) {
  2189. // #3: relocation
  2190. mutex_locker_t lock(runtimeLock);
  2191. addRemappedClass((Class)previously, cls);
  2192. addClassTableEntry(cls);
  2193. addNamedClass(cls, cls->mangledName(), /*replacing*/nil);
  2194. return realizeClassWithoutSwift(cls, (Class)previously);
  2195. } else {
  2196. // #1 and #2: realization in place, or new class
  2197. mutex_locker_t lock(runtimeLock);
  2198. if (!previously) {
  2199. // #2: new class
  2200. cls = readClass(cls, false/*bundle*/, false/*shared cache*/);
  2201. }
  2202. // #1 and #2: realization in place, or new class
  2203. // We ignore the Swift metadata initializer callback.
  2204. // We assume that's all handled since we're being called from Swift.
  2205. return realizeClassWithoutSwift(cls, nil);
  2206. }
  2207. }
  2208. else {
  2209. // #4: disavowal
  2210. // In the future this will mean remapping the old address to nil
  2211. // and if necessary removing the old address from any other tables.
  2212. _objc_fatal("Swift requested that class %p be ignored, "
  2213. "but libobjc does not support that.", previously);
  2214. }
  2215. }
  2216. /***********************************************************************
  2217. * realizeSwiftClass
  2218. * Performs first-time initialization on class cls,
  2219. * including allocating its read-write data,
  2220. * and any Swift-side initialization.
  2221. * Returns the real class structure for the class.
  2222. * Locking: acquires runtimeLock indirectly
  2223. **********************************************************************/
  2224. static Class realizeSwiftClass(Class cls)
  2225. {
  2226. runtimeLock.assertUnlocked();
  2227. // Some assumptions:
  2228. // * Metaclasses never have a Swift initializer.
  2229. // * Root classes never have a Swift initializer.
  2230. // (These two together avoid initialization order problems at the root.)
  2231. // * Unrealized non-Swift classes have no Swift ancestry.
  2232. // * Unrealized Swift classes with no initializer have no ancestry that
  2233. // does have the initializer.
  2234. // (These two together mean we don't need to scan superclasses here
  2235. // and we don't need to worry about Swift superclasses inside
  2236. // realizeClassWithoutSwift()).
  2237. // fixme some of these assumptions will be wrong
  2238. // if we add support for ObjC sublasses of Swift classes.
  2239. #if DEBUG
  2240. runtimeLock.lock();
  2241. ASSERT(remapClass(cls) == cls);
  2242. ASSERT(cls->isSwiftStable_ButAllowLegacyForNow());
  2243. ASSERT(!cls->isMetaClassMaybeUnrealized());
  2244. ASSERT(cls->superclass);
  2245. runtimeLock.unlock();
  2246. #endif
  2247. // Look for a Swift metadata initialization function
  2248. // installed on the class. If it is present we call it.
  2249. // That function in turn initializes the Swift metadata,
  2250. // prepares the "compiler-generated" ObjC metadata if not
  2251. // already present, and calls _objc_realizeSwiftClass() to finish
  2252. // our own initialization.
  2253. if (auto init = cls->swiftMetadataInitializer()) {
  2254. if (PrintConnecting) {
  2255. _objc_inform("CLASS: calling Swift metadata initializer "
  2256. "for class '%s' (%p)", cls->nameForLogging(), cls);
  2257. }
  2258. Class newcls = init(cls, nil);
  2259. // fixme someday Swift will need to relocate classes at this point,
  2260. // but we don't accept that yet.
  2261. if (cls != newcls) {
  2262. mutex_locker_t lock(runtimeLock);
  2263. addRemappedClass(cls, newcls);
  2264. }
  2265. return newcls;
  2266. }
  2267. else {
  2268. // No Swift-side initialization callback.
  2269. // Perform our own realization directly.
  2270. mutex_locker_t lock(runtimeLock);
  2271. return realizeClassWithoutSwift(cls, nil);
  2272. }
  2273. }
  2274. /***********************************************************************
  2275. * realizeClassMaybeSwift (MaybeRelock / AndUnlock / AndLeaveLocked)
  2276. * Realize a class that might be a Swift class.
  2277. * Returns the real class structure for the class.
  2278. * Locking:
  2279. * runtimeLock must be held on entry
  2280. * runtimeLock may be dropped during execution
  2281. * ...AndUnlock function leaves runtimeLock unlocked on exit
  2282. * ...AndLeaveLocked re-acquires runtimeLock if it was dropped
  2283. * This complication avoids repeated lock transitions in some cases.
  2284. **********************************************************************/
  2285. static Class
  2286. realizeClassMaybeSwiftMaybeRelock(Class cls, mutex_t& lock, bool leaveLocked)
  2287. {
  2288. lock.assertLocked();
  2289. if (!cls->isSwiftStable_ButAllowLegacyForNow()) {
  2290. // Non-Swift class. Realize it now with the lock still held.
  2291. // fixme wrong in the future for objc subclasses of swift classes
  2292. realizeClassWithoutSwift(cls, nil);
  2293. if (!leaveLocked) lock.unlock();
  2294. } else {
  2295. // Swift class. We need to drop locks and call the Swift
  2296. // runtime to initialize it.
  2297. lock.unlock();
  2298. cls = realizeSwiftClass(cls);
  2299. ASSERT(cls->isRealized()); // callback must have provoked realization
  2300. if (leaveLocked) lock.lock();
  2301. }
  2302. return cls;
  2303. }
  2304. static Class
  2305. realizeClassMaybeSwiftAndUnlock(Class cls, mutex_t& lock)
  2306. {
  2307. return realizeClassMaybeSwiftMaybeRelock(cls, lock, false);
  2308. }
  2309. static Class
  2310. realizeClassMaybeSwiftAndLeaveLocked(Class cls, mutex_t& lock)
  2311. {
  2312. return realizeClassMaybeSwiftMaybeRelock(cls, lock, true);
  2313. }
  2314. /***********************************************************************
  2315. * missingWeakSuperclass
  2316. * Return YES if some superclass of cls was weak-linked and is missing.
  2317. **********************************************************************/
  2318. static bool
  2319. missingWeakSuperclass(Class cls)
  2320. {
  2321. ASSERT(!cls->isRealized());
  2322. if (!cls->superclass) {
  2323. // superclass nil. This is normal for root classes only.
  2324. return (!(cls->data()->flags & RO_ROOT));
  2325. } else {
  2326. // superclass not nil. Check if a higher superclass is missing.
  2327. Class supercls = remapClass(cls->superclass);
  2328. ASSERT(cls != cls->superclass);
  2329. ASSERT(cls != supercls);
  2330. if (!supercls) return YES;
  2331. if (supercls->isRealized()) return NO;
  2332. return missingWeakSuperclass(supercls);
  2333. }
  2334. }
  2335. /***********************************************************************
  2336. * realizeAllClassesInImage
  2337. * Non-lazily realizes all unrealized classes in the given image.
  2338. * Locking: runtimeLock must be held by the caller.
  2339. * Locking: this function may drop and re-acquire the lock.
  2340. **********************************************************************/
  2341. static void realizeAllClassesInImage(header_info *hi)
  2342. {
  2343. runtimeLock.assertLocked();
  2344. size_t count, i;
  2345. classref_t const *classlist;
  2346. if (hi->areAllClassesRealized()) return;
  2347. classlist = _getObjc2ClassList(hi, &count);
  2348. for (i = 0; i < count; i++) {
  2349. Class cls = remapClass(classlist[i]);
  2350. if (cls) {
  2351. realizeClassMaybeSwiftAndLeaveLocked(cls, runtimeLock);
  2352. }
  2353. }
  2354. hi->setAllClassesRealized(YES);
  2355. }
  2356. /***********************************************************************
  2357. * realizeAllClasses
  2358. * Non-lazily realizes all unrealized classes in all known images.
  2359. * Locking: runtimeLock must be held by the caller.
  2360. * Locking: this function may drop and re-acquire the lock.
  2361. * Dropping the lock makes this function thread-unsafe with respect
  2362. * to concurrent image unload, but the callers of this function
  2363. * already ultimately do something that is also thread-unsafe with
  2364. * respect to image unload (such as using the list of all classes).
  2365. **********************************************************************/
  2366. static void realizeAllClasses(void)
  2367. {
  2368. runtimeLock.assertLocked();
  2369. header_info *hi;
  2370. for (hi = FirstHeader; hi; hi = hi->getNext()) {
  2371. realizeAllClassesInImage(hi); // may drop and re-acquire runtimeLock
  2372. }
  2373. }
  2374. /***********************************************************************
  2375. * _objc_allocateFutureClass
  2376. * Allocate an unresolved future class for the given class name.
  2377. * Returns any existing allocation if one was already made.
  2378. * Assumes the named class doesn't exist yet.
  2379. * Locking: acquires runtimeLock
  2380. **********************************************************************/
  2381. Class _objc_allocateFutureClass(const char *name)
  2382. {
  2383. mutex_locker_t lock(runtimeLock);
  2384. Class cls;
  2385. NXMapTable *map = futureNamedClasses();
  2386. if ((cls = (Class)NXMapGet(map, name))) {
  2387. // Already have a future class for this name.
  2388. return cls;
  2389. }
  2390. cls = _calloc_class(sizeof(objc_class));
  2391. addFutureNamedClass(name, cls);
  2392. return cls;
  2393. }
  2394. /***********************************************************************
  2395. * objc_getFutureClass. Return the id of the named class.
  2396. * If the class does not exist, return an uninitialized class
  2397. * structure that will be used for the class when and if it
  2398. * does get loaded.
  2399. * Not thread safe.
  2400. **********************************************************************/
  2401. Class objc_getFutureClass(const char *name)
  2402. {
  2403. Class cls;
  2404. // YES unconnected, NO class handler
  2405. // (unconnected is OK because it will someday be the real class)
  2406. cls = look_up_class(name, YES, NO);
  2407. if (cls) {
  2408. if (PrintFuture) {
  2409. _objc_inform("FUTURE: found %p already in use for %s",
  2410. (void*)cls, name);
  2411. }
  2412. return cls;
  2413. }
  2414. // No class or future class with that name yet. Make one.
  2415. // fixme not thread-safe with respect to
  2416. // simultaneous library load or getFutureClass.
  2417. return _objc_allocateFutureClass(name);
  2418. }
  2419. BOOL _class_isFutureClass(Class cls)
  2420. {
  2421. return cls && cls->isFuture();
  2422. }
  2423. /***********************************************************************
  2424. * _objc_flush_caches
  2425. * Flushes all caches.
  2426. * (Historical behavior: flush caches for cls, its metaclass,
  2427. * and subclasses thereof. Nil flushes all classes.)
  2428. * Locking: acquires runtimeLock
  2429. **********************************************************************/
  2430. static void flushCaches(Class cls)
  2431. {
  2432. runtimeLock.assertLocked();
  2433. #if CONFIG_USE_CACHE_LOCK
  2434. mutex_locker_t lock(cacheUpdateLock);
  2435. #endif
  2436. if (cls) {
  2437. foreach_realized_class_and_subclass(cls, [](Class c){
  2438. cache_erase_nolock(c);
  2439. return true;
  2440. });
  2441. }
  2442. else {
  2443. foreach_realized_class_and_metaclass([](Class c){
  2444. cache_erase_nolock(c);
  2445. return true;
  2446. });
  2447. }
  2448. }
  2449. void _objc_flush_caches(Class cls)
  2450. {
  2451. {
  2452. mutex_locker_t lock(runtimeLock);
  2453. flushCaches(cls);
  2454. if (cls && cls->superclass && cls != cls->getIsa()) {
  2455. flushCaches(cls->getIsa());
  2456. } else {
  2457. // cls is a root class or root metaclass. Its metaclass is itself
  2458. // or a subclass so the metaclass caches were already flushed.
  2459. }
  2460. }
  2461. if (!cls) {
  2462. // collectALot if cls==nil
  2463. #if CONFIG_USE_CACHE_LOCK
  2464. mutex_locker_t lock(cacheUpdateLock);
  2465. #else
  2466. mutex_locker_t lock(runtimeLock);
  2467. #endif
  2468. cache_collect(true);
  2469. }
  2470. }
  2471. /***********************************************************************
  2472. * map_images
  2473. * Process the given images which are being mapped in by dyld.
  2474. * Calls ABI-agnostic code after taking ABI-specific locks.
  2475. *
  2476. * Locking: write-locks runtimeLock
  2477. **********************************************************************/
  2478. void
  2479. map_images(unsigned count, const char * const paths[],
  2480. const struct mach_header * const mhdrs[])
  2481. {
  2482. mutex_locker_t lock(runtimeLock);
  2483. return map_images_nolock(count, paths, mhdrs);
  2484. }
  2485. /***********************************************************************
  2486. * load_images
  2487. * Process +load in the given images which are being mapped in by dyld.
  2488. *
  2489. * Locking: write-locks runtimeLock and loadMethodLock
  2490. **********************************************************************/
  2491. extern bool hasLoadMethods(const headerType *mhdr);
  2492. extern void prepare_load_methods(const headerType *mhdr);
  2493. void
  2494. load_images(const char *path __unused, const struct mach_header *mh)
  2495. {
  2496. // Return without taking locks if there are no +load methods here.
  2497. if (!hasLoadMethods((const headerType *)mh)) return;
  2498. recursive_mutex_locker_t lock(loadMethodLock);
  2499. // Discover load methods
  2500. {
  2501. mutex_locker_t lock2(runtimeLock);
  2502. prepare_load_methods((const headerType *)mh);
  2503. }
  2504. // Call +load methods (without runtimeLock - re-entrant)
  2505. call_load_methods();
  2506. }
  2507. /***********************************************************************
  2508. * unmap_image
  2509. * Process the given image which is about to be unmapped by dyld.
  2510. *
  2511. * Locking: write-locks runtimeLock and loadMethodLock
  2512. **********************************************************************/
  2513. void
  2514. unmap_image(const char *path __unused, const struct mach_header *mh)
  2515. {
  2516. recursive_mutex_locker_t lock(loadMethodLock);
  2517. mutex_locker_t lock2(runtimeLock);
  2518. unmap_image_nolock(mh);
  2519. }
  2520. /***********************************************************************
  2521. * mustReadClasses
  2522. * Preflight check in advance of readClass() from an image.
  2523. **********************************************************************/
  2524. bool mustReadClasses(header_info *hi, bool hasDyldRoots)
  2525. {
  2526. const char *reason;
  2527. // If the image is not preoptimized then we must read classes.
  2528. if (!hi->hasPreoptimizedClasses()) {
  2529. reason = nil; // Don't log this one because it is noisy.
  2530. goto readthem;
  2531. }
  2532. // If iOS simulator then we must read classes.
  2533. #if TARGET_OS_SIMULATOR
  2534. reason = "the image is for iOS simulator";
  2535. goto readthem;
  2536. #endif
  2537. ASSERT(!hi->isBundle()); // no MH_BUNDLE in shared cache
  2538. // If the image may have missing weak superclasses then we must read classes
  2539. if (!noMissingWeakSuperclasses() || hasDyldRoots) {
  2540. reason = "the image may contain classes with missing weak superclasses";
  2541. goto readthem;
  2542. }
  2543. // If there are unresolved future classes then we must read classes.
  2544. if (haveFutureNamedClasses()) {
  2545. reason = "there are unresolved future classes pending";
  2546. goto readthem;
  2547. }
  2548. // readClass() rewrites bits in backward-deploying Swift stable ABI code.
  2549. // The assumption here is there there are no such classes
  2550. // in the dyld shared cache.
  2551. #if DEBUG
  2552. {
  2553. size_t count;
  2554. classref_t const *classlist = _getObjc2ClassList(hi, &count);
  2555. for (size_t i = 0; i < count; i++) {
  2556. Class cls = remapClass(classlist[i]);
  2557. ASSERT(!cls->isUnfixedBackwardDeployingStableSwift());
  2558. }
  2559. }
  2560. #endif
  2561. // readClass() does not need to do anything.
  2562. return NO;
  2563. readthem:
  2564. if (PrintPreopt && reason) {
  2565. _objc_inform("PREOPTIMIZATION: reading classes manually from %s "
  2566. "because %s", hi->fname(), reason);
  2567. }
  2568. return YES;
  2569. }
  2570. /***********************************************************************
  2571. * readClass
  2572. * Read a class and metaclass as written by a compiler.
  2573. * Returns the new class pointer. This could be:
  2574. * - cls
  2575. * - nil (cls has a missing weak-linked superclass)
  2576. * - something else (space for this class was reserved by a future class)
  2577. *
  2578. * Note that all work performed by this function is preflighted by
  2579. * mustReadClasses(). Do not change this function without updating that one.
  2580. *
  2581. * Locking: runtimeLock acquired by map_images or objc_readClassPair
  2582. **********************************************************************/
  2583. Class readClass(Class cls, bool headerIsBundle, bool headerIsPreoptimized)
  2584. {
  2585. const char *mangledName = cls->mangledName();
  2586. if (missingWeakSuperclass(cls)) {
  2587. // No superclass (probably weak-linked).
  2588. // Disavow any knowledge of this subclass.
  2589. if (PrintConnecting) {
  2590. _objc_inform("CLASS: IGNORING class '%s' with "
  2591. "missing weak-linked superclass",
  2592. cls->nameForLogging());
  2593. }
  2594. addRemappedClass(cls, nil);
  2595. cls->superclass = nil;
  2596. return nil;
  2597. }
  2598. cls->fixupBackwardDeployingStableSwift();
  2599. Class replacing = nil;
  2600. if (Class newCls = popFutureNamedClass(mangledName)) {
  2601. // This name was previously allocated as a future class.
  2602. // Copy objc_class to future class's struct.
  2603. // Preserve future's rw data block.
  2604. if (newCls->isAnySwift()) {
  2605. _objc_fatal("Can't complete future class request for '%s' "
  2606. "because the real class is too big.",
  2607. cls->nameForLogging());
  2608. }
  2609. class_rw_t *rw = newCls->data();
  2610. const class_ro_t *old_ro = rw->ro;
  2611. memcpy(newCls, cls, sizeof(objc_class));
  2612. rw->ro = (class_ro_t *)newCls->data();
  2613. newCls->setData(rw);
  2614. freeIfMutable((char *)old_ro->name);
  2615. free((void *)old_ro);
  2616. addRemappedClass(cls, newCls);
  2617. replacing = cls;
  2618. cls = newCls;
  2619. }
  2620. if (headerIsPreoptimized && !replacing) {
  2621. // class list built in shared cache
  2622. // fixme strict assert doesn't work because of duplicates
  2623. // ASSERT(cls == getClass(name));
  2624. ASSERT(getClassExceptSomeSwift(mangledName));
  2625. } else {
  2626. addNamedClass(cls, mangledName, replacing);
  2627. addClassTableEntry(cls);
  2628. }
  2629. // for future reference: shared cache never contains MH_BUNDLEs
  2630. if (headerIsBundle) {
  2631. cls->data()->flags |= RO_FROM_BUNDLE;
  2632. cls->ISA()->data()->flags |= RO_FROM_BUNDLE;
  2633. }
  2634. return cls;
  2635. }
  2636. /***********************************************************************
  2637. * readProtocol
  2638. * Read a protocol as written by a compiler.
  2639. **********************************************************************/
  2640. static void
  2641. readProtocol(protocol_t *newproto, Class protocol_class,
  2642. NXMapTable *protocol_map,
  2643. bool headerIsPreoptimized, bool headerIsBundle)
  2644. {
  2645. // This is not enough to make protocols in unloaded bundles safe,
  2646. // but it does prevent crashes when looking up unrelated protocols.
  2647. auto insertFn = headerIsBundle ? NXMapKeyCopyingInsert : NXMapInsert;
  2648. protocol_t *oldproto = (protocol_t *)getProtocol(newproto->mangledName);
  2649. if (oldproto) {
  2650. if (oldproto != newproto) {
  2651. // Some other definition already won.
  2652. if (PrintProtocols) {
  2653. _objc_inform("PROTOCOLS: protocol at %p is %s "
  2654. "(duplicate of %p)",
  2655. newproto, oldproto->nameForLogging(), oldproto);
  2656. }
  2657. // If we are a shared cache binary then we have a definition of this
  2658. // protocol, but if another one was chosen then we need to clear our
  2659. // isCanonical bit so that no-one trusts it.
  2660. // Note, if getProtocol returned a shared cache protocol then the
  2661. // canonical definition is already in the shared cache and we don't
  2662. // need to do anything.
  2663. if (headerIsPreoptimized && !oldproto->isCanonical()) {
  2664. // Note newproto is an entry in our __objc_protolist section which
  2665. // for shared cache binaries points to the original protocol in
  2666. // that binary, not the shared cache uniqued one.
  2667. auto cacheproto = (protocol_t *)
  2668. getSharedCachePreoptimizedProtocol(newproto->mangledName);
  2669. if (cacheproto && cacheproto->isCanonical())
  2670. cacheproto->clearIsCanonical();
  2671. }
  2672. }
  2673. }
  2674. else if (headerIsPreoptimized) {
  2675. // Shared cache initialized the protocol object itself,
  2676. // but in order to allow out-of-cache replacement we need
  2677. // to add it to the protocol table now.
  2678. protocol_t *cacheproto = (protocol_t *)
  2679. getPreoptimizedProtocol(newproto->mangledName);
  2680. protocol_t *installedproto;
  2681. if (cacheproto && cacheproto != newproto) {
  2682. // Another definition in the shared cache wins (because
  2683. // everything in the cache was fixed up to point to it).
  2684. installedproto = cacheproto;
  2685. }
  2686. else {
  2687. // This definition wins.
  2688. installedproto = newproto;
  2689. }
  2690. ASSERT(installedproto->getIsa() == protocol_class);
  2691. ASSERT(installedproto->size >= sizeof(protocol_t));
  2692. insertFn(protocol_map, installedproto->mangledName,
  2693. installedproto);
  2694. if (PrintProtocols) {
  2695. _objc_inform("PROTOCOLS: protocol at %p is %s",
  2696. installedproto, installedproto->nameForLogging());
  2697. if (newproto != installedproto) {
  2698. _objc_inform("PROTOCOLS: protocol at %p is %s "
  2699. "(duplicate of %p)",
  2700. newproto, installedproto->nameForLogging(),
  2701. installedproto);
  2702. }
  2703. }
  2704. }
  2705. else if (newproto->size >= sizeof(protocol_t)) {
  2706. // New protocol from an un-preoptimized image
  2707. // with sufficient storage. Fix it up in place.
  2708. // fixme duplicate protocols from unloadable bundle
  2709. newproto->initIsa(protocol_class); // fixme pinned
  2710. insertFn(protocol_map, newproto->mangledName, newproto);
  2711. if (PrintProtocols) {
  2712. _objc_inform("PROTOCOLS: protocol at %p is %s",
  2713. newproto, newproto->nameForLogging());
  2714. }
  2715. }
  2716. else {
  2717. // New protocol from an un-preoptimized image
  2718. // with insufficient storage. Reallocate it.
  2719. // fixme duplicate protocols from unloadable bundle
  2720. size_t size = max(sizeof(protocol_t), (size_t)newproto->size);
  2721. protocol_t *installedproto = (protocol_t *)calloc(size, 1);
  2722. memcpy(installedproto, newproto, newproto->size);
  2723. installedproto->size = (typeof(installedproto->size))size;
  2724. installedproto->initIsa(protocol_class); // fixme pinned
  2725. insertFn(protocol_map, installedproto->mangledName, installedproto);
  2726. if (PrintProtocols) {
  2727. _objc_inform("PROTOCOLS: protocol at %p is %s ",
  2728. installedproto, installedproto->nameForLogging());
  2729. _objc_inform("PROTOCOLS: protocol at %p is %s "
  2730. "(reallocated to %p)",
  2731. newproto, installedproto->nameForLogging(),
  2732. installedproto);
  2733. }
  2734. }
  2735. }
  2736. /***********************************************************************
  2737. * _read_images
  2738. * Perform initial processing of the headers in the linked
  2739. * list beginning with headerList.
  2740. *
  2741. * Called by: map_images_nolock
  2742. *
  2743. * Locking: runtimeLock acquired by map_images
  2744. **********************************************************************/
  2745. void _read_images(header_info **hList, uint32_t hCount, int totalClasses, int unoptimizedTotalClasses)
  2746. {
  2747. header_info *hi;
  2748. uint32_t hIndex;
  2749. size_t count;
  2750. size_t i;
  2751. Class *resolvedFutureClasses = nil;
  2752. size_t resolvedFutureClassCount = 0;
  2753. static bool doneOnce;
  2754. bool launchTime = NO;
  2755. TimeLogger ts(PrintImageTimes);
  2756. runtimeLock.assertLocked();
  2757. #define EACH_HEADER \
  2758. hIndex = 0; \
  2759. hIndex < hCount && (hi = hList[hIndex]); \
  2760. hIndex++
  2761. if (!doneOnce) {
  2762. doneOnce = YES;
  2763. launchTime = YES;
  2764. #if SUPPORT_NONPOINTER_ISA
  2765. // Disable non-pointer isa under some conditions.
  2766. # if SUPPORT_INDEXED_ISA
  2767. // Disable nonpointer isa if any image contains old Swift code
  2768. for (EACH_HEADER) {
  2769. if (hi->info()->containsSwift() &&
  2770. hi->info()->swiftUnstableVersion() < objc_image_info::SwiftVersion3)
  2771. {
  2772. DisableNonpointerIsa = true;
  2773. if (PrintRawIsa) {
  2774. _objc_inform("RAW ISA: disabling non-pointer isa because "
  2775. "the app or a framework contains Swift code "
  2776. "older than Swift 3.0");
  2777. }
  2778. break;
  2779. }
  2780. }
  2781. # endif
  2782. # if TARGET_OS_OSX
  2783. // Disable non-pointer isa if the app is too old
  2784. // (linked before OS X 10.11)
  2785. if (dyld_get_program_sdk_version() < DYLD_MACOSX_VERSION_10_11) {
  2786. DisableNonpointerIsa = true;
  2787. if (PrintRawIsa) {
  2788. _objc_inform("RAW ISA: disabling non-pointer isa because "
  2789. "the app is too old (SDK version " SDK_FORMAT ")",
  2790. FORMAT_SDK(dyld_get_program_sdk_version()));
  2791. }
  2792. }
  2793. // Disable non-pointer isa if the app has a __DATA,__objc_rawisa section
  2794. // New apps that load old extensions may need this.
  2795. for (EACH_HEADER) {
  2796. if (hi->mhdr()->filetype != MH_EXECUTE) continue;
  2797. unsigned long size;
  2798. if (getsectiondata(hi->mhdr(), "__DATA", "__objc_rawisa", &size)) {
  2799. DisableNonpointerIsa = true;
  2800. if (PrintRawIsa) {
  2801. _objc_inform("RAW ISA: disabling non-pointer isa because "
  2802. "the app has a __DATA,__objc_rawisa section");
  2803. }
  2804. }
  2805. break; // assume only one MH_EXECUTE image
  2806. }
  2807. # endif
  2808. #endif
  2809. if (DisableTaggedPointers) {
  2810. disableTaggedPointers();
  2811. }
  2812. initializeTaggedPointerObfuscator();
  2813. if (PrintConnecting) {
  2814. _objc_inform("CLASS: found %d classes during launch", totalClasses);
  2815. }
  2816. // namedClasses
  2817. // Preoptimized classes don't go in this table.
  2818. // 4/3 is NXMapTable's load factor
  2819. int namedClassesSize =
  2820. (isPreoptimized() ? unoptimizedTotalClasses : totalClasses) * 4 / 3;
  2821. gdb_objc_realized_classes =
  2822. NXCreateMapTable(NXStrValueMapPrototype, namedClassesSize);
  2823. ts.log("IMAGE TIMES: first time tasks");
  2824. }
  2825. // Fix up @selector references
  2826. static size_t UnfixedSelectors;
  2827. {
  2828. mutex_locker_t lock(selLock);
  2829. for (EACH_HEADER) {
  2830. if (hi->hasPreoptimizedSelectors()) continue;
  2831. bool isBundle = hi->isBundle();
  2832. SEL *sels = _getObjc2SelectorRefs(hi, &count);
  2833. UnfixedSelectors += count;
  2834. for (i = 0; i < count; i++) {
  2835. const char *name = sel_cname(sels[i]);
  2836. SEL sel = sel_registerNameNoLock(name, isBundle);
  2837. if (sels[i] != sel) {
  2838. sels[i] = sel;
  2839. }
  2840. }
  2841. }
  2842. }
  2843. ts.log("IMAGE TIMES: fix up selector references");
  2844. // Discover classes. Fix up unresolved future classes. Mark bundle classes.
  2845. bool hasDyldRoots = dyld_shared_cache_some_image_overridden();
  2846. for (EACH_HEADER) {
  2847. if (! mustReadClasses(hi, hasDyldRoots)) {
  2848. // Image is sufficiently optimized that we need not call readClass()
  2849. continue;
  2850. }
  2851. classref_t const *classlist = _getObjc2ClassList(hi, &count);
  2852. bool headerIsBundle = hi->isBundle();
  2853. bool headerIsPreoptimized = hi->hasPreoptimizedClasses();
  2854. for (i = 0; i < count; i++) {
  2855. Class cls = (Class)classlist[i];
  2856. Class newCls = readClass(cls, headerIsBundle, headerIsPreoptimized);
  2857. if (newCls != cls && newCls) {
  2858. // Class was moved but not deleted. Currently this occurs
  2859. // only when the new class resolved a future class.
  2860. // Non-lazily realize the class below.
  2861. resolvedFutureClasses = (Class *)
  2862. realloc(resolvedFutureClasses,
  2863. (resolvedFutureClassCount+1) * sizeof(Class));
  2864. resolvedFutureClasses[resolvedFutureClassCount++] = newCls;
  2865. }
  2866. }
  2867. }
  2868. ts.log("IMAGE TIMES: discover classes");
  2869. // Fix up remapped classes
  2870. // Class list and nonlazy class list remain unremapped.
  2871. // Class refs and super refs are remapped for message dispatching.
  2872. if (!noClassesRemapped()) {
  2873. for (EACH_HEADER) {
  2874. Class *classrefs = _getObjc2ClassRefs(hi, &count);
  2875. for (i = 0; i < count; i++) {
  2876. remapClassRef(&classrefs[i]);
  2877. }
  2878. // fixme why doesn't test future1 catch the absence of this?
  2879. classrefs = _getObjc2SuperRefs(hi, &count);
  2880. for (i = 0; i < count; i++) {
  2881. remapClassRef(&classrefs[i]);
  2882. }
  2883. }
  2884. }
  2885. ts.log("IMAGE TIMES: remap classes");
  2886. #if SUPPORT_FIXUP
  2887. // Fix up old objc_msgSend_fixup call sites
  2888. for (EACH_HEADER) {
  2889. message_ref_t *refs = _getObjc2MessageRefs(hi, &count);
  2890. if (count == 0) continue;
  2891. if (PrintVtables) {
  2892. _objc_inform("VTABLES: repairing %zu unsupported vtable dispatch "
  2893. "call sites in %s", count, hi->fname());
  2894. }
  2895. for (i = 0; i < count; i++) {
  2896. fixupMessageRef(refs+i);
  2897. }
  2898. }
  2899. ts.log("IMAGE TIMES: fix up objc_msgSend_fixup");
  2900. #endif
  2901. bool cacheSupportsProtocolRoots = sharedCacheSupportsProtocolRoots();
  2902. // Discover protocols. Fix up protocol refs.
  2903. for (EACH_HEADER) {
  2904. extern objc_class OBJC_CLASS_$_Protocol;
  2905. Class cls = (Class)&OBJC_CLASS_$_Protocol;
  2906. ASSERT(cls);
  2907. NXMapTable *protocol_map = protocols();
  2908. bool isPreoptimized = hi->hasPreoptimizedProtocols();
  2909. // Skip reading protocols if this is an image from the shared cache
  2910. // and we support roots
  2911. // Note, after launch we do need to walk the protocol as the protocol
  2912. // in the shared cache is marked with isCanonical() and that may not
  2913. // be true if some non-shared cache binary was chosen as the canonical
  2914. // definition
  2915. if (launchTime && isPreoptimized && cacheSupportsProtocolRoots) {
  2916. if (PrintProtocols) {
  2917. _objc_inform("PROTOCOLS: Skipping reading protocols in image: %s",
  2918. hi->fname());
  2919. }
  2920. continue;
  2921. }
  2922. bool isBundle = hi->isBundle();
  2923. protocol_t * const *protolist = _getObjc2ProtocolList(hi, &count);
  2924. for (i = 0; i < count; i++) {
  2925. readProtocol(protolist[i], cls, protocol_map,
  2926. isPreoptimized, isBundle);
  2927. }
  2928. }
  2929. ts.log("IMAGE TIMES: discover protocols");
  2930. // Fix up @protocol references
  2931. // Preoptimized images may have the right
  2932. // answer already but we don't know for sure.
  2933. for (EACH_HEADER) {
  2934. // At launch time, we know preoptimized image refs are pointing at the
  2935. // shared cache definition of a protocol. We can skip the check on
  2936. // launch, but have to visit @protocol refs for shared cache images
  2937. // loaded later.
  2938. if (launchTime && cacheSupportsProtocolRoots && hi->isPreoptimized())
  2939. continue;
  2940. protocol_t **protolist = _getObjc2ProtocolRefs(hi, &count);
  2941. for (i = 0; i < count; i++) {
  2942. remapProtocolRef(&protolist[i]);
  2943. }
  2944. }
  2945. ts.log("IMAGE TIMES: fix up @protocol references");
  2946. // Discover categories.
  2947. for (EACH_HEADER) {
  2948. bool hasClassProperties = hi->info()->hasCategoryClassProperties();
  2949. auto processCatlist = [&](category_t * const *catlist) {
  2950. for (i = 0; i < count; i++) {
  2951. category_t *cat = catlist[i];
  2952. Class cls = remapClass(cat->cls);
  2953. locstamped_category_t lc{cat, hi};
  2954. if (!cls) {
  2955. // Category's target class is missing (probably weak-linked).
  2956. // Ignore the category.
  2957. if (PrintConnecting) {
  2958. _objc_inform("CLASS: IGNORING category \?\?\?(%s) %p with "
  2959. "missing weak-linked target class",
  2960. cat->name, cat);
  2961. }
  2962. continue;
  2963. }
  2964. // Process this category.
  2965. if (cls->isStubClass()) {
  2966. // Stub classes are never realized. Stub classes
  2967. // don't know their metaclass until they're
  2968. // initialized, so we have to add categories with
  2969. // class methods or properties to the stub itself.
  2970. // methodizeClass() will find them and add them to
  2971. // the metaclass as appropriate.
  2972. if (cat->instanceMethods ||
  2973. cat->protocols ||
  2974. cat->instanceProperties ||
  2975. cat->classMethods ||
  2976. cat->protocols ||
  2977. (hasClassProperties && cat->_classProperties))
  2978. {
  2979. objc::unattachedCategories.addForClass(lc, cls);
  2980. }
  2981. } else {
  2982. // First, register the category with its target class.
  2983. // Then, rebuild the class's method lists (etc) if
  2984. // the class is realized.
  2985. if (cat->instanceMethods || cat->protocols
  2986. || cat->instanceProperties)
  2987. {
  2988. if (cls->isRealized()) {
  2989. attachCategories(cls, &lc, 1, ATTACH_EXISTING);
  2990. } else {
  2991. objc::unattachedCategories.addForClass(lc, cls);
  2992. }
  2993. }
  2994. if (cat->classMethods || cat->protocols
  2995. || (hasClassProperties && cat->_classProperties))
  2996. {
  2997. if (cls->ISA()->isRealized()) {
  2998. attachCategories(cls->ISA(), &lc, 1, ATTACH_EXISTING | ATTACH_METACLASS);
  2999. } else {
  3000. objc::unattachedCategories.addForClass(lc, cls->ISA());
  3001. }
  3002. }
  3003. }
  3004. }
  3005. };
  3006. processCatlist(_getObjc2CategoryList(hi, &count));
  3007. processCatlist(_getObjc2CategoryList2(hi, &count));
  3008. }
  3009. ts.log("IMAGE TIMES: discover categories");
  3010. // Category discovery MUST BE Late to avoid potential races
  3011. // when other threads call the new category code before
  3012. // this thread finishes its fixups.
  3013. // +load handled by prepare_load_methods()
  3014. // Realize non-lazy classes (for +load methods and static instances)
  3015. for (EACH_HEADER) {
  3016. classref_t const *classlist =
  3017. _getObjc2NonlazyClassList(hi, &count);
  3018. for (i = 0; i < count; i++) {
  3019. Class cls = remapClass(classlist[i]);
  3020. if (!cls) continue;
  3021. addClassTableEntry(cls);
  3022. if (cls->isSwiftStable()) {
  3023. if (cls->swiftMetadataInitializer()) {
  3024. _objc_fatal("Swift class %s with a metadata initializer "
  3025. "is not allowed to be non-lazy",
  3026. cls->nameForLogging());
  3027. }
  3028. // fixme also disallow relocatable classes
  3029. // We can't disallow all Swift classes because of
  3030. // classes like Swift.__EmptyArrayStorage
  3031. }
  3032. realizeClassWithoutSwift(cls, nil);
  3033. }
  3034. }
  3035. ts.log("IMAGE TIMES: realize non-lazy classes");
  3036. // Realize newly-resolved future classes, in case CF manipulates them
  3037. if (resolvedFutureClasses) {
  3038. for (i = 0; i < resolvedFutureClassCount; i++) {
  3039. Class cls = resolvedFutureClasses[i];
  3040. if (cls->isSwiftStable()) {
  3041. _objc_fatal("Swift class is not allowed to be future");
  3042. }
  3043. realizeClassWithoutSwift(cls, nil);
  3044. cls->setInstancesRequireRawIsaRecursively(false/*inherited*/);
  3045. }
  3046. free(resolvedFutureClasses);
  3047. }
  3048. ts.log("IMAGE TIMES: realize future classes");
  3049. if (DebugNonFragileIvars) {
  3050. realizeAllClasses();
  3051. }
  3052. // Print preoptimization statistics
  3053. if (PrintPreopt) {
  3054. static unsigned int PreoptTotalMethodLists;
  3055. static unsigned int PreoptOptimizedMethodLists;
  3056. static unsigned int PreoptTotalClasses;
  3057. static unsigned int PreoptOptimizedClasses;
  3058. for (EACH_HEADER) {
  3059. if (hi->hasPreoptimizedSelectors()) {
  3060. _objc_inform("PREOPTIMIZATION: honoring preoptimized selectors "
  3061. "in %s", hi->fname());
  3062. }
  3063. else if (hi->info()->optimizedByDyld()) {
  3064. _objc_inform("PREOPTIMIZATION: IGNORING preoptimized selectors "
  3065. "in %s", hi->fname());
  3066. }
  3067. classref_t const *classlist = _getObjc2ClassList(hi, &count);
  3068. for (i = 0; i < count; i++) {
  3069. Class cls = remapClass(classlist[i]);
  3070. if (!cls) continue;
  3071. PreoptTotalClasses++;
  3072. if (hi->hasPreoptimizedClasses()) {
  3073. PreoptOptimizedClasses++;
  3074. }
  3075. const method_list_t *mlist;
  3076. if ((mlist = ((class_ro_t *)cls->data())->baseMethods())) {
  3077. PreoptTotalMethodLists++;
  3078. if (mlist->isFixedUp()) {
  3079. PreoptOptimizedMethodLists++;
  3080. }
  3081. }
  3082. if ((mlist=((class_ro_t *)cls->ISA()->data())->baseMethods())) {
  3083. PreoptTotalMethodLists++;
  3084. if (mlist->isFixedUp()) {
  3085. PreoptOptimizedMethodLists++;
  3086. }
  3087. }
  3088. }
  3089. }
  3090. _objc_inform("PREOPTIMIZATION: %zu selector references not "
  3091. "pre-optimized", UnfixedSelectors);
  3092. _objc_inform("PREOPTIMIZATION: %u/%u (%.3g%%) method lists pre-sorted",
  3093. PreoptOptimizedMethodLists, PreoptTotalMethodLists,
  3094. PreoptTotalMethodLists
  3095. ? 100.0*PreoptOptimizedMethodLists/PreoptTotalMethodLists
  3096. : 0.0);
  3097. _objc_inform("PREOPTIMIZATION: %u/%u (%.3g%%) classes pre-registered",
  3098. PreoptOptimizedClasses, PreoptTotalClasses,
  3099. PreoptTotalClasses
  3100. ? 100.0*PreoptOptimizedClasses/PreoptTotalClasses
  3101. : 0.0);
  3102. _objc_inform("PREOPTIMIZATION: %zu protocol references not "
  3103. "pre-optimized", UnfixedProtocolReferences);
  3104. }
  3105. #undef EACH_HEADER
  3106. }
  3107. /***********************************************************************
  3108. * prepare_load_methods
  3109. * Schedule +load for classes in this image, any un-+load-ed
  3110. * superclasses in other images, and any categories in this image.
  3111. **********************************************************************/
  3112. // Recursively schedule +load for cls and any un-+load-ed superclasses.
  3113. // cls must already be connected.
  3114. static void schedule_class_load(Class cls)
  3115. {
  3116. if (!cls) return;
  3117. ASSERT(cls->isRealized()); // _read_images should realize
  3118. if (cls->data()->flags & RW_LOADED) return;
  3119. // Ensure superclass-first ordering
  3120. schedule_class_load(cls->superclass);
  3121. add_class_to_loadable_list(cls);
  3122. cls->setInfo(RW_LOADED);
  3123. }
  3124. // Quick scan for +load methods that doesn't take a lock.
  3125. bool hasLoadMethods(const headerType *mhdr)
  3126. {
  3127. size_t count;
  3128. if (_getObjc2NonlazyClassList(mhdr, &count) && count > 0) return true;
  3129. if (_getObjc2NonlazyCategoryList(mhdr, &count) && count > 0) return true;
  3130. return false;
  3131. }
  3132. void prepare_load_methods(const headerType *mhdr)
  3133. {
  3134. size_t count, i;
  3135. runtimeLock.assertLocked();
  3136. classref_t const *classlist =
  3137. _getObjc2NonlazyClassList(mhdr, &count);
  3138. for (i = 0; i < count; i++) {
  3139. schedule_class_load(remapClass(classlist[i]));
  3140. }
  3141. category_t * const *categorylist = _getObjc2NonlazyCategoryList(mhdr, &count);
  3142. for (i = 0; i < count; i++) {
  3143. category_t *cat = categorylist[i];
  3144. Class cls = remapClass(cat->cls);
  3145. if (!cls) continue; // category for ignored weak-linked class
  3146. if (cls->isSwiftStable()) {
  3147. _objc_fatal("Swift class extensions and categories on Swift "
  3148. "classes are not allowed to have +load methods");
  3149. }
  3150. realizeClassWithoutSwift(cls, nil);
  3151. ASSERT(cls->ISA()->isRealized());
  3152. add_category_to_loadable_list(cat);
  3153. }
  3154. }
  3155. /***********************************************************************
  3156. * _unload_image
  3157. * Only handles MH_BUNDLE for now.
  3158. * Locking: write-lock and loadMethodLock acquired by unmap_image
  3159. **********************************************************************/
  3160. void _unload_image(header_info *hi)
  3161. {
  3162. size_t count, i;
  3163. loadMethodLock.assertLocked();
  3164. runtimeLock.assertLocked();
  3165. // Unload unattached categories and categories waiting for +load.
  3166. // Ignore __objc_catlist2. We don't support unloading Swift
  3167. // and we never will.
  3168. category_t * const *catlist = _getObjc2CategoryList(hi, &count);
  3169. for (i = 0; i < count; i++) {
  3170. category_t *cat = catlist[i];
  3171. Class cls = remapClass(cat->cls);
  3172. if (!cls) continue; // category for ignored weak-linked class
  3173. // fixme for MH_DYLIB cat's class may have been unloaded already
  3174. // unattached list
  3175. objc::unattachedCategories.eraseCategoryForClass(cat, cls);
  3176. // +load queue
  3177. remove_category_from_loadable_list(cat);
  3178. }
  3179. // Unload classes.
  3180. // Gather classes from both __DATA,__objc_clslist
  3181. // and __DATA,__objc_nlclslist. arclite's hack puts a class in the latter
  3182. // only, and we need to unload that class if we unload an arclite image.
  3183. objc::DenseSet<Class> classes{};
  3184. classref_t const *classlist;
  3185. classlist = _getObjc2ClassList(hi, &count);
  3186. for (i = 0; i < count; i++) {
  3187. Class cls = remapClass(classlist[i]);
  3188. if (cls) classes.insert(cls);
  3189. }
  3190. classlist = _getObjc2NonlazyClassList(hi, &count);
  3191. for (i = 0; i < count; i++) {
  3192. Class cls = remapClass(classlist[i]);
  3193. if (cls) classes.insert(cls);
  3194. }
  3195. // First detach classes from each other. Then free each class.
  3196. // This avoid bugs where this loop unloads a subclass before its superclass
  3197. for (Class cls: classes) {
  3198. remove_class_from_loadable_list(cls);
  3199. detach_class(cls->ISA(), YES);
  3200. detach_class(cls, NO);
  3201. }
  3202. for (Class cls: classes) {
  3203. free_class(cls->ISA());
  3204. free_class(cls);
  3205. }
  3206. // XXX FIXME -- Clean up protocols:
  3207. // <rdar://problem/9033191> Support unloading protocols at dylib/image unload time
  3208. // fixme DebugUnload
  3209. }
  3210. /***********************************************************************
  3211. * method_getDescription
  3212. * Returns a pointer to this method's objc_method_description.
  3213. * Locking: none
  3214. **********************************************************************/
  3215. struct objc_method_description *
  3216. method_getDescription(Method m)
  3217. {
  3218. if (!m) return nil;
  3219. return (struct objc_method_description *)m;
  3220. }
  3221. IMP
  3222. method_getImplementation(Method m)
  3223. {
  3224. return m ? m->imp : nil;
  3225. }
  3226. /***********************************************************************
  3227. * method_getName
  3228. * Returns this method's selector.
  3229. * The method must not be nil.
  3230. * The method must already have been fixed-up.
  3231. * Locking: none
  3232. **********************************************************************/
  3233. SEL
  3234. method_getName(Method m)
  3235. {
  3236. if (!m) return nil;
  3237. ASSERT(m->name == sel_registerName(sel_getName(m->name)));
  3238. return m->name;
  3239. }
  3240. /***********************************************************************
  3241. * method_getTypeEncoding
  3242. * Returns this method's old-style type encoding string.
  3243. * The method must not be nil.
  3244. * Locking: none
  3245. **********************************************************************/
  3246. const char *
  3247. method_getTypeEncoding(Method m)
  3248. {
  3249. if (!m) return nil;
  3250. return m->types;
  3251. }
  3252. /***********************************************************************
  3253. * method_setImplementation
  3254. * Sets this method's implementation to imp.
  3255. * The previous implementation is returned.
  3256. **********************************************************************/
  3257. static IMP
  3258. _method_setImplementation(Class cls, method_t *m, IMP imp)
  3259. {
  3260. runtimeLock.assertLocked();
  3261. if (!m) return nil;
  3262. if (!imp) return nil;
  3263. IMP old = m->imp;
  3264. m->imp = imp;
  3265. // Cache updates are slow if cls is nil (i.e. unknown)
  3266. // RR/AWZ updates are slow if cls is nil (i.e. unknown)
  3267. // fixme build list of classes whose Methods are known externally?
  3268. flushCaches(cls);
  3269. adjustCustomFlagsForMethodChange(cls, m);
  3270. return old;
  3271. }
  3272. IMP
  3273. method_setImplementation(Method m, IMP imp)
  3274. {
  3275. // Don't know the class - will be slow if RR/AWZ are affected
  3276. // fixme build list of classes whose Methods are known externally?
  3277. mutex_locker_t lock(runtimeLock);
  3278. return _method_setImplementation(Nil, m, imp);
  3279. }
  3280. void method_exchangeImplementations(Method m1, Method m2)
  3281. {
  3282. if (!m1 || !m2) return;
  3283. mutex_locker_t lock(runtimeLock);
  3284. IMP m1_imp = m1->imp;
  3285. m1->imp = m2->imp;
  3286. m2->imp = m1_imp;
  3287. // RR/AWZ updates are slow because class is unknown
  3288. // Cache updates are slow because class is unknown
  3289. // fixme build list of classes whose Methods are known externally?
  3290. flushCaches(nil);
  3291. adjustCustomFlagsForMethodChange(nil, m1);
  3292. adjustCustomFlagsForMethodChange(nil, m2);
  3293. }
  3294. /***********************************************************************
  3295. * ivar_getOffset
  3296. * fixme
  3297. * Locking: none
  3298. **********************************************************************/
  3299. ptrdiff_t
  3300. ivar_getOffset(Ivar ivar)
  3301. {
  3302. if (!ivar) return 0;
  3303. return *ivar->offset;
  3304. }
  3305. /***********************************************************************
  3306. * ivar_getName
  3307. * fixme
  3308. * Locking: none
  3309. **********************************************************************/
  3310. const char *
  3311. ivar_getName(Ivar ivar)
  3312. {
  3313. if (!ivar) return nil;
  3314. return ivar->name;
  3315. }
  3316. /***********************************************************************
  3317. * ivar_getTypeEncoding
  3318. * fixme
  3319. * Locking: none
  3320. **********************************************************************/
  3321. const char *
  3322. ivar_getTypeEncoding(Ivar ivar)
  3323. {
  3324. if (!ivar) return nil;
  3325. return ivar->type;
  3326. }
  3327. const char *property_getName(objc_property_t prop)
  3328. {
  3329. return prop->name;
  3330. }
  3331. const char *property_getAttributes(objc_property_t prop)
  3332. {
  3333. return prop->attributes;
  3334. }
  3335. objc_property_attribute_t *property_copyAttributeList(objc_property_t prop,
  3336. unsigned int *outCount)
  3337. {
  3338. if (!prop) {
  3339. if (outCount) *outCount = 0;
  3340. return nil;
  3341. }
  3342. mutex_locker_t lock(runtimeLock);
  3343. return copyPropertyAttributeList(prop->attributes,outCount);
  3344. }
  3345. char * property_copyAttributeValue(objc_property_t prop, const char *name)
  3346. {
  3347. if (!prop || !name || *name == '\0') return nil;
  3348. mutex_locker_t lock(runtimeLock);
  3349. return copyPropertyAttributeValue(prop->attributes, name);
  3350. }
  3351. /***********************************************************************
  3352. * getExtendedTypesIndexesForMethod
  3353. * Returns:
  3354. * a is the count of methods in all method lists before m's method list
  3355. * b is the index of m in m's method list
  3356. * a+b is the index of m's extended types in the extended types array
  3357. **********************************************************************/
  3358. static void getExtendedTypesIndexesForMethod(protocol_t *proto, const method_t *m, bool isRequiredMethod, bool isInstanceMethod, uint32_t& a, uint32_t &b)
  3359. {
  3360. a = 0;
  3361. if (proto->instanceMethods) {
  3362. if (isRequiredMethod && isInstanceMethod) {
  3363. b = proto->instanceMethods->indexOfMethod(m);
  3364. return;
  3365. }
  3366. a += proto->instanceMethods->count;
  3367. }
  3368. if (proto->classMethods) {
  3369. if (isRequiredMethod && !isInstanceMethod) {
  3370. b = proto->classMethods->indexOfMethod(m);
  3371. return;
  3372. }
  3373. a += proto->classMethods->count;
  3374. }
  3375. if (proto->optionalInstanceMethods) {
  3376. if (!isRequiredMethod && isInstanceMethod) {
  3377. b = proto->optionalInstanceMethods->indexOfMethod(m);
  3378. return;
  3379. }
  3380. a += proto->optionalInstanceMethods->count;
  3381. }
  3382. if (proto->optionalClassMethods) {
  3383. if (!isRequiredMethod && !isInstanceMethod) {
  3384. b = proto->optionalClassMethods->indexOfMethod(m);
  3385. return;
  3386. }
  3387. a += proto->optionalClassMethods->count;
  3388. }
  3389. }
  3390. /***********************************************************************
  3391. * getExtendedTypesIndexForMethod
  3392. * Returns the index of m's extended types in proto's extended types array.
  3393. **********************************************************************/
  3394. static uint32_t getExtendedTypesIndexForMethod(protocol_t *proto, const method_t *m, bool isRequiredMethod, bool isInstanceMethod)
  3395. {
  3396. uint32_t a;
  3397. uint32_t b;
  3398. getExtendedTypesIndexesForMethod(proto, m, isRequiredMethod,
  3399. isInstanceMethod, a, b);
  3400. return a + b;
  3401. }
  3402. /***********************************************************************
  3403. * fixupProtocolMethodList
  3404. * Fixes up a single method list in a protocol.
  3405. **********************************************************************/
  3406. static void
  3407. fixupProtocolMethodList(protocol_t *proto, method_list_t *mlist,
  3408. bool required, bool instance)
  3409. {
  3410. runtimeLock.assertLocked();
  3411. if (!mlist) return;
  3412. if (mlist->isFixedUp()) return;
  3413. const char **extTypes = proto->extendedMethodTypes();
  3414. fixupMethodList(mlist, true/*always copy for simplicity*/,
  3415. !extTypes/*sort if no extended method types*/);
  3416. if (extTypes) {
  3417. // Sort method list and extended method types together.
  3418. // fixupMethodList() can't do this.
  3419. // fixme COW stomp
  3420. uint32_t count = mlist->count;
  3421. uint32_t prefix;
  3422. uint32_t junk;
  3423. getExtendedTypesIndexesForMethod(proto, &mlist->get(0),
  3424. required, instance, prefix, junk);
  3425. for (uint32_t i = 0; i < count; i++) {
  3426. for (uint32_t j = i+1; j < count; j++) {
  3427. method_t& mi = mlist->get(i);
  3428. method_t& mj = mlist->get(j);
  3429. if (mi.name > mj.name) {
  3430. std::swap(mi, mj);
  3431. std::swap(extTypes[prefix+i], extTypes[prefix+j]);
  3432. }
  3433. }
  3434. }
  3435. }
  3436. }
  3437. /***********************************************************************
  3438. * fixupProtocol
  3439. * Fixes up all of a protocol's method lists.
  3440. **********************************************************************/
  3441. static void
  3442. fixupProtocol(protocol_t *proto)
  3443. {
  3444. runtimeLock.assertLocked();
  3445. if (proto->protocols) {
  3446. for (uintptr_t i = 0; i < proto->protocols->count; i++) {
  3447. protocol_t *sub = remapProtocol(proto->protocols->list[i]);
  3448. if (!sub->isFixedUp()) fixupProtocol(sub);
  3449. }
  3450. }
  3451. fixupProtocolMethodList(proto, proto->instanceMethods, YES, YES);
  3452. fixupProtocolMethodList(proto, proto->classMethods, YES, NO);
  3453. fixupProtocolMethodList(proto, proto->optionalInstanceMethods, NO, YES);
  3454. fixupProtocolMethodList(proto, proto->optionalClassMethods, NO, NO);
  3455. // fixme memory barrier so we can check this with no lock
  3456. proto->setFixedUp();
  3457. }
  3458. /***********************************************************************
  3459. * fixupProtocolIfNeeded
  3460. * Fixes up all of a protocol's method lists if they aren't fixed up already.
  3461. * Locking: write-locks runtimeLock.
  3462. **********************************************************************/
  3463. static void
  3464. fixupProtocolIfNeeded(protocol_t *proto)
  3465. {
  3466. runtimeLock.assertUnlocked();
  3467. ASSERT(proto);
  3468. if (!proto->isFixedUp()) {
  3469. mutex_locker_t lock(runtimeLock);
  3470. fixupProtocol(proto);
  3471. }
  3472. }
  3473. static method_list_t *
  3474. getProtocolMethodList(protocol_t *proto, bool required, bool instance)
  3475. {
  3476. method_list_t **mlistp = nil;
  3477. if (required) {
  3478. if (instance) {
  3479. mlistp = &proto->instanceMethods;
  3480. } else {
  3481. mlistp = &proto->classMethods;
  3482. }
  3483. } else {
  3484. if (instance) {
  3485. mlistp = &proto->optionalInstanceMethods;
  3486. } else {
  3487. mlistp = &proto->optionalClassMethods;
  3488. }
  3489. }
  3490. return *mlistp;
  3491. }
  3492. /***********************************************************************
  3493. * protocol_getMethod_nolock
  3494. * Locking: runtimeLock must be held by the caller
  3495. **********************************************************************/
  3496. static method_t *
  3497. protocol_getMethod_nolock(protocol_t *proto, SEL sel,
  3498. bool isRequiredMethod, bool isInstanceMethod,
  3499. bool recursive)
  3500. {
  3501. runtimeLock.assertLocked();
  3502. if (!proto || !sel) return nil;
  3503. ASSERT(proto->isFixedUp());
  3504. method_list_t *mlist =
  3505. getProtocolMethodList(proto, isRequiredMethod, isInstanceMethod);
  3506. if (mlist) {
  3507. method_t *m = search_method_list(mlist, sel);
  3508. if (m) return m;
  3509. }
  3510. if (recursive && proto->protocols) {
  3511. method_t *m;
  3512. for (uint32_t i = 0; i < proto->protocols->count; i++) {
  3513. protocol_t *realProto = remapProtocol(proto->protocols->list[i]);
  3514. m = protocol_getMethod_nolock(realProto, sel,
  3515. isRequiredMethod, isInstanceMethod,
  3516. true);
  3517. if (m) return m;
  3518. }
  3519. }
  3520. return nil;
  3521. }
  3522. /***********************************************************************
  3523. * protocol_getMethod
  3524. * fixme
  3525. * Locking: acquires runtimeLock
  3526. **********************************************************************/
  3527. Method
  3528. protocol_getMethod(protocol_t *proto, SEL sel, bool isRequiredMethod, bool isInstanceMethod, bool recursive)
  3529. {
  3530. if (!proto) return nil;
  3531. fixupProtocolIfNeeded(proto);
  3532. mutex_locker_t lock(runtimeLock);
  3533. return protocol_getMethod_nolock(proto, sel, isRequiredMethod,
  3534. isInstanceMethod, recursive);
  3535. }
  3536. /***********************************************************************
  3537. * protocol_getMethodTypeEncoding_nolock
  3538. * Return the @encode string for the requested protocol method.
  3539. * Returns nil if the compiler did not emit any extended @encode data.
  3540. * Locking: runtimeLock must be held by the caller
  3541. **********************************************************************/
  3542. const char *
  3543. protocol_getMethodTypeEncoding_nolock(protocol_t *proto, SEL sel,
  3544. bool isRequiredMethod,
  3545. bool isInstanceMethod)
  3546. {
  3547. runtimeLock.assertLocked();
  3548. if (!proto) return nil;
  3549. if (!proto->extendedMethodTypes()) return nil;
  3550. ASSERT(proto->isFixedUp());
  3551. method_t *m =
  3552. protocol_getMethod_nolock(proto, sel,
  3553. isRequiredMethod, isInstanceMethod, false);
  3554. if (m) {
  3555. uint32_t i = getExtendedTypesIndexForMethod(proto, m,
  3556. isRequiredMethod,
  3557. isInstanceMethod);
  3558. return proto->extendedMethodTypes()[i];
  3559. }
  3560. // No method with that name. Search incorporated protocols.
  3561. if (proto->protocols) {
  3562. for (uintptr_t i = 0; i < proto->protocols->count; i++) {
  3563. const char *enc =
  3564. protocol_getMethodTypeEncoding_nolock(remapProtocol(proto->protocols->list[i]), sel, isRequiredMethod, isInstanceMethod);
  3565. if (enc) return enc;
  3566. }
  3567. }
  3568. return nil;
  3569. }
  3570. /***********************************************************************
  3571. * _protocol_getMethodTypeEncoding
  3572. * Return the @encode string for the requested protocol method.
  3573. * Returns nil if the compiler did not emit any extended @encode data.
  3574. * Locking: acquires runtimeLock
  3575. **********************************************************************/
  3576. const char *
  3577. _protocol_getMethodTypeEncoding(Protocol *proto_gen, SEL sel,
  3578. BOOL isRequiredMethod, BOOL isInstanceMethod)
  3579. {
  3580. protocol_t *proto = newprotocol(proto_gen);
  3581. if (!proto) return nil;
  3582. fixupProtocolIfNeeded(proto);
  3583. mutex_locker_t lock(runtimeLock);
  3584. return protocol_getMethodTypeEncoding_nolock(proto, sel,
  3585. isRequiredMethod,
  3586. isInstanceMethod);
  3587. }
  3588. /***********************************************************************
  3589. * protocol_t::demangledName
  3590. * Returns the (Swift-demangled) name of the given protocol.
  3591. * Locking: none
  3592. **********************************************************************/
  3593. const char *
  3594. protocol_t::demangledName()
  3595. {
  3596. ASSERT(hasDemangledNameField());
  3597. if (! _demangledName) {
  3598. char *de = copySwiftV1DemangledName(mangledName, true/*isProtocol*/);
  3599. if (! OSAtomicCompareAndSwapPtrBarrier(nil, (void*)(de ?: mangledName),
  3600. (void**)&_demangledName))
  3601. {
  3602. if (de) free(de);
  3603. }
  3604. }
  3605. return _demangledName;
  3606. }
  3607. /***********************************************************************
  3608. * protocol_getName
  3609. * Returns the (Swift-demangled) name of the given protocol.
  3610. * Locking: runtimeLock must not be held by the caller
  3611. **********************************************************************/
  3612. const char *
  3613. protocol_getName(Protocol *proto)
  3614. {
  3615. if (!proto) return "nil";
  3616. else return newprotocol(proto)->demangledName();
  3617. }
  3618. /***********************************************************************
  3619. * protocol_getInstanceMethodDescription
  3620. * Returns the description of a named instance method.
  3621. * Locking: runtimeLock must not be held by the caller
  3622. **********************************************************************/
  3623. struct objc_method_description
  3624. protocol_getMethodDescription(Protocol *p, SEL aSel,
  3625. BOOL isRequiredMethod, BOOL isInstanceMethod)
  3626. {
  3627. Method m =
  3628. protocol_getMethod(newprotocol(p), aSel,
  3629. isRequiredMethod, isInstanceMethod, true);
  3630. if (m) return *method_getDescription(m);
  3631. else return (struct objc_method_description){nil, nil};
  3632. }
  3633. /***********************************************************************
  3634. * protocol_conformsToProtocol_nolock
  3635. * Returns YES if self conforms to other.
  3636. * Locking: runtimeLock must be held by the caller.
  3637. **********************************************************************/
  3638. static bool
  3639. protocol_conformsToProtocol_nolock(protocol_t *self, protocol_t *other)
  3640. {
  3641. runtimeLock.assertLocked();
  3642. if (!self || !other) {
  3643. return NO;
  3644. }
  3645. // protocols need not be fixed up
  3646. if (0 == strcmp(self->mangledName, other->mangledName)) {
  3647. return YES;
  3648. }
  3649. if (self->protocols) {
  3650. uintptr_t i;
  3651. for (i = 0; i < self->protocols->count; i++) {
  3652. protocol_t *proto = remapProtocol(self->protocols->list[i]);
  3653. if (other == proto) {
  3654. return YES;
  3655. }
  3656. if (0 == strcmp(other->mangledName, proto->mangledName)) {
  3657. return YES;
  3658. }
  3659. if (protocol_conformsToProtocol_nolock(proto, other)) {
  3660. return YES;
  3661. }
  3662. }
  3663. }
  3664. return NO;
  3665. }
  3666. /***********************************************************************
  3667. * protocol_conformsToProtocol
  3668. * Returns YES if self conforms to other.
  3669. * Locking: acquires runtimeLock
  3670. **********************************************************************/
  3671. BOOL protocol_conformsToProtocol(Protocol *self, Protocol *other)
  3672. {
  3673. mutex_locker_t lock(runtimeLock);
  3674. return protocol_conformsToProtocol_nolock(newprotocol(self),
  3675. newprotocol(other));
  3676. }
  3677. /***********************************************************************
  3678. * protocol_isEqual
  3679. * Return YES if two protocols are equal (i.e. conform to each other)
  3680. * Locking: acquires runtimeLock
  3681. **********************************************************************/
  3682. BOOL protocol_isEqual(Protocol *self, Protocol *other)
  3683. {
  3684. if (self == other) return YES;
  3685. if (!self || !other) return NO;
  3686. if (!protocol_conformsToProtocol(self, other)) return NO;
  3687. if (!protocol_conformsToProtocol(other, self)) return NO;
  3688. return YES;
  3689. }
  3690. /***********************************************************************
  3691. * protocol_copyMethodDescriptionList
  3692. * Returns descriptions of a protocol's methods.
  3693. * Locking: acquires runtimeLock
  3694. **********************************************************************/
  3695. struct objc_method_description *
  3696. protocol_copyMethodDescriptionList(Protocol *p,
  3697. BOOL isRequiredMethod,BOOL isInstanceMethod,
  3698. unsigned int *outCount)
  3699. {
  3700. protocol_t *proto = newprotocol(p);
  3701. struct objc_method_description *result = nil;
  3702. unsigned int count = 0;
  3703. if (!proto) {
  3704. if (outCount) *outCount = 0;
  3705. return nil;
  3706. }
  3707. fixupProtocolIfNeeded(proto);
  3708. mutex_locker_t lock(runtimeLock);
  3709. method_list_t *mlist =
  3710. getProtocolMethodList(proto, isRequiredMethod, isInstanceMethod);
  3711. if (mlist) {
  3712. result = (struct objc_method_description *)
  3713. calloc(mlist->count + 1, sizeof(struct objc_method_description));
  3714. for (const auto& meth : *mlist) {
  3715. result[count].name = meth.name;
  3716. result[count].types = (char *)meth.types;
  3717. count++;
  3718. }
  3719. }
  3720. if (outCount) *outCount = count;
  3721. return result;
  3722. }
  3723. /***********************************************************************
  3724. * protocol_getProperty
  3725. * fixme
  3726. * Locking: runtimeLock must be held by the caller
  3727. **********************************************************************/
  3728. static property_t *
  3729. protocol_getProperty_nolock(protocol_t *proto, const char *name,
  3730. bool isRequiredProperty, bool isInstanceProperty)
  3731. {
  3732. runtimeLock.assertLocked();
  3733. if (!isRequiredProperty) {
  3734. // Only required properties are currently supported.
  3735. return nil;
  3736. }
  3737. property_list_t *plist = isInstanceProperty ?
  3738. proto->instanceProperties : proto->classProperties();
  3739. if (plist) {
  3740. for (auto& prop : *plist) {
  3741. if (0 == strcmp(name, prop.name)) {
  3742. return &prop;
  3743. }
  3744. }
  3745. }
  3746. if (proto->protocols) {
  3747. uintptr_t i;
  3748. for (i = 0; i < proto->protocols->count; i++) {
  3749. protocol_t *p = remapProtocol(proto->protocols->list[i]);
  3750. property_t *prop =
  3751. protocol_getProperty_nolock(p, name,
  3752. isRequiredProperty,
  3753. isInstanceProperty);
  3754. if (prop) return prop;
  3755. }
  3756. }
  3757. return nil;
  3758. }
  3759. objc_property_t protocol_getProperty(Protocol *p, const char *name,
  3760. BOOL isRequiredProperty, BOOL isInstanceProperty)
  3761. {
  3762. if (!p || !name) return nil;
  3763. mutex_locker_t lock(runtimeLock);
  3764. return (objc_property_t)
  3765. protocol_getProperty_nolock(newprotocol(p), name,
  3766. isRequiredProperty, isInstanceProperty);
  3767. }
  3768. /***********************************************************************
  3769. * protocol_copyPropertyList
  3770. * protocol_copyPropertyList2
  3771. * fixme
  3772. * Locking: acquires runtimeLock
  3773. **********************************************************************/
  3774. static property_t **
  3775. copyPropertyList(property_list_t *plist, unsigned int *outCount)
  3776. {
  3777. property_t **result = nil;
  3778. unsigned int count = 0;
  3779. if (plist) {
  3780. count = plist->count;
  3781. }
  3782. if (count > 0) {
  3783. result = (property_t **)malloc((count+1) * sizeof(property_t *));
  3784. count = 0;
  3785. for (auto& prop : *plist) {
  3786. result[count++] = &prop;
  3787. }
  3788. result[count] = nil;
  3789. }
  3790. if (outCount) *outCount = count;
  3791. return result;
  3792. }
  3793. objc_property_t *
  3794. protocol_copyPropertyList2(Protocol *proto, unsigned int *outCount,
  3795. BOOL isRequiredProperty, BOOL isInstanceProperty)
  3796. {
  3797. if (!proto || !isRequiredProperty) {
  3798. // Optional properties are not currently supported.
  3799. if (outCount) *outCount = 0;
  3800. return nil;
  3801. }
  3802. mutex_locker_t lock(runtimeLock);
  3803. property_list_t *plist = isInstanceProperty
  3804. ? newprotocol(proto)->instanceProperties
  3805. : newprotocol(proto)->classProperties();
  3806. return (objc_property_t *)copyPropertyList(plist, outCount);
  3807. }
  3808. objc_property_t *
  3809. protocol_copyPropertyList(Protocol *proto, unsigned int *outCount)
  3810. {
  3811. return protocol_copyPropertyList2(proto, outCount,
  3812. YES/*required*/, YES/*instance*/);
  3813. }
  3814. /***********************************************************************
  3815. * protocol_copyProtocolList
  3816. * Copies this protocol's incorporated protocols.
  3817. * Does not copy those protocol's incorporated protocols in turn.
  3818. * Locking: acquires runtimeLock
  3819. **********************************************************************/
  3820. Protocol * __unsafe_unretained *
  3821. protocol_copyProtocolList(Protocol *p, unsigned int *outCount)
  3822. {
  3823. unsigned int count = 0;
  3824. Protocol **result = nil;
  3825. protocol_t *proto = newprotocol(p);
  3826. if (!proto) {
  3827. if (outCount) *outCount = 0;
  3828. return nil;
  3829. }
  3830. mutex_locker_t lock(runtimeLock);
  3831. if (proto->protocols) {
  3832. count = (unsigned int)proto->protocols->count;
  3833. }
  3834. if (count > 0) {
  3835. result = (Protocol **)malloc((count+1) * sizeof(Protocol *));
  3836. unsigned int i;
  3837. for (i = 0; i < count; i++) {
  3838. result[i] = (Protocol *)remapProtocol(proto->protocols->list[i]);
  3839. }
  3840. result[i] = nil;
  3841. }
  3842. if (outCount) *outCount = count;
  3843. return result;
  3844. }
  3845. /***********************************************************************
  3846. * objc_allocateProtocol
  3847. * Creates a new protocol. The protocol may not be used until
  3848. * objc_registerProtocol() is called.
  3849. * Returns nil if a protocol with the same name already exists.
  3850. * Locking: acquires runtimeLock
  3851. **********************************************************************/
  3852. Protocol *
  3853. objc_allocateProtocol(const char *name)
  3854. {
  3855. mutex_locker_t lock(runtimeLock);
  3856. if (getProtocol(name)) {
  3857. return nil;
  3858. }
  3859. protocol_t *result = (protocol_t *)calloc(sizeof(protocol_t), 1);
  3860. extern objc_class OBJC_CLASS_$___IncompleteProtocol;
  3861. Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
  3862. result->initProtocolIsa(cls);
  3863. result->size = sizeof(protocol_t);
  3864. // fixme mangle the name if it looks swift-y?
  3865. result->mangledName = strdupIfMutable(name);
  3866. // fixme reserve name without installing
  3867. return (Protocol *)result;
  3868. }
  3869. /***********************************************************************
  3870. * objc_registerProtocol
  3871. * Registers a newly-constructed protocol. The protocol is now
  3872. * ready for use and immutable.
  3873. * Locking: acquires runtimeLock
  3874. **********************************************************************/
  3875. void objc_registerProtocol(Protocol *proto_gen)
  3876. {
  3877. protocol_t *proto = newprotocol(proto_gen);
  3878. mutex_locker_t lock(runtimeLock);
  3879. extern objc_class OBJC_CLASS_$___IncompleteProtocol;
  3880. Class oldcls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
  3881. extern objc_class OBJC_CLASS_$_Protocol;
  3882. Class cls = (Class)&OBJC_CLASS_$_Protocol;
  3883. if (proto->ISA() == cls) {
  3884. _objc_inform("objc_registerProtocol: protocol '%s' was already "
  3885. "registered!", proto->nameForLogging());
  3886. return;
  3887. }
  3888. if (proto->ISA() != oldcls) {
  3889. _objc_inform("objc_registerProtocol: protocol '%s' was not allocated "
  3890. "with objc_allocateProtocol!", proto->nameForLogging());
  3891. return;
  3892. }
  3893. // NOT initProtocolIsa(). The protocol object may already
  3894. // have been retained and we must preserve that count.
  3895. proto->changeIsa(cls);
  3896. // Don't add this protocol if we already have it.
  3897. // Should we warn on duplicates?
  3898. if (getProtocol(proto->mangledName) == nil) {
  3899. NXMapKeyCopyingInsert(protocols(), proto->mangledName, proto);
  3900. }
  3901. }
  3902. /***********************************************************************
  3903. * protocol_addProtocol
  3904. * Adds an incorporated protocol to another protocol.
  3905. * No method enforcement is performed.
  3906. * `proto` must be under construction. `addition` must not.
  3907. * Locking: acquires runtimeLock
  3908. **********************************************************************/
  3909. void
  3910. protocol_addProtocol(Protocol *proto_gen, Protocol *addition_gen)
  3911. {
  3912. protocol_t *proto = newprotocol(proto_gen);
  3913. protocol_t *addition = newprotocol(addition_gen);
  3914. extern objc_class OBJC_CLASS_$___IncompleteProtocol;
  3915. Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
  3916. if (!proto_gen) return;
  3917. if (!addition_gen) return;
  3918. mutex_locker_t lock(runtimeLock);
  3919. if (proto->ISA() != cls) {
  3920. _objc_inform("protocol_addProtocol: modified protocol '%s' is not "
  3921. "under construction!", proto->nameForLogging());
  3922. return;
  3923. }
  3924. if (addition->ISA() == cls) {
  3925. _objc_inform("protocol_addProtocol: added protocol '%s' is still "
  3926. "under construction!", addition->nameForLogging());
  3927. return;
  3928. }
  3929. protocol_list_t *protolist = proto->protocols;
  3930. if (!protolist) {
  3931. protolist = (protocol_list_t *)
  3932. calloc(1, sizeof(protocol_list_t)
  3933. + sizeof(protolist->list[0]));
  3934. } else {
  3935. protolist = (protocol_list_t *)
  3936. realloc(protolist, protocol_list_size(protolist)
  3937. + sizeof(protolist->list[0]));
  3938. }
  3939. protolist->list[protolist->count++] = (protocol_ref_t)addition;
  3940. proto->protocols = protolist;
  3941. }
  3942. /***********************************************************************
  3943. * protocol_addMethodDescription
  3944. * Adds a method to a protocol. The protocol must be under construction.
  3945. * Locking: acquires runtimeLock
  3946. **********************************************************************/
  3947. static void
  3948. protocol_addMethod_nolock(method_list_t*& list, SEL name, const char *types)
  3949. {
  3950. if (!list) {
  3951. list = (method_list_t *)calloc(sizeof(method_list_t), 1);
  3952. list->entsizeAndFlags = sizeof(list->first);
  3953. list->setFixedUp();
  3954. } else {
  3955. size_t size = list->byteSize() + list->entsize();
  3956. list = (method_list_t *)realloc(list, size);
  3957. }
  3958. method_t& meth = list->get(list->count++);
  3959. meth.name = name;
  3960. meth.types = types ? strdupIfMutable(types) : "";
  3961. meth.imp = nil;
  3962. }
  3963. void
  3964. protocol_addMethodDescription(Protocol *proto_gen, SEL name, const char *types,
  3965. BOOL isRequiredMethod, BOOL isInstanceMethod)
  3966. {
  3967. protocol_t *proto = newprotocol(proto_gen);
  3968. extern objc_class OBJC_CLASS_$___IncompleteProtocol;
  3969. Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
  3970. if (!proto_gen) return;
  3971. mutex_locker_t lock(runtimeLock);
  3972. if (proto->ISA() != cls) {
  3973. _objc_inform("protocol_addMethodDescription: protocol '%s' is not "
  3974. "under construction!", proto->nameForLogging());
  3975. return;
  3976. }
  3977. if (isRequiredMethod && isInstanceMethod) {
  3978. protocol_addMethod_nolock(proto->instanceMethods, name, types);
  3979. } else if (isRequiredMethod && !isInstanceMethod) {
  3980. protocol_addMethod_nolock(proto->classMethods, name, types);
  3981. } else if (!isRequiredMethod && isInstanceMethod) {
  3982. protocol_addMethod_nolock(proto->optionalInstanceMethods, name,types);
  3983. } else /* !isRequiredMethod && !isInstanceMethod) */ {
  3984. protocol_addMethod_nolock(proto->optionalClassMethods, name, types);
  3985. }
  3986. }
  3987. /***********************************************************************
  3988. * protocol_addProperty
  3989. * Adds a property to a protocol. The protocol must be under construction.
  3990. * Locking: acquires runtimeLock
  3991. **********************************************************************/
  3992. static void
  3993. protocol_addProperty_nolock(property_list_t *&plist, const char *name,
  3994. const objc_property_attribute_t *attrs,
  3995. unsigned int count)
  3996. {
  3997. if (!plist) {
  3998. plist = (property_list_t *)calloc(sizeof(property_list_t), 1);
  3999. plist->entsizeAndFlags = sizeof(property_t);
  4000. } else {
  4001. plist = (property_list_t *)
  4002. realloc(plist, sizeof(property_list_t)
  4003. + plist->count * plist->entsize());
  4004. }
  4005. property_t& prop = plist->get(plist->count++);
  4006. prop.name = strdupIfMutable(name);
  4007. prop.attributes = copyPropertyAttributeString(attrs, count);
  4008. }
  4009. void
  4010. protocol_addProperty(Protocol *proto_gen, const char *name,
  4011. const objc_property_attribute_t *attrs,
  4012. unsigned int count,
  4013. BOOL isRequiredProperty, BOOL isInstanceProperty)
  4014. {
  4015. protocol_t *proto = newprotocol(proto_gen);
  4016. extern objc_class OBJC_CLASS_$___IncompleteProtocol;
  4017. Class cls = (Class)&OBJC_CLASS_$___IncompleteProtocol;
  4018. if (!proto) return;
  4019. if (!name) return;
  4020. mutex_locker_t lock(runtimeLock);
  4021. if (proto->ISA() != cls) {
  4022. _objc_inform("protocol_addProperty: protocol '%s' is not "
  4023. "under construction!", proto->nameForLogging());
  4024. return;
  4025. }
  4026. if (isRequiredProperty && isInstanceProperty) {
  4027. protocol_addProperty_nolock(proto->instanceProperties, name, attrs, count);
  4028. }
  4029. else if (isRequiredProperty && !isInstanceProperty) {
  4030. protocol_addProperty_nolock(proto->_classProperties, name, attrs, count);
  4031. }
  4032. //else if (!isRequiredProperty && isInstanceProperty) {
  4033. // protocol_addProperty_nolock(proto->optionalInstanceProperties, name, attrs, count);
  4034. //}
  4035. //else /* !isRequiredProperty && !isInstanceProperty) */ {
  4036. // protocol_addProperty_nolock(proto->optionalClassProperties, name, attrs, count);
  4037. //}
  4038. }
  4039. static int
  4040. objc_getRealizedClassList_nolock(Class *buffer, int bufferLen)
  4041. {
  4042. int count = 0;
  4043. if (buffer) {
  4044. int c = 0;
  4045. foreach_realized_class([=, &count, &c](Class cls) {
  4046. count++;
  4047. if (c < bufferLen) {
  4048. buffer[c++] = cls;
  4049. }
  4050. return true;
  4051. });
  4052. } else {
  4053. foreach_realized_class([&count](Class cls) {
  4054. count++;
  4055. return true;
  4056. });
  4057. }
  4058. return count;
  4059. }
  4060. static Class *
  4061. objc_copyRealizedClassList_nolock(unsigned int *outCount)
  4062. {
  4063. Class *result = nil;
  4064. unsigned int count = 0;
  4065. foreach_realized_class([&count](Class cls) {
  4066. count++;
  4067. return true;
  4068. });
  4069. if (count > 0) {
  4070. unsigned int c = 0;
  4071. result = (Class *)malloc((1+count) * sizeof(Class));
  4072. foreach_realized_class([=, &c](Class cls) {
  4073. result[c++] = cls;
  4074. return true;
  4075. });
  4076. result[c] = nil;
  4077. }
  4078. if (outCount) *outCount = count;
  4079. return result;
  4080. }
  4081. static void
  4082. class_getImpCache_nolock(Class cls, cache_t &cache, objc_imp_cache_entry *buffer, int len)
  4083. {
  4084. bucket_t *buckets = cache.buckets();
  4085. uintptr_t count = cache.capacity();
  4086. uintptr_t index;
  4087. int wpos = 0;
  4088. for (index = 0; index < count && wpos < len; index += 1) {
  4089. if (buckets[index].sel()) {
  4090. buffer[wpos].imp = buckets[index].imp(cls);
  4091. buffer[wpos].sel = buckets[index].sel();
  4092. wpos++;
  4093. }
  4094. }
  4095. }
  4096. /***********************************************************************
  4097. * objc_getClassList
  4098. * Returns pointers to all classes.
  4099. * This requires all classes be realized, which is regretfully non-lazy.
  4100. * Locking: acquires runtimeLock
  4101. **********************************************************************/
  4102. int
  4103. objc_getClassList(Class *buffer, int bufferLen)
  4104. {
  4105. mutex_locker_t lock(runtimeLock);
  4106. realizeAllClasses();
  4107. return objc_getRealizedClassList_nolock(buffer, bufferLen);
  4108. }
  4109. /***********************************************************************
  4110. * objc_copyClassList
  4111. * Returns pointers to Realized classes.
  4112. *
  4113. * outCount may be nil. *outCount is the number of classes returned.
  4114. * If the returned array is not nil, it is nil-terminated and must be
  4115. * freed with free().
  4116. * Locking: write-locks runtimeLock
  4117. **********************************************************************/
  4118. Class *
  4119. objc_copyRealizedClassList(unsigned int *outCount)
  4120. {
  4121. mutex_locker_t lock(runtimeLock);
  4122. return objc_copyRealizedClassList_nolock(outCount);
  4123. }
  4124. /***********************************************************************
  4125. * objc_copyClassList
  4126. * Returns pointers to all classes.
  4127. * This requires all classes be realized, which is regretfully non-lazy.
  4128. *
  4129. * outCount may be nil. *outCount is the number of classes returned.
  4130. * If the returned array is not nil, it is nil-terminated and must be
  4131. * freed with free().
  4132. * Locking: write-locks runtimeLock
  4133. **********************************************************************/
  4134. Class *
  4135. objc_copyClassList(unsigned int *outCount)
  4136. {
  4137. mutex_locker_t lock(runtimeLock);
  4138. realizeAllClasses();
  4139. return objc_copyRealizedClassList_nolock(outCount);
  4140. }
  4141. /***********************************************************************
  4142. * class_copyImpCache
  4143. * Returns the current content of the Class IMP Cache
  4144. *
  4145. * outCount may be nil. *outCount is the number of entries returned.
  4146. * If the returned array is not nil, it is nil-terminated and must be
  4147. * freed with free().
  4148. * Locking: write-locks cacheUpdateLock
  4149. **********************************************************************/
  4150. objc_imp_cache_entry *
  4151. class_copyImpCache(Class cls, int *outCount)
  4152. {
  4153. objc_imp_cache_entry *buffer = nullptr;
  4154. #if CONFIG_USE_CACHE_LOCK
  4155. mutex_locker_t lock(cacheUpdateLock);
  4156. #else
  4157. mutex_locker_t lock(runtimeLock);
  4158. #endif
  4159. cache_t &cache = cls->cache;
  4160. int count = (int)cache.occupied();
  4161. if (count) {
  4162. buffer = (objc_imp_cache_entry *)calloc(1+count, sizeof(objc_imp_cache_entry));
  4163. class_getImpCache_nolock(cls, cache, buffer, count);
  4164. }
  4165. if (outCount) *outCount = count;
  4166. return buffer;
  4167. }
  4168. /***********************************************************************
  4169. * objc_copyProtocolList
  4170. * Returns pointers to all protocols.
  4171. * Locking: read-locks runtimeLock
  4172. **********************************************************************/
  4173. Protocol * __unsafe_unretained *
  4174. objc_copyProtocolList(unsigned int *outCount)
  4175. {
  4176. mutex_locker_t lock(runtimeLock);
  4177. NXMapTable *protocol_map = protocols();
  4178. // Find all the protocols from the pre-optimized images. These protocols
  4179. // won't be in the protocol map.
  4180. objc::DenseMap<const char*, Protocol*> preoptimizedProtocols;
  4181. if (sharedCacheSupportsProtocolRoots()) {
  4182. header_info *hi;
  4183. for (hi = FirstHeader; hi; hi = hi->getNext()) {
  4184. if (!hi->hasPreoptimizedProtocols())
  4185. continue;
  4186. size_t count, i;
  4187. const protocol_t * const *protolist = _getObjc2ProtocolList(hi, &count);
  4188. for (i = 0; i < count; i++) {
  4189. const protocol_t* protocol = protolist[i];
  4190. // Skip protocols we have in the run time map. These likely
  4191. // correspond to protocols added dynamically which have the same
  4192. // name as a protocol found later in a dlopen'ed shared cache image.
  4193. if (NXMapGet(protocol_map, protocol->mangledName) != nil)
  4194. continue;
  4195. // The protocols in the shared cache protolist point to their
  4196. // original on-disk object, not the optimized one. We can use the name
  4197. // to find the optimized one.
  4198. Protocol* optimizedProto = getPreoptimizedProtocol(protocol->mangledName);
  4199. preoptimizedProtocols.insert({ protocol->mangledName, optimizedProto });
  4200. }
  4201. }
  4202. }
  4203. unsigned int count = NXCountMapTable(protocol_map) + (unsigned int)preoptimizedProtocols.size();
  4204. if (count == 0) {
  4205. if (outCount) *outCount = 0;
  4206. return nil;
  4207. }
  4208. Protocol **result = (Protocol **)malloc((count+1) * sizeof(Protocol*));
  4209. unsigned int i = 0;
  4210. Protocol *proto;
  4211. const char *name;
  4212. NXMapState state = NXInitMapState(protocol_map);
  4213. while (NXNextMapState(protocol_map, &state,
  4214. (const void **)&name, (const void **)&proto))
  4215. {
  4216. result[i++] = proto;
  4217. }
  4218. // Add any protocols found in the pre-optimized table
  4219. for (auto it : preoptimizedProtocols) {
  4220. result[i++] = it.second;
  4221. }
  4222. result[i++] = nil;
  4223. ASSERT(i == count+1);
  4224. if (outCount) *outCount = count;
  4225. return result;
  4226. }
  4227. /***********************************************************************
  4228. * objc_getProtocol
  4229. * Get a protocol by name, or return nil
  4230. * Locking: read-locks runtimeLock
  4231. **********************************************************************/
  4232. Protocol *objc_getProtocol(const char *name)
  4233. {
  4234. mutex_locker_t lock(runtimeLock);
  4235. return getProtocol(name);
  4236. }
  4237. /***********************************************************************
  4238. * class_copyMethodList
  4239. * fixme
  4240. * Locking: read-locks runtimeLock
  4241. **********************************************************************/
  4242. Method *
  4243. class_copyMethodList(Class cls, unsigned int *outCount)
  4244. {
  4245. unsigned int count = 0;
  4246. Method *result = nil;
  4247. if (!cls) {
  4248. if (outCount) *outCount = 0;
  4249. return nil;
  4250. }
  4251. mutex_locker_t lock(runtimeLock);
  4252. ASSERT(cls->isRealized());
  4253. count = cls->data()->methods.count();
  4254. if (count > 0) {
  4255. result = (Method *)malloc((count + 1) * sizeof(Method));
  4256. count = 0;
  4257. for (auto& meth : cls->data()->methods) {
  4258. result[count++] = &meth;
  4259. }
  4260. result[count] = nil;
  4261. }
  4262. if (outCount) *outCount = count;
  4263. return result;
  4264. }
  4265. /***********************************************************************
  4266. * class_copyIvarList
  4267. * fixme
  4268. * Locking: read-locks runtimeLock
  4269. **********************************************************************/
  4270. Ivar *
  4271. class_copyIvarList(Class cls, unsigned int *outCount)
  4272. {
  4273. const ivar_list_t *ivars;
  4274. Ivar *result = nil;
  4275. unsigned int count = 0;
  4276. if (!cls) {
  4277. if (outCount) *outCount = 0;
  4278. return nil;
  4279. }
  4280. mutex_locker_t lock(runtimeLock);
  4281. ASSERT(cls->isRealized());
  4282. if ((ivars = cls->data()->ro->ivars) && ivars->count) {
  4283. result = (Ivar *)malloc((ivars->count+1) * sizeof(Ivar));
  4284. for (auto& ivar : *ivars) {
  4285. if (!ivar.offset) continue; // anonymous bitfield
  4286. result[count++] = &ivar;
  4287. }
  4288. result[count] = nil;
  4289. }
  4290. if (outCount) *outCount = count;
  4291. return result;
  4292. }
  4293. /***********************************************************************
  4294. * class_copyPropertyList. Returns a heap block containing the
  4295. * properties declared in the class, or nil if the class
  4296. * declares no properties. Caller must free the block.
  4297. * Does not copy any superclass's properties.
  4298. * Locking: read-locks runtimeLock
  4299. **********************************************************************/
  4300. objc_property_t *
  4301. class_copyPropertyList(Class cls, unsigned int *outCount)
  4302. {
  4303. if (!cls) {
  4304. if (outCount) *outCount = 0;
  4305. return nil;
  4306. }
  4307. mutex_locker_t lock(runtimeLock);
  4308. checkIsKnownClass(cls);
  4309. ASSERT(cls->isRealized());
  4310. auto rw = cls->data();
  4311. property_t **result = nil;
  4312. unsigned int count = rw->properties.count();
  4313. if (count > 0) {
  4314. result = (property_t **)malloc((count + 1) * sizeof(property_t *));
  4315. count = 0;
  4316. for (auto& prop : rw->properties) {
  4317. result[count++] = &prop;
  4318. }
  4319. result[count] = nil;
  4320. }
  4321. if (outCount) *outCount = count;
  4322. return (objc_property_t *)result;
  4323. }
  4324. /***********************************************************************
  4325. * objc_class::getLoadMethod
  4326. * fixme
  4327. * Called only from add_class_to_loadable_list.
  4328. * Locking: runtimeLock must be read- or write-locked by the caller.
  4329. **********************************************************************/
  4330. IMP
  4331. objc_class::getLoadMethod()
  4332. {
  4333. runtimeLock.assertLocked();
  4334. const method_list_t *mlist;
  4335. ASSERT(isRealized());
  4336. ASSERT(ISA()->isRealized());
  4337. ASSERT(!isMetaClass());
  4338. ASSERT(ISA()->isMetaClass());
  4339. mlist = ISA()->data()->ro->baseMethods();
  4340. if (mlist) {
  4341. for (const auto& meth : *mlist) {
  4342. const char *name = sel_cname(meth.name);
  4343. if (0 == strcmp(name, "load")) {
  4344. return meth.imp;
  4345. }
  4346. }
  4347. }
  4348. return nil;
  4349. }
  4350. /***********************************************************************
  4351. * _category_getName
  4352. * Returns a category's name.
  4353. * Locking: none
  4354. **********************************************************************/
  4355. const char *
  4356. _category_getName(Category cat)
  4357. {
  4358. return cat->name;
  4359. }
  4360. /***********************************************************************
  4361. * _category_getClassName
  4362. * Returns a category's class's name
  4363. * Called only from add_category_to_loadable_list and
  4364. * remove_category_from_loadable_list for logging purposes.
  4365. * Locking: runtimeLock must be read- or write-locked by the caller
  4366. **********************************************************************/
  4367. const char *
  4368. _category_getClassName(Category cat)
  4369. {
  4370. runtimeLock.assertLocked();
  4371. return remapClass(cat->cls)->nameForLogging();
  4372. }
  4373. /***********************************************************************
  4374. * _category_getClass
  4375. * Returns a category's class
  4376. * Called only by call_category_loads.
  4377. * Locking: read-locks runtimeLock
  4378. **********************************************************************/
  4379. Class
  4380. _category_getClass(Category cat)
  4381. {
  4382. mutex_locker_t lock(runtimeLock);
  4383. Class result = remapClass(cat->cls);
  4384. ASSERT(result->isRealized()); // ok for call_category_loads' usage
  4385. return result;
  4386. }
  4387. /***********************************************************************
  4388. * _category_getLoadMethod
  4389. * fixme
  4390. * Called only from add_category_to_loadable_list
  4391. * Locking: runtimeLock must be read- or write-locked by the caller
  4392. **********************************************************************/
  4393. IMP
  4394. _category_getLoadMethod(Category cat)
  4395. {
  4396. runtimeLock.assertLocked();
  4397. const method_list_t *mlist;
  4398. mlist = cat->classMethods;
  4399. if (mlist) {
  4400. for (const auto& meth : *mlist) {
  4401. const char *name = sel_cname(meth.name);
  4402. if (0 == strcmp(name, "load")) {
  4403. return meth.imp;
  4404. }
  4405. }
  4406. }
  4407. return nil;
  4408. }
  4409. /***********************************************************************
  4410. * category_t::propertiesForMeta
  4411. * Return a category's instance or class properties.
  4412. * hi is the image containing the category.
  4413. **********************************************************************/
  4414. property_list_t *
  4415. category_t::propertiesForMeta(bool isMeta, struct header_info *hi)
  4416. {
  4417. if (!isMeta) return instanceProperties;
  4418. else if (hi->info()->hasCategoryClassProperties()) return _classProperties;
  4419. else return nil;
  4420. }
  4421. /***********************************************************************
  4422. * class_copyProtocolList
  4423. * fixme
  4424. * Locking: read-locks runtimeLock
  4425. **********************************************************************/
  4426. Protocol * __unsafe_unretained *
  4427. class_copyProtocolList(Class cls, unsigned int *outCount)
  4428. {
  4429. unsigned int count = 0;
  4430. Protocol **result = nil;
  4431. if (!cls) {
  4432. if (outCount) *outCount = 0;
  4433. return nil;
  4434. }
  4435. mutex_locker_t lock(runtimeLock);
  4436. checkIsKnownClass(cls);
  4437. ASSERT(cls->isRealized());
  4438. count = cls->data()->protocols.count();
  4439. if (count > 0) {
  4440. result = (Protocol **)malloc((count+1) * sizeof(Protocol *));
  4441. count = 0;
  4442. for (const auto& proto : cls->data()->protocols) {
  4443. result[count++] = (Protocol *)remapProtocol(proto);
  4444. }
  4445. result[count] = nil;
  4446. }
  4447. if (outCount) *outCount = count;
  4448. return result;
  4449. }
  4450. /***********************************************************************
  4451. * objc_copyImageNames
  4452. * Copies names of loaded images with ObjC contents.
  4453. *
  4454. * Locking: acquires runtimeLock
  4455. **********************************************************************/
  4456. const char **objc_copyImageNames(unsigned int *outCount)
  4457. {
  4458. mutex_locker_t lock(runtimeLock);
  4459. int HeaderCount = 0;
  4460. for (header_info *hi = FirstHeader; hi != nil; hi = hi->getNext()) {
  4461. HeaderCount++;
  4462. }
  4463. #if TARGET_OS_WIN32
  4464. const TCHAR **names = (const TCHAR **)
  4465. malloc((HeaderCount+1) * sizeof(TCHAR *));
  4466. #else
  4467. const char **names = (const char **)
  4468. malloc((HeaderCount+1) * sizeof(char *));
  4469. #endif
  4470. unsigned int count = 0;
  4471. for (header_info *hi = FirstHeader; hi != nil; hi = hi->getNext()) {
  4472. #if TARGET_OS_WIN32
  4473. if (hi->moduleName) {
  4474. names[count++] = hi->moduleName;
  4475. }
  4476. #else
  4477. const char *fname = hi->fname();
  4478. if (fname) {
  4479. names[count++] = fname;
  4480. }
  4481. #endif
  4482. }
  4483. names[count] = nil;
  4484. if (count == 0) {
  4485. // Return nil instead of empty list if there are no images
  4486. free((void *)names);
  4487. names = nil;
  4488. }
  4489. if (outCount) *outCount = count;
  4490. return names;
  4491. }
  4492. /***********************************************************************
  4493. * copyClassNamesForImage_nolock
  4494. * Copies class names from the given image.
  4495. * Missing weak-import classes are omitted.
  4496. * Swift class names are demangled.
  4497. *
  4498. * Locking: runtimeLock must be held by the caller
  4499. **********************************************************************/
  4500. const char **
  4501. copyClassNamesForImage_nolock(header_info *hi, unsigned int *outCount)
  4502. {
  4503. runtimeLock.assertLocked();
  4504. ASSERT(hi);
  4505. size_t count;
  4506. classref_t const *classlist = _getObjc2ClassList(hi, &count);
  4507. const char **names = (const char **)
  4508. malloc((count+1) * sizeof(const char *));
  4509. size_t shift = 0;
  4510. for (size_t i = 0; i < count; i++) {
  4511. Class cls = remapClass(classlist[i]);
  4512. if (cls) {
  4513. names[i-shift] = cls->demangledName();
  4514. } else {
  4515. shift++; // ignored weak-linked class
  4516. }
  4517. }
  4518. count -= shift;
  4519. names[count] = nil;
  4520. if (outCount) *outCount = (unsigned int)count;
  4521. return names;
  4522. }
  4523. /***********************************************************************
  4524. * objc_copyClassNamesForImage
  4525. * Copies class names from the named image.
  4526. * The image name must be identical to dladdr's dli_fname value.
  4527. * Missing weak-import classes are omitted.
  4528. * Swift class names are demangled.
  4529. *
  4530. * Locking: acquires runtimeLock
  4531. **********************************************************************/
  4532. const char **
  4533. objc_copyClassNamesForImage(const char *image, unsigned int *outCount)
  4534. {
  4535. if (!image) {
  4536. if (outCount) *outCount = 0;
  4537. return nil;
  4538. }
  4539. mutex_locker_t lock(runtimeLock);
  4540. // Find the image.
  4541. header_info *hi;
  4542. for (hi = FirstHeader; hi != nil; hi = hi->getNext()) {
  4543. #if TARGET_OS_WIN32
  4544. if (0 == wcscmp((TCHAR *)image, hi->moduleName)) break;
  4545. #else
  4546. if (0 == strcmp(image, hi->fname())) break;
  4547. #endif
  4548. }
  4549. if (!hi) {
  4550. if (outCount) *outCount = 0;
  4551. return nil;
  4552. }
  4553. return copyClassNamesForImage_nolock(hi, outCount);
  4554. }
  4555. /***********************************************************************
  4556. * objc_copyClassNamesForImageHeader
  4557. * Copies class names from the given image.
  4558. * Missing weak-import classes are omitted.
  4559. * Swift class names are demangled.
  4560. *
  4561. * Locking: acquires runtimeLock
  4562. **********************************************************************/
  4563. const char **
  4564. objc_copyClassNamesForImageHeader(const struct mach_header *mh, unsigned int *outCount)
  4565. {
  4566. if (!mh) {
  4567. if (outCount) *outCount = 0;
  4568. return nil;
  4569. }
  4570. mutex_locker_t lock(runtimeLock);
  4571. // Find the image.
  4572. header_info *hi;
  4573. for (hi = FirstHeader; hi != nil; hi = hi->getNext()) {
  4574. if (hi->mhdr() == (const headerType *)mh) break;
  4575. }
  4576. if (!hi) {
  4577. if (outCount) *outCount = 0;
  4578. return nil;
  4579. }
  4580. return copyClassNamesForImage_nolock(hi, outCount);
  4581. }
  4582. /***********************************************************************
  4583. * saveTemporaryString
  4584. * Save a string in a thread-local FIFO buffer.
  4585. * This is suitable for temporary strings generated for logging purposes.
  4586. **********************************************************************/
  4587. static void
  4588. saveTemporaryString(char *str)
  4589. {
  4590. // Fixed-size FIFO. We free the first string, shift
  4591. // the rest, and add the new string to the end.
  4592. _objc_pthread_data *data = _objc_fetch_pthread_data(true);
  4593. if (data->printableNames[0]) {
  4594. free(data->printableNames[0]);
  4595. }
  4596. int last = countof(data->printableNames) - 1;
  4597. for (int i = 0; i < last; i++) {
  4598. data->printableNames[i] = data->printableNames[i+1];
  4599. }
  4600. data->printableNames[last] = str;
  4601. }
  4602. /***********************************************************************
  4603. * objc_class::nameForLogging
  4604. * Returns the class's name, suitable for display.
  4605. * The returned memory is TEMPORARY. Print it or copy it immediately.
  4606. * Locking: none
  4607. **********************************************************************/
  4608. const char *
  4609. objc_class::nameForLogging()
  4610. {
  4611. // Handle the easy case directly.
  4612. if (isRealized() || isFuture()) {
  4613. if (data()->demangledName) return data()->demangledName;
  4614. }
  4615. char *result;
  4616. const char *name = mangledName();
  4617. char *de = copySwiftV1DemangledName(name);
  4618. if (de) result = de;
  4619. else result = strdup(name);
  4620. saveTemporaryString(result);
  4621. return result;
  4622. }
  4623. /***********************************************************************
  4624. * objc_class::demangledName
  4625. * If realize=false, the class must already be realized or future.
  4626. * Locking: runtimeLock may or may not be held by the caller.
  4627. **********************************************************************/
  4628. mutex_t DemangleCacheLock;
  4629. static objc::DenseSet<const char *> *DemangleCache;
  4630. const char *
  4631. objc_class::demangledName()
  4632. {
  4633. // Return previously demangled name if available.
  4634. if (isRealized() || isFuture()) {
  4635. if (data()->demangledName) return data()->demangledName;
  4636. }
  4637. // Try demangling the mangled name.
  4638. const char *mangled = mangledName();
  4639. char *de = copySwiftV1DemangledName(mangled);
  4640. if (isRealized() || isFuture()) {
  4641. // Class is already realized or future.
  4642. // Save demangling result in rw data.
  4643. // We may not own runtimeLock so use an atomic operation instead.
  4644. if (! OSAtomicCompareAndSwapPtrBarrier(nil, (void*)(de ?: mangled),
  4645. (void**)&data()->demangledName))
  4646. {
  4647. if (de) free(de);
  4648. }
  4649. return data()->demangledName;
  4650. }
  4651. // Class is not yet realized.
  4652. if (!de) {
  4653. // Name is not mangled. Return it without caching.
  4654. return mangled;
  4655. }
  4656. // Class is not yet realized and name is mangled.
  4657. // Allocate the name but don't save it in the class.
  4658. // Save the name in a side cache instead to prevent leaks.
  4659. // When the class is actually realized we may allocate a second
  4660. // copy of the name, but we don't care.
  4661. // (Previously we would try to realize the class now and save the
  4662. // name there, but realization is more complicated for Swift classes.)
  4663. // Only objc_copyClassNamesForImage() should get here.
  4664. // fixme lldb's calls to class_getName() can also get here when
  4665. // interrogating the dyld shared cache. (rdar://27258517)
  4666. // fixme runtimeLock.assertLocked();
  4667. // fixme ASSERT(realize);
  4668. const char *cached;
  4669. {
  4670. mutex_locker_t lock(DemangleCacheLock);
  4671. if (!DemangleCache) {
  4672. DemangleCache = new objc::DenseSet<const char *>{};
  4673. }
  4674. cached = *DemangleCache->insert(de).first;
  4675. }
  4676. if (cached != de) free(de);
  4677. return cached;
  4678. }
  4679. /***********************************************************************
  4680. * class_getName
  4681. * fixme
  4682. * Locking: may acquire DemangleCacheLock
  4683. **********************************************************************/
  4684. const char *class_getName(Class cls)
  4685. {
  4686. if (!cls) return "nil";
  4687. // fixme lldb calls class_getName() on unrealized classes (rdar://27258517)
  4688. // ASSERT(cls->isRealized() || cls->isFuture());
  4689. return cls->demangledName();
  4690. }
  4691. /***********************************************************************
  4692. * objc_debug_class_getNameRaw
  4693. * fixme
  4694. * Locking: none
  4695. **********************************************************************/
  4696. const char *objc_debug_class_getNameRaw(Class cls)
  4697. {
  4698. if (!cls) return "nil";
  4699. return cls->mangledName();
  4700. }
  4701. /***********************************************************************
  4702. * class_getVersion
  4703. * fixme
  4704. * Locking: none
  4705. **********************************************************************/
  4706. int
  4707. class_getVersion(Class cls)
  4708. {
  4709. if (!cls) return 0;
  4710. ASSERT(cls->isRealized());
  4711. return cls->data()->version;
  4712. }
  4713. /***********************************************************************
  4714. * class_setVersion
  4715. * fixme
  4716. * Locking: none
  4717. **********************************************************************/
  4718. void
  4719. class_setVersion(Class cls, int version)
  4720. {
  4721. if (!cls) return;
  4722. ASSERT(cls->isRealized());
  4723. cls->data()->version = version;
  4724. }
  4725. /***********************************************************************
  4726. * search_method_list_inline
  4727. **********************************************************************/
  4728. ALWAYS_INLINE static method_t *
  4729. findMethodInSortedMethodList(SEL key, const method_list_t *list)
  4730. {
  4731. ASSERT(list);
  4732. const method_t * const first = &list->first;
  4733. const method_t *base = first;
  4734. const method_t *probe;
  4735. uintptr_t keyValue = (uintptr_t)key;
  4736. uint32_t count;
  4737. for (count = list->count; count != 0; count >>= 1) {
  4738. probe = base + (count >> 1);
  4739. uintptr_t probeValue = (uintptr_t)probe->name;
  4740. if (keyValue == probeValue) {
  4741. // `probe` is a match.
  4742. // Rewind looking for the *first* occurrence of this value.
  4743. // This is required for correct category overrides.
  4744. while (probe > first && keyValue == (uintptr_t)probe[-1].name) {
  4745. probe--;
  4746. }
  4747. return (method_t *)probe;
  4748. }
  4749. if (keyValue > probeValue) {
  4750. base = probe + 1;
  4751. count--;
  4752. }
  4753. }
  4754. return nil;
  4755. }
  4756. ALWAYS_INLINE static method_t *
  4757. search_method_list_inline(const method_list_t *mlist, SEL sel)
  4758. {
  4759. int methodListIsFixedUp = mlist->isFixedUp();
  4760. int methodListHasExpectedSize = mlist->entsize() == sizeof(method_t);
  4761. if (fastpath(methodListIsFixedUp && methodListHasExpectedSize)) {
  4762. return findMethodInSortedMethodList(sel, mlist);
  4763. } else {
  4764. // Linear search of unsorted method list
  4765. for (auto& meth : *mlist) {
  4766. if (meth.name == sel) return &meth;
  4767. }
  4768. }
  4769. #if DEBUG
  4770. // sanity-check negative results
  4771. if (mlist->isFixedUp()) {
  4772. for (auto& meth : *mlist) {
  4773. if (meth.name == sel) {
  4774. _objc_fatal("linear search worked when binary search did not");
  4775. }
  4776. }
  4777. }
  4778. #endif
  4779. return nil;
  4780. }
  4781. NEVER_INLINE static method_t *
  4782. search_method_list(const method_list_t *mlist, SEL sel)
  4783. {
  4784. return search_method_list_inline(mlist, sel);
  4785. }
  4786. /***********************************************************************
  4787. * method_lists_contains_any
  4788. **********************************************************************/
  4789. static NEVER_INLINE bool
  4790. method_lists_contains_any(method_list_t **mlists, method_list_t **end,
  4791. SEL sels[], size_t selcount)
  4792. {
  4793. while (mlists < end) {
  4794. const method_list_t *mlist = *mlists++;
  4795. int methodListIsFixedUp = mlist->isFixedUp();
  4796. int methodListHasExpectedSize = mlist->entsize() == sizeof(method_t);
  4797. if (fastpath(methodListIsFixedUp && methodListHasExpectedSize)) {
  4798. for (size_t i = 0; i < selcount; i++) {
  4799. if (findMethodInSortedMethodList(sels[i], mlist)) {
  4800. return true;
  4801. }
  4802. }
  4803. } else {
  4804. for (auto& meth : *mlist) {
  4805. for (size_t i = 0; i < selcount; i++) {
  4806. if (meth.name == sels[i]) {
  4807. return true;
  4808. }
  4809. }
  4810. }
  4811. }
  4812. }
  4813. return false;
  4814. }
  4815. /***********************************************************************
  4816. * getMethodNoSuper_nolock
  4817. * fixme
  4818. * Locking: runtimeLock must be read- or write-locked by the caller
  4819. **********************************************************************/
  4820. static method_t *
  4821. getMethodNoSuper_nolock(Class cls, SEL sel)
  4822. {
  4823. runtimeLock.assertLocked();
  4824. ASSERT(cls->isRealized());
  4825. // fixme nil cls?
  4826. // fixme nil sel?
  4827. for (auto mlists = cls->data()->methods.beginLists(),
  4828. end = cls->data()->methods.endLists();
  4829. mlists != end;
  4830. ++mlists)
  4831. {
  4832. // <rdar://problem/46904873> getMethodNoSuper_nolock is the hottest
  4833. // caller of search_method_list, inlining it turns
  4834. // getMethodNoSuper_nolock into a frame-less function and eliminates
  4835. // any store from this codepath.
  4836. method_t *m = search_method_list_inline(*mlists, sel);
  4837. if (m) return m;
  4838. }
  4839. return nil;
  4840. }
  4841. /***********************************************************************
  4842. * getMethod_nolock
  4843. * fixme
  4844. * Locking: runtimeLock must be read- or write-locked by the caller
  4845. **********************************************************************/
  4846. static method_t *
  4847. getMethod_nolock(Class cls, SEL sel)
  4848. {
  4849. method_t *m = nil;
  4850. runtimeLock.assertLocked();
  4851. // fixme nil cls?
  4852. // fixme nil sel?
  4853. ASSERT(cls->isRealized());
  4854. while (cls && ((m = getMethodNoSuper_nolock(cls, sel))) == nil) {
  4855. cls = cls->superclass;
  4856. }
  4857. return m;
  4858. }
  4859. /***********************************************************************
  4860. * _class_getMethod
  4861. * fixme
  4862. * Locking: read-locks runtimeLock
  4863. **********************************************************************/
  4864. static Method _class_getMethod(Class cls, SEL sel)
  4865. {
  4866. mutex_locker_t lock(runtimeLock);
  4867. return getMethod_nolock(cls, sel);
  4868. }
  4869. /***********************************************************************
  4870. * class_getInstanceMethod. Return the instance method for the
  4871. * specified class and selector.
  4872. **********************************************************************/
  4873. Method class_getInstanceMethod(Class cls, SEL sel)
  4874. {
  4875. if (!cls || !sel) return nil;
  4876. // This deliberately avoids +initialize because it historically did so.
  4877. // This implementation is a bit weird because it's the only place that
  4878. // wants a Method instead of an IMP.
  4879. #warning fixme build and search caches
  4880. // Search method lists, try method resolver, etc.
  4881. lookUpImpOrForward(nil, sel, cls, LOOKUP_RESOLVER);
  4882. #warning fixme build and search caches
  4883. return _class_getMethod(cls, sel);
  4884. }
  4885. /***********************************************************************
  4886. * resolveClassMethod
  4887. * Call +resolveClassMethod, looking for a method to be added to class cls.
  4888. * cls should be a metaclass.
  4889. * Does not check if the method already exists.
  4890. **********************************************************************/
  4891. static void resolveClassMethod(id inst, SEL sel, Class cls)
  4892. {
  4893. runtimeLock.assertUnlocked();
  4894. ASSERT(cls->isRealized());
  4895. ASSERT(cls->isMetaClass());
  4896. if (!lookUpImpOrNil(inst, @selector(resolveClassMethod:), cls)) {
  4897. // Resolver not implemented.
  4898. return;
  4899. }
  4900. Class nonmeta;
  4901. {
  4902. mutex_locker_t lock(runtimeLock);
  4903. nonmeta = getMaybeUnrealizedNonMetaClass(cls, inst);
  4904. // +initialize path should have realized nonmeta already
  4905. if (!nonmeta->isRealized()) {
  4906. _objc_fatal("nonmeta class %s (%p) unexpectedly not realized",
  4907. nonmeta->nameForLogging(), nonmeta);
  4908. }
  4909. }
  4910. BOOL (*msg)(Class, SEL, SEL) = (typeof(msg))objc_msgSend;
  4911. bool resolved = msg(nonmeta, @selector(resolveClassMethod:), sel);
  4912. // Cache the result (good or bad) so the resolver doesn't fire next time.
  4913. // +resolveClassMethod adds to self->ISA() a.k.a. cls
  4914. IMP imp = lookUpImpOrNil(inst, sel, cls);
  4915. if (resolved && PrintResolving) {
  4916. if (imp) {
  4917. _objc_inform("RESOLVE: method %c[%s %s] "
  4918. "dynamically resolved to %p",
  4919. cls->isMetaClass() ? '+' : '-',
  4920. cls->nameForLogging(), sel_getName(sel), imp);
  4921. }
  4922. else {
  4923. // Method resolver didn't add anything?
  4924. _objc_inform("RESOLVE: +[%s resolveClassMethod:%s] returned YES"
  4925. ", but no new implementation of %c[%s %s] was found",
  4926. cls->nameForLogging(), sel_getName(sel),
  4927. cls->isMetaClass() ? '+' : '-',
  4928. cls->nameForLogging(), sel_getName(sel));
  4929. }
  4930. }
  4931. }
  4932. /***********************************************************************
  4933. * resolveInstanceMethod
  4934. * Call +resolveInstanceMethod, looking for a method to be added to class cls.
  4935. * cls may be a metaclass or a non-meta class.
  4936. * Does not check if the method already exists.
  4937. **********************************************************************/
  4938. static void resolveInstanceMethod(id inst, SEL sel, Class cls)
  4939. {
  4940. runtimeLock.assertUnlocked();
  4941. ASSERT(cls->isRealized());
  4942. SEL resolve_sel = @selector(resolveInstanceMethod:);
  4943. if (!lookUpImpOrNil(cls, resolve_sel, cls->ISA())) {
  4944. // Resolver not implemented.
  4945. return;
  4946. }
  4947. BOOL (*msg)(Class, SEL, SEL) = (typeof(msg))objc_msgSend;
  4948. bool resolved = msg(cls, resolve_sel, sel);
  4949. // Cache the result (good or bad) so the resolver doesn't fire next time.
  4950. // +resolveInstanceMethod adds to self a.k.a. cls
  4951. IMP imp = lookUpImpOrNil(inst, sel, cls);
  4952. if (resolved && PrintResolving) {
  4953. if (imp) {
  4954. _objc_inform("RESOLVE: method %c[%s %s] "
  4955. "dynamically resolved to %p",
  4956. cls->isMetaClass() ? '+' : '-',
  4957. cls->nameForLogging(), sel_getName(sel), imp);
  4958. }
  4959. else {
  4960. // Method resolver didn't add anything?
  4961. _objc_inform("RESOLVE: +[%s resolveInstanceMethod:%s] returned YES"
  4962. ", but no new implementation of %c[%s %s] was found",
  4963. cls->nameForLogging(), sel_getName(sel),
  4964. cls->isMetaClass() ? '+' : '-',
  4965. cls->nameForLogging(), sel_getName(sel));
  4966. }
  4967. }
  4968. }
  4969. /***********************************************************************
  4970. * resolveMethod_locked
  4971. * Call +resolveClassMethod or +resolveInstanceMethod.
  4972. *
  4973. * Called with the runtimeLock held to avoid pressure in the caller
  4974. * Tail calls into lookUpImpOrForward, also to avoid pressure in the callerb
  4975. **********************************************************************/
  4976. static NEVER_INLINE IMP
  4977. resolveMethod_locked(id inst, SEL sel, Class cls, int behavior)
  4978. {
  4979. runtimeLock.assertLocked();
  4980. ASSERT(cls->isRealized());
  4981. runtimeLock.unlock();
  4982. if (! cls->isMetaClass()) {
  4983. // try [cls resolveInstanceMethod:sel]
  4984. resolveInstanceMethod(inst, sel, cls);
  4985. }
  4986. else {
  4987. // try [nonMetaClass resolveClassMethod:sel]
  4988. // and [cls resolveInstanceMethod:sel]
  4989. resolveClassMethod(inst, sel, cls);
  4990. if (!lookUpImpOrNil(inst, sel, cls)) {
  4991. resolveInstanceMethod(inst, sel, cls);
  4992. }
  4993. }
  4994. // chances are that calling the resolver have populated the cache
  4995. // so attempt using it
  4996. return lookUpImpOrForward(inst, sel, cls, behavior | LOOKUP_CACHE);
  4997. }
  4998. /***********************************************************************
  4999. * log_and_fill_cache
  5000. * Log this method call. If the logger permits it, fill the method cache.
  5001. * cls is the method whose cache should be filled.
  5002. * implementer is the class that owns the implementation in question.
  5003. **********************************************************************/
  5004. static void
  5005. log_and_fill_cache(Class cls, IMP imp, SEL sel, id receiver, Class implementer)
  5006. {
  5007. #if SUPPORT_MESSAGE_LOGGING
  5008. if (slowpath(objcMsgLogEnabled && implementer)) {
  5009. bool cacheIt = logMessageSend(implementer->isMetaClass(),
  5010. cls->nameForLogging(),
  5011. implementer->nameForLogging(),
  5012. sel);
  5013. if (!cacheIt) return;
  5014. }
  5015. #endif
  5016. cache_fill(cls, sel, imp, receiver);
  5017. }
  5018. /***********************************************************************
  5019. * lookUpImpOrForward.
  5020. * The standard IMP lookup.
  5021. * Without LOOKUP_INITIALIZE: tries to avoid +initialize (but sometimes fails)
  5022. * Without LOOKUP_CACHE: skips optimistic unlocked lookup (but uses cache elsewhere)
  5023. * Most callers should use LOOKUP_INITIALIZE and LOOKUP_CACHE
  5024. * inst is an instance of cls or a subclass thereof, or nil if none is known.
  5025. * If cls is an un-initialized metaclass then a non-nil inst is faster.
  5026. * May return _objc_msgForward_impcache. IMPs destined for external use
  5027. * must be converted to _objc_msgForward or _objc_msgForward_stret.
  5028. * If you don't want forwarding at all, use LOOKUP_NIL.
  5029. **********************************************************************/
  5030. IMP lookUpImpOrForward(id inst, SEL sel, Class cls, int behavior)
  5031. {
  5032. const IMP forward_imp = (IMP)_objc_msgForward_impcache;
  5033. IMP imp = nil;
  5034. Class curClass;
  5035. runtimeLock.assertUnlocked();
  5036. // Optimistic cache lookup
  5037. if (fastpath(behavior & LOOKUP_CACHE)) {
  5038. imp = cache_getImp(cls, sel);
  5039. if (imp) goto done_nolock;
  5040. }
  5041. // runtimeLock is held during isRealized and isInitialized checking
  5042. // to prevent races against concurrent realization.
  5043. // runtimeLock is held during method search to make
  5044. // method-lookup + cache-fill atomic with respect to method addition.
  5045. // Otherwise, a category could be added but ignored indefinitely because
  5046. // the cache was re-filled with the old value after the cache flush on
  5047. // behalf of the category.
  5048. runtimeLock.lock();
  5049. // We don't want people to be able to craft a binary blob that looks like
  5050. // a class but really isn't one and do a CFI attack.
  5051. //
  5052. // To make these harder we want to make sure this is a class that was
  5053. // either built into the binary or legitimately registered through
  5054. // objc_duplicateClass, objc_initializeClassPair or objc_allocateClassPair.
  5055. //
  5056. // TODO: this check is quite costly during process startup.
  5057. checkIsKnownClass(cls);
  5058. if (slowpath(!cls->isRealized())) {
  5059. cls = realizeClassMaybeSwiftAndLeaveLocked(cls, runtimeLock);
  5060. // runtimeLock may have been dropped but is now locked again
  5061. }
  5062. if (slowpath((behavior & LOOKUP_INITIALIZE) && !cls->isInitialized())) {
  5063. cls = initializeAndLeaveLocked(cls, inst, runtimeLock);
  5064. // runtimeLock may have been dropped but is now locked again
  5065. // If sel == initialize, class_initialize will send +initialize and
  5066. // then the messenger will send +initialize again after this
  5067. // procedure finishes. Of course, if this is not being called
  5068. // from the messenger then it won't happen. 2778172
  5069. }
  5070. runtimeLock.assertLocked();
  5071. curClass = cls;
  5072. // The code used to lookpu the class's cache again right after
  5073. // we take the lock but for the vast majority of the cases
  5074. // evidence shows this is a miss most of the time, hence a time loss.
  5075. //
  5076. // The only codepath calling into this without having performed some
  5077. // kind of cache lookup is class_getInstanceMethod().
  5078. for (unsigned attempts = unreasonableClassCount();;) {
  5079. // curClass method list.
  5080. Method meth = getMethodNoSuper_nolock(curClass, sel);
  5081. if (meth) {
  5082. imp = meth->imp;
  5083. goto done;
  5084. }
  5085. if (slowpath((curClass = curClass->superclass) == nil)) {
  5086. // No implementation found, and method resolver didn't help.
  5087. // Use forwarding.
  5088. imp = forward_imp;
  5089. break;
  5090. }
  5091. // Halt if there is a cycle in the superclass chain.
  5092. if (slowpath(--attempts == 0)) {
  5093. _objc_fatal("Memory corruption in class list.");
  5094. }
  5095. // Superclass cache.
  5096. imp = cache_getImp(curClass, sel);
  5097. if (slowpath(imp == forward_imp)) {
  5098. // Found a forward:: entry in a superclass.
  5099. // Stop searching, but don't cache yet; call method
  5100. // resolver for this class first.
  5101. break;
  5102. }
  5103. if (fastpath(imp)) {
  5104. // Found the method in a superclass. Cache it in this class.
  5105. goto done;
  5106. }
  5107. }
  5108. // No implementation found. Try method resolver once.
  5109. if (slowpath(behavior & LOOKUP_RESOLVER)) {
  5110. behavior ^= LOOKUP_RESOLVER;
  5111. return resolveMethod_locked(inst, sel, cls, behavior);
  5112. }
  5113. done:
  5114. log_and_fill_cache(cls, imp, sel, inst, curClass);
  5115. runtimeLock.unlock();
  5116. done_nolock:
  5117. if (slowpath((behavior & LOOKUP_NIL) && imp == forward_imp)) {
  5118. return nil;
  5119. }
  5120. return imp;
  5121. }
  5122. /***********************************************************************
  5123. * lookupMethodInClassAndLoadCache.
  5124. * Like lookUpImpOrForward, but does not search superclasses.
  5125. * Caches and returns objc_msgForward if the method is not found in the class.
  5126. **********************************************************************/
  5127. IMP lookupMethodInClassAndLoadCache(Class cls, SEL sel)
  5128. {
  5129. Method meth;
  5130. IMP imp;
  5131. // fixme this is incomplete - no resolver, +initialize -
  5132. // but it's only used for .cxx_construct/destruct so we don't care
  5133. ASSERT(sel == SEL_cxx_construct || sel == SEL_cxx_destruct);
  5134. // Search cache first.
  5135. imp = cache_getImp(cls, sel);
  5136. if (imp) return imp;
  5137. // Cache miss. Search method list.
  5138. mutex_locker_t lock(runtimeLock);
  5139. meth = getMethodNoSuper_nolock(cls, sel);
  5140. if (meth) {
  5141. // Hit in method list. Cache it.
  5142. cache_fill(cls, sel, meth->imp, nil);
  5143. return meth->imp;
  5144. } else {
  5145. // Miss in method list. Cache objc_msgForward.
  5146. cache_fill(cls, sel, _objc_msgForward_impcache, nil);
  5147. return _objc_msgForward_impcache;
  5148. }
  5149. }
  5150. /***********************************************************************
  5151. * class_getProperty
  5152. * fixme
  5153. * Locking: read-locks runtimeLock
  5154. **********************************************************************/
  5155. objc_property_t class_getProperty(Class cls, const char *name)
  5156. {
  5157. if (!cls || !name) return nil;
  5158. mutex_locker_t lock(runtimeLock);
  5159. checkIsKnownClass(cls);
  5160. ASSERT(cls->isRealized());
  5161. for ( ; cls; cls = cls->superclass) {
  5162. for (auto& prop : cls->data()->properties) {
  5163. if (0 == strcmp(name, prop.name)) {
  5164. return (objc_property_t)&prop;
  5165. }
  5166. }
  5167. }
  5168. return nil;
  5169. }
  5170. /***********************************************************************
  5171. * Locking: fixme
  5172. **********************************************************************/
  5173. Class gdb_class_getClass(Class cls)
  5174. {
  5175. const char *className = cls->mangledName();
  5176. if(!className || !strlen(className)) return Nil;
  5177. Class rCls = look_up_class(className, NO, NO);
  5178. return rCls;
  5179. }
  5180. Class gdb_object_getClass(id obj)
  5181. {
  5182. if (!obj) return nil;
  5183. return gdb_class_getClass(obj->getIsa());
  5184. }
  5185. /***********************************************************************
  5186. * Locking: write-locks runtimeLock
  5187. **********************************************************************/
  5188. void
  5189. objc_class::setInitialized()
  5190. {
  5191. Class metacls;
  5192. Class cls;
  5193. ASSERT(!isMetaClass());
  5194. cls = (Class)this;
  5195. metacls = cls->ISA();
  5196. mutex_locker_t lock(runtimeLock);
  5197. // Special cases:
  5198. // - NSObject AWZ class methods are default.
  5199. // - NSObject RR class and instance methods are default.
  5200. // - NSObject Core class and instance methods are default.
  5201. // adjustCustomFlagsForMethodChange() also knows these special cases.
  5202. // attachMethodLists() also knows these special cases.
  5203. objc::AWZScanner::scanInitializedClass(cls, metacls);
  5204. objc::RRScanner::scanInitializedClass(cls, metacls);
  5205. objc::CoreScanner::scanInitializedClass(cls, metacls);
  5206. // Update the +initialize flags.
  5207. // Do this last.
  5208. metacls->changeInfo(RW_INITIALIZED, RW_INITIALIZING);
  5209. }
  5210. void
  5211. objc_class::printInstancesRequireRawIsa(bool inherited)
  5212. {
  5213. ASSERT(PrintRawIsa);
  5214. ASSERT(instancesRequireRawIsa());
  5215. _objc_inform("RAW ISA: %s%s%s", nameForLogging(),
  5216. isMetaClass() ? " (meta)" : "",
  5217. inherited ? " (inherited)" : "");
  5218. }
  5219. /***********************************************************************
  5220. * Mark this class and all of its subclasses as requiring raw isa pointers
  5221. **********************************************************************/
  5222. void objc_class::setInstancesRequireRawIsaRecursively(bool inherited)
  5223. {
  5224. Class cls = (Class)this;
  5225. runtimeLock.assertLocked();
  5226. if (instancesRequireRawIsa()) return;
  5227. foreach_realized_class_and_subclass(cls, [=](Class c){
  5228. if (c->instancesRequireRawIsa()) {
  5229. return false;
  5230. }
  5231. c->setInstancesRequireRawIsa();
  5232. if (PrintRawIsa) c->printInstancesRequireRawIsa(inherited || c != cls);
  5233. return true;
  5234. });
  5235. }
  5236. /***********************************************************************
  5237. * Choose a class index.
  5238. * Set instancesRequireRawIsa if no more class indexes are available.
  5239. **********************************************************************/
  5240. void objc_class::chooseClassArrayIndex()
  5241. {
  5242. #if SUPPORT_INDEXED_ISA
  5243. Class cls = (Class)this;
  5244. runtimeLock.assertLocked();
  5245. if (objc_indexed_classes_count >= ISA_INDEX_COUNT) {
  5246. // No more indexes available.
  5247. ASSERT(cls->classArrayIndex() == 0);
  5248. cls->setInstancesRequireRawIsaRecursively(false/*not inherited*/);
  5249. return;
  5250. }
  5251. unsigned index = objc_indexed_classes_count++;
  5252. if (index == 0) index = objc_indexed_classes_count++; // index 0 is unused
  5253. classForIndex(index) = cls;
  5254. cls->setClassArrayIndex(index);
  5255. #endif
  5256. }
  5257. /***********************************************************************
  5258. * Update custom RR and AWZ when a method changes its IMP
  5259. **********************************************************************/
  5260. static void
  5261. adjustCustomFlagsForMethodChange(Class cls, method_t *meth)
  5262. {
  5263. objc::AWZScanner::scanChangedMethod(cls, meth);
  5264. objc::RRScanner::scanChangedMethod(cls, meth);
  5265. objc::CoreScanner::scanChangedMethod(cls, meth);
  5266. }
  5267. /***********************************************************************
  5268. * class_getIvarLayout
  5269. * Called by the garbage collector.
  5270. * The class must be nil or already realized.
  5271. * Locking: none
  5272. **********************************************************************/
  5273. const uint8_t *
  5274. class_getIvarLayout(Class cls)
  5275. {
  5276. if (cls) return cls->data()->ro->ivarLayout;
  5277. else return nil;
  5278. }
  5279. /***********************************************************************
  5280. * class_getWeakIvarLayout
  5281. * Called by the garbage collector.
  5282. * The class must be nil or already realized.
  5283. * Locking: none
  5284. **********************************************************************/
  5285. const uint8_t *
  5286. class_getWeakIvarLayout(Class cls)
  5287. {
  5288. if (cls) return cls->data()->ro->weakIvarLayout;
  5289. else return nil;
  5290. }
  5291. /***********************************************************************
  5292. * class_setIvarLayout
  5293. * Changes the class's ivar layout.
  5294. * nil layout means no unscanned ivars
  5295. * The class must be under construction.
  5296. * fixme: sanity-check layout vs instance size?
  5297. * fixme: sanity-check layout vs superclass?
  5298. * Locking: acquires runtimeLock
  5299. **********************************************************************/
  5300. void
  5301. class_setIvarLayout(Class cls, const uint8_t *layout)
  5302. {
  5303. if (!cls) return;
  5304. mutex_locker_t lock(runtimeLock);
  5305. checkIsKnownClass(cls);
  5306. // Can only change layout of in-construction classes.
  5307. // note: if modifications to post-construction classes were
  5308. // allowed, there would be a race below (us vs. concurrent object_setIvar)
  5309. if (!(cls->data()->flags & RW_CONSTRUCTING)) {
  5310. _objc_inform("*** Can't set ivar layout for already-registered "
  5311. "class '%s'", cls->nameForLogging());
  5312. return;
  5313. }
  5314. class_ro_t *ro_w = make_ro_writeable(cls->data());
  5315. try_free(ro_w->ivarLayout);
  5316. ro_w->ivarLayout = ustrdupMaybeNil(layout);
  5317. }
  5318. /***********************************************************************
  5319. * class_setWeakIvarLayout
  5320. * Changes the class's weak ivar layout.
  5321. * nil layout means no weak ivars
  5322. * The class must be under construction.
  5323. * fixme: sanity-check layout vs instance size?
  5324. * fixme: sanity-check layout vs superclass?
  5325. * Locking: acquires runtimeLock
  5326. **********************************************************************/
  5327. void
  5328. class_setWeakIvarLayout(Class cls, const uint8_t *layout)
  5329. {
  5330. if (!cls) return;
  5331. mutex_locker_t lock(runtimeLock);
  5332. checkIsKnownClass(cls);
  5333. // Can only change layout of in-construction classes.
  5334. // note: if modifications to post-construction classes were
  5335. // allowed, there would be a race below (us vs. concurrent object_setIvar)
  5336. if (!(cls->data()->flags & RW_CONSTRUCTING)) {
  5337. _objc_inform("*** Can't set weak ivar layout for already-registered "
  5338. "class '%s'", cls->nameForLogging());
  5339. return;
  5340. }
  5341. class_ro_t *ro_w = make_ro_writeable(cls->data());
  5342. try_free(ro_w->weakIvarLayout);
  5343. ro_w->weakIvarLayout = ustrdupMaybeNil(layout);
  5344. }
  5345. /***********************************************************************
  5346. * getIvar
  5347. * Look up an ivar by name.
  5348. * Locking: runtimeLock must be read- or write-locked by the caller.
  5349. **********************************************************************/
  5350. static ivar_t *getIvar(Class cls, const char *name)
  5351. {
  5352. runtimeLock.assertLocked();
  5353. const ivar_list_t *ivars;
  5354. ASSERT(cls->isRealized());
  5355. if ((ivars = cls->data()->ro->ivars)) {
  5356. for (auto& ivar : *ivars) {
  5357. if (!ivar.offset) continue; // anonymous bitfield
  5358. // ivar.name may be nil for anonymous bitfields etc.
  5359. if (ivar.name && 0 == strcmp(name, ivar.name)) {
  5360. return &ivar;
  5361. }
  5362. }
  5363. }
  5364. return nil;
  5365. }
  5366. /***********************************************************************
  5367. * _class_getClassForIvar
  5368. * Given a class and an ivar that is in it or one of its superclasses,
  5369. * find the actual class that defined the ivar.
  5370. **********************************************************************/
  5371. Class _class_getClassForIvar(Class cls, Ivar ivar)
  5372. {
  5373. mutex_locker_t lock(runtimeLock);
  5374. for ( ; cls; cls = cls->superclass) {
  5375. if (auto ivars = cls->data()->ro->ivars) {
  5376. if (ivars->containsIvar(ivar)) {
  5377. return cls;
  5378. }
  5379. }
  5380. }
  5381. return nil;
  5382. }
  5383. /***********************************************************************
  5384. * _class_getVariable
  5385. * fixme
  5386. * Locking: read-locks runtimeLock
  5387. **********************************************************************/
  5388. Ivar
  5389. _class_getVariable(Class cls, const char *name)
  5390. {
  5391. mutex_locker_t lock(runtimeLock);
  5392. for ( ; cls; cls = cls->superclass) {
  5393. ivar_t *ivar = getIvar(cls, name);
  5394. if (ivar) {
  5395. return ivar;
  5396. }
  5397. }
  5398. return nil;
  5399. }
  5400. /***********************************************************************
  5401. * class_conformsToProtocol
  5402. * fixme
  5403. * Locking: read-locks runtimeLock
  5404. **********************************************************************/
  5405. BOOL class_conformsToProtocol(Class cls, Protocol *proto_gen)
  5406. {
  5407. protocol_t *proto = newprotocol(proto_gen);
  5408. if (!cls) return NO;
  5409. if (!proto_gen) return NO;
  5410. mutex_locker_t lock(runtimeLock);
  5411. checkIsKnownClass(cls);
  5412. ASSERT(cls->isRealized());
  5413. for (const auto& proto_ref : cls->data()->protocols) {
  5414. protocol_t *p = remapProtocol(proto_ref);
  5415. if (p == proto || protocol_conformsToProtocol_nolock(p, proto)) {
  5416. return YES;
  5417. }
  5418. }
  5419. return NO;
  5420. }
  5421. /**********************************************************************
  5422. * addMethod
  5423. * fixme
  5424. * Locking: runtimeLock must be held by the caller
  5425. **********************************************************************/
  5426. static IMP
  5427. addMethod(Class cls, SEL name, IMP imp, const char *types, bool replace)
  5428. {
  5429. IMP result = nil;
  5430. runtimeLock.assertLocked();
  5431. checkIsKnownClass(cls);
  5432. ASSERT(types);
  5433. ASSERT(cls->isRealized());
  5434. method_t *m;
  5435. if ((m = getMethodNoSuper_nolock(cls, name))) {
  5436. // already exists
  5437. if (!replace) {
  5438. result = m->imp;
  5439. } else {
  5440. result = _method_setImplementation(cls, m, imp);
  5441. }
  5442. } else {
  5443. // fixme optimize
  5444. method_list_t *newlist;
  5445. newlist = (method_list_t *)calloc(sizeof(*newlist), 1);
  5446. newlist->entsizeAndFlags =
  5447. (uint32_t)sizeof(method_t) | fixed_up_method_list;
  5448. newlist->count = 1;
  5449. newlist->first.name = name;
  5450. newlist->first.types = strdupIfMutable(types);
  5451. newlist->first.imp = imp;
  5452. prepareMethodLists(cls, &newlist, 1, NO, NO);
  5453. cls->data()->methods.attachLists(&newlist, 1);
  5454. flushCaches(cls);
  5455. result = nil;
  5456. }
  5457. return result;
  5458. }
  5459. /**********************************************************************
  5460. * addMethods
  5461. * Add the given methods to a class in bulk.
  5462. * Returns the selectors which could not be added, when replace == NO and a
  5463. * method already exists. The returned selectors are NULL terminated and must be
  5464. * freed by the caller. They are NULL if no failures occurred.
  5465. * Locking: runtimeLock must be held by the caller
  5466. **********************************************************************/
  5467. static SEL *
  5468. addMethods(Class cls, const SEL *names, const IMP *imps, const char **types,
  5469. uint32_t count, bool replace, uint32_t *outFailedCount)
  5470. {
  5471. runtimeLock.assertLocked();
  5472. ASSERT(names);
  5473. ASSERT(imps);
  5474. ASSERT(types);
  5475. ASSERT(cls->isRealized());
  5476. method_list_t *newlist;
  5477. size_t newlistSize = method_list_t::byteSize(sizeof(method_t), count);
  5478. newlist = (method_list_t *)calloc(newlistSize, 1);
  5479. newlist->entsizeAndFlags =
  5480. (uint32_t)sizeof(method_t) | fixed_up_method_list;
  5481. newlist->count = 0;
  5482. method_t *newlistMethods = &newlist->first;
  5483. SEL *failedNames = nil;
  5484. uint32_t failedCount = 0;
  5485. for (uint32_t i = 0; i < count; i++) {
  5486. method_t *m;
  5487. if ((m = getMethodNoSuper_nolock(cls, names[i]))) {
  5488. // already exists
  5489. if (!replace) {
  5490. // report failure
  5491. if (failedNames == nil) {
  5492. // allocate an extra entry for a trailing NULL in case
  5493. // every method fails
  5494. failedNames = (SEL *)calloc(sizeof(*failedNames),
  5495. count + 1);
  5496. }
  5497. failedNames[failedCount] = m->name;
  5498. failedCount++;
  5499. } else {
  5500. _method_setImplementation(cls, m, imps[i]);
  5501. }
  5502. } else {
  5503. method_t *newmethod = &newlistMethods[newlist->count];
  5504. newmethod->name = names[i];
  5505. newmethod->types = strdupIfMutable(types[i]);
  5506. newmethod->imp = imps[i];
  5507. newlist->count++;
  5508. }
  5509. }
  5510. if (newlist->count > 0) {
  5511. // fixme resize newlist because it may have been over-allocated above.
  5512. // Note that realloc() alone doesn't work due to ptrauth.
  5513. method_t::SortBySELAddress sorter;
  5514. std::stable_sort(newlist->begin(), newlist->end(), sorter);
  5515. prepareMethodLists(cls, &newlist, 1, NO, NO);
  5516. cls->data()->methods.attachLists(&newlist, 1);
  5517. flushCaches(cls);
  5518. } else {
  5519. // Attaching the method list to the class consumes it. If we don't
  5520. // do that, we have to free the memory ourselves.
  5521. free(newlist);
  5522. }
  5523. if (outFailedCount) *outFailedCount = failedCount;
  5524. return failedNames;
  5525. }
  5526. BOOL
  5527. class_addMethod(Class cls, SEL name, IMP imp, const char *types)
  5528. {
  5529. if (!cls) return NO;
  5530. mutex_locker_t lock(runtimeLock);
  5531. return ! addMethod(cls, name, imp, types ?: "", NO);
  5532. }
  5533. IMP
  5534. class_replaceMethod(Class cls, SEL name, IMP imp, const char *types)
  5535. {
  5536. if (!cls) return nil;
  5537. mutex_locker_t lock(runtimeLock);
  5538. return addMethod(cls, name, imp, types ?: "", YES);
  5539. }
  5540. SEL *
  5541. class_addMethodsBulk(Class cls, const SEL *names, const IMP *imps,
  5542. const char **types, uint32_t count,
  5543. uint32_t *outFailedCount)
  5544. {
  5545. if (!cls) {
  5546. if (outFailedCount) *outFailedCount = count;
  5547. return (SEL *)memdup(names, count * sizeof(*names));
  5548. }
  5549. mutex_locker_t lock(runtimeLock);
  5550. return addMethods(cls, names, imps, types, count, NO, outFailedCount);
  5551. }
  5552. void
  5553. class_replaceMethodsBulk(Class cls, const SEL *names, const IMP *imps,
  5554. const char **types, uint32_t count)
  5555. {
  5556. if (!cls) return;
  5557. mutex_locker_t lock(runtimeLock);
  5558. addMethods(cls, names, imps, types, count, YES, nil);
  5559. }
  5560. /***********************************************************************
  5561. * class_addIvar
  5562. * Adds an ivar to a class.
  5563. * Locking: acquires runtimeLock
  5564. **********************************************************************/
  5565. BOOL
  5566. class_addIvar(Class cls, const char *name, size_t size,
  5567. uint8_t alignment, const char *type)
  5568. {
  5569. if (!cls) return NO;
  5570. if (!type) type = "";
  5571. if (name && 0 == strcmp(name, "")) name = nil;
  5572. mutex_locker_t lock(runtimeLock);
  5573. checkIsKnownClass(cls);
  5574. ASSERT(cls->isRealized());
  5575. // No class variables
  5576. if (cls->isMetaClass()) {
  5577. return NO;
  5578. }
  5579. // Can only add ivars to in-construction classes.
  5580. if (!(cls->data()->flags & RW_CONSTRUCTING)) {
  5581. return NO;
  5582. }
  5583. // Check for existing ivar with this name, unless it's anonymous.
  5584. // Check for too-big ivar.
  5585. // fixme check for superclass ivar too?
  5586. if ((name && getIvar(cls, name)) || size > UINT32_MAX) {
  5587. return NO;
  5588. }
  5589. class_ro_t *ro_w = make_ro_writeable(cls->data());
  5590. // fixme allocate less memory here
  5591. ivar_list_t *oldlist, *newlist;
  5592. if ((oldlist = (ivar_list_t *)cls->data()->ro->ivars)) {
  5593. size_t oldsize = oldlist->byteSize();
  5594. newlist = (ivar_list_t *)calloc(oldsize + oldlist->entsize(), 1);
  5595. memcpy(newlist, oldlist, oldsize);
  5596. free(oldlist);
  5597. } else {
  5598. newlist = (ivar_list_t *)calloc(sizeof(ivar_list_t), 1);
  5599. newlist->entsizeAndFlags = (uint32_t)sizeof(ivar_t);
  5600. }
  5601. uint32_t offset = cls->unalignedInstanceSize();
  5602. uint32_t alignMask = (1<<alignment)-1;
  5603. offset = (offset + alignMask) & ~alignMask;
  5604. ivar_t& ivar = newlist->get(newlist->count++);
  5605. #if __x86_64__
  5606. // Deliberately over-allocate the ivar offset variable.
  5607. // Use calloc() to clear all 64 bits. See the note in struct ivar_t.
  5608. ivar.offset = (int32_t *)(int64_t *)calloc(sizeof(int64_t), 1);
  5609. #else
  5610. ivar.offset = (int32_t *)malloc(sizeof(int32_t));
  5611. #endif
  5612. *ivar.offset = offset;
  5613. ivar.name = name ? strdupIfMutable(name) : nil;
  5614. ivar.type = strdupIfMutable(type);
  5615. ivar.alignment_raw = alignment;
  5616. ivar.size = (uint32_t)size;
  5617. ro_w->ivars = newlist;
  5618. cls->setInstanceSize((uint32_t)(offset + size));
  5619. // Ivar layout updated in registerClass.
  5620. return YES;
  5621. }
  5622. /***********************************************************************
  5623. * class_addProtocol
  5624. * Adds a protocol to a class.
  5625. * Locking: acquires runtimeLock
  5626. **********************************************************************/
  5627. BOOL class_addProtocol(Class cls, Protocol *protocol_gen)
  5628. {
  5629. protocol_t *protocol = newprotocol(protocol_gen);
  5630. if (!cls) return NO;
  5631. if (class_conformsToProtocol(cls, protocol_gen)) return NO;
  5632. mutex_locker_t lock(runtimeLock);
  5633. ASSERT(cls->isRealized());
  5634. // fixme optimize
  5635. protocol_list_t *protolist = (protocol_list_t *)
  5636. malloc(sizeof(protocol_list_t) + sizeof(protocol_t *));
  5637. protolist->count = 1;
  5638. protolist->list[0] = (protocol_ref_t)protocol;
  5639. cls->data()->protocols.attachLists(&protolist, 1);
  5640. // fixme metaclass?
  5641. return YES;
  5642. }
  5643. /***********************************************************************
  5644. * class_addProperty
  5645. * Adds a property to a class.
  5646. * Locking: acquires runtimeLock
  5647. **********************************************************************/
  5648. static bool
  5649. _class_addProperty(Class cls, const char *name,
  5650. const objc_property_attribute_t *attrs, unsigned int count,
  5651. bool replace)
  5652. {
  5653. if (!cls) return NO;
  5654. if (!name) return NO;
  5655. property_t *prop = class_getProperty(cls, name);
  5656. if (prop && !replace) {
  5657. // already exists, refuse to replace
  5658. return NO;
  5659. }
  5660. else if (prop) {
  5661. // replace existing
  5662. mutex_locker_t lock(runtimeLock);
  5663. try_free(prop->attributes);
  5664. prop->attributes = copyPropertyAttributeString(attrs, count);
  5665. return YES;
  5666. }
  5667. else {
  5668. mutex_locker_t lock(runtimeLock);
  5669. ASSERT(cls->isRealized());
  5670. property_list_t *proplist = (property_list_t *)
  5671. malloc(sizeof(*proplist));
  5672. proplist->count = 1;
  5673. proplist->entsizeAndFlags = sizeof(proplist->first);
  5674. proplist->first.name = strdupIfMutable(name);
  5675. proplist->first.attributes = copyPropertyAttributeString(attrs, count);
  5676. cls->data()->properties.attachLists(&proplist, 1);
  5677. return YES;
  5678. }
  5679. }
  5680. BOOL
  5681. class_addProperty(Class cls, const char *name,
  5682. const objc_property_attribute_t *attrs, unsigned int n)
  5683. {
  5684. return _class_addProperty(cls, name, attrs, n, NO);
  5685. }
  5686. void
  5687. class_replaceProperty(Class cls, const char *name,
  5688. const objc_property_attribute_t *attrs, unsigned int n)
  5689. {
  5690. _class_addProperty(cls, name, attrs, n, YES);
  5691. }
  5692. /***********************************************************************
  5693. * look_up_class
  5694. * Look up a class by name, and realize it.
  5695. * Locking: acquires runtimeLock
  5696. **********************************************************************/
  5697. static BOOL empty_getClass(const char *name, Class *outClass)
  5698. {
  5699. *outClass = nil;
  5700. return NO;
  5701. }
  5702. static ChainedHookFunction<objc_hook_getClass> GetClassHook{empty_getClass};
  5703. void objc_setHook_getClass(objc_hook_getClass newValue,
  5704. objc_hook_getClass *outOldValue)
  5705. {
  5706. GetClassHook.set(newValue, outOldValue);
  5707. }
  5708. Class
  5709. look_up_class(const char *name,
  5710. bool includeUnconnected __attribute__((unused)),
  5711. bool includeClassHandler __attribute__((unused)))
  5712. {
  5713. if (!name) return nil;
  5714. Class result;
  5715. bool unrealized;
  5716. {
  5717. runtimeLock.lock();
  5718. result = getClassExceptSomeSwift(name);
  5719. unrealized = result && !result->isRealized();
  5720. if (unrealized) {
  5721. result = realizeClassMaybeSwiftAndUnlock(result, runtimeLock);
  5722. // runtimeLock is now unlocked
  5723. } else {
  5724. runtimeLock.unlock();
  5725. }
  5726. }
  5727. if (!result) {
  5728. // Ask Swift about its un-instantiated classes.
  5729. // We use thread-local storage to prevent infinite recursion
  5730. // if the hook function provokes another lookup of the same name
  5731. // (for example, if the hook calls objc_allocateClassPair)
  5732. auto *tls = _objc_fetch_pthread_data(true);
  5733. // Stop if this thread is already looking up this name.
  5734. for (unsigned i = 0; i < tls->classNameLookupsUsed; i++) {
  5735. if (0 == strcmp(name, tls->classNameLookups[i])) {
  5736. return nil;
  5737. }
  5738. }
  5739. // Save this lookup in tls.
  5740. if (tls->classNameLookupsUsed == tls->classNameLookupsAllocated) {
  5741. tls->classNameLookupsAllocated =
  5742. (tls->classNameLookupsAllocated * 2 ?: 1);
  5743. size_t size = tls->classNameLookupsAllocated *
  5744. sizeof(tls->classNameLookups[0]);
  5745. tls->classNameLookups = (const char **)
  5746. realloc(tls->classNameLookups, size);
  5747. }
  5748. tls->classNameLookups[tls->classNameLookupsUsed++] = name;
  5749. // Call the hook.
  5750. Class swiftcls = nil;
  5751. if (GetClassHook.get()(name, &swiftcls)) {
  5752. ASSERT(swiftcls->isRealized());
  5753. result = swiftcls;
  5754. }
  5755. // Erase the name from tls.
  5756. unsigned slot = --tls->classNameLookupsUsed;
  5757. ASSERT(slot >= 0 && slot < tls->classNameLookupsAllocated);
  5758. ASSERT(name == tls->classNameLookups[slot]);
  5759. tls->classNameLookups[slot] = nil;
  5760. }
  5761. return result;
  5762. }
  5763. /***********************************************************************
  5764. * objc_duplicateClass
  5765. * fixme
  5766. * Locking: acquires runtimeLock
  5767. **********************************************************************/
  5768. Class
  5769. objc_duplicateClass(Class original, const char *name,
  5770. size_t extraBytes)
  5771. {
  5772. Class duplicate;
  5773. mutex_locker_t lock(runtimeLock);
  5774. checkIsKnownClass(original);
  5775. ASSERT(original->isRealized());
  5776. ASSERT(!original->isMetaClass());
  5777. duplicate = alloc_class_for_subclass(original, extraBytes);
  5778. duplicate->initClassIsa(original->ISA());
  5779. duplicate->superclass = original->superclass;
  5780. duplicate->cache.initializeToEmpty();
  5781. class_rw_t *rw = (class_rw_t *)calloc(sizeof(*original->data()), 1);
  5782. rw->flags = (original->data()->flags | RW_COPIED_RO | RW_REALIZING);
  5783. rw->version = original->data()->version;
  5784. rw->firstSubclass = nil;
  5785. rw->nextSiblingClass = nil;
  5786. duplicate->bits = original->bits;
  5787. duplicate->setData(rw);
  5788. rw->ro = original->data()->ro->duplicate();
  5789. *(char **)&rw->ro->name = strdupIfMutable(name);
  5790. rw->methods = original->data()->methods.duplicate();
  5791. // fixme dies when categories are added to the base
  5792. rw->properties = original->data()->properties;
  5793. rw->protocols = original->data()->protocols;
  5794. duplicate->chooseClassArrayIndex();
  5795. if (duplicate->superclass) {
  5796. addSubclass(duplicate->superclass, duplicate);
  5797. // duplicate->isa == original->isa so don't addSubclass() for it
  5798. } else {
  5799. addRootClass(duplicate);
  5800. }
  5801. // Don't methodize class - construction above is correct
  5802. addNamedClass(duplicate, duplicate->data()->ro->name);
  5803. addClassTableEntry(duplicate, /*addMeta=*/false);
  5804. if (PrintConnecting) {
  5805. _objc_inform("CLASS: realizing class '%s' (duplicate of %s) %p %p",
  5806. name, original->nameForLogging(),
  5807. (void*)duplicate, duplicate->data()->ro);
  5808. }
  5809. duplicate->clearInfo(RW_REALIZING);
  5810. return duplicate;
  5811. }
  5812. /***********************************************************************
  5813. * objc_initializeClassPair
  5814. * Locking: runtimeLock must be write-locked by the caller
  5815. **********************************************************************/
  5816. // &UnsetLayout is the default ivar layout during class construction
  5817. static const uint8_t UnsetLayout = 0;
  5818. static void objc_initializeClassPair_internal(Class superclass, const char *name, Class cls, Class meta)
  5819. {
  5820. runtimeLock.assertLocked();
  5821. class_ro_t *cls_ro_w, *meta_ro_w;
  5822. cls->setData((class_rw_t *)calloc(sizeof(class_rw_t), 1));
  5823. meta->setData((class_rw_t *)calloc(sizeof(class_rw_t), 1));
  5824. cls_ro_w = (class_ro_t *)calloc(sizeof(class_ro_t), 1);
  5825. meta_ro_w = (class_ro_t *)calloc(sizeof(class_ro_t), 1);
  5826. cls->data()->ro = cls_ro_w;
  5827. meta->data()->ro = meta_ro_w;
  5828. // Set basic info
  5829. cls->data()->flags = RW_CONSTRUCTING | RW_COPIED_RO | RW_REALIZED | RW_REALIZING;
  5830. meta->data()->flags = RW_CONSTRUCTING | RW_COPIED_RO | RW_REALIZED | RW_REALIZING;
  5831. cls->data()->version = 0;
  5832. meta->data()->version = 7;
  5833. cls_ro_w->flags = 0;
  5834. meta_ro_w->flags = RO_META;
  5835. if (!superclass) {
  5836. cls_ro_w->flags |= RO_ROOT;
  5837. meta_ro_w->flags |= RO_ROOT;
  5838. }
  5839. if (superclass) {
  5840. uint32_t flagsToCopy = RW_FORBIDS_ASSOCIATED_OBJECTS;
  5841. cls->data()->flags |= superclass->data()->flags & flagsToCopy;
  5842. cls_ro_w->instanceStart = superclass->unalignedInstanceSize();
  5843. meta_ro_w->instanceStart = superclass->ISA()->unalignedInstanceSize();
  5844. cls->setInstanceSize(cls_ro_w->instanceStart);
  5845. meta->setInstanceSize(meta_ro_w->instanceStart);
  5846. } else {
  5847. cls_ro_w->instanceStart = 0;
  5848. meta_ro_w->instanceStart = (uint32_t)sizeof(objc_class);
  5849. cls->setInstanceSize((uint32_t)sizeof(id)); // just an isa
  5850. meta->setInstanceSize(meta_ro_w->instanceStart);
  5851. }
  5852. cls_ro_w->name = strdupIfMutable(name);
  5853. meta_ro_w->name = strdupIfMutable(name);
  5854. cls_ro_w->ivarLayout = &UnsetLayout;
  5855. cls_ro_w->weakIvarLayout = &UnsetLayout;
  5856. meta->chooseClassArrayIndex();
  5857. cls->chooseClassArrayIndex();
  5858. // This absolutely needs to be done before addSubclass
  5859. // as initializeToEmpty() clobbers the FAST_CACHE bits
  5860. cls->cache.initializeToEmpty();
  5861. meta->cache.initializeToEmpty();
  5862. #if FAST_CACHE_META
  5863. meta->cache.setBit(FAST_CACHE_META);
  5864. #endif
  5865. meta->setInstancesRequireRawIsa();
  5866. // Connect to superclasses and metaclasses
  5867. cls->initClassIsa(meta);
  5868. if (superclass) {
  5869. meta->initClassIsa(superclass->ISA()->ISA());
  5870. cls->superclass = superclass;
  5871. meta->superclass = superclass->ISA();
  5872. addSubclass(superclass, cls);
  5873. addSubclass(superclass->ISA(), meta);
  5874. } else {
  5875. meta->initClassIsa(meta);
  5876. cls->superclass = Nil;
  5877. meta->superclass = cls;
  5878. addRootClass(cls);
  5879. addSubclass(cls, meta);
  5880. }
  5881. addClassTableEntry(cls);
  5882. }
  5883. /***********************************************************************
  5884. * verifySuperclass
  5885. * Sanity-check the superclass provided to
  5886. * objc_allocateClassPair, objc_initializeClassPair, or objc_readClassPair.
  5887. **********************************************************************/
  5888. bool
  5889. verifySuperclass(Class superclass, bool rootOK)
  5890. {
  5891. if (!superclass) {
  5892. // Superclass does not exist.
  5893. // If subclass may be a root class, this is OK.
  5894. // If subclass must not be a root class, this is bad.
  5895. return rootOK;
  5896. }
  5897. // Superclass must be realized.
  5898. if (! superclass->isRealized()) return false;
  5899. // Superclass must not be under construction.
  5900. if (superclass->data()->flags & RW_CONSTRUCTING) return false;
  5901. return true;
  5902. }
  5903. /***********************************************************************
  5904. * objc_initializeClassPair
  5905. **********************************************************************/
  5906. Class objc_initializeClassPair(Class superclass, const char *name, Class cls, Class meta)
  5907. {
  5908. // Fail if the class name is in use.
  5909. if (look_up_class(name, NO, NO)) return nil;
  5910. mutex_locker_t lock(runtimeLock);
  5911. // Fail if the class name is in use.
  5912. // Fail if the superclass isn't kosher.
  5913. if (getClassExceptSomeSwift(name) ||
  5914. !verifySuperclass(superclass, true/*rootOK*/))
  5915. {
  5916. return nil;
  5917. }
  5918. objc_initializeClassPair_internal(superclass, name, cls, meta);
  5919. return cls;
  5920. }
  5921. /***********************************************************************
  5922. * objc_allocateClassPair
  5923. * fixme
  5924. * Locking: acquires runtimeLock
  5925. **********************************************************************/
  5926. Class objc_allocateClassPair(Class superclass, const char *name,
  5927. size_t extraBytes)
  5928. {
  5929. Class cls, meta;
  5930. // Fail if the class name is in use.
  5931. if (look_up_class(name, NO, NO)) return nil;
  5932. mutex_locker_t lock(runtimeLock);
  5933. // Fail if the class name is in use.
  5934. // Fail if the superclass isn't kosher.
  5935. if (getClassExceptSomeSwift(name) ||
  5936. !verifySuperclass(superclass, true/*rootOK*/))
  5937. {
  5938. return nil;
  5939. }
  5940. // Allocate new classes.
  5941. cls = alloc_class_for_subclass(superclass, extraBytes);
  5942. meta = alloc_class_for_subclass(superclass, extraBytes);
  5943. // fixme mangle the name if it looks swift-y?
  5944. objc_initializeClassPair_internal(superclass, name, cls, meta);
  5945. return cls;
  5946. }
  5947. /***********************************************************************
  5948. * objc_registerClassPair
  5949. * fixme
  5950. * Locking: acquires runtimeLock
  5951. **********************************************************************/
  5952. void objc_registerClassPair(Class cls)
  5953. {
  5954. mutex_locker_t lock(runtimeLock);
  5955. checkIsKnownClass(cls);
  5956. if ((cls->data()->flags & RW_CONSTRUCTED) ||
  5957. (cls->ISA()->data()->flags & RW_CONSTRUCTED))
  5958. {
  5959. _objc_inform("objc_registerClassPair: class '%s' was already "
  5960. "registered!", cls->data()->ro->name);
  5961. return;
  5962. }
  5963. if (!(cls->data()->flags & RW_CONSTRUCTING) ||
  5964. !(cls->ISA()->data()->flags & RW_CONSTRUCTING))
  5965. {
  5966. _objc_inform("objc_registerClassPair: class '%s' was not "
  5967. "allocated with objc_allocateClassPair!",
  5968. cls->data()->ro->name);
  5969. return;
  5970. }
  5971. // Clear "under construction" bit, set "done constructing" bit
  5972. cls->ISA()->changeInfo(RW_CONSTRUCTED, RW_CONSTRUCTING | RW_REALIZING);
  5973. cls->changeInfo(RW_CONSTRUCTED, RW_CONSTRUCTING | RW_REALIZING);
  5974. // Add to named class table.
  5975. addNamedClass(cls, cls->data()->ro->name);
  5976. }
  5977. /***********************************************************************
  5978. * objc_readClassPair()
  5979. * Read a class and metaclass as written by a compiler.
  5980. * Assumes the class and metaclass are not referenced by other things
  5981. * that might need to be fixed up (such as categories and subclasses).
  5982. * Does not call +load.
  5983. * Returns the class pointer, or nil.
  5984. *
  5985. * Locking: runtimeLock acquired by map_images
  5986. **********************************************************************/
  5987. Class objc_readClassPair(Class bits, const struct objc_image_info *info)
  5988. {
  5989. mutex_locker_t lock(runtimeLock);
  5990. // No info bits are significant yet.
  5991. (void)info;
  5992. // Fail if the superclass isn't kosher.
  5993. bool rootOK = bits->data()->flags & RO_ROOT;
  5994. if (!verifySuperclass(bits->superclass, rootOK)){
  5995. return nil;
  5996. }
  5997. // Duplicate classes are allowed, just like they are for image loading.
  5998. // readClass will complain about the duplicate.
  5999. Class cls = readClass(bits, false/*bundle*/, false/*shared cache*/);
  6000. if (cls != bits) {
  6001. // This function isn't allowed to remap anything.
  6002. _objc_fatal("objc_readClassPair for class %s changed %p to %p",
  6003. cls->nameForLogging(), bits, cls);
  6004. }
  6005. // The only client of this function is old Swift.
  6006. // Stable Swift won't use it.
  6007. // fixme once Swift in the OS settles we can assert(!cls->isSwiftStable()).
  6008. cls = realizeClassWithoutSwift(cls, nil);
  6009. return cls;
  6010. }
  6011. /***********************************************************************
  6012. * detach_class
  6013. * Disconnect a class from other data structures.
  6014. * Exception: does not remove the class from the +load list
  6015. * Call this before free_class.
  6016. * Locking: runtimeLock must be held by the caller.
  6017. **********************************************************************/
  6018. static void detach_class(Class cls, bool isMeta)
  6019. {
  6020. runtimeLock.assertLocked();
  6021. // categories not yet attached to this class
  6022. objc::unattachedCategories.eraseClass(cls);
  6023. // superclass's subclass list
  6024. if (cls->isRealized()) {
  6025. Class supercls = cls->superclass;
  6026. if (supercls) {
  6027. removeSubclass(supercls, cls);
  6028. } else {
  6029. removeRootClass(cls);
  6030. }
  6031. }
  6032. // class tables and +load queue
  6033. if (!isMeta) {
  6034. removeNamedClass(cls, cls->mangledName());
  6035. }
  6036. objc::allocatedClasses.get().erase(cls);
  6037. }
  6038. /***********************************************************************
  6039. * free_class
  6040. * Frees a class's data structures.
  6041. * Call this after detach_class.
  6042. * Locking: runtimeLock must be held by the caller
  6043. **********************************************************************/
  6044. static void free_class(Class cls)
  6045. {
  6046. runtimeLock.assertLocked();
  6047. if (! cls->isRealized()) return;
  6048. auto rw = cls->data();
  6049. auto ro = rw->ro;
  6050. cache_delete(cls);
  6051. for (auto& meth : rw->methods) {
  6052. try_free(meth.types);
  6053. }
  6054. rw->methods.tryFree();
  6055. const ivar_list_t *ivars = ro->ivars;
  6056. if (ivars) {
  6057. for (auto& ivar : *ivars) {
  6058. try_free(ivar.offset);
  6059. try_free(ivar.name);
  6060. try_free(ivar.type);
  6061. }
  6062. try_free(ivars);
  6063. }
  6064. for (auto& prop : rw->properties) {
  6065. try_free(prop.name);
  6066. try_free(prop.attributes);
  6067. }
  6068. rw->properties.tryFree();
  6069. rw->protocols.tryFree();
  6070. try_free(ro->ivarLayout);
  6071. try_free(ro->weakIvarLayout);
  6072. try_free(ro->name);
  6073. try_free(ro);
  6074. try_free(rw);
  6075. try_free(cls);
  6076. }
  6077. void objc_disposeClassPair(Class cls)
  6078. {
  6079. mutex_locker_t lock(runtimeLock);
  6080. checkIsKnownClass(cls);
  6081. if (!(cls->data()->flags & (RW_CONSTRUCTED|RW_CONSTRUCTING)) ||
  6082. !(cls->ISA()->data()->flags & (RW_CONSTRUCTED|RW_CONSTRUCTING)))
  6083. {
  6084. // class not allocated with objc_allocateClassPair
  6085. // disposing still-unregistered class is OK!
  6086. _objc_inform("objc_disposeClassPair: class '%s' was not "
  6087. "allocated with objc_allocateClassPair!",
  6088. cls->data()->ro->name);
  6089. return;
  6090. }
  6091. if (cls->isMetaClass()) {
  6092. _objc_inform("objc_disposeClassPair: class '%s' is a metaclass, "
  6093. "not a class!", cls->data()->ro->name);
  6094. return;
  6095. }
  6096. // Shouldn't have any live subclasses.
  6097. if (cls->data()->firstSubclass) {
  6098. _objc_inform("objc_disposeClassPair: class '%s' still has subclasses, "
  6099. "including '%s'!", cls->data()->ro->name,
  6100. cls->data()->firstSubclass->nameForLogging());
  6101. }
  6102. if (cls->ISA()->data()->firstSubclass) {
  6103. _objc_inform("objc_disposeClassPair: class '%s' still has subclasses, "
  6104. "including '%s'!", cls->data()->ro->name,
  6105. cls->ISA()->data()->firstSubclass->nameForLogging());
  6106. }
  6107. // don't remove_class_from_loadable_list()
  6108. // - it's not there and we don't have the lock
  6109. detach_class(cls->ISA(), YES);
  6110. detach_class(cls, NO);
  6111. free_class(cls->ISA());
  6112. free_class(cls);
  6113. }
  6114. /***********************************************************************
  6115. * objc_constructInstance
  6116. * Creates an instance of `cls` at the location pointed to by `bytes`.
  6117. * `bytes` must point to at least class_getInstanceSize(cls) bytes of
  6118. * well-aligned zero-filled memory.
  6119. * The new object's isa is set. Any C++ constructors are called.
  6120. * Returns `bytes` if successful. Returns nil if `cls` or `bytes` is
  6121. * nil, or if C++ constructors fail.
  6122. * Note: class_createInstance() and class_createInstances() preflight this.
  6123. **********************************************************************/
  6124. id
  6125. objc_constructInstance(Class cls, void *bytes)
  6126. {
  6127. if (!cls || !bytes) return nil;
  6128. id obj = (id)bytes;
  6129. // Read class's info bits all at once for performance
  6130. bool hasCxxCtor = cls->hasCxxCtor();
  6131. bool hasCxxDtor = cls->hasCxxDtor();
  6132. bool fast = cls->canAllocNonpointer();
  6133. if (fast) {
  6134. obj->initInstanceIsa(cls, hasCxxDtor);
  6135. } else {
  6136. obj->initIsa(cls);
  6137. }
  6138. if (hasCxxCtor) {
  6139. return object_cxxConstructFromClass(obj, cls, OBJECT_CONSTRUCT_NONE);
  6140. } else {
  6141. return obj;
  6142. }
  6143. }
  6144. /***********************************************************************
  6145. * class_createInstance
  6146. * fixme
  6147. * Locking: none
  6148. *
  6149. * Note: this function has been carefully written so that the fastpath
  6150. * takes no branch.
  6151. **********************************************************************/
  6152. static ALWAYS_INLINE id
  6153. _class_createInstanceFromZone(Class cls, size_t extraBytes, void *zone,
  6154. int construct_flags = OBJECT_CONSTRUCT_NONE,
  6155. bool cxxConstruct = true,
  6156. size_t *outAllocatedSize = nil)
  6157. {
  6158. ASSERT(cls->isRealized());
  6159. // Read class's info bits all at once for performance
  6160. bool hasCxxCtor = cxxConstruct && cls->hasCxxCtor();
  6161. bool hasCxxDtor = cls->hasCxxDtor();
  6162. bool fast = cls->canAllocNonpointer();
  6163. size_t size;
  6164. size = cls->instanceSize(extraBytes);
  6165. if (outAllocatedSize) *outAllocatedSize = size;
  6166. id obj;
  6167. if (zone) {
  6168. obj = (id)malloc_zone_calloc((malloc_zone_t *)zone, 1, size);
  6169. } else {
  6170. obj = (id)calloc(1, size);
  6171. }
  6172. if (slowpath(!obj)) {
  6173. if (construct_flags & OBJECT_CONSTRUCT_CALL_BADALLOC) {
  6174. return _objc_callBadAllocHandler(cls);
  6175. }
  6176. return nil;
  6177. }
  6178. if (!zone && fast) {
  6179. obj->initInstanceIsa(cls, hasCxxDtor);
  6180. } else {
  6181. // Use raw pointer isa on the assumption that they might be
  6182. // doing something weird with the zone or RR.
  6183. obj->initIsa(cls);
  6184. }
  6185. if (fastpath(!hasCxxCtor)) {
  6186. return obj;
  6187. }
  6188. construct_flags |= OBJECT_CONSTRUCT_FREE_ONFAILURE;
  6189. return object_cxxConstructFromClass(obj, cls, construct_flags);
  6190. }
  6191. id
  6192. class_createInstance(Class cls, size_t extraBytes)
  6193. {
  6194. if (!cls) return nil;
  6195. return _class_createInstanceFromZone(cls, extraBytes, nil);
  6196. }
  6197. NEVER_INLINE
  6198. id
  6199. _objc_rootAllocWithZone(Class cls, malloc_zone_t *zone __unused)
  6200. {
  6201. // allocWithZone under __OBJC2__ ignores the zone parameter
  6202. return _class_createInstanceFromZone(cls, 0, nil,
  6203. OBJECT_CONSTRUCT_CALL_BADALLOC);
  6204. }
  6205. /***********************************************************************
  6206. * class_createInstances
  6207. * fixme
  6208. * Locking: none
  6209. **********************************************************************/
  6210. #if SUPPORT_NONPOINTER_ISA
  6211. #warning fixme optimize class_createInstances
  6212. #endif
  6213. unsigned
  6214. class_createInstances(Class cls, size_t extraBytes,
  6215. id *results, unsigned num_requested)
  6216. {
  6217. return _class_createInstancesFromZone(cls, extraBytes, nil,
  6218. results, num_requested);
  6219. }
  6220. /***********************************************************************
  6221. * object_copyFromZone
  6222. * fixme
  6223. * Locking: none
  6224. **********************************************************************/
  6225. static id
  6226. _object_copyFromZone(id oldObj, size_t extraBytes, void *zone)
  6227. {
  6228. if (!oldObj) return nil;
  6229. if (oldObj->isTaggedPointer()) return oldObj;
  6230. // fixme this doesn't handle C++ ivars correctly (#4619414)
  6231. Class cls = oldObj->ISA();
  6232. size_t size;
  6233. id obj = _class_createInstanceFromZone(cls, extraBytes, zone,
  6234. OBJECT_CONSTRUCT_NONE, false, &size);
  6235. if (!obj) return nil;
  6236. // Copy everything except the isa, which was already set above.
  6237. uint8_t *copyDst = (uint8_t *)obj + sizeof(Class);
  6238. uint8_t *copySrc = (uint8_t *)oldObj + sizeof(Class);
  6239. size_t copySize = size - sizeof(Class);
  6240. memmove(copyDst, copySrc, copySize);
  6241. fixupCopiedIvars(obj, oldObj);
  6242. return obj;
  6243. }
  6244. /***********************************************************************
  6245. * object_copy
  6246. * fixme
  6247. * Locking: none
  6248. **********************************************************************/
  6249. id
  6250. object_copy(id oldObj, size_t extraBytes)
  6251. {
  6252. return _object_copyFromZone(oldObj, extraBytes, malloc_default_zone());
  6253. }
  6254. #if SUPPORT_ZONES
  6255. /***********************************************************************
  6256. * class_createInstanceFromZone
  6257. * fixme
  6258. * Locking: none
  6259. **********************************************************************/
  6260. id
  6261. class_createInstanceFromZone(Class cls, size_t extraBytes, void *zone)
  6262. {
  6263. if (!cls) return nil;
  6264. return _class_createInstanceFromZone(cls, extraBytes, zone);
  6265. }
  6266. /***********************************************************************
  6267. * object_copyFromZone
  6268. * fixme
  6269. * Locking: none
  6270. **********************************************************************/
  6271. id
  6272. object_copyFromZone(id oldObj, size_t extraBytes, void *zone)
  6273. {
  6274. return _object_copyFromZone(oldObj, extraBytes, zone);
  6275. }
  6276. #endif
  6277. /***********************************************************************
  6278. * objc_destructInstance
  6279. * Destroys an instance without freeing memory.
  6280. * Calls C++ destructors.
  6281. * Calls ARC ivar cleanup.
  6282. * Removes associative references.
  6283. * Returns `obj`. Does nothing if `obj` is nil.
  6284. **********************************************************************/
  6285. void *objc_destructInstance(id obj)
  6286. {
  6287. if (obj) {
  6288. // Read all of the flags at once for performance.
  6289. bool cxx = obj->hasCxxDtor();
  6290. bool assoc = obj->hasAssociatedObjects();
  6291. // This order is important.
  6292. if (cxx) object_cxxDestruct(obj);
  6293. if (assoc) _object_remove_assocations(obj);
  6294. obj->clearDeallocating();
  6295. }
  6296. return obj;
  6297. }
  6298. /***********************************************************************
  6299. * object_dispose
  6300. * fixme
  6301. * Locking: none
  6302. **********************************************************************/
  6303. id
  6304. object_dispose(id obj)
  6305. {
  6306. if (!obj) return nil;
  6307. objc_destructInstance(obj);
  6308. free(obj);
  6309. return nil;
  6310. }
  6311. /***********************************************************************
  6312. * _objc_getFreedObjectClass
  6313. * fixme
  6314. * Locking: none
  6315. **********************************************************************/
  6316. Class _objc_getFreedObjectClass (void)
  6317. {
  6318. return nil;
  6319. }
  6320. /***********************************************************************
  6321. * Tagged pointer objects.
  6322. *
  6323. * Tagged pointer objects store the class and the object value in the
  6324. * object pointer; the "pointer" does not actually point to anything.
  6325. *
  6326. * Tagged pointer objects currently use this representation:
  6327. * (LSB)
  6328. * 1 bit set if tagged, clear if ordinary object pointer
  6329. * 3 bits tag index
  6330. * 60 bits payload
  6331. * (MSB)
  6332. * The tag index defines the object's class.
  6333. * The payload format is defined by the object's class.
  6334. *
  6335. * If the tag index is 0b111, the tagged pointer object uses an
  6336. * "extended" representation, allowing more classes but with smaller payloads:
  6337. * (LSB)
  6338. * 1 bit set if tagged, clear if ordinary object pointer
  6339. * 3 bits 0b111
  6340. * 8 bits extended tag index
  6341. * 52 bits payload
  6342. * (MSB)
  6343. *
  6344. * Some architectures reverse the MSB and LSB in these representations.
  6345. *
  6346. * This representation is subject to change. Representation-agnostic SPI is:
  6347. * objc-internal.h for class implementers.
  6348. * objc-gdb.h for debuggers.
  6349. **********************************************************************/
  6350. #if !SUPPORT_TAGGED_POINTERS
  6351. // These variables are always provided for debuggers.
  6352. uintptr_t objc_debug_taggedpointer_obfuscator = 0;
  6353. uintptr_t objc_debug_taggedpointer_mask = 0;
  6354. unsigned objc_debug_taggedpointer_slot_shift = 0;
  6355. uintptr_t objc_debug_taggedpointer_slot_mask = 0;
  6356. unsigned objc_debug_taggedpointer_payload_lshift = 0;
  6357. unsigned objc_debug_taggedpointer_payload_rshift = 0;
  6358. Class objc_debug_taggedpointer_classes[1] = { nil };
  6359. uintptr_t objc_debug_taggedpointer_ext_mask = 0;
  6360. unsigned objc_debug_taggedpointer_ext_slot_shift = 0;
  6361. uintptr_t objc_debug_taggedpointer_ext_slot_mask = 0;
  6362. unsigned objc_debug_taggedpointer_ext_payload_lshift = 0;
  6363. unsigned objc_debug_taggedpointer_ext_payload_rshift = 0;
  6364. Class objc_debug_taggedpointer_ext_classes[1] = { nil };
  6365. static void
  6366. disableTaggedPointers() { }
  6367. static void
  6368. initializeTaggedPointerObfuscator(void) { }
  6369. #else
  6370. // The "slot" used in the class table and given to the debugger
  6371. // includes the is-tagged bit. This makes objc_msgSend faster.
  6372. // The "ext" representation doesn't do that.
  6373. uintptr_t objc_debug_taggedpointer_obfuscator;
  6374. uintptr_t objc_debug_taggedpointer_mask = _OBJC_TAG_MASK;
  6375. unsigned objc_debug_taggedpointer_slot_shift = _OBJC_TAG_SLOT_SHIFT;
  6376. uintptr_t objc_debug_taggedpointer_slot_mask = _OBJC_TAG_SLOT_MASK;
  6377. unsigned objc_debug_taggedpointer_payload_lshift = _OBJC_TAG_PAYLOAD_LSHIFT;
  6378. unsigned objc_debug_taggedpointer_payload_rshift = _OBJC_TAG_PAYLOAD_RSHIFT;
  6379. // objc_debug_taggedpointer_classes is defined in objc-msg-*.s
  6380. uintptr_t objc_debug_taggedpointer_ext_mask = _OBJC_TAG_EXT_MASK;
  6381. unsigned objc_debug_taggedpointer_ext_slot_shift = _OBJC_TAG_EXT_SLOT_SHIFT;
  6382. uintptr_t objc_debug_taggedpointer_ext_slot_mask = _OBJC_TAG_EXT_SLOT_MASK;
  6383. unsigned objc_debug_taggedpointer_ext_payload_lshift = _OBJC_TAG_EXT_PAYLOAD_LSHIFT;
  6384. unsigned objc_debug_taggedpointer_ext_payload_rshift = _OBJC_TAG_EXT_PAYLOAD_RSHIFT;
  6385. // objc_debug_taggedpointer_ext_classes is defined in objc-msg-*.s
  6386. static void
  6387. disableTaggedPointers()
  6388. {
  6389. objc_debug_taggedpointer_mask = 0;
  6390. objc_debug_taggedpointer_slot_shift = 0;
  6391. objc_debug_taggedpointer_slot_mask = 0;
  6392. objc_debug_taggedpointer_payload_lshift = 0;
  6393. objc_debug_taggedpointer_payload_rshift = 0;
  6394. objc_debug_taggedpointer_ext_mask = 0;
  6395. objc_debug_taggedpointer_ext_slot_shift = 0;
  6396. objc_debug_taggedpointer_ext_slot_mask = 0;
  6397. objc_debug_taggedpointer_ext_payload_lshift = 0;
  6398. objc_debug_taggedpointer_ext_payload_rshift = 0;
  6399. }
  6400. // Returns a pointer to the class's storage in the tagged class arrays.
  6401. // Assumes the tag is a valid basic tag.
  6402. static Class *
  6403. classSlotForBasicTagIndex(objc_tag_index_t tag)
  6404. {
  6405. uintptr_t tagObfuscator = ((objc_debug_taggedpointer_obfuscator
  6406. >> _OBJC_TAG_INDEX_SHIFT)
  6407. & _OBJC_TAG_INDEX_MASK);
  6408. uintptr_t obfuscatedTag = tag ^ tagObfuscator;
  6409. // Array index in objc_tag_classes includes the tagged bit itself
  6410. #if SUPPORT_MSB_TAGGED_POINTERS
  6411. return &objc_tag_classes[0x8 | obfuscatedTag];
  6412. #else
  6413. return &objc_tag_classes[(obfuscatedTag << 1) | 1];
  6414. #endif
  6415. }
  6416. // Returns a pointer to the class's storage in the tagged class arrays,
  6417. // or nil if the tag is out of range.
  6418. static Class *
  6419. classSlotForTagIndex(objc_tag_index_t tag)
  6420. {
  6421. if (tag >= OBJC_TAG_First60BitPayload && tag <= OBJC_TAG_Last60BitPayload) {
  6422. return classSlotForBasicTagIndex(tag);
  6423. }
  6424. if (tag >= OBJC_TAG_First52BitPayload && tag <= OBJC_TAG_Last52BitPayload) {
  6425. int index = tag - OBJC_TAG_First52BitPayload;
  6426. uintptr_t tagObfuscator = ((objc_debug_taggedpointer_obfuscator
  6427. >> _OBJC_TAG_EXT_INDEX_SHIFT)
  6428. & _OBJC_TAG_EXT_INDEX_MASK);
  6429. return &objc_tag_ext_classes[index ^ tagObfuscator];
  6430. }
  6431. return nil;
  6432. }
  6433. /***********************************************************************
  6434. * initializeTaggedPointerObfuscator
  6435. * Initialize objc_debug_taggedpointer_obfuscator with randomness.
  6436. *
  6437. * The tagged pointer obfuscator is intended to make it more difficult
  6438. * for an attacker to construct a particular object as a tagged pointer,
  6439. * in the presence of a buffer overflow or other write control over some
  6440. * memory. The obfuscator is XORed with the tagged pointers when setting
  6441. * or retrieving payload values. They are filled with randomness on first
  6442. * use.
  6443. **********************************************************************/
  6444. static void
  6445. initializeTaggedPointerObfuscator(void)
  6446. {
  6447. if (sdkIsOlderThan(10_14, 12_0, 12_0, 5_0, 3_0) ||
  6448. // Set the obfuscator to zero for apps linked against older SDKs,
  6449. // in case they're relying on the tagged pointer representation.
  6450. DisableTaggedPointerObfuscation) {
  6451. objc_debug_taggedpointer_obfuscator = 0;
  6452. } else {
  6453. // Pull random data into the variable, then shift away all non-payload bits.
  6454. arc4random_buf(&objc_debug_taggedpointer_obfuscator,
  6455. sizeof(objc_debug_taggedpointer_obfuscator));
  6456. objc_debug_taggedpointer_obfuscator &= ~_OBJC_TAG_MASK;
  6457. }
  6458. }
  6459. /***********************************************************************
  6460. * _objc_registerTaggedPointerClass
  6461. * Set the class to use for the given tagged pointer index.
  6462. * Aborts if the tag is out of range, or if the tag is already
  6463. * used by some other class.
  6464. **********************************************************************/
  6465. void
  6466. _objc_registerTaggedPointerClass(objc_tag_index_t tag, Class cls)
  6467. {
  6468. if (objc_debug_taggedpointer_mask == 0) {
  6469. _objc_fatal("tagged pointers are disabled");
  6470. }
  6471. Class *slot = classSlotForTagIndex(tag);
  6472. if (!slot) {
  6473. _objc_fatal("tag index %u is invalid", (unsigned int)tag);
  6474. }
  6475. Class oldCls = *slot;
  6476. if (cls && oldCls && cls != oldCls) {
  6477. _objc_fatal("tag index %u used for two different classes "
  6478. "(was %p %s, now %p %s)", tag,
  6479. oldCls, oldCls->nameForLogging(),
  6480. cls, cls->nameForLogging());
  6481. }
  6482. *slot = cls;
  6483. // Store a placeholder class in the basic tag slot that is
  6484. // reserved for the extended tag space, if it isn't set already.
  6485. // Do this lazily when the first extended tag is registered so
  6486. // that old debuggers characterize bogus pointers correctly more often.
  6487. if (tag < OBJC_TAG_First60BitPayload || tag > OBJC_TAG_Last60BitPayload) {
  6488. Class *extSlot = classSlotForBasicTagIndex(OBJC_TAG_RESERVED_7);
  6489. if (*extSlot == nil) {
  6490. extern objc_class OBJC_CLASS_$___NSUnrecognizedTaggedPointer;
  6491. *extSlot = (Class)&OBJC_CLASS_$___NSUnrecognizedTaggedPointer;
  6492. }
  6493. }
  6494. }
  6495. /***********************************************************************
  6496. * _objc_getClassForTag
  6497. * Returns the class that is using the given tagged pointer tag.
  6498. * Returns nil if no class is using that tag or the tag is out of range.
  6499. **********************************************************************/
  6500. Class
  6501. _objc_getClassForTag(objc_tag_index_t tag)
  6502. {
  6503. Class *slot = classSlotForTagIndex(tag);
  6504. if (slot) return *slot;
  6505. else return nil;
  6506. }
  6507. #endif
  6508. #if SUPPORT_FIXUP
  6509. OBJC_EXTERN void objc_msgSend_fixup(void);
  6510. OBJC_EXTERN void objc_msgSendSuper2_fixup(void);
  6511. OBJC_EXTERN void objc_msgSend_stret_fixup(void);
  6512. OBJC_EXTERN void objc_msgSendSuper2_stret_fixup(void);
  6513. #if defined(__i386__) || defined(__x86_64__)
  6514. OBJC_EXTERN void objc_msgSend_fpret_fixup(void);
  6515. #endif
  6516. #if defined(__x86_64__)
  6517. OBJC_EXTERN void objc_msgSend_fp2ret_fixup(void);
  6518. #endif
  6519. OBJC_EXTERN void objc_msgSend_fixedup(void);
  6520. OBJC_EXTERN void objc_msgSendSuper2_fixedup(void);
  6521. OBJC_EXTERN void objc_msgSend_stret_fixedup(void);
  6522. OBJC_EXTERN void objc_msgSendSuper2_stret_fixedup(void);
  6523. #if defined(__i386__) || defined(__x86_64__)
  6524. OBJC_EXTERN void objc_msgSend_fpret_fixedup(void);
  6525. #endif
  6526. #if defined(__x86_64__)
  6527. OBJC_EXTERN void objc_msgSend_fp2ret_fixedup(void);
  6528. #endif
  6529. /***********************************************************************
  6530. * fixupMessageRef
  6531. * Repairs an old vtable dispatch call site.
  6532. * vtable dispatch itself is not supported.
  6533. **********************************************************************/
  6534. static void
  6535. fixupMessageRef(message_ref_t *msg)
  6536. {
  6537. msg->sel = sel_registerName((const char *)msg->sel);
  6538. if (msg->imp == &objc_msgSend_fixup) {
  6539. if (msg->sel == @selector(alloc)) {
  6540. msg->imp = (IMP)&objc_alloc;
  6541. } else if (msg->sel == @selector(allocWithZone:)) {
  6542. msg->imp = (IMP)&objc_allocWithZone;
  6543. } else if (msg->sel == @selector(retain)) {
  6544. msg->imp = (IMP)&objc_retain;
  6545. } else if (msg->sel == @selector(release)) {
  6546. msg->imp = (IMP)&objc_release;
  6547. } else if (msg->sel == @selector(autorelease)) {
  6548. msg->imp = (IMP)&objc_autorelease;
  6549. } else {
  6550. msg->imp = &objc_msgSend_fixedup;
  6551. }
  6552. }
  6553. else if (msg->imp == &objc_msgSendSuper2_fixup) {
  6554. msg->imp = &objc_msgSendSuper2_fixedup;
  6555. }
  6556. else if (msg->imp == &objc_msgSend_stret_fixup) {
  6557. msg->imp = &objc_msgSend_stret_fixedup;
  6558. }
  6559. else if (msg->imp == &objc_msgSendSuper2_stret_fixup) {
  6560. msg->imp = &objc_msgSendSuper2_stret_fixedup;
  6561. }
  6562. #if defined(__i386__) || defined(__x86_64__)
  6563. else if (msg->imp == &objc_msgSend_fpret_fixup) {
  6564. msg->imp = &objc_msgSend_fpret_fixedup;
  6565. }
  6566. #endif
  6567. #if defined(__x86_64__)
  6568. else if (msg->imp == &objc_msgSend_fp2ret_fixup) {
  6569. msg->imp = &objc_msgSend_fp2ret_fixedup;
  6570. }
  6571. #endif
  6572. }
  6573. // SUPPORT_FIXUP
  6574. #endif
  6575. // ProKit SPI
  6576. static Class setSuperclass(Class cls, Class newSuper)
  6577. {
  6578. Class oldSuper;
  6579. runtimeLock.assertLocked();
  6580. ASSERT(cls->isRealized());
  6581. ASSERT(newSuper->isRealized());
  6582. oldSuper = cls->superclass;
  6583. removeSubclass(oldSuper, cls);
  6584. removeSubclass(oldSuper->ISA(), cls->ISA());
  6585. cls->superclass = newSuper;
  6586. cls->ISA()->superclass = newSuper->ISA();
  6587. addSubclass(newSuper, cls);
  6588. addSubclass(newSuper->ISA(), cls->ISA());
  6589. // Flush subclass's method caches.
  6590. flushCaches(cls);
  6591. flushCaches(cls->ISA());
  6592. return oldSuper;
  6593. }
  6594. Class class_setSuperclass(Class cls, Class newSuper)
  6595. {
  6596. mutex_locker_t lock(runtimeLock);
  6597. return setSuperclass(cls, newSuper);
  6598. }
  6599. void runtime_init(void)
  6600. {
  6601. objc::unattachedCategories.init(32);
  6602. objc::allocatedClasses.init();
  6603. }
  6604. // __OBJC2__
  6605. #endif