-
Notifications
You must be signed in to change notification settings - Fork 511
/
Copy pathobjc-private.h
1246 lines (1011 loc) · 38.2 KB
/
objc-private.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
/*
* Copyright (c) 1999-2007 Apple Inc. All Rights Reserved.
*
* @APPLE_LICENSE_HEADER_START@
*
* This file contains Original Code and/or Modifications of Original Code
* as defined in and that are subject to the Apple Public Source License
* Version 2.0 (the 'License'). You may not use this file except in
* compliance with the License. Please obtain a copy of the License at
* http://www.opensource.apple.com/apsl/ and read it before using this
* file.
*
* The Original Code and all software distributed under the License are
* distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
* EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
* INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
* Please see the License for the specific language governing rights and
* limitations under the License.
*
* @APPLE_LICENSE_HEADER_END@
*/
/*
* objc-private.h
* Copyright 1988-1996, NeXT Software, Inc.
*/
#ifndef _OBJC_PRIVATE_H_
#define _OBJC_PRIVATE_H_
#include "objc-config.h"
/* Isolate ourselves from the definitions of id and Class in the compiler
* and public headers.
*/
#ifdef _OBJC_OBJC_H_
#error include objc-private.h before other headers
#endif
#define OBJC_TYPES_DEFINED 1
#undef OBJC_OLD_DISPATCH_PROTOTYPES
#define OBJC_OLD_DISPATCH_PROTOTYPES 0
#include <cstddef> // for nullptr_t
#include <stdint.h>
#include <assert.h>
// An assert that's disabled for release builds but still ensures the expression compiles.
#ifdef NDEBUG
#define ASSERT(x) (void)sizeof(!(x))
#else
#define ASSERT(x) assert(x)
#endif
// `this` is never NULL in C++ unless we encounter UB, but checking for what's impossible
// is the point of these asserts, so disable the corresponding warning, and let's hope
// we will reach the assert despite the UB
#define ASSERT_THIS_NOT_NULL \
_Pragma("clang diagnostic push") \
_Pragma("clang diagnostic ignored \"-Wundefined-bool-conversion\"") \
ASSERT(this) \
_Pragma("clang diagnostic pop")
// An assert that's enabled in release builds.
#define RELEASE_ASSERT(x, message, ...) \
do { \
if (slowpath(!(x))) \
_objc_fatal("Assertion failed: (%s) - " message, #x __VA_OPT__(,) __VA_ARGS__); \
} while(0)
// Generate an alias for a function
#define _OBJC_ALIAS_STR(x) #x
#define OBJC_DECLARE_FUNCTION_ALIAS(alias,orig) \
asm(" .globl _" _OBJC_ALIAS_STR(alias) "\n" \
" .set _" _OBJC_ALIAS_STR(alias) ",_" _OBJC_ALIAS_STR(orig) "\n")
struct objc_class;
struct objc_object;
struct category_t;
typedef struct objc_class *Class;
typedef struct objc_object *id;
typedef struct classref *classref_t;
namespace {
struct SideTable;
};
#include "isa.h"
union isa_t {
isa_t() { }
isa_t(uintptr_t value) : bits(value) { }
uintptr_t bits;
private:
// Accessing the class requires custom ptrauth operations, so
// force clients to go through setClass/getClass by making this
// private.
Class cls;
public:
#if defined(ISA_BITFIELD)
struct {
ISA_BITFIELD; // defined in isa.h
};
#if ISA_HAS_INLINE_RC
bool isDeallocating() const {
return extra_rc == 0 && has_sidetable_rc == 0;
}
void setDeallocating() {
extra_rc = 0;
has_sidetable_rc = 0;
}
#endif // ISA_HAS_INLINE_RC
#endif
void setClass(Class cls, objc_object *obj);
Class getClass(bool authenticated) const;
Class getDecodedClass(bool authenticated) const;
};
struct objc_object {
private:
char isa_storage[sizeof(isa_t)];
isa_t &isa() { return *reinterpret_cast<isa_t *>(isa_storage); }
const isa_t &isa() const { return *reinterpret_cast<const isa_t *>(isa_storage); }
public:
// ISA() assumes this is NOT a tagged pointer object
Class ISA(bool authenticated = false) const;
// rawISA() assumes this is NOT a tagged pointer object or a non pointer ISA
Class rawISA() const;
// getIsa() allows this to be a tagged pointer object
Class getIsa() const;
uintptr_t isaBits() const;
// initIsa() should be used to init the isa of new objects only.
// If this object already has an isa, use changeIsa() for correctness.
// initInstanceIsa(): objects with no custom RR/AWZ
// initClassIsa(): class objects
// initProtocolIsa(): protocol objects
// initIsa(): other objects
void initIsa(Class cls /*nonpointer=false*/);
void initClassIsa(Class cls /*nonpointer=maybe*/);
void initProtocolIsa(Class cls /*nonpointer=maybe*/);
void initInstanceIsa(Class cls, bool hasCxxDtor);
// changeIsa() should be used to change the isa of existing objects.
// If this is a new object, use initIsa() for performance.
Class changeIsa(Class newCls);
bool hasNonpointerIsa() const;
bool isTaggedPointer() const;
bool isBasicTaggedPointer() const;
bool isExtTaggedPointer() const;
bool isClass() const;
// object may have associated objects?
bool hasAssociatedObjects() const;
void setHasAssociatedObjects();
// object may be weakly referenced?
bool isWeaklyReferenced() const;
void setWeaklyReferenced_nolock();
// object may be uniquely referenced?
bool isUniquelyReferenced() const;
// object may have -.cxx_destruct implementation?
bool hasCxxDtor() const;
// Optimized calls to retain/release methods
id retain();
void release();
id autorelease();
// Implementations of retain/release methods
id rootRetain();
bool rootRelease();
id rootAutorelease();
bool rootTryRetain();
bool rootReleaseShouldDealloc();
uintptr_t rootRetainCount() const;
// Implementation of dealloc methods
bool rootIsDeallocating() const;
void clearDeallocating();
void rootDealloc();
private:
void initIsa(Class newCls, bool nonpointer, bool hasCxxDtor);
// Slow paths for inline control
id rootAutorelease2();
#if SUPPORT_NONPOINTER_ISA
// Controls what parts of root{Retain,Release} to emit/inline
// - Full means the full (slow) implementation
// - Fast means the fastpaths only
// - FastOrMsgSend means the fastpaths but checking whether we should call
// -retain/-release or Swift, for the usage of objc_{retain,release}
enum class RRVariant {
Full,
Fast,
FastOrMsgSend,
};
// Unified retain count manipulation for nonpointer isa
inline id rootRetain(bool tryRetain, RRVariant variant);
inline bool rootRelease(bool performDealloc, RRVariant variant);
id rootRetain_overflow(bool tryRetain);
uintptr_t rootRelease_underflow(bool performDealloc);
void clearDeallocating_slow();
// Side table retain count overflow for nonpointer isa
struct SidetableBorrow { size_t borrowed, remaining; };
void sidetable_lock() const;
void sidetable_unlock() const;
void sidetable_moveExtraRC_nolock(size_t extra_rc, bool isDeallocating, bool weaklyReferenced);
bool sidetable_addExtraRC_nolock(size_t delta_rc);
SidetableBorrow sidetable_subExtraRC_nolock(size_t delta_rc);
size_t sidetable_getExtraRC_nolock() const;
void sidetable_clearExtraRC_nolock();
#endif
// Side-table-only retain count
bool sidetable_isDeallocating() const;
void sidetable_clearDeallocating();
bool sidetable_isWeaklyReferenced() const;
void sidetable_setWeaklyReferenced_nolock();
id sidetable_retain(bool locked = false);
id sidetable_retain_slow(SideTable& table);
uintptr_t sidetable_release(bool locked = false, bool performDealloc = true);
uintptr_t sidetable_release_slow(SideTable& table, bool performDealloc = true);
bool sidetable_tryRetain();
uintptr_t sidetable_retainCount() const;
#if DEBUG
bool sidetable_present() const;
#endif
void performDealloc();
};
// signed_method_t is a dummy type that exists to distinguish between
// externally-visible Method values and internal method_t* values.
// Externally-visible Method values are signed on ptrauth archs.
// Use _method_auth and _method_sign to convert between them.
typedef struct signed_method_t *Method;
typedef struct ivar_t *Ivar;
typedef struct category_t *Category;
typedef struct property_t *objc_property_t;
// Settings from environment variables
typedef enum {
Off = 0,
On = 1,
Fatal = 2
} option_value_t;
#define OPTION(var, def, env, help) extern option_value_t var;
#define INTERNAL_OPTION(var, def, env, help) extern option_value_t var;
#include "objc-env.h"
#undef OPTION
#undef INTERNAL_OPTION
/* errors */
extern id(*badAllocHandler)(Class);
extern id _objc_callBadAllocHandler(Class cls) __attribute__((cold, noinline));
extern void __objc_error(id, const char *, ...) __attribute__((cold, format (printf, 2, 3), noreturn));
extern void _objc_inform(const char *fmt, ...) __attribute__((cold, format(printf, 1, 2)));
extern void _objc_inform_on_crash(const char *fmt, ...) __attribute__((cold, format (printf, 1, 2)));
extern void _objc_inform_now_and_on_crash(const char *fmt, ...) __attribute__((cold, format (printf, 1, 2)));
extern void _objc_inform_deprecated(const char *oldname, const char *newname) __attribute__((cold, noinline));
extern void inform_duplicate(const char *name, Class oldCls, Class cls);
// Public headers
#include "objc.h"
#include "runtime.h"
#include "objc-os.h"
#include "objc-abi.h"
#include "objc-api.h"
#include "objc-config.h"
#include "objc-internal.h"
#include "maptable.h"
#include "hashtable2.h"
/* Do not include message.h here. */
/* #include "message.h" */
#define __APPLE_API_PRIVATE
#include "objc-gdb.h"
#undef __APPLE_API_PRIVATE
// Private headers
#include "objc-ptrauth.h"
#include "objc-runtime-new.h"
#include "objc-references.h"
#include "objc-initialize.h"
#include "objc-loadmethod.h"
#include "objc-opt.h"
#define STRINGIFY(x) #x
#define STRINGIFY2(x) STRINGIFY(x)
__BEGIN_DECLS
/* preoptimization */
extern void preopt_init(void);
extern void disableSharedCacheProtocolOptimizations(void);
extern bool isPreoptimized(void);
extern bool noMissingWeakSuperclasses(void);
extern header_info *preoptimizedHinfoForHeader(const headerType *mhdr);
extern Protocol *getPreoptimizedProtocol(const char *name);
extern Protocol *getSharedCachePreoptimizedProtocol(const char *name);
extern unsigned getPreoptimizedClassUnreasonableCount();
extern Class getPreoptimizedClass(const char *name);
extern Class getPreoptimizedClassesWithMetaClass(Class metacls);
namespace objc {
struct SafeRanges {
private:
struct Range {
uintptr_t start;
uintptr_t end;
inline bool contains(uintptr_t ptr) const {
uintptr_t m_start, m_end;
#if __arm64__
// <rdar://problem/48304934> Force the compiler to use ldp
// we really don't want 2 loads and 2 jumps.
__asm__(
# if __LP64__
"ldp %x[one], %x[two], [%x[src]]"
# else
"ldp %w[one], %w[two], [%x[src]]"
# endif
: [one] "=r" (m_start), [two] "=r" (m_end)
: [src] "r" (this)
);
#else
m_start = start;
m_end = end;
#endif
return m_start <= ptr && ptr < m_end;
}
};
struct Range shared_cache;
struct Range *ranges;
uint32_t count;
uint32_t size : 31;
uint32_t sorted : 1;
public:
inline bool inSharedCache(uintptr_t ptr) const {
return shared_cache.contains(ptr);
}
inline bool contains(uint16_t witness, uintptr_t ptr) const {
return witness < count && ranges[witness].contains(ptr);
}
inline void setSharedCacheRange(uintptr_t start, uintptr_t end) {
shared_cache = Range{start, end};
add(start, end);
}
bool find(uintptr_t ptr, uint32_t &pos);
void add(uintptr_t start, uintptr_t end);
void remove(uintptr_t start, uintptr_t end);
};
extern struct SafeRanges dataSegmentsRanges;
static inline bool inSharedCache(uintptr_t ptr) {
return dataSegmentsRanges.inSharedCache(ptr);
}
} // objc
struct header_info;
struct header_info_rw* getPreoptimizedHeaderRW(const struct header_info *const hdr);
bool hasSharedCacheDyldInfo();
typedef struct header_info {
private:
// Note, this is no longer a pointer, but instead an offset to a pointer
// from this location.
intptr_t mhdr_offset;
// Note, this is no longer a pointer, but instead an offset to a pointer
// from this location.
intptr_t info_offset;
// Note, this is no longer a pointer, but instead an offset to a pointer
// from this location.
// This may not be present in old shared caches
intptr_t dyld_info_offset;
// Do not add fields without editing ObjCModernAbstraction.hpp
public:
header_info_rw *getHeaderInfoRW() {
header_info_rw *preopt = getPreoptimizedHeaderRW(this);
if (preopt) return preopt;
else return &rw_data[0];
}
const headerType *mhdr() const {
return (const headerType *)(((intptr_t)&mhdr_offset) + mhdr_offset);
}
void setmhdr(const headerType *mhdr) {
mhdr_offset = (intptr_t)mhdr - (intptr_t)&mhdr_offset;
}
const objc_image_info *info() const {
return (const objc_image_info *)(((intptr_t)&info_offset) + info_offset);
}
void setinfo(const objc_image_info *info) {
info_offset = (intptr_t)info - (intptr_t)&info_offset;
}
_dyld_section_location_info_t dyldInfo() const {
// Shared cache images might not have the info, for now
if ( isPreoptimized() ) {
if ( !hasSharedCacheDyldInfo() )
return NULL;
}
return (const _dyld_section_location_info_t)(((intptr_t)&dyld_info_offset) + dyld_info_offset);
}
void setdyldInfo(_dyld_section_location_info_t info) {
dyld_info_offset = (intptr_t)info - (intptr_t)&dyld_info_offset;
}
// refs sections
SEL *selrefs(size_t *outCount) const;
message_ref_t *messagerefs(size_t *outCount) const;
Class* classrefs(size_t *outCount) const;
Class* superrefs(size_t *outCount) const;
protocol_t ** protocolrefs(size_t *outCount) const;
// list sections
classref_t const *classlist(size_t *outCount) const;
const classref_t *nlclslist(size_t *outCount) const;
stub_class_t * const *stublist(size_t *outCount) const;
category_t * const *catlist(size_t *outCount) const;
category_t * const *catlist2(size_t *outCount) const;
category_t * const *nlcatlist(size_t *outCount) const;
protocol_t * const *protocollist(size_t *outCount) const;
// misc sections
bool hasForkOkSection() const;
bool hasRawISASection() const;
bool isLoaded() {
return getHeaderInfoRW()->getLoaded();
}
void setLoaded(bool v) {
getHeaderInfoRW()->setLoaded(v);
}
header_info *getNext() {
return getHeaderInfoRW()->getNext();
}
void setNext(header_info *v) {
getHeaderInfoRW()->setNext(v);
}
bool isBundle() {
return mhdr()->filetype == MH_BUNDLE;
}
const char *fname() const {
return dyld_image_path_containing_address(mhdr());
}
bool isPreoptimized() const;
private:
// Images in the shared cache will have an empty array here while those
// allocated at run time will allocate a single entry.
header_info_rw rw_data[];
} header_info;
extern header_info *FirstHeader;
extern header_info *LastHeader;
extern header_info *LastHeaderRealizedAllClasses;
extern void appendHeader(header_info *hi);
extern void removeHeader(header_info *hi);
extern objc_image_info *
_getObjcImageInfo(const headerType *mhdr, _dyld_section_location_info_t info, size_t *outBytes);
struct mapped_image_info {
header_info *hi;
_dyld_objc_notify_mapped_info dyldInfo;
bool dyldObjCRefsOptimized() {
return dyldInfo.dyldObjCRefsOptimized && isPreoptimized();
}
// TODO: dyld will add a flag for this which we need to adopt.
bool dyldCategoriesOptimized() {
return false;
}
};
// Mach-O segment and section names are 16 bytes and may be un-terminated.
static inline bool segnameEquals(const char *lhs, const char *rhs) {
return 0 == strncmp(lhs, rhs, 16);
}
static inline bool segnameStartsWith(const char *segname, const char *prefix) {
return 0 == strncmp(segname, prefix, strlen(prefix));
}
static inline bool sectnameEquals(const char *lhs, const char *rhs) {
return segnameEquals(lhs, rhs);
}
static inline bool sectnameStartsWith(const char *sectname, const char *prefix){
return segnameStartsWith(sectname, prefix);
}
extern bool didCallDyldNotifyRegister;
/* selectors */
extern void sel_init(size_t selrefCount);
extern SEL sel_registerNameNoLock(const char *str, bool copy);
extern SEL _sel_searchBuiltins(const char *str);
extern SEL SEL_cxx_construct;
extern SEL SEL_cxx_destruct;
extern Class _calloc_class(size_t size);
/* method lookup */
enum {
LOOKUP_INITIALIZE = 1,
LOOKUP_RESOLVER = 2,
LOOKUP_NIL = 4,
LOOKUP_NOCACHE = 8,
};
extern IMP lookUpImpOrForward(id obj, SEL, Class cls, int behavior);
extern IMP lookUpImpOrForwardTryCache(id obj, SEL, Class cls, int behavior = 0);
extern IMP lookUpImpOrNilTryCache(id obj, SEL, Class cls, int behavior = 0);
extern IMP lookupMethodInClassAndLoadCache(Class cls, SEL sel);
struct IMPAndSEL {
IMP imp;
SEL sel;
};
extern IMPAndSEL _method_getImplementationAndName(Method m);
extern BOOL class_respondsToSelector_inst(id inst, SEL sel, Class cls);
extern Class class_initialize(Class cls, id inst);
extern bool objcMsgLogEnabled;
extern bool logMessageSend(bool isClassMethod,
const char *objectsClass,
const char *implementingClass,
SEL selector);
/* message dispatcher */
#if !OBJC_OLD_DISPATCH_PROTOTYPES
extern void _objc_msgForward_impcache(void);
#else
extern id _objc_msgForward_impcache(id, SEL, ...);
#endif
// Report an error gated on an environment variable. Based on the variable,
// uses _objc_inform or _objc_fatal. The format string and arguments are only
// evaluated when needed.
#define OBJC_DEBUG_OPTION_REPORT_ERROR(option, ...) \
do { \
if (option) \
(option == Fatal ? _objc_fatal : _objc_inform)(__VA_ARGS__); \
} while(0)
/* magic */
extern Class _objc_getFreedObjectClass (void);
/* map table additions */
extern void *NXMapKeyCopyingInsert(NXMapTable *table, const void *key, const void *value);
extern void *NXMapKeyFreeingRemove(NXMapTable *table, const void *key);
/* hash table additions */
extern unsigned _NXHashCapacity(NXHashTable *table);
extern void _NXHashRehashToCapacity(NXHashTable *table, unsigned newCapacity);
/* property attribute parsing */
extern const char *copyPropertyAttributeString(const objc_property_attribute_t *attrs, unsigned int count);
extern objc_property_attribute_t *copyPropertyAttributeList(const char *attrs, unsigned int *outCount);
extern char *copyPropertyAttributeValue(const char *attrs, const char *name);
/* locking */
class recursive_mutex_locker_t : nocopy_t {
recursive_mutex_t& lock;
public:
recursive_mutex_locker_t(recursive_mutex_t& newLock)
: lock(newLock) { lock.lock(); }
~recursive_mutex_locker_t() { lock.unlock(); }
};
/* Exceptions */
struct alt_handler_list;
extern void exception_init(void);
extern void _destroyAltHandlerList(struct alt_handler_list *list);
/* Class change notifications (gdb only for now) */
#define OBJC_CLASS_ADDED (1<<0)
#define OBJC_CLASS_REMOVED (1<<1)
#define OBJC_CLASS_IVARS_CHANGED (1<<2)
#define OBJC_CLASS_METHODS_CHANGED (1<<3)
extern void gdb_objc_class_changed(Class cls, unsigned long changes, const char *classname)
__attribute__((noinline));
extern void environ_init(void);
extern void runtime_init(void);
extern void logReplacedMethod(const char *className, SEL s, bool isMeta, const char *catName, void *oldImp, void *newImp);
// objc per-thread storage
struct _objc_pthread_data {
struct _objc_initializing_classes *initializingClasses; // for +initialize
struct SyncCache *syncCache; // for @synchronize
struct alt_handler_list *handlerList; // for exception alt handlers
char *printableNames[4]; // temporary demangled names for logging
const char **classNameLookups; // for objc_getClass() hooks
unsigned classNameLookupsAllocated;
unsigned classNameLookupsUsed;
// If you add new fields here, don't forget to update the destructor
~_objc_pthread_data();
};
extern _objc_pthread_data *_objc_fetch_pthread_data(bool create);
// encoding.h
extern unsigned int encoding_getNumberOfArguments(const char *typedesc);
extern unsigned int encoding_getSizeOfArguments(const char *typedesc);
extern unsigned int encoding_getArgumentInfo(const char *typedesc, unsigned int arg, const char **type, int *offset);
extern void encoding_getReturnType(const char *t, char *dst, size_t dst_len);
extern char * encoding_copyReturnType(const char *t);
extern void encoding_getArgumentType(const char *t, unsigned int index, char *dst, size_t dst_len);
extern char *encoding_copyArgumentType(const char *t, unsigned int index);
// sync.h
/// Different kinds of synchronization. The `_objc_sync_enter/exit_kind` calls
/// map each unique `(object, kind)` pair to a distinct lock. The `kind` allows
/// multiple distinct locks for the same object, used for different purposes.
enum class SyncKind {
invalid, // Don't use, raw value of 0 means something went wrong.
atSynchronize, // Used for @synchronize/objc_sync_enter/exit.
classInitialize, // Used for +initialize machinery.
};
extern void _destroySyncCache(struct SyncCache *cache);
extern void _objc_sync_exit_forked_child(id obj, SyncKind kind);
extern void _objc_sync_assert_locked(id obj, SyncKind kind);
extern void _objc_sync_assert_unlocked(id obj, SyncKind kind);
extern void _objc_sync_foreach_lock(void (^call)(id obj, SyncKind kind, recursive_mutex_t *mutex));
extern void _objc_sync_lock_atfork_prepare(void);
extern void _objc_sync_lock_atfork_parent(void);
extern void _objc_sync_lock_atfork_child(void);
extern int _objc_sync_enter_kind(id obj, SyncKind kind);
extern int _objc_sync_exit_kind(id obj, SyncKind kind);
// arr
extern void arr_init(void);
extern id objc_autoreleaseReturnValue(id obj);
// block trampolines
#if !TARGET_OS_EXCLAVEKIT
extern void _imp_implementationWithBlock_init(void);
extern IMP _imp_implementationWithBlockNoCopy(id block);
#endif
// layout.h
typedef struct {
uint8_t *bits;
size_t bitCount;
size_t bitsAllocated;
bool weak;
} layout_bitmap;
extern layout_bitmap layout_bitmap_create(const unsigned char *layout_string, size_t layoutStringInstanceSize, size_t instanceSize, bool weak);
extern layout_bitmap layout_bitmap_create_empty(size_t instanceSize, bool weak);
extern void layout_bitmap_free(layout_bitmap bits);
extern const unsigned char *layout_string_create(layout_bitmap bits);
extern void layout_bitmap_set_ivar(layout_bitmap bits, const char *type, size_t offset);
extern void layout_bitmap_grow(layout_bitmap *bits, size_t newCount);
extern void layout_bitmap_slide(layout_bitmap *bits, size_t oldPos, size_t newPos);
extern void layout_bitmap_slide_anywhere(layout_bitmap *bits, size_t oldPos, size_t newPos);
extern bool layout_bitmap_splat(layout_bitmap dst, layout_bitmap src,
size_t oldSrcInstanceSize);
extern bool layout_bitmap_or(layout_bitmap dst, layout_bitmap src, const char *msg);
extern bool layout_bitmap_clear(layout_bitmap dst, layout_bitmap src, const char *msg);
extern void layout_bitmap_print(layout_bitmap bits);
// fixme runtime
extern bool MultithreadedForkChild;
extern id objc_noop_imp(id self, SEL _cmd);
extern Class look_up_class(const char *aClassName, bool includeUnconnected, bool includeClassHandler);
extern bool is_root_ramdisk();
extern "C" void map_images(unsigned count, const struct _dyld_objc_notify_mapped_info infos[]);
extern void map_images_nolock(unsigned count,
const struct _dyld_objc_notify_mapped_info infos[],
bool *disabledClassROEnforcement);
extern void load_images(const struct _dyld_objc_notify_mapped_info* info);
extern void unmap_image(const char *path, const struct mach_header *mh);
extern void unmap_image_nolock(const struct mach_header *mh);
extern void _read_images(mapped_image_info infos[], uint32_t hCount, int totalClasses, int unoptimizedTotalClass);
void loadAllCategoriesIfNeeded(void);
extern void _unload_image(header_info *hi);
extern const header_info *_headerForClass(Class cls);
extern Class _class_remap(Class cls);
extern Ivar _class_getVariable(Class cls, const char *name);
extern unsigned _class_createInstances(Class cls, size_t extraBytes, id *results, unsigned num_requested);
extern const char *_category_getName(Category cat);
extern const char *_category_getClassName(Category cat);
extern Class _category_getClass(Category cat);
extern IMP _category_getLoadMethod(Category cat);
enum {
OBJECT_CONSTRUCT_NONE = 0,
OBJECT_CONSTRUCT_FREE_ONFAILURE = 1,
OBJECT_CONSTRUCT_CALL_BADALLOC = 2,
};
extern id object_cxxConstructFromClass(id obj, Class cls, int flags);
extern void object_cxxDestruct(id obj);
extern void fixupCopiedIvars(id newObject, id oldObject);
extern Class _class_getClassForIvar(Class cls, Ivar ivar);
#define OBJC_WARN_DEPRECATED \
do { \
static int warned = 0; \
if (!warned) { \
warned = 1; \
_objc_inform_deprecated(__FUNCTION__, NULL); \
} \
} while (0) \
__END_DECLS
#ifndef STATIC_ASSERT
# define STATIC_ASSERT(x) _STATIC_ASSERT2(x, __LINE__)
# define _STATIC_ASSERT2(x, line) _STATIC_ASSERT3(x, line)
# define _STATIC_ASSERT3(x, line) \
typedef struct { \
int _static_assert[(x) ? 0 : -1]; \
} _static_assert_ ## line __attribute__((unavailable))
#endif
#define countof(arr) (sizeof(arr) / sizeof((arr)[0]))
static __inline uint32_t _objc_strhash(const char *s) {
uint32_t hash = 0;
for (;;) {
int a = *s++;
if (0 == a) break;
hash += (hash << 8) + a;
}
return hash;
}
#if __cplusplus
template <typename T>
static inline T log2u(T x) {
return (x<2) ? 0 : log2u(x>>1)+1;
}
template <typename T>
static inline T exp2u(T x) {
return (1 << x);
}
template <typename T>
static T exp2m1u(T x) {
return (1 << x) - 1;
}
#endif
// Misalignment-safe integer types
__attribute__((aligned(1))) typedef uintptr_t unaligned_uintptr_t;
__attribute__((aligned(1))) typedef intptr_t unaligned_intptr_t;
__attribute__((aligned(1))) typedef uint64_t unaligned_uint64_t;
__attribute__((aligned(1))) typedef int64_t unaligned_int64_t;
__attribute__((aligned(1))) typedef uint32_t unaligned_uint32_t;
__attribute__((aligned(1))) typedef int32_t unaligned_int32_t;
__attribute__((aligned(1))) typedef uint16_t unaligned_uint16_t;
__attribute__((aligned(1))) typedef int16_t unaligned_int16_t;
// Global operator new and delete. We must not use any app overrides.
// This ALSO REQUIRES each of these be in libobjc's unexported symbol list.
#if __cplusplus && !defined(TEST_OVERRIDES_NEW)
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Winline-new-delete"
#include <new>
inline void* operator new(std::size_t size) { return malloc(size); }
inline void* operator new[](std::size_t size) { return malloc(size); }
inline void* operator new(std::size_t size, const std::nothrow_t&) noexcept(true) { return malloc(size); }
inline void* operator new[](std::size_t size, const std::nothrow_t&) noexcept(true) { return malloc(size); }
inline void operator delete(void* p) noexcept(true) { free(p); }
inline void operator delete[](void* p) noexcept(true) { free(p); }
inline void operator delete(void* p, const std::nothrow_t&) noexcept(true) { free(p); }
inline void operator delete[](void* p, const std::nothrow_t&) noexcept(true) { free(p); }
#pragma clang diagnostic pop
#endif
// Overflow-detecting wrapper around malloc(n * m)
static inline void *alloc_overflow(size_t n, size_t m) {
size_t total;
if (__builtin_mul_overflow(n, m, &total))
_objc_fatal("attempt to allocate %zu * %zu bytes would overflow", n, m);
return malloc(total);
}
class TimeLogger {
uint64_t mStart;
bool mRecord;
public:
TimeLogger(bool record = true)
: mStart(nanoseconds())
, mRecord(record)
{ }
void log(const char *msg) {
if (mRecord) {
uint64_t end = nanoseconds();
_objc_inform("%.2f ms: %s", (end - mStart) / 1000000.0, msg);
mStart = nanoseconds();
}
}
};
enum { CacheLineSize = 64 };
// StripedMap<T> is a map of void* -> T, sized appropriately
// for cache-friendly lock striping.
// For example, this may be used as StripedMap<spinlock_t>
// or as StripedMap<SomeStruct> where SomeStruct stores a spin lock.
template<typename T>
class StripedMap {
#if TARGET_OS_IPHONE && !TARGET_OS_SIMULATOR
enum { StripeCount = 8 };
#else
enum { StripeCount = 64 };
#endif
struct PaddedT {
T value alignas(CacheLineSize);
};
PaddedT array[StripeCount];
static unsigned int indexForPointer(const void *p) {
uintptr_t addr = reinterpret_cast<uintptr_t>(p);
return ((addr >> 4) ^ (addr >> 9)) % StripeCount;
}
public:
T& operator[] (const void *p) {
return array[indexForPointer(p)].value;
}
const T& operator[] (const void *p) const {
return const_cast<StripedMap<T>>(this)[p];
}
// Shortcuts for StripedMaps of locks.
void lockAll() {
for (unsigned int i = 0; i < StripeCount; i++) {
array[i].value.lock();
}
}
void unlockAll() {
for (unsigned int i = 0; i < StripeCount; i++) {
array[i].value.unlock();
}
}
void forceResetAll() {
for (unsigned int i = 0; i < StripeCount; i++) {
array[i].value.reset();
}
}
void defineLockOrder() {
for (unsigned int i = 1; i < StripeCount; i++) {
lockdebug::lock_precedes_lock(&array[i-1].value, &array[i].value);
}
}
void precedeLock(const void *newlock) {
// assumes defineLockOrder is also called
lockdebug::lock_precedes_lock(&array[StripeCount-1].value, newlock);
}
void succeedLock(const void *oldlock) {
// assumes defineLockOrder is also called
lockdebug::lock_precedes_lock(oldlock, &array[0].value);
}
const void *getLock(int i) {
if (i < StripeCount) return &array[i].value;
else return nil;
}
template<typename F>
void forEach(F f) {
for (int i = 0; i < StripeCount; i++) {
f(array[i].value);
}
}
#if DEBUG
StripedMap() {
// Verify alignment expectations.
uintptr_t base = (uintptr_t)&array[0].value;
uintptr_t delta = (uintptr_t)&array[1].value - base;
ASSERT(delta % CacheLineSize == 0);
ASSERT(base % CacheLineSize == 0);
}
#else
constexpr StripedMap() {}
#endif
};
// DisguisedPtr<T> acts like pointer type T*, except the
// stored value is disguised to hide it from tools like `leaks`.
// nil is disguised as itself so zero-filled memory works as expected,
// which means 0x80..00 is also disguised as itself but we don't care.
// Note that weak_entry_t knows about this encoding.
template <typename T>
class DisguisedPtr {
void *value;
static void *disguise(T* ptr) {
return (void *)-(uintptr_t)ptr;
}
static T* undisguise(void *val) {
return (T*)-(uintptr_t)val;
}
public: