forked from vgorloff/swift-everywhere-toolchain
-
Notifications
You must be signed in to change notification settings - Fork 1
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
3 changed files
with
374 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,372 @@ | ||
diff --git a/lib/IRGen/MetadataRequest.cpp b/lib/IRGen/MetadataRequest.cpp | ||
index 0a83d8bb6ec..7c314016a12 100644 | ||
--- a/lib/IRGen/MetadataRequest.cpp | ||
+++ b/lib/IRGen/MetadataRequest.cpp | ||
@@ -2646,9 +2646,16 @@ emitMetadataAccessByMangledName(IRGenFunction &IGF, CanType type, | ||
unsigned mangledStringSize; | ||
std::tie(mangledString, mangledStringSize) = | ||
IGM.getTypeRef(type, CanGenericSignature(), MangledTypeRefRole::Metadata); | ||
- | ||
- assert(mangledStringSize < 0x80000000u | ||
- && "2GB of mangled name ought to be enough for anyone"); | ||
+ | ||
+ // Android AArch64 reserves the top byte of the address for memory tagging | ||
+ // since Android 11, so only use the bottom 23 bits to store this size | ||
+ // and the 24th bit to signal that there is a size. | ||
+ if (IGM.Triple.isAndroid() && IGM.Triple.getArch() == llvm::Triple::aarch64) | ||
+ assert(mangledStringSize < 0x00800001u && | ||
+ "8MB of mangled name ought to be enough for Android AArch64"); | ||
+ else | ||
+ assert(mangledStringSize < 0x80000000u && | ||
+ "2GB of mangled name ought to be enough for anyone"); | ||
|
||
// Get or create the cache variable if necessary. | ||
auto cache = IGM.getAddrOfTypeMetadataDemanglingCacheVariable(type, | ||
@@ -2718,6 +2725,21 @@ emitMetadataAccessByMangledName(IRGenFunction &IGF, CanType type, | ||
auto contBB = subIGF.createBasicBlock(""); | ||
llvm::Value *comparison = subIGF.Builder.CreateICmpSLT(load, | ||
llvm::ConstantInt::get(IGM.Int64Ty, 0)); | ||
+ | ||
+ // Check if the 24th bit is set on Android AArch64 and only instantiate the | ||
+ // type metadata if it is, as otherwise it might be negative only because | ||
+ // of the memory tag on Android. | ||
+ if (IGM.Triple.isAndroid() && | ||
+ IGM.Triple.getArch() == llvm::Triple::aarch64) { | ||
+ | ||
+ auto getBitAfterAndroidTag = subIGF.Builder.CreateAnd( | ||
+ load, llvm::ConstantInt::get(IGM.Int64Ty, 0x0080000000000000)); | ||
+ auto checkNotAndroidTag = subIGF.Builder.CreateICmpNE( | ||
+ getBitAfterAndroidTag, llvm::ConstantInt::get(IGM.Int64Ty, 0)); | ||
+ | ||
+ comparison = subIGF.Builder.CreateAnd(comparison, checkNotAndroidTag); | ||
+ } | ||
+ | ||
comparison = subIGF.Builder.CreateExpect(comparison, | ||
llvm::ConstantInt::get(IGM.Int1Ty, 0)); | ||
subIGF.Builder.CreateCondBr(comparison, isUnfilledBB, contBB); | ||
diff --git a/lib/IRGen/SwiftTargetInfo.cpp b/lib/IRGen/SwiftTargetInfo.cpp | ||
index 81c0fce41bc..3049df39356 100644 | ||
--- a/lib/IRGen/SwiftTargetInfo.cpp | ||
+++ b/lib/IRGen/SwiftTargetInfo.cpp | ||
@@ -36,10 +36,17 @@ static void setToMask(SpareBitVector &bits, unsigned size, uint64_t mask) { | ||
/// Configures target-specific information for arm64 platforms. | ||
static void configureARM64(IRGenModule &IGM, const llvm::Triple &triple, | ||
SwiftTargetInfo &target) { | ||
- setToMask(target.PointerSpareBits, 64, | ||
- SWIFT_ABI_ARM64_SWIFT_SPARE_BITS_MASK); | ||
- setToMask(target.ObjCPointerReservedBits, 64, | ||
- SWIFT_ABI_ARM64_OBJC_RESERVED_BITS_MASK); | ||
+ if (triple.isAndroid()) { | ||
+ setToMask(target.PointerSpareBits, 64, | ||
+ SWIFT_ABI_ANDROID_ARM64_SWIFT_SPARE_BITS_MASK); | ||
+ setToMask(target.ObjCPointerReservedBits, 64, | ||
+ SWIFT_ABI_ANDROID_ARM64_OBJC_RESERVED_BITS_MASK); | ||
+ } else { | ||
+ setToMask(target.PointerSpareBits, 64, | ||
+ SWIFT_ABI_ARM64_SWIFT_SPARE_BITS_MASK); | ||
+ setToMask(target.ObjCPointerReservedBits, 64, | ||
+ SWIFT_ABI_ARM64_OBJC_RESERVED_BITS_MASK); | ||
+ } | ||
setToMask(target.IsObjCPointerBit, 64, SWIFT_ABI_ARM64_IS_OBJC_BIT); | ||
|
||
if (triple.isOSDarwin()) { | ||
diff --git a/stdlib/public/SwiftShims/HeapObject.h b/stdlib/public/SwiftShims/HeapObject.h | ||
index 5e165fd3d4e..6a568d77a23 100644 | ||
--- a/stdlib/public/SwiftShims/HeapObject.h | ||
+++ b/stdlib/public/SwiftShims/HeapObject.h | ||
@@ -157,12 +157,22 @@ static_assert(alignof(HeapObject) == alignof(void*), | ||
#endif | ||
#define _swift_abi_SwiftSpareBitsMask \ | ||
(__swift_uintptr_t) SWIFT_ABI_ARM64_SWIFT_SPARE_BITS_MASK | ||
+#if defined(__ANDROID__) | ||
+#define _swift_abi_ObjCReservedBitsMask \ | ||
+ (__swift_uintptr_t) SWIFT_ABI_ANDROID_ARM64_OBJC_RESERVED_BITS_MASK | ||
+#else | ||
#define _swift_abi_ObjCReservedBitsMask \ | ||
(__swift_uintptr_t) SWIFT_ABI_ARM64_OBJC_RESERVED_BITS_MASK | ||
+#endif | ||
#define _swift_abi_ObjCReservedLowBits \ | ||
(unsigned) SWIFT_ABI_ARM64_OBJC_NUM_RESERVED_LOW_BITS | ||
+#if defined(__ANDROID__) | ||
+#define _swift_BridgeObject_TaggedPointerBits \ | ||
+ (__swift_uintptr_t) SWIFT_ABI_DEFAULT_BRIDGEOBJECT_TAG_64 >> 8 | ||
+#else | ||
#define _swift_BridgeObject_TaggedPointerBits \ | ||
(__swift_uintptr_t) SWIFT_ABI_DEFAULT_BRIDGEOBJECT_TAG_64 | ||
+#endif | ||
|
||
#elif defined(__powerpc64__) | ||
|
||
diff --git a/stdlib/public/SwiftShims/System.h b/stdlib/public/SwiftShims/System.h | ||
index 9656bf69fe1..c45e189a462 100644 | ||
--- a/stdlib/public/SwiftShims/System.h | ||
+++ b/stdlib/public/SwiftShims/System.h | ||
@@ -152,10 +152,19 @@ | ||
/// Darwin reserves the low 4GB of address space. | ||
#define SWIFT_ABI_DARWIN_ARM64_LEAST_VALID_POINTER 0x100000000ULL | ||
|
||
+// Android AArch64 reserves the top byte for pointer tagging since Android 11, | ||
+// so shift the spare bits tag to the second byte and zero the ObjC tag. | ||
+#define SWIFT_ABI_ANDROID_ARM64_SWIFT_SPARE_BITS_MASK 0x00F0000000000007ULL | ||
+#define SWIFT_ABI_ANDROID_ARM64_OBJC_RESERVED_BITS_MASK 0x0000000000000000ULL | ||
+ | ||
+#if defined(__ANDROID__) && defined(__aarch64__) | ||
+#define SWIFT_ABI_ARM64_SWIFT_SPARE_BITS_MASK SWIFT_ABI_ANDROID_ARM64_SWIFT_SPARE_BITS_MASK | ||
+#else | ||
// TBI guarantees the top byte of pointers is unused, but ARMv8.5-A | ||
// claims the bottom four bits of that for memory tagging. | ||
// Heap objects are eight-byte aligned. | ||
#define SWIFT_ABI_ARM64_SWIFT_SPARE_BITS_MASK 0xF000000000000007ULL | ||
+#endif | ||
|
||
// Objective-C reserves just the high bit for tagged pointers. | ||
#define SWIFT_ABI_ARM64_OBJC_RESERVED_BITS_MASK 0x8000000000000000ULL | ||
diff --git a/stdlib/public/core/KeyPath.swift b/stdlib/public/core/KeyPath.swift | ||
index 832a6b7a67b..5d93c0e369c 100644 | ||
--- a/stdlib/public/core/KeyPath.swift | ||
+++ b/stdlib/public/core/KeyPath.swift | ||
@@ -1747,7 +1747,7 @@ internal struct KeyPathBuffer { | ||
internal mutating func pushRaw(size: Int, alignment: Int) | ||
-> UnsafeMutableRawBufferPointer { | ||
var baseAddress = buffer.baseAddress.unsafelyUnwrapped | ||
- var misalign = Int(bitPattern: baseAddress) % alignment | ||
+ var misalign = Int(bitPattern: baseAddress) & (alignment - 1) | ||
if misalign != 0 { | ||
misalign = alignment - misalign | ||
baseAddress = baseAddress.advanced(by: misalign) | ||
@@ -3242,7 +3242,7 @@ internal struct InstantiateKeyPathBuffer: KeyPathPatternVisitor { | ||
) { | ||
let alignment = MemoryLayout<T>.alignment | ||
var baseAddress = destData.baseAddress.unsafelyUnwrapped | ||
- var misalign = Int(bitPattern: baseAddress) % alignment | ||
+ var misalign = Int(bitPattern: baseAddress) & (alignment - 1) | ||
if misalign != 0 { | ||
misalign = alignment - misalign | ||
baseAddress = baseAddress.advanced(by: misalign) | ||
diff --git a/stdlib/public/core/SmallString.swift b/stdlib/public/core/SmallString.swift | ||
index 1cab6141406..3330071f862 100644 | ||
--- a/stdlib/public/core/SmallString.swift | ||
+++ b/stdlib/public/core/SmallString.swift | ||
@@ -23,6 +23,9 @@ | ||
// ↑ ↑ | ||
// first (leftmost) code unit discriminator (incl. count) | ||
// | ||
+// On Android AArch64, there is one less byte available because the discriminator | ||
+// is stored in the penultimate code unit instead, to match where it's stored | ||
+// for large strings. | ||
@frozen @usableFromInline | ||
internal struct _SmallString { | ||
@usableFromInline | ||
@@ -78,6 +81,8 @@ extension _SmallString { | ||
internal static var capacity: Int { | ||
#if arch(i386) || arch(arm) || arch(arm64_32) || arch(wasm32) | ||
return 10 | ||
+#elseif os(Android) && arch(arm64) | ||
+ return 14 | ||
#else | ||
return 15 | ||
#endif | ||
@@ -111,7 +116,11 @@ extension _SmallString { | ||
// usage: it always clears the discriminator and count (in case it's full) | ||
@inlinable @inline(__always) | ||
internal var zeroTerminatedRawCodeUnits: RawBitPattern { | ||
+#if os(Android) && arch(arm64) | ||
+ let smallStringCodeUnitMask = ~UInt64(0xFFFF).bigEndian // zero last two bytes | ||
+#else | ||
let smallStringCodeUnitMask = ~UInt64(0xFF).bigEndian // zero last byte | ||
+#endif | ||
return (self._storage.0, self._storage.1 & smallStringCodeUnitMask) | ||
} | ||
|
||
diff --git a/stdlib/public/core/StringObject.swift b/stdlib/public/core/StringObject.swift | ||
index b087e87f51e..88ff7fbf089 100644 | ||
--- a/stdlib/public/core/StringObject.swift | ||
+++ b/stdlib/public/core/StringObject.swift | ||
@@ -56,6 +56,11 @@ | ||
can compile to a fused check-and-branch, even if that burns part of the | ||
encoding space. | ||
|
||
+ On Android AArch64, we cannot use the top byte for large strings because it is | ||
+ reserved by the OS for memory tagging since Android 11, so shift the | ||
+ discriminator to the second byte instead. This burns one more byte on small | ||
+ strings. | ||
+ | ||
On 32-bit platforms, we store an explicit discriminator (as a UInt8) with the | ||
same encoding as above, placed in the high bits. E.g. `b62` above is in | ||
`_discriminator`'s `b6`. | ||
@@ -111,8 +116,13 @@ internal struct _StringObject { | ||
|
||
@inlinable @inline(__always) | ||
init(count: Int, variant: Variant, discriminator: UInt64, flags: UInt16) { | ||
+#if os(Android) && arch(arm64) | ||
+ _internalInvariant(discriminator & 0x00FF_0000_0000_0000 == discriminator, | ||
+ "only the second byte can carry the discriminator and small count on Android AArch64") | ||
+#else | ||
_internalInvariant(discriminator & 0xFF00_0000_0000_0000 == discriminator, | ||
"only the top byte can carry the discriminator and small count") | ||
+#endif | ||
|
||
self._count = count | ||
self._variant = variant | ||
@@ -349,7 +359,13 @@ extension _StringObject.Nibbles { | ||
extension _StringObject.Nibbles { | ||
// Mask for address bits, i.e. non-discriminator and non-extra high bits | ||
@inlinable @inline(__always) | ||
- static internal var largeAddressMask: UInt64 { return 0x0FFF_FFFF_FFFF_FFFF } | ||
+ static internal var largeAddressMask: UInt64 { | ||
+#if os(Android) && arch(arm64) | ||
+ return 0xFF0F_FFFF_FFFF_FFFF | ||
+#else | ||
+ return 0x0FFF_FFFF_FFFF_FFFF | ||
+#endif | ||
+ } | ||
|
||
// Mask for address bits, i.e. non-discriminator and non-extra high bits | ||
@inlinable @inline(__always) | ||
@@ -360,20 +376,32 @@ extension _StringObject.Nibbles { | ||
// Discriminator for small strings | ||
@inlinable @inline(__always) | ||
internal static func small(isASCII: Bool) -> UInt64 { | ||
+#if os(Android) && arch(arm64) | ||
+ return isASCII ? 0x00E0_0000_0000_0000 : 0x00A0_0000_0000_0000 | ||
+#else | ||
return isASCII ? 0xE000_0000_0000_0000 : 0xA000_0000_0000_0000 | ||
+#endif | ||
} | ||
|
||
// Discriminator for small strings | ||
@inlinable @inline(__always) | ||
internal static func small(withCount count: Int, isASCII: Bool) -> UInt64 { | ||
_internalInvariant(count <= _SmallString.capacity) | ||
+#if os(Android) && arch(arm64) | ||
+ return small(isASCII: isASCII) | UInt64(truncatingIfNeeded: count) &<< 48 | ||
+#else | ||
return small(isASCII: isASCII) | UInt64(truncatingIfNeeded: count) &<< 56 | ||
+#endif | ||
} | ||
|
||
// Discriminator for large, immortal, swift-native strings | ||
@inlinable @inline(__always) | ||
internal static func largeImmortal() -> UInt64 { | ||
+#if os(Android) && arch(arm64) | ||
+ return 0x0080_0000_0000_0000 | ||
+#else | ||
return 0x8000_0000_0000_0000 | ||
+#endif | ||
} | ||
|
||
// Discriminator for large, mortal (i.e. managed), swift-native strings | ||
@@ -397,7 +425,11 @@ extension _StringObject { | ||
|
||
@inlinable @inline(__always) | ||
internal var isImmortal: Bool { | ||
+#if os(Android) && arch(arm64) | ||
+ return (discriminatedObjectRawBits & 0x0080_0000_0000_0000) != 0 | ||
+#else | ||
return (discriminatedObjectRawBits & 0x8000_0000_0000_0000) != 0 | ||
+#endif | ||
} | ||
|
||
@inlinable @inline(__always) | ||
@@ -405,7 +437,11 @@ extension _StringObject { | ||
|
||
@inlinable @inline(__always) | ||
internal var isSmall: Bool { | ||
+#if os(Android) && arch(arm64) | ||
+ return (discriminatedObjectRawBits & 0x0020_0000_0000_0000) != 0 | ||
+#else | ||
return (discriminatedObjectRawBits & 0x2000_0000_0000_0000) != 0 | ||
+#endif | ||
} | ||
|
||
@inlinable @inline(__always) | ||
@@ -419,7 +455,11 @@ extension _StringObject { | ||
// - Non-Cocoa shared strings | ||
@inlinable @inline(__always) | ||
internal var providesFastUTF8: Bool { | ||
+#if os(Android) && arch(arm64) | ||
+ return (discriminatedObjectRawBits & 0x0010_0000_0000_0000) == 0 | ||
+#else | ||
return (discriminatedObjectRawBits & 0x1000_0000_0000_0000) == 0 | ||
+#endif | ||
} | ||
|
||
@inlinable @inline(__always) | ||
@@ -429,16 +469,26 @@ extension _StringObject { | ||
// conforms to `_AbstractStringStorage` | ||
@inline(__always) | ||
internal var hasStorage: Bool { | ||
+#if os(Android) && arch(arm64) | ||
+ return (discriminatedObjectRawBits & 0x00F0_0000_0000_0000) == 0 | ||
+#else | ||
return (discriminatedObjectRawBits & 0xF000_0000_0000_0000) == 0 | ||
+#endif | ||
} | ||
|
||
// Whether we are a mortal, native (tail-allocated) string | ||
@inline(__always) | ||
internal var hasNativeStorage: Bool { | ||
+#if os(Android) && arch(arm64) | ||
+ // Android uses the same logic as explained below for other platforms, | ||
+ // except isSmall is at b53, so shift it to b61 first before proceeding. | ||
+ let bits = ~(discriminatedObjectRawBits << 8) & self._countAndFlagsBits | ||
+#else | ||
// b61 on the object means isSmall, and on countAndFlags means | ||
// isNativelyStored. We just need to check that b61 is 0 on the object and 1 | ||
// on countAndFlags. | ||
let bits = ~discriminatedObjectRawBits & self._countAndFlagsBits | ||
+#endif | ||
let result = bits & 0x2000_0000_0000_0000 != 0 | ||
_internalInvariant(!result || hasStorage, "native storage needs storage") | ||
return result | ||
@@ -466,7 +516,11 @@ extension _StringObject { | ||
@inline(__always) | ||
internal var largeIsCocoa: Bool { | ||
_internalInvariant(isLarge) | ||
+#if os(Android) && arch(arm64) | ||
+ return (discriminatedObjectRawBits & 0x0040_0000_0000_0000) != 0 | ||
+#else | ||
return (discriminatedObjectRawBits & 0x4000_0000_0000_0000) != 0 | ||
+#endif | ||
} | ||
|
||
// Whether this string is in one of our fastest representations: | ||
@@ -535,7 +589,11 @@ extension _StringObject { | ||
|
||
@inlinable | ||
internal static func getSmallCount(fromRaw x: UInt64) -> Int { | ||
+#if os(Android) && arch(arm64) | ||
+ return Int(truncatingIfNeeded: (x & 0x000F_0000_0000_0000) &>> 48) | ||
+#else | ||
return Int(truncatingIfNeeded: (x & 0x0F00_0000_0000_0000) &>> 56) | ||
+#endif | ||
} | ||
|
||
@inlinable @inline(__always) | ||
@@ -546,7 +604,11 @@ extension _StringObject { | ||
|
||
@inlinable | ||
internal static func getSmallIsASCII(fromRaw x: UInt64) -> Bool { | ||
+#if os(Android) && arch(arm64) | ||
+ return x & 0x0040_0000_0000_0000 != 0 | ||
+#else | ||
return x & 0x4000_0000_0000_0000 != 0 | ||
+#endif | ||
} | ||
@inlinable @inline(__always) | ||
internal var smallIsASCII: Bool { | ||
diff --git a/stdlib/public/runtime/HeapObject.cpp b/stdlib/public/runtime/HeapObject.cpp | ||
index c6601e6d579..973d029d99c 100644 | ||
--- a/stdlib/public/runtime/HeapObject.cpp | ||
+++ b/stdlib/public/runtime/HeapObject.cpp | ||
@@ -66,6 +66,10 @@ static inline bool isValidPointerForNativeRetain(const void *p) { | ||
// arm64_32 is special since it has 32-bit pointers but __arm64__ is true. | ||
// Catch it early since __POINTER_WIDTH__ is generally non-portable. | ||
return p != nullptr; | ||
+#elif defined(__ANDROID__) && defined(__aarch64__) | ||
+ // Check the top of the second byte instead, since Android AArch64 reserves | ||
+ // the top byte for its own pointer tagging since Android 11. | ||
+ return (intptr_t)((uintptr_t)p << 8) > 0; | ||
#elif defined(__x86_64__) || defined(__arm64__) || defined(__aarch64__) || defined(_M_ARM64) || defined(__s390x__) || (defined(__powerpc64__) && defined(__LITTLE_ENDIAN__)) | ||
// On these platforms, except s390x, the upper half of address space is reserved for the | ||
// kernel, so we can assume that pointer values in this range are invalid. |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters