summaryrefslogtreecommitdiff
path: root/0109-Backport-SME-aarch64-Rename-AARCH64_ISA-architecture.patch
diff options
context:
space:
mode:
Diffstat (limited to '0109-Backport-SME-aarch64-Rename-AARCH64_ISA-architecture.patch')
-rw-r--r--0109-Backport-SME-aarch64-Rename-AARCH64_ISA-architecture.patch157
1 files changed, 157 insertions, 0 deletions
diff --git a/0109-Backport-SME-aarch64-Rename-AARCH64_ISA-architecture.patch b/0109-Backport-SME-aarch64-Rename-AARCH64_ISA-architecture.patch
new file mode 100644
index 0000000..9b541de
--- /dev/null
+++ b/0109-Backport-SME-aarch64-Rename-AARCH64_ISA-architecture.patch
@@ -0,0 +1,157 @@
+From 244780570ebc85c44806559ba165d4a70a2333d1 Mon Sep 17 00:00:00 2001
+From: Richard Sandiford <richard.sandiford@arm.com>
+Date: Thu, 29 Sep 2022 11:32:50 +0100
+Subject: [PATCH 010/157] [Backport][SME] aarch64: Rename AARCH64_ISA
+ architecture-level macros
+
+Reference: https://gcc.gnu.org/git/?p=gcc.git;a=commit;h=2a4788ac3bae1467b0379852d5a6690a8496d0c9
+
+All AARCH64_ISA_* architecture-level macros except AARCH64_ISA_V8_R
+are for the A profile: they cause __ARM_ARCH_PROFILE to be set to
+'A' and they are associated with architecture names like armv8.4-a.
+
+It's convenient for later patches if we make this explicit
+by adding an "A" to the name. Also, rather than add an underscore
+(as for V8_R) it's more convenient to add the profile directly
+to the number, like we already do in the ARCH_IDENT field of the
+aarch64-arches.def entries.
+
+gcc/
+ * config/aarch64/aarch64.h (AARCH64_ISA_V8_2, AARCH64_ISA_V8_3)
+ (AARCH64_ISA_V8_4, AARCH64_ISA_V8_5, AARCH64_ISA_V8_6)
+ (AARCH64_ISA_V9, AARCH64_ISA_V9_1, AARCH64_ISA_V9_2)
+ (AARCH64_ISA_V9_3): Add "A" to the end of the name.
+ (AARCH64_ISA_V8_R): Rename to AARCH64_ISA_V8R.
+ (TARGET_ARMV8_3, TARGET_JSCVT, TARGET_FRINT, TARGET_MEMTAG): Update
+ accordingly.
+ * common/config/aarch64/aarch64-common.cc
+ (aarch64_get_extension_string_for_isa_flags): Likewise.
+ * config/aarch64/aarch64-c.cc
+ (aarch64_define_unconditional_macros): Likewise.
+---
+ gcc/common/config/aarch64/aarch64-common.cc | 2 +-
+ gcc/config/aarch64/aarch64-c.cc | 4 +--
+ gcc/config/aarch64/aarch64.h | 28 ++++++++++-----------
+ 3 files changed, 17 insertions(+), 17 deletions(-)
+
+diff --git a/gcc/common/config/aarch64/aarch64-common.cc b/gcc/common/config/aarch64/aarch64-common.cc
+index 85ce8133b..3dc020f0c 100644
+--- a/gcc/common/config/aarch64/aarch64-common.cc
++++ b/gcc/common/config/aarch64/aarch64-common.cc
+@@ -506,7 +506,7 @@ aarch64_get_extension_string_for_isa_flags (uint64_t isa_flags,
+
+ Note that assemblers with Armv8-R AArch64 support should not have this
+ issue, so we don't need this fix when targeting Armv8-R. */
+- if ((isa_flags & AARCH64_ISA_CRC) && !AARCH64_ISA_V8_R)
++ if ((isa_flags & AARCH64_ISA_CRC) && !AARCH64_ISA_V8R)
+ isa_flag_bits |= AARCH64_ISA_CRC;
+
+ /* Pass Two:
+diff --git a/gcc/config/aarch64/aarch64-c.cc b/gcc/config/aarch64/aarch64-c.cc
+index 3d2fb5ec2..18c9b975b 100644
+--- a/gcc/config/aarch64/aarch64-c.cc
++++ b/gcc/config/aarch64/aarch64-c.cc
+@@ -64,7 +64,7 @@ aarch64_define_unconditional_macros (cpp_reader *pfile)
+ builtin_define ("__ARM_ARCH_8A");
+
+ builtin_define_with_int_value ("__ARM_ARCH_PROFILE",
+- AARCH64_ISA_V8_R ? 'R' : 'A');
++ AARCH64_ISA_V8R ? 'R' : 'A');
+ builtin_define ("__ARM_FEATURE_CLZ");
+ builtin_define ("__ARM_FEATURE_IDIV");
+ builtin_define ("__ARM_FEATURE_UNALIGNED");
+@@ -82,7 +82,7 @@ aarch64_update_cpp_builtins (cpp_reader *pfile)
+ {
+ aarch64_def_or_undef (flag_unsafe_math_optimizations, "__ARM_FP_FAST", pfile);
+
+- builtin_define_with_int_value ("__ARM_ARCH", AARCH64_ISA_V9 ? 9 : 8);
++ builtin_define_with_int_value ("__ARM_ARCH", AARCH64_ISA_V9A ? 9 : 8);
+
+ builtin_define_with_int_value ("__ARM_SIZEOF_MINIMAL_ENUM",
+ flag_short_enums ? 1 : 4);
+diff --git a/gcc/config/aarch64/aarch64.h b/gcc/config/aarch64/aarch64.h
+index 7c090c8f2..356a263b2 100644
+--- a/gcc/config/aarch64/aarch64.h
++++ b/gcc/config/aarch64/aarch64.h
+@@ -297,7 +297,7 @@
+ #define AARCH64_ISA_SIMD (aarch64_isa_flags & AARCH64_FL_SIMD)
+ #define AARCH64_ISA_LSE (aarch64_isa_flags & AARCH64_FL_LSE)
+ #define AARCH64_ISA_RDMA (aarch64_isa_flags & AARCH64_FL_RDMA)
+-#define AARCH64_ISA_V8_2 (aarch64_isa_flags & AARCH64_FL_V8_2)
++#define AARCH64_ISA_V8_2A (aarch64_isa_flags & AARCH64_FL_V8_2)
+ #define AARCH64_ISA_F16 (aarch64_isa_flags & AARCH64_FL_F16)
+ #define AARCH64_ISA_SVE (aarch64_isa_flags & AARCH64_FL_SVE)
+ #define AARCH64_ISA_SVE2 (aarch64_isa_flags & AARCH64_FL_SVE2)
+@@ -305,31 +305,31 @@
+ #define AARCH64_ISA_SVE2_BITPERM (aarch64_isa_flags & AARCH64_FL_SVE2_BITPERM)
+ #define AARCH64_ISA_SVE2_SHA3 (aarch64_isa_flags & AARCH64_FL_SVE2_SHA3)
+ #define AARCH64_ISA_SVE2_SM4 (aarch64_isa_flags & AARCH64_FL_SVE2_SM4)
+-#define AARCH64_ISA_V8_3 (aarch64_isa_flags & AARCH64_FL_V8_3)
++#define AARCH64_ISA_V8_3A (aarch64_isa_flags & AARCH64_FL_V8_3)
+ #define AARCH64_ISA_DOTPROD (aarch64_isa_flags & AARCH64_FL_DOTPROD)
+ #define AARCH64_ISA_AES (aarch64_isa_flags & AARCH64_FL_AES)
+ #define AARCH64_ISA_SHA2 (aarch64_isa_flags & AARCH64_FL_SHA2)
+-#define AARCH64_ISA_V8_4 (aarch64_isa_flags & AARCH64_FL_V8_4)
++#define AARCH64_ISA_V8_4A (aarch64_isa_flags & AARCH64_FL_V8_4)
+ #define AARCH64_ISA_SM4 (aarch64_isa_flags & AARCH64_FL_SM4)
+ #define AARCH64_ISA_SHA3 (aarch64_isa_flags & AARCH64_FL_SHA3)
+ #define AARCH64_ISA_F16FML (aarch64_isa_flags & AARCH64_FL_F16FML)
+ #define AARCH64_ISA_RCPC8_4 (aarch64_isa_flags & AARCH64_FL_RCPC8_4)
+ #define AARCH64_ISA_RNG (aarch64_isa_flags & AARCH64_FL_RNG)
+-#define AARCH64_ISA_V8_5 (aarch64_isa_flags & AARCH64_FL_V8_5)
++#define AARCH64_ISA_V8_5A (aarch64_isa_flags & AARCH64_FL_V8_5)
+ #define AARCH64_ISA_TME (aarch64_isa_flags & AARCH64_FL_TME)
+ #define AARCH64_ISA_MEMTAG (aarch64_isa_flags & AARCH64_FL_MEMTAG)
+-#define AARCH64_ISA_V8_6 (aarch64_isa_flags & AARCH64_FL_V8_6)
++#define AARCH64_ISA_V8_6A (aarch64_isa_flags & AARCH64_FL_V8_6)
+ #define AARCH64_ISA_I8MM (aarch64_isa_flags & AARCH64_FL_I8MM)
+ #define AARCH64_ISA_F32MM (aarch64_isa_flags & AARCH64_FL_F32MM)
+ #define AARCH64_ISA_F64MM (aarch64_isa_flags & AARCH64_FL_F64MM)
+ #define AARCH64_ISA_BF16 (aarch64_isa_flags & AARCH64_FL_BF16)
+ #define AARCH64_ISA_SB (aarch64_isa_flags & AARCH64_FL_SB)
+-#define AARCH64_ISA_V8_R (aarch64_isa_flags & AARCH64_FL_V8_R)
++#define AARCH64_ISA_V8R (aarch64_isa_flags & AARCH64_FL_V8_R)
+ #define AARCH64_ISA_PAUTH (aarch64_isa_flags & AARCH64_FL_PAUTH)
+-#define AARCH64_ISA_V9 (aarch64_isa_flags & AARCH64_FL_V9)
+-#define AARCH64_ISA_V9_1 (aarch64_isa_flags & AARCH64_FL_V9_1)
+-#define AARCH64_ISA_V9_2 (aarch64_isa_flags & AARCH64_FL_V9_2)
+-#define AARCH64_ISA_V9_3 (aarch64_isa_flags & AARCH64_FL_V9_3)
++#define AARCH64_ISA_V9A (aarch64_isa_flags & AARCH64_FL_V9)
++#define AARCH64_ISA_V9_1A (aarch64_isa_flags & AARCH64_FL_V9_1)
++#define AARCH64_ISA_V9_2A (aarch64_isa_flags & AARCH64_FL_V9_2)
++#define AARCH64_ISA_V9_3A (aarch64_isa_flags & AARCH64_FL_V9_3)
+ #define AARCH64_ISA_MOPS (aarch64_isa_flags & AARCH64_FL_MOPS)
+ #define AARCH64_ISA_LS64 (aarch64_isa_flags & AARCH64_FL_LS64)
+
+@@ -383,16 +383,16 @@
+ #define TARGET_SVE2_SM4 (TARGET_SVE2 && AARCH64_ISA_SVE2_SM4)
+
+ /* ARMv8.3-A features. */
+-#define TARGET_ARMV8_3 (AARCH64_ISA_V8_3)
++#define TARGET_ARMV8_3 (AARCH64_ISA_V8_3A)
+
+ /* Javascript conversion instruction from Armv8.3-a. */
+-#define TARGET_JSCVT (TARGET_FLOAT && AARCH64_ISA_V8_3)
++#define TARGET_JSCVT (TARGET_FLOAT && AARCH64_ISA_V8_3A)
+
+ /* Armv8.3-a Complex number extension to AdvSIMD extensions. */
+ #define TARGET_COMPLEX (TARGET_SIMD && TARGET_ARMV8_3)
+
+ /* Floating-point rounding instructions from Armv8.5-a. */
+-#define TARGET_FRINT (AARCH64_ISA_V8_5 && TARGET_FLOAT)
++#define TARGET_FRINT (AARCH64_ISA_V8_5A && TARGET_FLOAT)
+
+ /* TME instructions are enabled. */
+ #define TARGET_TME (AARCH64_ISA_TME)
+@@ -401,7 +401,7 @@
+ #define TARGET_RNG (AARCH64_ISA_RNG)
+
+ /* Memory Tagging instructions optional to Armv8.5 enabled through +memtag. */
+-#define TARGET_MEMTAG (AARCH64_ISA_V8_5 && AARCH64_ISA_MEMTAG)
++#define TARGET_MEMTAG (AARCH64_ISA_V8_5A && AARCH64_ISA_MEMTAG)
+
+ /* I8MM instructions are enabled through +i8mm. */
+ #define TARGET_I8MM (AARCH64_ISA_I8MM)
+--
+2.33.0
+