summaryrefslogtreecommitdiff
path: root/8002-internal-buildcfg-add-Kunpeng-atomic-optimize.patch
diff options
context:
space:
mode:
Diffstat (limited to '8002-internal-buildcfg-add-Kunpeng-atomic-optimize.patch')
-rw-r--r--8002-internal-buildcfg-add-Kunpeng-atomic-optimize.patch170
1 files changed, 170 insertions, 0 deletions
diff --git a/8002-internal-buildcfg-add-Kunpeng-atomic-optimize.patch b/8002-internal-buildcfg-add-Kunpeng-atomic-optimize.patch
new file mode 100644
index 0000000..41e3ac5
--- /dev/null
+++ b/8002-internal-buildcfg-add-Kunpeng-atomic-optimize.patch
@@ -0,0 +1,170 @@
+From ac00af10615ecb97fc91898d8e4142ea88830084 Mon Sep 17 00:00:00 2001
+From: jinye <jinye10@huawei.com>
+Date: Tue, 5 Aug 2025 20:20:08 +0800
+Subject: [PATCH] internal/buildcfg:add Kunpeng atomic optimize
+
+---
+ src/cmd/compile/internal/ssagen/intrinsics.go | 4 ++-
+ src/cmd/go/internal/work/gc.go | 3 ++
+ src/internal/buildcfg/cfg.go | 29 +++++++++++++------
+ src/internal/buildcfg/cfg_test.go | 2 +-
+ src/reflect/asm_arm64.s | 7 +++++
+ src/reflect/type.go | 10 +++++++
+ 6 files changed, 44 insertions(+), 11 deletions(-)
+
+diff --git a/src/cmd/compile/internal/ssagen/intrinsics.go b/src/cmd/compile/internal/ssagen/intrinsics.go
+index e4da86db..45a27428 100644
+--- a/src/cmd/compile/internal/ssagen/intrinsics.go
++++ b/src/cmd/compile/internal/ssagen/intrinsics.go
+@@ -380,7 +380,9 @@ func initIntrinsics(cfg *intrinsicBuildConfig) {
+ makeAtomicGuardedIntrinsicARM64common := func(op0, op1 ssa.Op, typ types.Kind, emit atomicOpEmitter, needReturn bool) intrinsicBuilder {
+
+ return func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
+- if cfg.goarm64.LSE {
++ if buildcfg.GOARM64.LSE {
++ emit(s, n, args, op0, typ, needReturn)
++ } else if cfg.goarm64.LSE {
+ emit(s, n, args, op1, typ, needReturn)
+ } else {
+ // Target Atomic feature is identified by dynamic detection
+diff --git a/src/cmd/go/internal/work/gc.go b/src/cmd/go/internal/work/gc.go
+index 3a173efe..3ce12219 100644
+--- a/src/cmd/go/internal/work/gc.go
++++ b/src/cmd/go/internal/work/gc.go
+@@ -383,6 +383,9 @@ func asmArgs(a *Action, p *load.Package) []any {
+ if err == nil && g.LSE {
+ args = append(args, "-D", "GOARM64_LSE")
+ }
++ if err == nil && g.KPAtomicOpt {
++ args = append(args, "-D", "KPAtomicOpt")
++ }
+ }
+
+ return args
+diff --git a/src/internal/buildcfg/cfg.go b/src/internal/buildcfg/cfg.go
+index fca09bf8..e115d7f9 100644
+--- a/src/internal/buildcfg/cfg.go
++++ b/src/internal/buildcfg/cfg.go
+@@ -179,6 +179,8 @@ type Goarm64Features struct {
+ // * FEAT_SHA1, which includes the SHA1* instructions.
+ // * FEAT_SHA256, which includes the SHA256* instructions.
+ Crypto bool
++ // Kunpeng atomic optimize
++ KPAtomicOpt bool
+ }
+
+ func (g Goarm64Features) String() string {
+@@ -189,17 +191,22 @@ func (g Goarm64Features) String() string {
+ if g.Crypto {
+ arm64Str += ",crypto"
+ }
++ if g.KPAtomicOpt {
++ arm64Str += ",kpatomicopt"
++ }
+ return arm64Str
+ }
+
+ func ParseGoarm64(v string) (g Goarm64Features, e error) {
+ const (
+- lseOpt = ",lse"
+- cryptoOpt = ",crypto"
++ lseOpt = ",lse"
++ cryptoOpt = ",crypto"
++ kpAtomicOpt = ",kpatomicopt"
+ )
+
+ g.LSE = false
+ g.Crypto = false
++ g.KPAtomicOpt = false
+ // We allow any combination of suffixes, in any order
+ for {
+ if strings.HasSuffix(v, lseOpt) {
+@@ -214,20 +221,24 @@ func ParseGoarm64(v string) (g Goarm64Features, e error) {
+ continue
+ }
+
++ if strings.HasSuffix(v, kpAtomicOpt) {
++ if os.Getenv("KP_AI_OPT") == "1" {
++ g.KPAtomicOpt = true
++ }
++ v = v[:len(v)-len(kpAtomicOpt)]
++ continue
++ }
++
+ break
+ }
+
+ switch v {
+- case "v8.0":
+- g.Version = v
+- case "v8.1", "v8.2", "v8.3", "v8.4", "v8.5", "v8.6", "v8.7", "v8.8", "v8.9",
++ case "v8.0", "v8.1", "v8.2", "v8.3", "v8.4", "v8.5", "v8.6", "v8.7", "v8.8", "v8.9",
+ "v9.0", "v9.1", "v9.2", "v9.3", "v9.4", "v9.5":
+ g.Version = v
+- // LSE extension is mandatory starting from 8.1
+- g.LSE = true
+ default:
+- e = fmt.Errorf("invalid GOARM64: must start with v8.{0-9} or v9.{0-5} and may optionally end in %q and/or %q",
+- lseOpt, cryptoOpt)
++ e = fmt.Errorf("invalid GOARM64: must start with v8.{0-9} or v9.{0-5} and may optionally end in %q, %q and/or %q",
++ lseOpt, cryptoOpt, kpAtomicOpt)
+ g.Version = DefaultGOARM64
+ }
+
+diff --git a/src/internal/buildcfg/cfg_test.go b/src/internal/buildcfg/cfg_test.go
+index 757270b7..1807ef89 100644
+--- a/src/internal/buildcfg/cfg_test.go
++++ b/src/internal/buildcfg/cfg_test.go
+@@ -69,7 +69,7 @@ func TestConfigFlags(t *testing.T) {
+ t.Errorf("Wrong parsing of GOARM64=v8.0,lse,crypto")
+ }
+ os.Setenv("GOARM64", "v9.0")
+- if goarm64().Version != "v9.0" || goarm64().LSE != true || goarm64().Crypto != false {
++ if goarm64().Version != "v9.0" || goarm64().LSE != false || goarm64().Crypto != false {
+ t.Errorf("Wrong parsing of GOARM64=v9.0")
+ }
+ }
+diff --git a/src/reflect/asm_arm64.s b/src/reflect/asm_arm64.s
+index 5e91e62a..23677a9e 100644
+--- a/src/reflect/asm_arm64.s
++++ b/src/reflect/asm_arm64.s
+@@ -19,6 +19,13 @@
+ #define LOCAL_RETVALID 40
+ #define LOCAL_REGARGS 48
+
++GLOBL ·kpAtomicOpt(SB),(NOPTR|WRAPPER),$1
++#ifndef KPAtomicOpt
++DATA ·kpAtomicOpt(SB)/1,$0
++#else
++DATA ·kpAtomicOpt(SB)/1,$1
++#endif
++
+ // The frame size of the functions below is
+ // 32 (args of callReflect) + 8 (bool + padding) + 392 (abi.RegArgs) = 432.
+
+diff --git a/src/reflect/type.go b/src/reflect/type.go
+index 0e41a6db..214fa4d2 100644
+--- a/src/reflect/type.go
++++ b/src/reflect/type.go
+@@ -1809,8 +1809,18 @@ func ChanOf(dir ChanDir, t Type) Type {
+
+ var funcTypes []Type
+ var funcTypesMutex sync.Mutex
++var funcTypesRWMutex sync.RWMutex
++var kpAtomicOpt bool
+
+ func initFuncTypes(n int) Type {
++ if kpAtomicOpt {
++ funcTypesRWMutex.RLock()
++ if n < len(funcTypes) && funcTypes[n] != nil {
++ defer funcTypesRWMutex.RUnlock()
++ return funcTypes[n]
++ }
++ funcTypesRWMutex.RUnlock()
++ }
+ funcTypesMutex.Lock()
+ defer funcTypesMutex.Unlock()
+ if n >= len(funcTypes) {
+--
+2.33.0
+