Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Sign up
Appearance settings

Commit 4a7ed1f

Browse files
cmd/compile: mark architecture-specific unsafe points
Introduce a mechanism for marking architecture-specific Ops unsafe. And mark ones that use REGTMP on ARM64, as for async preemption we will be using REGTMP as a temporary register in the injected call. Change-Id: I8ff22e87d8f9cb10d02a2f0af7c12ad6d7d58f54 Reviewed-on: https://go-review.googlesource.com/c/go/+/203459 Run-TryBot: Cherry Zhang <cherryyz@google.com> Reviewed-by: Austin Clements <austin@google.com>
1 parent 0f992b9 commit 4a7ed1f

File tree

5 files changed

+32
-8
lines changed

5 files changed

+32
-8
lines changed

‎src/cmd/compile/internal/gc/plive.go

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -639,6 +639,15 @@ func (lv *Liveness) markUnsafePoints() {
639639

640640
lv.unsafePoints = bvalloc(int32(lv.f.NumValues()))
641641

642+
// Mark architecture-specific unsafe pointes.
643+
for _, b := range lv.f.Blocks {
644+
for _, v := range b.Values {
645+
if v.Op.UnsafePoint() {
646+
lv.unsafePoints.Set(int32(v.ID))
647+
}
648+
}
649+
}
650+
642651
// Mark write barrier unsafe points.
643652
for _, wbBlock := range lv.f.WBLoads {
644653
if wbBlock.Kind == ssa.BlockPlain && len(wbBlock.Values) == 0 {

‎src/cmd/compile/internal/ssa/gen/ARM64Ops.go

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -620,17 +620,17 @@ func init() {
620620
// LDAXR (Rarg0), Rout
621621
// STLXR Rarg1, (Rarg0), Rtmp
622622
// CBNZ Rtmp, -2(PC)
623-
{name: "LoweredAtomicExchange64", argLength: 3, reg: gpxchg, resultNotInArgs: true, faultOnNilArg0: true, hasSideEffects: true},
624-
{name: "LoweredAtomicExchange32", argLength: 3, reg: gpxchg, resultNotInArgs: true, faultOnNilArg0: true, hasSideEffects: true},
623+
{name: "LoweredAtomicExchange64", argLength: 3, reg: gpxchg, resultNotInArgs: true, faultOnNilArg0: true, hasSideEffects: true, unsafePoint: true},
624+
{name: "LoweredAtomicExchange32", argLength: 3, reg: gpxchg, resultNotInArgs: true, faultOnNilArg0: true, hasSideEffects: true, unsafePoint: true},
625625

626626
// atomic add.
627627
// *arg0 += arg1. arg2=mem. returns <new content of *arg0, memory>. auxint must be zero.
628628
// LDAXR (Rarg0), Rout
629629
// ADD Rarg1, Rout
630630
// STLXR Rout, (Rarg0), Rtmp
631631
// CBNZ Rtmp, -3(PC)
632-
{name: "LoweredAtomicAdd64", argLength: 3, reg: gpxchg, resultNotInArgs: true, faultOnNilArg0: true, hasSideEffects: true},
633-
{name: "LoweredAtomicAdd32", argLength: 3, reg: gpxchg, resultNotInArgs: true, faultOnNilArg0: true, hasSideEffects: true},
632+
{name: "LoweredAtomicAdd64", argLength: 3, reg: gpxchg, resultNotInArgs: true, faultOnNilArg0: true, hasSideEffects: true, unsafePoint: true},
633+
{name: "LoweredAtomicAdd32", argLength: 3, reg: gpxchg, resultNotInArgs: true, faultOnNilArg0: true, hasSideEffects: true, unsafePoint: true},
634634

635635
// atomic add variant.
636636
// *arg0 += arg1. arg2=mem. returns <new content of *arg0, memory>. auxint must be zero.
@@ -653,17 +653,17 @@ func init() {
653653
// STLXR Rarg2, (Rarg0), Rtmp
654654
// CBNZ Rtmp, -4(PC)
655655
// CSET EQ, Rout
656-
{name: "LoweredAtomicCas64", argLength: 4, reg: gpcas, resultNotInArgs: true, clobberFlags: true, faultOnNilArg0: true, hasSideEffects: true},
657-
{name: "LoweredAtomicCas32", argLength: 4, reg: gpcas, resultNotInArgs: true, clobberFlags: true, faultOnNilArg0: true, hasSideEffects: true},
656+
{name: "LoweredAtomicCas64", argLength: 4, reg: gpcas, resultNotInArgs: true, clobberFlags: true, faultOnNilArg0: true, hasSideEffects: true, unsafePoint: true},
657+
{name: "LoweredAtomicCas32", argLength: 4, reg: gpcas, resultNotInArgs: true, clobberFlags: true, faultOnNilArg0: true, hasSideEffects: true, unsafePoint: true},
658658

659659
// atomic and/or.
660660
// *arg0 &= (|=) arg1. arg2=mem. returns <new content of *arg0, memory>. auxint must be zero.
661661
// LDAXRB (Rarg0), Rout
662662
// AND/OR Rarg1, Rout
663663
// STLXRB Rout, (Rarg0), Rtmp
664664
// CBNZ Rtmp, -3(PC)
665-
{name: "LoweredAtomicAnd8", argLength: 3, reg: gpxchg, resultNotInArgs: true, asm: "AND", typ: "(UInt8,Mem)", faultOnNilArg0: true, hasSideEffects: true},
666-
{name: "LoweredAtomicOr8", argLength: 3, reg: gpxchg, resultNotInArgs: true, asm: "ORR", typ: "(UInt8,Mem)", faultOnNilArg0: true, hasSideEffects: true},
665+
{name: "LoweredAtomicAnd8", argLength: 3, reg: gpxchg, resultNotInArgs: true, asm: "AND", typ: "(UInt8,Mem)", faultOnNilArg0: true, hasSideEffects: true, unsafePoint: true},
666+
{name: "LoweredAtomicOr8", argLength: 3, reg: gpxchg, resultNotInArgs: true, asm: "ORR", typ: "(UInt8,Mem)", faultOnNilArg0: true, hasSideEffects: true, unsafePoint: true},
667667

668668
// LoweredWB invokes runtime.gcWriteBarrier. arg0=destptr, arg1=srcptr, arg2=mem, aux=runtime.gcWriteBarrier
669669
// It saves all GP registers if necessary,

‎src/cmd/compile/internal/ssa/gen/main.go

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -63,6 +63,7 @@ type opData struct {
6363
usesScratch bool // this op requires scratch memory space
6464
hasSideEffects bool // for "reasons", not to be eliminated. E.g., atomic store, #19182.
6565
zeroWidth bool // op never translates into any machine code. example: copy, which may sometimes translate to machine code, is not zero-width.
66+
unsafePoint bool // this op is an unsafe point, i.e. not safe for async preemption
6667
symEffect string // effect this op has on symbol in aux
6768
scale uint8 // amd64/386 indexed load scale
6869
}
@@ -325,6 +326,9 @@ func genOp() {
325326
if v.zeroWidth {
326327
fmt.Fprintln(w, "zeroWidth: true,")
327328
}
329+
if v.unsafePoint {
330+
fmt.Fprintln(w, "unsafePoint: true,")
331+
}
328332
needEffect := strings.HasPrefix(v.aux, "Sym")
329333
if v.symEffect != "" {
330334
if !needEffect {
@@ -401,6 +405,7 @@ func genOp() {
401405

402406
fmt.Fprintln(w, "func (o Op) SymEffect() SymEffect { return opcodeTable[o].symEffect }")
403407
fmt.Fprintln(w, "func (o Op) IsCall() bool { return opcodeTable[o].call }")
408+
fmt.Fprintln(w, "func (o Op) UnsafePoint() bool { return opcodeTable[o].unsafePoint }")
404409

405410
// generate registers
406411
for _, a := range archs {

‎src/cmd/compile/internal/ssa/op.go

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@ type opInfo struct {
3636
usesScratch bool // this op requires scratch memory space
3737
hasSideEffects bool // for "reasons", not to be eliminated. E.g., atomic store, #19182.
3838
zeroWidth bool // op never translates into any machine code. example: copy, which may sometimes translate to machine code, is not zero-width.
39+
unsafePoint bool // this op is an unsafe point, i.e. not safe for async preemption
3940
symEffect SymEffect // effect this op has on symbol in aux
4041
scale uint8 // amd64/386 indexed load scale
4142
}

‎src/cmd/compile/internal/ssa/opGen.go

Lines changed: 9 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
(0)

AltStyle によって変換されたページ (->オリジナル) /