Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

backend: stop allocating ValueDefinitions #2288

Merged
merged 1 commit into from
Jul 10, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
42 changes: 15 additions & 27 deletions internal/engine/wazevo/backend/compiler.go
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ type Compiler interface {
AllocateVReg(typ ssa.Type) regalloc.VReg

// ValueDefinition returns the definition of the given value.
ValueDefinition(ssa.Value) *SSAValueDefinition
ValueDefinition(ssa.Value) SSAValueDefinition

// VRegOf returns the virtual register of the given ssa.Value.
VRegOf(value ssa.Value) regalloc.VReg
Expand All @@ -79,13 +79,13 @@ type Compiler interface {

// MatchInstr returns true if the given definition is from an instruction with the given opcode, the current group ID,
// and a refcount of 1. That means, the instruction can be merged/swapped within the current instruction group.
MatchInstr(def *SSAValueDefinition, opcode ssa.Opcode) bool
MatchInstr(def SSAValueDefinition, opcode ssa.Opcode) bool

// MatchInstrOneOf is the same as MatchInstr but for multiple opcodes. If it matches one of ssa.Opcode,
// this returns the opcode. Otherwise, this returns ssa.OpcodeInvalid.
//
// Note: caller should be careful to avoid excessive allocation on opcodes slice.
MatchInstrOneOf(def *SSAValueDefinition, opcodes []ssa.Opcode) ssa.Opcode
MatchInstrOneOf(def SSAValueDefinition, opcodes []ssa.Opcode) ssa.Opcode

// AddRelocationInfo appends the relocation information for the function reference at the current buffer offset.
AddRelocationInfo(funcRef ssa.FuncRef)
Expand Down Expand Up @@ -126,8 +126,7 @@ type compiler struct {
nextVRegID regalloc.VRegID
// ssaValueToVRegs maps ssa.ValueID to regalloc.VReg.
ssaValueToVRegs [] /* VRegID to */ regalloc.VReg
// ssaValueDefinitions maps ssa.ValueID to its definition.
ssaValueDefinitions []SSAValueDefinition
ssaValuesInfo []ssa.ValueInfo
// returnVRegs is the list of virtual registers that store the return values.
returnVRegs []regalloc.VReg
varEdges [][2]regalloc.VReg
Expand Down Expand Up @@ -204,14 +203,10 @@ func (c *compiler) setCurrentGroupID(gid ssa.InstructionGroupID) {
// assignVirtualRegisters assigns a virtual register to each ssa.ValueID Valid in the ssa.Builder.
func (c *compiler) assignVirtualRegisters() {
builder := c.ssaBuilder
refCounts := builder.ValuesInfo()
c.ssaValuesInfo = builder.ValuesInfo()

need := len(refCounts)
if need >= len(c.ssaValueToVRegs) {
c.ssaValueToVRegs = append(c.ssaValueToVRegs, make([]regalloc.VReg, need+1)...)
}
if need >= len(c.ssaValueDefinitions) {
c.ssaValueDefinitions = append(c.ssaValueDefinitions, make([]SSAValueDefinition, need+1)...)
if diff := len(c.ssaValuesInfo) - len(c.ssaValueToVRegs); diff > 0 {
c.ssaValueToVRegs = append(c.ssaValueToVRegs, make([]regalloc.VReg, diff+1)...)
}

for blk := builder.BlockIteratorReversePostOrderBegin(); blk != nil; blk = builder.BlockIteratorReversePostOrderNext() {
Expand All @@ -222,7 +217,6 @@ func (c *compiler) assignVirtualRegisters() {
typ := p.Type()
vreg := c.AllocateVReg(typ)
c.ssaValueToVRegs[pid] = vreg
c.ssaValueDefinitions[pid] = SSAValueDefinition{V: p}
c.ssaTypeOfVRegID[vreg.ID()] = p.Type()
}

Expand All @@ -235,23 +229,13 @@ func (c *compiler) assignVirtualRegisters() {
typ := r.Type()
vReg := c.AllocateVReg(typ)
c.ssaValueToVRegs[id] = vReg
c.ssaValueDefinitions[id] = SSAValueDefinition{
Instr: cur,
V: r,
RefCount: refCounts[id].RefCount,
}
c.ssaTypeOfVRegID[vReg.ID()] = ssaTyp
}
for _, r := range rs {
id := r.ID()
ssaTyp := r.Type()
vReg := c.AllocateVReg(ssaTyp)
c.ssaValueToVRegs[id] = vReg
c.ssaValueDefinitions[id] = SSAValueDefinition{
Instr: cur,
V: r,
RefCount: refCounts[id].RefCount,
}
c.ssaTypeOfVRegID[vReg.ID()] = ssaTyp
}
}
Expand Down Expand Up @@ -293,8 +277,12 @@ func (c *compiler) Init() {
}

// ValueDefinition implements Compiler.ValueDefinition.
func (c *compiler) ValueDefinition(value ssa.Value) *SSAValueDefinition {
return &c.ssaValueDefinitions[value.ID()]
func (c *compiler) ValueDefinition(value ssa.Value) SSAValueDefinition {
return SSAValueDefinition{
V: value,
Instr: c.ssaBuilder.InstructionOfValue(value),
RefCount: c.ssaValuesInfo[value.ID()].RefCount,
}
}

// VRegOf implements Compiler.VRegOf.
Expand All @@ -313,7 +301,7 @@ func (c *compiler) TypeOf(v regalloc.VReg) ssa.Type {
}

// MatchInstr implements Compiler.MatchInstr.
func (c *compiler) MatchInstr(def *SSAValueDefinition, opcode ssa.Opcode) bool {
func (c *compiler) MatchInstr(def SSAValueDefinition, opcode ssa.Opcode) bool {
instr := def.Instr
return def.IsFromInstr() &&
instr.Opcode() == opcode &&
Expand All @@ -322,7 +310,7 @@ func (c *compiler) MatchInstr(def *SSAValueDefinition, opcode ssa.Opcode) bool {
}

// MatchInstrOneOf implements Compiler.MatchInstrOneOf.
func (c *compiler) MatchInstrOneOf(def *SSAValueDefinition, opcodes []ssa.Opcode) ssa.Opcode {
func (c *compiler) MatchInstrOneOf(def SSAValueDefinition, opcodes []ssa.Opcode) ssa.Opcode {
instr := def.Instr
if !def.IsFromInstr() {
return ssa.OpcodeInvalid
Expand Down
6 changes: 3 additions & 3 deletions internal/engine/wazevo/backend/compiler_lower.go
Original file line number Diff line number Diff line change
Expand Up @@ -152,12 +152,12 @@ func (c *compiler) lowerBlockArguments(args []ssa.Value, succ ssa.BasicBlock) {
src := args[i]

dstReg := c.VRegOf(dst)
srcDef := c.ssaValueDefinitions[src.ID()]
if srcDef.IsFromInstr() && srcDef.Instr.Constant() {
srcInstr := c.ssaBuilder.InstructionOfValue(src)
if srcInstr != nil && srcInstr.Constant() {
c.constEdges = append(c.constEdges, struct {
cInst *ssa.Instruction
dst regalloc.VReg
}{cInst: srcDef.Instr, dst: dstReg})
}{cInst: srcInstr, dst: dstReg})
} else {
srcReg := c.VRegOf(src)
// Even when the src=dst, insert the move so that we can keep such registers keep-alive.
Expand Down
3 changes: 0 additions & 3 deletions internal/engine/wazevo/backend/compiler_lower_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,6 @@ func TestCompiler_lowerBlockArguments(t *testing.T) {
}

c := newCompiler(context.Background(), m, builder)
c.ssaValueDefinitions = []SSAValueDefinition{{Instr: i1}, {Instr: i2}, {Instr: f1}, {Instr: f2}}
c.ssaValueToVRegs = []regalloc.VReg{0, 1, 2, 3, 4, 5, 6, 7}
return c, []ssa.Value{i1.Return(), i2.Return(), f1.Return(), f2.Return()}, succ, func(t *testing.T) {
require.Equal(t, 4, len(insertedConstInstructions))
Expand Down Expand Up @@ -85,7 +84,6 @@ func TestCompiler_lowerBlockArguments(t *testing.T) {
insertMoves = append(insertMoves, struct{ src, dst regalloc.VReg }{src: src, dst: dst})
}}
c := newCompiler(context.Background(), m, builder)
c.ssaValueDefinitions = []SSAValueDefinition{{}, {}, {}, {}}
c.ssaValueToVRegs = []regalloc.VReg{0, 1, 2, 3}
c.nextVRegID = 100 // Temporary reg should start with 100.
return c, []ssa.Value{v2, v1, v3 /* Swaps v1, v2 and pass v3 as-is. */}, blk, func(t *testing.T) {
Expand Down Expand Up @@ -123,7 +121,6 @@ func TestCompiler_lowerBlockArguments(t *testing.T) {
insertMoves = append(insertMoves, struct{ src, dst regalloc.VReg }{src: src, dst: dst})
}}
c := newCompiler(context.Background(), m, builder)
c.ssaValueDefinitions = []SSAValueDefinition{{}, {}}
c.ssaValueToVRegs = []regalloc.VReg{0, 1}
return c, []ssa.Value{add.Return()}, blk, func(t *testing.T) {
require.Equal(t, 1, len(insertMoves))
Expand Down
2 changes: 1 addition & 1 deletion internal/engine/wazevo/backend/isa/amd64/lower_mem.go
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ func (m *machine) lowerAddendsToAmode(x, y addend, offBase uint32) *amode {
}
}

func (m *machine) lowerAddend(x *backend.SSAValueDefinition) addend {
func (m *machine) lowerAddend(x backend.SSAValueDefinition) addend {
if !x.IsFromInstr() {
return addend{m.c.VRegOf(x.V), 0, 0}
}
Expand Down
52 changes: 26 additions & 26 deletions internal/engine/wazevo/backend/isa/amd64/lower_mem_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,9 +34,9 @@ func TestMachine_lowerToAddressMode(t *testing.T) {
iadd := b.AllocateInstruction().AsIadd(iconst1.Return(), iconst2.Return()).Insert(b)
ptr = iadd.Return()
offset = 3
ctx.definitions[iconst1.Return()] = &backend.SSAValueDefinition{Instr: iconst1}
ctx.definitions[iconst2.Return()] = &backend.SSAValueDefinition{Instr: iconst2}
ctx.definitions[ptr] = &backend.SSAValueDefinition{Instr: iadd}
ctx.definitions[iconst1.Return()] = backend.SSAValueDefinition{Instr: iconst1}
ctx.definitions[iconst2.Return()] = backend.SSAValueDefinition{Instr: iconst2}
ctx.definitions[ptr] = backend.SSAValueDefinition{Instr: iadd}
return
},
insts: []string{
Expand All @@ -52,10 +52,10 @@ func TestMachine_lowerToAddressMode(t *testing.T) {
iadd := b.AllocateInstruction().AsIadd(iconst1.Return(), p).Insert(b)
ptr = iadd.Return()
offset = 3
ctx.definitions[iconst1.Return()] = &backend.SSAValueDefinition{Instr: iconst1}
ctx.definitions[iconst1.Return()] = backend.SSAValueDefinition{Instr: iconst1}
ctx.vRegMap[p] = raxVReg
ctx.definitions[p] = &backend.SSAValueDefinition{V: p}
ctx.definitions[ptr] = &backend.SSAValueDefinition{Instr: iadd}
ctx.definitions[p] = backend.SSAValueDefinition{V: p}
ctx.definitions[ptr] = backend.SSAValueDefinition{Instr: iadd}
return
},
am: newAmodeImmReg(1+3, raxVReg),
Expand All @@ -69,10 +69,10 @@ func TestMachine_lowerToAddressMode(t *testing.T) {
ptr = iadd.Return()
offset = 3
ctx.vRegMap[p1] = raxVReg
ctx.definitions[p1] = &backend.SSAValueDefinition{V: p1}
ctx.definitions[p1] = backend.SSAValueDefinition{V: p1}
ctx.vRegMap[p2] = rcxVReg
ctx.definitions[p2] = &backend.SSAValueDefinition{V: p2}
ctx.definitions[ptr] = &backend.SSAValueDefinition{Instr: iadd}
ctx.definitions[p2] = backend.SSAValueDefinition{V: p2}
ctx.definitions[ptr] = backend.SSAValueDefinition{Instr: iadd}
return
},
am: newAmodeRegRegShift(3, raxVReg, rcxVReg, 0),
Expand All @@ -83,7 +83,7 @@ func TestMachine_lowerToAddressMode(t *testing.T) {
ptr = b.CurrentBlock().AddParam(b, ssa.TypeI64)
offset = 1 << 31
ctx.vRegMap[ptr] = raxVReg
ctx.definitions[ptr] = &backend.SSAValueDefinition{V: ptr}
ctx.definitions[ptr] = backend.SSAValueDefinition{V: ptr}
return
},
insts: []string{
Expand All @@ -98,8 +98,8 @@ func TestMachine_lowerToAddressMode(t *testing.T) {
in: func(ctx *mockCompiler, b ssa.Builder, m *machine) (ptr ssa.Value, offset uint32) {
iconst32 := b.AllocateInstruction().AsIconst32(123).Insert(b)
uextend := b.AllocateInstruction().AsUExtend(iconst32.Return(), 32, 64).Insert(b)
ctx.definitions[iconst32.Return()] = &backend.SSAValueDefinition{Instr: iconst32}
ctx.definitions[uextend.Return()] = &backend.SSAValueDefinition{Instr: uextend}
ctx.definitions[iconst32.Return()] = backend.SSAValueDefinition{Instr: iconst32}
ctx.definitions[uextend.Return()] = backend.SSAValueDefinition{Instr: uextend}
return uextend.Return(), 0
},
insts: []string{
Expand All @@ -114,9 +114,9 @@ func TestMachine_lowerToAddressMode(t *testing.T) {
iconst64 := b.AllocateInstruction().AsIconst64(2).Insert(b)
ishl := b.AllocateInstruction().AsIshl(p, iconst64.Return()).Insert(b)
ctx.vRegMap[p] = raxVReg
ctx.definitions[p] = &backend.SSAValueDefinition{V: p}
ctx.definitions[iconst64.Return()] = &backend.SSAValueDefinition{Instr: iconst64}
ctx.definitions[ishl.Return()] = &backend.SSAValueDefinition{Instr: ishl}
ctx.definitions[p] = backend.SSAValueDefinition{V: p}
ctx.definitions[iconst64.Return()] = backend.SSAValueDefinition{Instr: iconst64}
ctx.definitions[ishl.Return()] = backend.SSAValueDefinition{Instr: ishl}
return ishl.Return(), 1 << 30
},
insts: []string{
Expand All @@ -133,12 +133,12 @@ func TestMachine_lowerToAddressMode(t *testing.T) {
ishl := b.AllocateInstruction().AsIshl(p1, const2.Return()).Insert(b)
iadd := b.AllocateInstruction().AsIadd(p2, ishl.Return()).Insert(b)
ctx.vRegMap[p1] = raxVReg
ctx.definitions[p1] = &backend.SSAValueDefinition{V: p1}
ctx.definitions[p1] = backend.SSAValueDefinition{V: p1}
ctx.vRegMap[p2] = rcxVReg
ctx.definitions[p2] = &backend.SSAValueDefinition{V: p2}
ctx.definitions[const2.Return()] = &backend.SSAValueDefinition{Instr: const2}
ctx.definitions[ishl.Return()] = &backend.SSAValueDefinition{Instr: ishl}
ctx.definitions[iadd.Return()] = &backend.SSAValueDefinition{Instr: iadd}
ctx.definitions[p2] = backend.SSAValueDefinition{V: p2}
ctx.definitions[const2.Return()] = backend.SSAValueDefinition{Instr: const2}
ctx.definitions[ishl.Return()] = backend.SSAValueDefinition{Instr: ishl}
ctx.definitions[iadd.Return()] = backend.SSAValueDefinition{Instr: iadd}
return iadd.Return(), 1 << 30
},
am: newAmodeRegRegShift(1<<30, rcxVReg, raxVReg, 2),
Expand Down Expand Up @@ -179,7 +179,7 @@ func TestMachine_lowerAddendFromInstr(t *testing.T) {
name: "uextend const32",
in: func(ctx *mockCompiler, b ssa.Builder, m *machine) *ssa.Instruction {
iconst32 := b.AllocateInstruction().AsIconst32(123).Insert(b)
ctx.definitions[iconst32.Return()] = &backend.SSAValueDefinition{Instr: iconst32}
ctx.definitions[iconst32.Return()] = backend.SSAValueDefinition{Instr: iconst32}
return b.AllocateInstruction().AsUExtend(iconst32.Return(), 32, 64).Insert(b)
},
exp: addend{regalloc.VRegInvalid, 123, 0},
Expand All @@ -189,7 +189,7 @@ func TestMachine_lowerAddendFromInstr(t *testing.T) {
in: func(ctx *mockCompiler, b ssa.Builder, m *machine) *ssa.Instruction {
p := b.CurrentBlock().AddParam(b, ssa.TypeI32)
ctx.vRegMap[p] = raxVReg
ctx.definitions[p] = &backend.SSAValueDefinition{V: p}
ctx.definitions[p] = backend.SSAValueDefinition{V: p}
return b.AllocateInstruction().AsUExtend(p, 32, 64).Insert(b)
},
exp: addend{raxVReg, 0, 0},
Expand All @@ -199,7 +199,7 @@ func TestMachine_lowerAddendFromInstr(t *testing.T) {
in: func(ctx *mockCompiler, b ssa.Builder, m *machine) *ssa.Instruction {
p := b.CurrentBlock().AddParam(b, ssa.TypeI32)
ctx.vRegMap[p] = raxVReg
ctx.definitions[p] = &backend.SSAValueDefinition{V: p}
ctx.definitions[p] = backend.SSAValueDefinition{V: p}
return b.AllocateInstruction().AsUExtend(p, 32, 64).Insert(b)
},
exp: addend{raxVReg, 0, 0},
Expand All @@ -208,7 +208,7 @@ func TestMachine_lowerAddendFromInstr(t *testing.T) {
name: "sextend const32",
in: func(ctx *mockCompiler, b ssa.Builder, m *machine) *ssa.Instruction {
iconst32 := b.AllocateInstruction().AsIconst32(123).Insert(b)
ctx.definitions[iconst32.Return()] = &backend.SSAValueDefinition{Instr: iconst32}
ctx.definitions[iconst32.Return()] = backend.SSAValueDefinition{Instr: iconst32}
return b.AllocateInstruction().AsSExtend(iconst32.Return(), 32, 64).Insert(b)
},
exp: addend{regalloc.VRegInvalid, 123, 0},
Expand All @@ -218,7 +218,7 @@ func TestMachine_lowerAddendFromInstr(t *testing.T) {
in: func(ctx *mockCompiler, b ssa.Builder, m *machine) *ssa.Instruction {
p := b.CurrentBlock().AddParam(b, ssa.TypeI32)
ctx.vRegMap[p] = raxVReg
ctx.definitions[p] = &backend.SSAValueDefinition{V: p}
ctx.definitions[p] = backend.SSAValueDefinition{V: p}
return b.AllocateInstruction().AsSExtend(p, 32, 64).Insert(b)
},
exp: addend{raxVReg, 0, 0},
Expand All @@ -228,7 +228,7 @@ func TestMachine_lowerAddendFromInstr(t *testing.T) {
in: func(ctx *mockCompiler, b ssa.Builder, m *machine) *ssa.Instruction {
p := b.CurrentBlock().AddParam(b, ssa.TypeI32)
ctx.vRegMap[p] = raxVReg
ctx.definitions[p] = &backend.SSAValueDefinition{V: p}
ctx.definitions[p] = backend.SSAValueDefinition{V: p}
return b.AllocateInstruction().AsSExtend(p, 32, 64).Insert(b)
},
exp: addend{raxVReg, 0, 0},
Expand Down
15 changes: 9 additions & 6 deletions internal/engine/wazevo/backend/isa/amd64/machine.go
Original file line number Diff line number Diff line change
Expand Up @@ -1601,21 +1601,24 @@ func (m *machine) lowerExitIfTrueWithCode(execCtx regalloc.VReg, cond ssa.Value,
jmpIf.asJmpIf(condFromSSAIntCmpCond(c).invert(), newOperandLabel(l))
}

func (m *machine) tryLowerBandToFlag(x, y *backend.SSAValueDefinition) (ok bool) {
var target *backend.SSAValueDefinition
func (m *machine) tryLowerBandToFlag(x, y backend.SSAValueDefinition) (ok bool) {
var target backend.SSAValueDefinition
var got bool
if x.IsFromInstr() && x.Instr.Constant() && x.Instr.ConstantVal() == 0 {
if m.c.MatchInstr(y, ssa.OpcodeBand) {
target = y
got = true
}
}

if y.IsFromInstr() && y.Instr.Constant() && y.Instr.ConstantVal() == 0 {
if m.c.MatchInstr(x, ssa.OpcodeBand) {
target = x
got = true
}
}

if target == nil {
if !got {
return false
}

Expand Down Expand Up @@ -1947,9 +1950,9 @@ func (m *machine) lowerCall(si *ssa.Instruction) {

// callerGenVRegToFunctionArg is the opposite of GenFunctionArgToVReg, which is used to generate the
// caller side of the function call.
func (m *machine) callerGenVRegToFunctionArg(a *backend.FunctionABI, argIndex int, reg regalloc.VReg, def *backend.SSAValueDefinition, stackSlotSize int64) {
func (m *machine) callerGenVRegToFunctionArg(a *backend.FunctionABI, argIndex int, reg regalloc.VReg, def backend.SSAValueDefinition, stackSlotSize int64) {
arg := &a.Args[argIndex]
if def != nil && def.IsFromInstr() {
if def.IsFromInstr() {
// Constant instructions are inlined.
if inst := def.Instr; inst.Constant() {
m.insertLoadConstant(inst, reg)
Expand Down Expand Up @@ -2210,7 +2213,7 @@ func (m *machine) ResolveRelocations(refToBinaryOffset []int, binary []byte, rel
// CallTrampolineIslandInfo implements backend.Machine CallTrampolineIslandInfo.
func (m *machine) CallTrampolineIslandInfo(_ int) (_, _ int, _ error) { return }

func (m *machine) lowerIcmpToFlag(xd, yd *backend.SSAValueDefinition, _64 bool) {
func (m *machine) lowerIcmpToFlag(xd, yd backend.SSAValueDefinition, _64 bool) {
x := m.getOperand_Reg(xd)
y := m.getOperand_Mem_Imm32_Reg(yd)
cmp := m.allocateInstr().asCmpRmiR(true, y, x.reg(), _64)
Expand Down
Loading
Loading