diff --git a/README.md b/README.md index 07308995743..795046d436f 100644 --- a/README.md +++ b/README.md @@ -215,8 +215,8 @@ default. However, the following status covers what's currently possible with | sign-extension-ops | ✅ | | multi-value | ✅ | | JS-BigInt-integration | N/A | -| reference-types | ❌ | -| bulk-memory-operations | ❌ | +| reference-types | 👷‍♂️ | +| bulk-memory-operations | ✅ | | simd | ❌ | Note: While the above are specified in a WebAssembly GitHub repository, they diff --git a/internal/asm/arm64/impl.go b/internal/asm/arm64/impl.go index f9cfc25943c..a46f5a363df 100644 --- a/internal/asm/arm64/impl.go +++ b/internal/asm/arm64/impl.go @@ -2062,6 +2062,25 @@ func (a *AssemblerImpl) EncodeConstToRegister(n *NodeImpl) (err error) { 0b01_000000 | byte(c), 0b110_10011, }) + case LSL: + if c == 0 { + err = errors.New("LSL with zero constant should be optimized out") + return + } else if c < 0 || c > 63 { + err = fmt.Errorf("LSL requires immediate to be within 0 to 63, but got %d", c) + return + } + + // LSL(immediate) is an alias of UBFM + // https://developer.arm.com/documentation/ddi0596/2021-12/Base-Instructions/LSL--immediate---Logical-Shift-Left--immediate---an-alias-of-UBFM- + cb := byte(c) + a.Buf.Write([]byte{ + (dstRegBits << 5) | dstRegBits, + (0b111111-cb)<<2 | dstRegBits>>3, + 0b01_000000 | (64 - cb), + 0b110_10011, + }) + default: return errorEncodingUnsupported(n) } diff --git a/internal/integration_test/asm/arm64_debug/impl_test.go b/internal/integration_test/asm/arm64_debug/impl_test.go index 10bfb0287bc..f975bc5cb02 100644 --- a/internal/integration_test/asm/arm64_debug/impl_test.go +++ b/internal/integration_test/asm/arm64_debug/impl_test.go @@ -679,6 +679,10 @@ func TestAssemblerImpl_EncodeConstToRegister(t *testing.T) { inst: asm_arm64.MOVD, consts: consts64, }, + { + inst: asm_arm64.LSL, + consts: []int64{1, 2, 4, 16, 31, 32, 63}, + }, { inst: asm_arm64.LSR, consts: []int64{1, 2, 4, 16, 31, 32, 63}, @@ -694,7 +698,7 @@ func TestAssemblerImpl_EncodeConstToRegister(t *testing.T) { t.Run(asm_arm64.RegisterName(r), func(t *testing.T) { for _, c := range tc.consts { var cs = []int64{c} - if tc.inst != asm_arm64.LSR && c != 0 { + if tc.inst != asm_arm64.LSR && tc.inst != asm_arm64.LSL && c != 0 { cs = append(cs, -c) } for _, c := range cs { diff --git a/internal/integration_test/post1_0/bulk-memory-operations/spec_test.go b/internal/integration_test/post1_0/bulk-memory-operations/spec_test.go index a7f3e24738e..0619f89188a 100644 --- a/internal/integration_test/post1_0/bulk-memory-operations/spec_test.go +++ b/internal/integration_test/post1_0/bulk-memory-operations/spec_test.go @@ -7,6 +7,7 @@ import ( "github.com/tetratelabs/wazero" "github.com/tetratelabs/wazero/internal/testing/require" + "github.com/tetratelabs/wazero/internal/wasmruntime" ) // testCtx is an arbitrary, non-default context. Non-nil also prevents linter errors. @@ -17,23 +18,185 @@ func TestBulkMemoryOperations_JIT(t *testing.T) { t.Skip() } testBulkMemoryOperations(t, wazero.NewRuntimeConfigJIT) + testTableCopy(t, wazero.NewRuntimeConfigJIT) + testTableInit(t, wazero.NewRuntimeConfigJIT) + testElemDrop(t, wazero.NewRuntimeConfigJIT) } func TestBulkMemoryOperations_Interpreter(t *testing.T) { testBulkMemoryOperations(t, wazero.NewRuntimeConfigInterpreter) + testTableCopy(t, wazero.NewRuntimeConfigInterpreter) + testTableInit(t, wazero.NewRuntimeConfigInterpreter) + testElemDrop(t, wazero.NewRuntimeConfigInterpreter) } -// bulkMemoryOperationsWasm was compiled from testdata/bulk_memory_operations.wat -//go:embed testdata/bulk_memory_operations.wasm -var bulkMemoryOperationsWasm []byte +var ( + // bulkMemoryOperationsWasm was compiled from testdata/bulk_memory_operations.wat + //go:embed testdata/bulk_memory_operations.wasm + bulkMemoryOperationsWasm []byte + // tableCopyWasm was compiled from testdata/table_copy.wat + //go:embed testdata/table_copy.wasm + tableCopyWasm []byte + // tableInitWasm was compiled from testdata/table_init.wat + //go:embed testdata/table_init.wasm + tableInitWasm []byte + // elemDropWasm was compiled from testdata/elem_drop.wat + //go:embed testdata/elem_drop.wasm + elemDropWasm []byte +) -func testBulkMemoryOperations(t *testing.T, newRuntimeConfig func() *wazero.RuntimeConfig) { +func requireErrorOnBulkMemoryFeatureDisabled(t *testing.T, newRuntimeConfig func() *wazero.RuntimeConfig, bin []byte) { t.Run("disabled", func(t *testing.T) { // bulk-memory-operations is disabled by default. r := wazero.NewRuntimeWithConfig(newRuntimeConfig()) - _, err := r.InstantiateModuleFromCode(testCtx, bulkMemoryOperationsWasm) + _, err := r.InstantiateModuleFromCode(testCtx, bin) require.Error(t, err) }) +} + +func testTableCopy(t *testing.T, newRuntimeConfig func() *wazero.RuntimeConfig) { + t.Run("table.copy", func(t *testing.T) { + + requireErrorOnBulkMemoryFeatureDisabled(t, newRuntimeConfig, tableCopyWasm) + + r := wazero.NewRuntimeWithConfig(newRuntimeConfig().WithFeatureBulkMemoryOperations(true)) + mod, err := r.InstantiateModuleFromCode(testCtx, tableCopyWasm) + require.NoError(t, err) + defer mod.Close(testCtx) + + // Non-overlapping copy. + _, err = mod.ExportedFunction("copy").Call(testCtx, 3, 0, 3) + require.NoError(t, err) + res, err := mod.ExportedFunction("call").Call(testCtx, 3) + require.NoError(t, err) + require.Equal(t, uint64(0), res[0]) + res, err = mod.ExportedFunction("call").Call(testCtx, 4) + require.NoError(t, err) + require.Equal(t, uint64(1), res[0]) + res, err = mod.ExportedFunction("call").Call(testCtx, 5) + require.NoError(t, err) + require.Equal(t, uint64(2), res[0]) + + // src > dest with overlap + _, err = mod.ExportedFunction("copy").Call(testCtx, 0, 1, 3) + require.NoError(t, err) + res, err = mod.ExportedFunction("call").Call(testCtx, 0) + require.NoError(t, err) + require.Equal(t, uint64(1), res[0]) + res, err = mod.ExportedFunction("call").Call(testCtx, 1) + require.NoError(t, err) + require.Equal(t, uint64(2), res[0]) + res, err = mod.ExportedFunction("call").Call(testCtx, 2) + require.NoError(t, err) + require.Equal(t, uint64(0), res[0]) + + // src < dest with overlap + _, err = mod.ExportedFunction("copy").Call(testCtx, 2, 0, 3) + require.NoError(t, err) + res, err = mod.ExportedFunction("call").Call(testCtx, 2) + require.NoError(t, err) + require.Equal(t, uint64(1), res[0]) + res, err = mod.ExportedFunction("call").Call(testCtx, 3) + require.NoError(t, err) + require.Equal(t, uint64(2), res[0]) + res, err = mod.ExportedFunction("call").Call(testCtx, 4) + require.NoError(t, err) + require.Equal(t, uint64(0), res[0]) + + // Copying end at limit should be fine. + _, err = mod.ExportedFunction("copy").Call(testCtx, 6, 8, 2) + require.NoError(t, err) + _, err = mod.ExportedFunction("copy").Call(testCtx, 8, 6, 2) + require.NoError(t, err) + + // Copying zero size at the end of region is valid. + _, err = mod.ExportedFunction("copy").Call(testCtx, 10, 0, 0) + require.NoError(t, err) + _, err = mod.ExportedFunction("copy").Call(testCtx, 0, 10, 0) + require.NoError(t, err) + + // Out of bounds with size zero on outside of table. + _, err = mod.ExportedFunction("copy").Call(testCtx, 11, 0, 0) + require.ErrorIs(t, err, wasmruntime.ErrRuntimeInvalidTableAccess) + _, err = mod.ExportedFunction("copy").Call(testCtx, 0, 11, 0) + require.ErrorIs(t, err, wasmruntime.ErrRuntimeInvalidTableAccess) + }) +} + +func testTableInit(t *testing.T, newRuntimeConfig func() *wazero.RuntimeConfig) { + t.Run("table.init", func(t *testing.T) { + requireErrorOnBulkMemoryFeatureDisabled(t, newRuntimeConfig, tableInitWasm) + + r := wazero.NewRuntimeWithConfig(newRuntimeConfig().WithFeatureBulkMemoryOperations(true)) + mod, err := r.InstantiateModuleFromCode(testCtx, tableInitWasm) + require.NoError(t, err) + defer mod.Close(testCtx) + + // Out of bounds access should raise the runtime error. + _, err = mod.ExportedFunction("init").Call(testCtx, 2, 0, 2) + require.ErrorIs(t, err, wasmruntime.ErrRuntimeInvalidTableAccess) + // And the table still not initialized. + _, err = mod.ExportedFunction("call").Call(testCtx, 2) + require.ErrorIs(t, err, wasmruntime.ErrRuntimeInvalidTableAccess) + + _, err = mod.ExportedFunction("init").Call(testCtx, 0, 1, 2) + require.NoError(t, err) + res, err := mod.ExportedFunction("call").Call(testCtx, 0) + require.NoError(t, err) + require.Equal(t, uint64(1), res[0]) + res, err = mod.ExportedFunction("call").Call(testCtx, 1) + require.NoError(t, err) + require.Equal(t, uint64(0), res[0]) + + // Initialization ending at the limit should be fine. + _, err = mod.ExportedFunction("init").Call(testCtx, 1, 2, 2) + require.NoError(t, err) + // Also, zero length at the end also fine. + _, err = mod.ExportedFunction("init").Call(testCtx, 3, 0, 0) + require.NoError(t, err) + _, err = mod.ExportedFunction("init").Call(testCtx, 0, 4, 0) + require.NoError(t, err) + + // Initialization out side of table with size zero should be trap. + _, err = mod.ExportedFunction("init").Call(testCtx, 4, 0, 0) + require.ErrorIs(t, err, wasmruntime.ErrRuntimeInvalidTableAccess) + // Same goes for element. + _, err = mod.ExportedFunction("init").Call(testCtx, 0, 5, 0) + require.ErrorIs(t, err, wasmruntime.ErrRuntimeInvalidTableAccess) + }) +} + +func testElemDrop(t *testing.T, newRuntimeConfig func() *wazero.RuntimeConfig) { + t.Run("elem.drop", func(t *testing.T) { + requireErrorOnBulkMemoryFeatureDisabled(t, newRuntimeConfig, elemDropWasm) + + r := wazero.NewRuntimeWithConfig(newRuntimeConfig().WithFeatureBulkMemoryOperations(true)) + mod, err := r.InstantiateModuleFromCode(testCtx, elemDropWasm) + require.NoError(t, err) + defer mod.Close(testCtx) + + // Copying the passive element $a into index zero at the table. + _, err = mod.ExportedFunction("init_passive").Call(testCtx, 1) + require.NoError(t, err) + // Droppig same elements should be fine. + _, err = mod.ExportedFunction("drop_passive").Call(testCtx) + require.NoError(t, err) + _, err = mod.ExportedFunction("drop_passive").Call(testCtx) + require.NoError(t, err) + + // Size zero init access to the size zero (dropped) elements should be ok. + _, err = mod.ExportedFunction("init_passive").Call(testCtx, 0) + require.NoError(t, err) + + // Buf size must be zero for such dropped elements. + _, err = mod.ExportedFunction("init_passive").Call(testCtx, 1) + require.ErrorIs(t, err, wasmruntime.ErrRuntimeInvalidTableAccess) + }) +} + +func testBulkMemoryOperations(t *testing.T, newRuntimeConfig func() *wazero.RuntimeConfig) { + requireErrorOnBulkMemoryFeatureDisabled(t, newRuntimeConfig, bulkMemoryOperationsWasm) + t.Run("enabled", func(t *testing.T) { r := wazero.NewRuntimeWithConfig(newRuntimeConfig().WithFeatureBulkMemoryOperations(true)) diff --git a/internal/integration_test/post1_0/bulk-memory-operations/testdata/elem_drop.wasm b/internal/integration_test/post1_0/bulk-memory-operations/testdata/elem_drop.wasm new file mode 100644 index 00000000000..78c0ea32b5e Binary files /dev/null and b/internal/integration_test/post1_0/bulk-memory-operations/testdata/elem_drop.wasm differ diff --git a/internal/integration_test/post1_0/bulk-memory-operations/testdata/elem_drop.wat b/internal/integration_test/post1_0/bulk-memory-operations/testdata/elem_drop.wat new file mode 100644 index 00000000000..e7e3805436a --- /dev/null +++ b/internal/integration_test/post1_0/bulk-memory-operations/testdata/elem_drop.wat @@ -0,0 +1,16 @@ +(module + (table 1 funcref) + (func $f) + (elem $p funcref (ref.func $f)) + (elem $a (table 0) (i32.const 0) func $f) + + (func (export "drop_passive") (elem.drop $p)) + (func (export "init_passive") (param $len i32) + (table.init $p (i32.const 0) (i32.const 0) (local.get $len)) + ) + + (func (export "drop_active") (elem.drop $a)) + (func (export "init_active") (param $len i32) + (table.init $a (i32.const 0) (i32.const 0) (local.get $len)) + ) +) diff --git a/internal/integration_test/post1_0/bulk-memory-operations/testdata/table_copy.wasm b/internal/integration_test/post1_0/bulk-memory-operations/testdata/table_copy.wasm new file mode 100644 index 00000000000..0797e746f45 Binary files /dev/null and b/internal/integration_test/post1_0/bulk-memory-operations/testdata/table_copy.wasm differ diff --git a/internal/integration_test/post1_0/bulk-memory-operations/testdata/table_copy.wat b/internal/integration_test/post1_0/bulk-memory-operations/testdata/table_copy.wat new file mode 100644 index 00000000000..128c04016a4 --- /dev/null +++ b/internal/integration_test/post1_0/bulk-memory-operations/testdata/table_copy.wat @@ -0,0 +1,17 @@ +(module + (table 10 funcref) + (elem (i32.const 0) $zero $one $two) + (func $zero (result i32) (i32.const 0)) + (func $one (result i32) (i32.const 1)) + (func $two (result i32) (i32.const 2)) + + (func (export "copy") (param i32 i32 i32) + (table.copy + (local.get 0) + (local.get 1) + (local.get 2))) + + (func (export "call") (param i32) (result i32) + (call_indirect (result i32) + (local.get 0))) +) diff --git a/internal/integration_test/post1_0/bulk-memory-operations/testdata/table_init.wasm b/internal/integration_test/post1_0/bulk-memory-operations/testdata/table_init.wasm new file mode 100644 index 00000000000..96814c06061 Binary files /dev/null and b/internal/integration_test/post1_0/bulk-memory-operations/testdata/table_init.wasm differ diff --git a/internal/integration_test/post1_0/bulk-memory-operations/testdata/table_init.wat b/internal/integration_test/post1_0/bulk-memory-operations/testdata/table_init.wat new file mode 100644 index 00000000000..60032bea2b7 --- /dev/null +++ b/internal/integration_test/post1_0/bulk-memory-operations/testdata/table_init.wat @@ -0,0 +1,18 @@ +(module + (table 3 funcref) + (elem funcref + (ref.func $zero) (ref.func $one) (ref.func $zero) (ref.func $one)) + + (func $zero (result i32) (i32.const 0)) + (func $one (result i32) (i32.const 1)) + + (func (export "init") (param i32 i32 i32) + (table.init 0 + (local.get 0) + (local.get 1) + (local.get 2))) + + (func (export "call") (param i32) (result i32) + (call_indirect (result i32) + (local.get 0))) +) diff --git a/internal/integration_test/spectest/spectest.go b/internal/integration_test/spectest/spectest.go index 462df62b6b5..a5605e24b16 100644 --- a/internal/integration_test/spectest/spectest.go +++ b/internal/integration_test/spectest/spectest.go @@ -180,7 +180,7 @@ func (c command) expectedError() (err error) { err = wasmruntime.ErrRuntimeOutOfBoundsMemoryAccess case "indirect call type mismatch", "indirect call": err = wasmruntime.ErrRuntimeIndirectCallTypeMismatch - case "undefined element", "undefined": + case "undefined element", "undefined", "out of bounds table access": err = wasmruntime.ErrRuntimeInvalidTableAccess case "integer overflow": err = wasmruntime.ErrRuntimeIntegerOverflow diff --git a/internal/leb128/leb128_test.go b/internal/leb128/leb128_test.go index 59c2f734192..7a8bcc9a686 100644 --- a/internal/leb128/leb128_test.go +++ b/internal/leb128/leb128_test.go @@ -106,6 +106,7 @@ func TestDecodeUint32(t *testing.T) { {bytes: []byte{0x00}, exp: 0}, {bytes: []byte{0x04}, exp: 4}, {bytes: []byte{0x01}, exp: 1}, + {bytes: []byte{0x80, 0}, exp: 0}, {bytes: []byte{0x80, 0x7f}, exp: 16256}, {bytes: []byte{0xe5, 0x8e, 0x26}, exp: 624485}, {bytes: []byte{0x80, 0x80, 0x80, 0x4f}, exp: 165675008}, diff --git a/internal/modgen/modgen.go b/internal/modgen/modgen.go index e16c2d6c710..e65ae02c2ff 100644 --- a/internal/modgen/modgen.go +++ b/internal/modgen/modgen.go @@ -379,7 +379,7 @@ func (g *generator) genElementSection() { min := table.Min for i := uint32(0); i < g.numElements; i++ { // Elements can't exceed min of table. - indexes := make([]wasm.NullableIndex, g.nextRandom().Intn(int(min)+1)) + indexes := make([]*wasm.Index, g.nextRandom().Intn(int(min)+1)) for i := range indexes { v := uint32(g.nextRandom().Intn(numFuncs)) indexes[i] = &v diff --git a/internal/modgen/modgen_test.go b/internal/modgen/modgen_test.go index f072f47721e..7950b4b5d7c 100644 --- a/internal/modgen/modgen_test.go +++ b/internal/modgen/modgen_test.go @@ -587,7 +587,7 @@ func TestGenerator_startSection(t *testing.T) { } func TestGenerator_elementSection(t *testing.T) { - u32AsInitValue := func(in uint32) wasm.NullableIndex { + uint32Ptr := func(in uint32) *uint32 { return &in } @@ -616,7 +616,7 @@ func TestGenerator_elementSection(t *testing.T) { exps: []*wasm.ElementSegment{ { OffsetExpr: &wasm.ConstantExpression{Opcode: wasm.OpcodeI32Const, Data: leb128.EncodeInt32(98)}, - Init: []wasm.NullableIndex{u32AsInitValue(0), u32AsInitValue(50)}, + Init: []*wasm.Index{uint32Ptr(0), uint32Ptr(50)}, }, }, }, @@ -630,16 +630,16 @@ func TestGenerator_elementSection(t *testing.T) { exps: []*wasm.ElementSegment{ { OffsetExpr: &wasm.ConstantExpression{Opcode: wasm.OpcodeI32Const, Data: leb128.EncodeInt32(0)}, - Init: []wasm.NullableIndex{u32AsInitValue(25), u32AsInitValue(75)}, + Init: []*wasm.Index{uint32Ptr(25), uint32Ptr(75)}, }, { OffsetExpr: &wasm.ConstantExpression{Opcode: wasm.OpcodeI32Const, Data: leb128.EncodeInt32(99)}, - Init: []wasm.NullableIndex{u32AsInitValue(3)}, + Init: []*wasm.Index{uint32Ptr(3)}, }, { OffsetExpr: &wasm.ConstantExpression{Opcode: wasm.OpcodeI32Const, Data: leb128.EncodeInt32(90)}, - Init: []wasm.NullableIndex{u32AsInitValue(1), u32AsInitValue(2), u32AsInitValue(3), u32AsInitValue(4), - u32AsInitValue(5), u32AsInitValue(6), u32AsInitValue(7), u32AsInitValue(8), u32AsInitValue(9), u32AsInitValue(10)}, + Init: []*wasm.Index{uint32Ptr(1), uint32Ptr(2), uint32Ptr(3), uint32Ptr(4), + uint32Ptr(5), uint32Ptr(6), uint32Ptr(7), uint32Ptr(8), uint32Ptr(9), uint32Ptr(10)}, }, }, }, diff --git a/internal/testing/enginetest/enginetest.go b/internal/testing/enginetest/enginetest.go index 2d8b1ddd5e9..6442b6751ef 100644 --- a/internal/testing/enginetest/enginetest.go +++ b/internal/testing/enginetest/enginetest.go @@ -113,7 +113,7 @@ func RunTestEngine_NewModuleEngine_InitTable(t *testing.T, et EngineTester) { e := et.NewEngine(wasm.Features20191205) t.Run("no table elements", func(t *testing.T) { - table := &wasm.TableInstance{Min: 2, Table: make([]interface{}, 2)} + table := &wasm.TableInstance{Min: 2, References: make([]wasm.Reference, 2)} m := &wasm.Module{ TypeSection: []*wasm.FunctionType{}, FunctionSection: []uint32{}, @@ -128,10 +128,10 @@ func RunTestEngine_NewModuleEngine_InitTable(t *testing.T, et EngineTester) { require.NoError(t, err) // Since there are no elements to initialize, we expect the table to be nil. - require.Equal(t, table.Table, make([]interface{}, 2)) + require.Equal(t, table.References, make([]wasm.Reference, 2)) }) t.Run("module-defined function", func(t *testing.T) { - table := &wasm.TableInstance{Min: 2, Table: make([]interface{}, 2)} + table := &wasm.TableInstance{Min: 2, References: make([]wasm.Reference, 2)} m := &wasm.Module{ TypeSection: []*wasm.FunctionType{{}}, @@ -158,11 +158,11 @@ func RunTestEngine_NewModuleEngine_InitTable(t *testing.T, et EngineTester) { require.NoError(t, err) // The functions mapped to the table are defined in the same moduleEngine - require.Equal(t, table.Table, et.InitTable(me, table.Min, tableInit)) + require.Equal(t, table.References, et.InitTable(me, table.Min, tableInit)) }) t.Run("imported function", func(t *testing.T) { - table := &wasm.TableInstance{Min: 2, Table: make([]interface{}, 2)} + table := &wasm.TableInstance{Min: 2, References: make([]wasm.Reference, 2)} importedModule := &wasm.Module{ TypeSection: []*wasm.FunctionType{{}}, @@ -205,11 +205,11 @@ func RunTestEngine_NewModuleEngine_InitTable(t *testing.T, et EngineTester) { require.NoError(t, err) // A moduleEngine's compiled function slice includes its imports, so the offsets is absolute. - require.Equal(t, table.Table, et.InitTable(importing, table.Min, tableInit)) + require.Equal(t, table.References, et.InitTable(importing, table.Min, tableInit)) }) t.Run("mixed functions", func(t *testing.T) { - table := &wasm.TableInstance{Min: 2, Table: make([]interface{}, 2)} + table := &wasm.TableInstance{Min: 2, References: make([]wasm.Reference, 2)} importedModule := &wasm.Module{ TypeSection: []*wasm.FunctionType{{}}, @@ -261,7 +261,7 @@ func RunTestEngine_NewModuleEngine_InitTable(t *testing.T, et EngineTester) { require.NoError(t, err) // A moduleEngine's compiled function slice includes its imports, so the offsets are absolute. - require.Equal(t, table.Table, et.InitTable(importing, table.Min, tableInit)) + require.Equal(t, table.References, et.InitTable(importing, table.Min, tableInit)) }) } diff --git a/internal/wasm/binary/const_expr.go b/internal/wasm/binary/const_expr.go index ee0e2b0d5f2..0051ed335c5 100644 --- a/internal/wasm/binary/const_expr.go +++ b/internal/wasm/binary/const_expr.go @@ -32,6 +32,15 @@ func decodeConstantExpression(r *bytes.Reader) (*wasm.ConstantExpression, error) _, err = ieee754.DecodeFloat64(r) case wasm.OpcodeGlobalGet: _, _, err = leb128.DecodeUint32(r) + case wasm.OpcodeRefNull: + var reftype byte + reftype, err = r.ReadByte() + if reftype != wasm.RefTypeFuncref { + return nil, fmt.Errorf("ref.null instruction in constant expression must be of funcref type but was 0x%x", reftype) + } + case wasm.OpcodeRefFunc: + // Parsing index. + _, _, err = leb128.DecodeUint32(r) default: return nil, fmt.Errorf("%v for const expression opt code: %#x", ErrInvalidByte, b) } diff --git a/internal/wasm/binary/const_expr_test.go b/internal/wasm/binary/const_expr_test.go new file mode 100644 index 00000000000..efccf1cd6d6 --- /dev/null +++ b/internal/wasm/binary/const_expr_test.go @@ -0,0 +1,89 @@ +package binary + +import ( + "bytes" + "strconv" + "testing" + + "github.com/tetratelabs/wazero/internal/testing/require" + "github.com/tetratelabs/wazero/internal/wasm" +) + +func TestDecodeConstantExpression(t *testing.T) { + for i, tc := range []struct { + in []byte + exp *wasm.ConstantExpression + }{ + { + in: []byte{ + wasm.OpcodeRefFunc, + 0x80, 0, // Multi byte zero. + wasm.OpcodeEnd, + }, + exp: &wasm.ConstantExpression{ + Opcode: wasm.OpcodeRefFunc, + Data: []byte{0x80, 0}, + }, + }, + { + in: []byte{ + wasm.OpcodeRefFunc, + 0x80, 0x80, 0x80, 0x4f, // 165675008 in varint encoding. + wasm.OpcodeEnd, + }, + exp: &wasm.ConstantExpression{ + Opcode: wasm.OpcodeRefFunc, + Data: []byte{0x80, 0x80, 0x80, 0x4f}, + }, + }, + { + in: []byte{ + wasm.OpcodeRefNull, + wasm.RefTypeFuncref, + wasm.OpcodeEnd, + }, + exp: &wasm.ConstantExpression{ + Opcode: wasm.OpcodeRefNull, + Data: []byte{ + wasm.RefTypeFuncref, + }, + }, + }, + // TOOD: backfill more cases for const and global opcodes. + } { + tc := tc + t.Run(strconv.Itoa(i), func(t *testing.T) { + actual, err := decodeConstantExpression(bytes.NewReader(tc.in)) + require.NoError(t, err) + require.Equal(t, tc.exp, actual) + }) + } +} + +func TestDecodeConstantExpression_errors(t *testing.T) { + for _, tc := range []struct { + in []byte + expectedErr string + }{ + { + in: []byte{ + wasm.OpcodeRefFunc, + 0, + }, + expectedErr: "look for end opcode: EOF", + }, + { + in: []byte{ + wasm.OpcodeRefNull, + wasm.RefTypeExternref, + }, + expectedErr: "ref.null instruction in constant expression must be of funcref type but was 0x6f", + }, + // TOOD: backfill more cases for const and global opcodes. + } { + t.Run(tc.expectedErr, func(t *testing.T) { + _, err := decodeConstantExpression(bytes.NewReader(tc.in)) + require.EqualError(t, err, tc.expectedErr) + }) + } +} diff --git a/internal/wasm/binary/element.go b/internal/wasm/binary/element.go index 02ccf46ceae..801e1a6866f 100644 --- a/internal/wasm/binary/element.go +++ b/internal/wasm/binary/element.go @@ -9,7 +9,7 @@ import ( "github.com/tetratelabs/wazero/internal/wasm" ) -func ensureElementTypeFuncRef(r *bytes.Reader) error { +func ensureElementKindFuncRef(r *bytes.Reader) error { elemKind, err := r.ReadByte() if err != nil { return fmt.Errorf("read element prefix: %w", err) @@ -20,13 +20,13 @@ func ensureElementTypeFuncRef(r *bytes.Reader) error { return nil } -func decodeElementInitValueVector(r *bytes.Reader) ([]wasm.NullableIndex, error) { +func decodeElementInitValueVector(r *bytes.Reader) ([]*wasm.Index, error) { vs, _, err := leb128.DecodeUint32(r) if err != nil { return nil, fmt.Errorf("get size of vector: %w", err) } - vec := make([]wasm.NullableIndex, vs) + vec := make([]*wasm.Index, vs) for i := range vec { u32, _, err := leb128.DecodeUint32(r) if err != nil { @@ -37,17 +37,37 @@ func decodeElementInitValueVector(r *bytes.Reader) ([]wasm.NullableIndex, error) return vec, nil } -func decodeElementConstExprVector(r *bytes.Reader) ([]wasm.NullableIndex, error) { - return nil, nil +func decodeElementConstExprVector(r *bytes.Reader) ([]*wasm.Index, error) { + vs, _, err := leb128.DecodeUint32(r) + if err != nil { + return nil, fmt.Errorf("get size of vector: %w", err) + } + vec := make([]*wasm.Index, vs) + for i := range vec { + expr, err := decodeConstantExpression(r) + if err != nil { + return nil, err + } + switch expr.Opcode { + case wasm.OpcodeRefFunc: + v, _, _ := leb128.DecodeUint32(bytes.NewReader(expr.Data)) + vec[i] = &v + case wasm.OpcodeRefNull: + // Translate the ref.null result into the null index, so there's nothing to do here. + default: + return nil, fmt.Errorf("const expr must be either ref.null or ref.func but was %s", wasm.InstructionName(expr.Opcode)) + } + } + return vec, nil } -func decodeElementRefType(r *bytes.Reader) (ret wasm.ElemType, err error) { +func decodeElementRefType(r *bytes.Reader) (ret wasm.RefType, err error) { ret, err = r.ReadByte() if err != nil { err = fmt.Errorf("read element ref type: %w", err) return } - if ret != wasm.ElemTypeFuncref { + if ret != wasm.RefTypeFuncref { // TODO: this will be relaxed to accept externref after reference type proposal impl. err = errors.New("ref type must be funcref for element") } @@ -77,12 +97,12 @@ func decodeElementSegment(r *bytes.Reader) (*wasm.ElementSegment, error) { return &wasm.ElementSegment{ OffsetExpr: expr, Init: init, - Type: wasm.ExternTypeFunc, - Mode: wasm.ElemModeActive, + Type: wasm.RefTypeFuncref, + Mode: wasm.ElementModeActive, }, nil case 1: // Prefix 1 requires funcref. - if err = ensureElementTypeFuncRef(r); err != nil { + if err = ensureElementKindFuncRef(r); err != nil { return nil, err } @@ -92,8 +112,8 @@ func decodeElementSegment(r *bytes.Reader) (*wasm.ElementSegment, error) { } return &wasm.ElementSegment{ Init: init, - Type: wasm.ExternTypeFunc, - Mode: wasm.ElemModePassive, + Type: wasm.RefTypeFuncref, + Mode: wasm.ElementModePassive, }, nil case 2: tableIndex, _, err := leb128.DecodeUint32(r) @@ -112,7 +132,7 @@ func decodeElementSegment(r *bytes.Reader) (*wasm.ElementSegment, error) { } // Prefix 2 requires funcref. - if err = ensureElementTypeFuncRef(r); err != nil { + if err = ensureElementKindFuncRef(r); err != nil { return nil, err } @@ -123,12 +143,12 @@ func decodeElementSegment(r *bytes.Reader) (*wasm.ElementSegment, error) { return &wasm.ElementSegment{ OffsetExpr: expr, Init: init, - Type: wasm.ExternTypeFunc, - Mode: wasm.ElemModeActive, + Type: wasm.RefTypeFuncref, + Mode: wasm.ElementModeActive, }, nil case 3: // Prefix 3 requires funcref. - if err = ensureElementTypeFuncRef(r); err != nil { + if err = ensureElementKindFuncRef(r); err != nil { return nil, err } init, err := decodeElementInitValueVector(r) @@ -137,8 +157,8 @@ func decodeElementSegment(r *bytes.Reader) (*wasm.ElementSegment, error) { } return &wasm.ElementSegment{ Init: init, - Type: wasm.ExternTypeFunc, - Mode: wasm.ElemModeDeclared, + Type: wasm.RefTypeFuncref, + Mode: wasm.ElementModeDeclarative, }, nil case 4: expr, err := decodeConstantExpression(r) @@ -154,22 +174,22 @@ func decodeElementSegment(r *bytes.Reader) (*wasm.ElementSegment, error) { return &wasm.ElementSegment{ OffsetExpr: expr, Init: init, - Type: wasm.ExternTypeFunc, - Mode: wasm.ElemModeActive, + Type: wasm.RefTypeFuncref, + Mode: wasm.ElementModeActive, }, nil case 5: - init, err := decodeElementInitValueVector(r) + refType, err := decodeElementRefType(r) if err != nil { return nil, err } - refType, err := decodeElementRefType(r) + init, err := decodeElementConstExprVector(r) if err != nil { return nil, err } return &wasm.ElementSegment{ Init: init, Type: refType, - Mode: wasm.ElemModePassive, + Mode: wasm.ElementModePassive, }, nil case 6: tableIndex, _, err := leb128.DecodeUint32(r) @@ -200,7 +220,7 @@ func decodeElementSegment(r *bytes.Reader) (*wasm.ElementSegment, error) { OffsetExpr: expr, Init: init, Type: refType, - Mode: wasm.ElemModeActive, + Mode: wasm.ElementModeActive, }, nil case 7: refType, err := decodeElementRefType(r) @@ -214,7 +234,7 @@ func decodeElementSegment(r *bytes.Reader) (*wasm.ElementSegment, error) { return &wasm.ElementSegment{ Init: init, Type: refType, - Mode: wasm.ElemModeDeclared, + Mode: wasm.ElementModeDeclarative, }, nil default: return nil, fmt.Errorf("invalid element segment prefix: 0x%x", prefix) @@ -225,7 +245,7 @@ func decodeElementSegment(r *bytes.Reader) (*wasm.ElementSegment, error) { // // https://www.w3.org/TR/2019/REC-wasm-core-1-20191205/#element-section%E2%91%A0 func encodeElement(e *wasm.ElementSegment) (ret []byte) { - if e.Mode == wasm.ElemModeActive { + if e.Mode == wasm.ElementModeActive { // Currently multiple tables are not supported. ret = append(ret, leb128.EncodeInt32(0)...) ret = append(ret, encodeConstantExpression(e.OffsetExpr)...) @@ -233,6 +253,8 @@ func encodeElement(e *wasm.ElementSegment) (ret []byte) { for _, idx := range e.Init { ret = append(ret, leb128.EncodeInt32(int32(*idx))...) } + } else { + panic("TODO: support encoding for non-active elements.") } return } diff --git a/internal/wasm/binary/element_test.go b/internal/wasm/binary/element_test.go new file mode 100644 index 00000000000..209c25d18f3 --- /dev/null +++ b/internal/wasm/binary/element_test.go @@ -0,0 +1,255 @@ +package binary + +import ( + "bytes" + "strconv" + "testing" + + "github.com/tetratelabs/wazero/internal/testing/require" + "github.com/tetratelabs/wazero/internal/wasm" +) + +func uint32Ptr(v uint32) *uint32 { + return &v +} + +func Test_ensureElementKindFuncRef(t *testing.T) { + require.NoError(t, ensureElementKindFuncRef(bytes.NewReader([]byte{0x0}))) + require.Error(t, ensureElementKindFuncRef(bytes.NewReader([]byte{0x1}))) +} + +func Test_decodeElementInitValueVector(t *testing.T) { + for i, tc := range []struct { + in []byte + exp []*wasm.Index + }{ + { + in: []byte{0}, + exp: []*wasm.Index{}, + }, + { + in: []byte{5, 1, 2, 3, 4, 5}, + exp: []*wasm.Index{uint32Ptr(1), uint32Ptr(2), uint32Ptr(3), uint32Ptr(4), uint32Ptr(5)}, + }, + } { + t.Run(strconv.Itoa(i), func(t *testing.T) { + actual, err := decodeElementInitValueVector(bytes.NewReader(tc.in)) + require.NoError(t, err) + require.Equal(t, tc.exp, actual) + }) + } +} + +func Test_decodeElementConstExprVector(t *testing.T) { + for i, tc := range []struct { + in []byte + exp []*wasm.Index + }{ + { + in: []byte{0}, + exp: []*wasm.Index{}, + }, + { + in: []byte{ + 2, // Tow indexes. + wasm.OpcodeRefNull, wasm.RefTypeFuncref, wasm.OpcodeEnd, + wasm.OpcodeRefFunc, 100, wasm.OpcodeEnd, + }, + exp: []*wasm.Index{nil, uint32Ptr(100)}, + }, + { + in: []byte{ + 3, // Three indexes. + wasm.OpcodeRefNull, wasm.RefTypeFuncref, wasm.OpcodeEnd, + wasm.OpcodeRefFunc, + 0x80, 0x80, 0x80, 0x4f, // 165675008 in varint encoding. + wasm.OpcodeEnd, + wasm.OpcodeRefNull, wasm.RefTypeFuncref, wasm.OpcodeEnd, + }, + exp: []*wasm.Index{nil, uint32Ptr(165675008), nil}, + }, + } { + t.Run(strconv.Itoa(i), func(t *testing.T) { + actual, err := decodeElementConstExprVector(bytes.NewReader(tc.in)) + require.NoError(t, err) + require.Equal(t, tc.exp, actual) + }) + } +} + +func TestDecodeElementSegment(t *testing.T) { + for _, tc := range []struct { + name string + in []byte + exp *wasm.ElementSegment + }{ + { + name: "legacy", + in: []byte{ + 0, // Prefix (which is previously the table index fixed to zero) + // Offset const expr. + wasm.OpcodeI32Const, 1, wasm.OpcodeEnd, + // Init vector. + 5, 1, 2, 3, 4, 5, + }, + exp: &wasm.ElementSegment{ + OffsetExpr: &wasm.ConstantExpression{Opcode: wasm.OpcodeI32Const, Data: []byte{1}}, + Init: []*wasm.Index{uint32Ptr(1), uint32Ptr(2), uint32Ptr(3), uint32Ptr(4), uint32Ptr(5)}, + Mode: wasm.ElementModeActive, + Type: wasm.RefTypeFuncref, + }, + }, + { + name: "legacy multi byte const expr data", + in: []byte{ + 0, // Prefix (which is previously the table index fixed to zero) + // Offset const expr. + wasm.OpcodeI32Const, 0x80, 0, wasm.OpcodeEnd, + // Init vector. + 5, 1, 2, 3, 4, 5, + }, + exp: &wasm.ElementSegment{ + OffsetExpr: &wasm.ConstantExpression{Opcode: wasm.OpcodeI32Const, Data: []byte{0x80, 0}}, + Init: []*wasm.Index{uint32Ptr(1), uint32Ptr(2), uint32Ptr(3), uint32Ptr(4), uint32Ptr(5)}, + Mode: wasm.ElementModeActive, + Type: wasm.RefTypeFuncref, + }, + }, + { + + name: "passive value vector", + in: []byte{ + 1, // Prefix. + 0, // Elem kind must be fixed to zero. + // Init vector. + 5, 1, 2, 3, 4, 5, + }, + exp: &wasm.ElementSegment{ + Init: []*wasm.Index{uint32Ptr(1), uint32Ptr(2), uint32Ptr(3), uint32Ptr(4), uint32Ptr(5)}, + Mode: wasm.ElementModePassive, + Type: wasm.RefTypeFuncref, + }, + }, + { + + name: "active with table index encoded.", + in: []byte{ + 2, // Prefix. + 0, // Table index which is fixed to zero until reference type proposal. + // Offset const expr. + wasm.OpcodeI32Const, 0x80, 0, wasm.OpcodeEnd, + 0, // Elem kind must be fixed to zero. + // Init vector. + 5, 1, 2, 3, 4, 5, + }, + exp: &wasm.ElementSegment{ + OffsetExpr: &wasm.ConstantExpression{Opcode: wasm.OpcodeI32Const, Data: []byte{0x80, 0}}, + Init: []*wasm.Index{uint32Ptr(1), uint32Ptr(2), uint32Ptr(3), uint32Ptr(4), uint32Ptr(5)}, + Mode: wasm.ElementModeActive, + Type: wasm.RefTypeFuncref, + }, + }, + { + name: "declarative", + in: []byte{ + 3, // Prefix. + 0, // Elem kind must be fixed to zero. + // Init vector. + 5, 1, 2, 3, 4, 5, + }, + exp: &wasm.ElementSegment{ + Init: []*wasm.Index{uint32Ptr(1), uint32Ptr(2), uint32Ptr(3), uint32Ptr(4), uint32Ptr(5)}, + Mode: wasm.ElementModeDeclarative, + Type: wasm.RefTypeFuncref, + }, + }, + { + name: "active const expr vector", + in: []byte{ + 4, // Prefix. + // Offset expr. + wasm.OpcodeI32Const, 0x80, 1, wasm.OpcodeEnd, + // Init const expr vector. + 3, // number of const expr. + wasm.OpcodeRefNull, wasm.RefTypeFuncref, wasm.OpcodeEnd, + wasm.OpcodeRefFunc, + 0x80, 0x80, 0x80, 0x4f, // 165675008 in varint encoding. + wasm.OpcodeEnd, + wasm.OpcodeRefNull, wasm.RefTypeFuncref, wasm.OpcodeEnd, + }, + exp: &wasm.ElementSegment{ + OffsetExpr: &wasm.ConstantExpression{Opcode: wasm.OpcodeI32Const, Data: []byte{0x80, 1}}, + Init: []*wasm.Index{nil, uint32Ptr(165675008), nil}, + Mode: wasm.ElementModeActive, + Type: wasm.RefTypeFuncref, + }, + }, + { + name: "passive const expr vector - funcref", + in: []byte{ + 5, // Prefix. + wasm.RefTypeFuncref, + // Init const expr vector. + 3, // number of const expr. + wasm.OpcodeRefNull, wasm.RefTypeFuncref, wasm.OpcodeEnd, + wasm.OpcodeRefFunc, + 0x80, 0x80, 0x80, 0x4f, // 165675008 in varint encoding. + wasm.OpcodeEnd, + wasm.OpcodeRefNull, wasm.RefTypeFuncref, wasm.OpcodeEnd, + }, + exp: &wasm.ElementSegment{ + Init: []*wasm.Index{nil, uint32Ptr(165675008), nil}, + Mode: wasm.ElementModePassive, + Type: wasm.RefTypeFuncref, + }, + }, + { + name: "active with table index and const expr vector", + in: []byte{ + 6, // Prefix. + 0, // Table index which is fixed to zero until reference type proposal. + // Offset expr. + wasm.OpcodeI32Const, 0x80, 1, wasm.OpcodeEnd, + wasm.RefTypeFuncref, + // Init const expr vector. + 3, // number of const expr. + wasm.OpcodeRefNull, wasm.RefTypeFuncref, wasm.OpcodeEnd, + wasm.OpcodeRefFunc, + 0x80, 0x80, 0x80, 0x4f, // 165675008 in varint encoding. + wasm.OpcodeEnd, + wasm.OpcodeRefNull, wasm.RefTypeFuncref, wasm.OpcodeEnd, + }, + exp: &wasm.ElementSegment{ + OffsetExpr: &wasm.ConstantExpression{Opcode: wasm.OpcodeI32Const, Data: []byte{0x80, 1}}, + Init: []*wasm.Index{nil, uint32Ptr(165675008), nil}, + Mode: wasm.ElementModeActive, + Type: wasm.RefTypeFuncref, + }, + }, + { + name: "declarative const expr vector", + in: []byte{ + 7, // Prefix. + wasm.RefTypeFuncref, + // Init const expr vector. + 2, // number of const expr. + wasm.OpcodeRefNull, wasm.RefTypeFuncref, wasm.OpcodeEnd, + wasm.OpcodeRefFunc, + 0x80, 0x80, 0x80, 0x4f, // 165675008 in varint encoding. + wasm.OpcodeEnd, + }, + exp: &wasm.ElementSegment{ + Init: []*wasm.Index{nil, uint32Ptr(165675008)}, + Mode: wasm.ElementModeDeclarative, + Type: wasm.RefTypeFuncref, + }, + }, + } { + tc := tc + t.Run(tc.name, func(t *testing.T) { + actual, err := decodeElementSegment(bytes.NewReader(tc.in)) + require.NoError(t, err) + require.Equal(t, actual, tc.exp) + }) + } +} diff --git a/internal/wasm/binary/encoder_test.go b/internal/wasm/binary/encoder_test.go index 49e2b979cb7..90b9a3781c9 100644 --- a/internal/wasm/binary/encoder_test.go +++ b/internal/wasm/binary/encoder_test.go @@ -113,8 +113,8 @@ func TestModule_Encode(t *testing.T) { }, expected: append(append(Magic, version...), wasm.SectionIDTable, 0x04, // 4 bytes in this section - 0x01, // 1 table - wasm.ElemTypeFuncref, 0x0, 0x03, // func, only min: 3 + 0x01, // 1 table + wasm.RefTypeFuncref, 0x0, 0x03, // func, only min: 3 wasm.SectionIDMemory, 0x04, // 4 bytes in this section 0x01, // 1 memory 0x01, 0x01, 0x01, // min and max = 1 diff --git a/internal/wasm/binary/import.go b/internal/wasm/binary/import.go index e756107b07c..174cf9fed96 100644 --- a/internal/wasm/binary/import.go +++ b/internal/wasm/binary/import.go @@ -52,7 +52,7 @@ func encodeImport(i *wasm.Import) []byte { case wasm.ExternTypeFunc: data = append(data, leb128.EncodeUint32(i.DescFunc)...) case wasm.ExternTypeTable: - data = append(data, wasm.ElemTypeFuncref) + data = append(data, wasm.RefTypeFuncref) data = append(data, encodeLimitsType(i.DescTable.Min, i.DescTable.Max)...) case wasm.ExternTypeMemory: maxPtr := &i.DescMem.Max diff --git a/internal/wasm/binary/import_test.go b/internal/wasm/binary/import_test.go index c8cf4502471..81923ca9cf3 100644 --- a/internal/wasm/binary/import_test.go +++ b/internal/wasm/binary/import_test.go @@ -114,7 +114,7 @@ func TestEncodeImport(t *testing.T) { 0x02, 'm', 'y', 0x05, 't', 'a', 'b', 'l', 'e', wasm.ExternTypeTable, - wasm.ElemTypeFuncref, + wasm.RefTypeFuncref, 0x1, 0x1, 0x2, // Limit with max. }, }, diff --git a/internal/wasm/binary/section_test.go b/internal/wasm/binary/section_test.go index 522c93656ad..d5c1a306471 100644 --- a/internal/wasm/binary/section_test.go +++ b/internal/wasm/binary/section_test.go @@ -18,8 +18,8 @@ func TestTableSection(t *testing.T) { { name: "min and min with max", input: []byte{ - 0x01, // 1 table - wasm.ElemTypeFuncref, 0x01, 2, 3, // (table 2 3) + 0x01, // 1 table + wasm.RefTypeFuncref, 0x01, 2, 3, // (table 2 3) }, expected: &wasm.Table{Min: 2, Max: &three}, }, @@ -45,9 +45,9 @@ func TestTableSection_Errors(t *testing.T) { { name: "min and min with max", input: []byte{ - 0x02, // 2 tables - wasm.ElemTypeFuncref, 0x00, 0x01, // (table 1) - wasm.ElemTypeFuncref, 0x01, 0x02, 0x03, // (table 2 3) + 0x02, // 2 tables + wasm.RefTypeFuncref, 0x00, 0x01, // (table 1) + wasm.RefTypeFuncref, 0x01, 0x02, 0x03, // (table 2 3) }, expectedErr: "at most one table allowed in module, but read 2", }, diff --git a/internal/wasm/binary/table.go b/internal/wasm/binary/table.go index e81739e78b1..fbf8ab8572b 100644 --- a/internal/wasm/binary/table.go +++ b/internal/wasm/binary/table.go @@ -16,8 +16,8 @@ func decodeTable(r *bytes.Reader) (*wasm.Table, error) { return nil, fmt.Errorf("read leading byte: %v", err) } - if b != wasm.ElemTypeFuncref { - return nil, fmt.Errorf("invalid element type %#x != funcref(%#x)", b, wasm.ElemTypeFuncref) + if b != wasm.RefTypeFuncref { + return nil, fmt.Errorf("invalid element type %#x != funcref(%#x)", b, wasm.RefTypeFuncref) } min, max, err := decodeLimitsType(r) @@ -41,5 +41,5 @@ func decodeTable(r *bytes.Reader) (*wasm.Table, error) { // // See https://www.w3.org/TR/2019/REC-wasm-core-1-20191205/#binary-table func encodeTable(i *wasm.Table) []byte { - return append([]byte{wasm.ElemTypeFuncref}, encodeLimitsType(i.Min, i.Max)...) + return append([]byte{wasm.RefTypeFuncref}, encodeLimitsType(i.Min, i.Max)...) } diff --git a/internal/wasm/binary/table_test.go b/internal/wasm/binary/table_test.go index 23e40bbb22f..1dbf4b2689b 100644 --- a/internal/wasm/binary/table_test.go +++ b/internal/wasm/binary/table_test.go @@ -21,27 +21,27 @@ func TestTableType(t *testing.T) { { name: "min 0", input: &wasm.Table{}, - expected: []byte{wasm.ElemTypeFuncref, 0x0, 0}, + expected: []byte{wasm.RefTypeFuncref, 0x0, 0}, }, { name: "min 0, max 0", input: &wasm.Table{Max: &zero}, - expected: []byte{wasm.ElemTypeFuncref, 0x1, 0, 0}, + expected: []byte{wasm.RefTypeFuncref, 0x1, 0, 0}, }, { name: "min largest", input: &wasm.Table{Min: max}, - expected: []byte{wasm.ElemTypeFuncref, 0x0, 0x80, 0x80, 0x80, 0x40}, + expected: []byte{wasm.RefTypeFuncref, 0x0, 0x80, 0x80, 0x80, 0x40}, }, { name: "min 0, max largest", input: &wasm.Table{Max: &max}, - expected: []byte{wasm.ElemTypeFuncref, 0x1, 0, 0x80, 0x80, 0x80, 0x40}, + expected: []byte{wasm.RefTypeFuncref, 0x1, 0, 0x80, 0x80, 0x80, 0x40}, }, { name: "min largest max largest", input: &wasm.Table{Min: max, Max: &max}, - expected: []byte{wasm.ElemTypeFuncref, 0x1, 0x80, 0x80, 0x80, 0x40, 0x80, 0x80, 0x80, 0x40}, + expected: []byte{wasm.RefTypeFuncref, 0x1, 0x80, 0x80, 0x80, 0x40, 0x80, 0x80, 0x80, 0x40}, }, } @@ -74,17 +74,17 @@ func TestDecodeTableType_Errors(t *testing.T) { }, { name: "max < min", - input: []byte{wasm.ElemTypeFuncref, 0x1, 0x80, 0x80, 0x4, 0}, + input: []byte{wasm.RefTypeFuncref, 0x1, 0x80, 0x80, 0x4, 0}, expectedErr: "table size minimum must not be greater than maximum", }, { name: "min > limit", - input: []byte{wasm.ElemTypeFuncref, 0x0, 0xff, 0xff, 0xff, 0xff, 0xf}, + input: []byte{wasm.RefTypeFuncref, 0x0, 0xff, 0xff, 0xff, 0xff, 0xf}, expectedErr: "table min must be at most 134217728", }, { name: "max > limit", - input: []byte{wasm.ElemTypeFuncref, 0x1, 0, 0xff, 0xff, 0xff, 0xff, 0xf}, + input: []byte{wasm.RefTypeFuncref, 0x1, 0, 0xff, 0xff, 0xff, 0xff, 0xf}, expectedErr: "table max must be at most 134217728", }, } diff --git a/internal/wasm/engine.go b/internal/wasm/engine.go index dc3a62fc118..a9371810c34 100644 --- a/internal/wasm/engine.go +++ b/internal/wasm/engine.go @@ -41,6 +41,7 @@ type ModuleEngine interface { // Call invokes a function instance f with given parameters. Call(ctx context.Context, m *CallContext, f *FunctionInstance, params ...uint64) (results []uint64, err error) - // TODO - GetFunctionReference(index Index) Reference + // CreateFuncElementInstnace creates an ElementInstance whose references are engine-specific function pointers + // corresponding to the given `indexes`. + CreateFuncElementInstnace(indexes []*Index) *ElementInstance } diff --git a/internal/wasm/func_validation.go b/internal/wasm/func_validation.go index 0cb8174fb02..c39c0f018cc 100644 --- a/internal/wasm/func_validation.go +++ b/internal/wasm/func_validation.go @@ -759,7 +759,7 @@ func (m *Module) validateFunctionWithMaxStackValues( var params []ValueType switch miscOpcode { case OpcodeMiscMemoryInit, OpcodeMiscMemoryCopy, OpcodeMiscMemoryFill, OpcodeMiscDataDrop: - if memory == nil { + if miscOpcode != OpcodeMiscDataDrop && memory == nil { return fmt.Errorf("memory must exist for %s", MiscInstructionName(miscOpcode)) } if miscOpcode != OpcodeMiscDataDrop { @@ -805,10 +805,10 @@ func (m *Module) validateFunctionWithMaxStackValues( } } case OpcodeMiscTableInit, OpcodeMiscElemDrop, OpcodeMiscTableCopy: - if table == nil { - return fmt.Errorf("table must exist for %s", MiscInstructionName(miscOpcode)) - } if miscOpcode != OpcodeMiscElemDrop { + if table == nil { + return fmt.Errorf("table must exist for %s", MiscInstructionName(miscOpcode)) + } params = []ValueType{ValueTypeI32, ValueTypeI32, ValueTypeI32} } if miscOpcode == OpcodeMiscTableInit || miscOpcode == OpcodeMiscElemDrop { diff --git a/internal/wasm/func_validation_test.go b/internal/wasm/func_validation_test.go index 407d1947294..af52dbfd554 100644 --- a/internal/wasm/func_validation_test.go +++ b/internal/wasm/func_validation_test.go @@ -370,12 +370,6 @@ func TestModule_ValidateFunction_BulkMemoryOperations(t *testing.T) { expectedErr: "cannot pop the operand for memory.init: i32 missing", }, // data.drop - { - body: []byte{OpcodeMiscPrefix, OpcodeMiscDataDrop}, - flag: FeatureBulkMemoryOperations, - memory: nil, - expectedErr: "memory must exist for data.drop", - }, { body: []byte{OpcodeMiscPrefix, OpcodeMiscDataDrop}, flag: Features20191205, @@ -545,11 +539,6 @@ func TestModule_ValidateFunction_BulkMemoryOperations(t *testing.T) { expectedErr: "cannot pop the operand for table.init: i32 missing", }, // elem.drop - { - body: []byte{OpcodeMiscPrefix, OpcodeMiscElemDrop}, - flag: FeatureBulkMemoryOperations, - expectedErr: "table must exist for elem.drop", - }, { body: []byte{OpcodeMiscPrefix, OpcodeMiscElemDrop}, flag: Features20191205, diff --git a/internal/wasm/instruction.go b/internal/wasm/instruction.go index ab69bdb00c4..7ba4bbc3d2a 100644 --- a/internal/wasm/instruction.go +++ b/internal/wasm/instruction.go @@ -229,6 +229,22 @@ const ( OpcodeF32ReinterpretI32 Opcode = 0xbe OpcodeF64ReinterpretI64 Opcode = 0xbf + // OpcodeRefNull pushes a null reference value whose type is specified by immediate. + // This is defined in the reference types proposal, but necessary for bulk-memory proposal as well. + // + // Currently only supported in the constant expression in element segments to this opcode. + OpcodeRefNull = 0xd0 + // OpcodeRefIsNull pops a reference value, and pushes 1 if it is null, 0 otherwise. + // This is defined in the reference types proposal, but necessary for bulk-memory proposal as well. + // + // Currently not supported. + OpcodeRefIsNull = 0xd1 + // OpcodeRefFunc pushes a funcref value whose index equals the immediate to this opcode. + // This is defined in the reference types proposal, but necessary for bulk-memory proposal as well. + // + // Currently only supported in the constant expression in element segments. + OpcodeRefFunc = 0xd2 + // Below are toggled with FeatureSignExtensionOps // OpcodeI32Extend8S extends a signed 8-bit integer to a 32-bit integer. @@ -465,6 +481,10 @@ const ( OpcodeF32ReinterpretI32Name = "f32.reinterpret_i32" OpcodeF64ReinterpretI64Name = "f64.reinterpret_i64" + OpcodeRefNullName = "ref.null" + OpcodeRefIsNullName = "ref.is_null" + OpcodeRefFuncName = "ref.func" + // Below are toggled with FeatureSignExtensionOps OpcodeI32Extend8SName = "i32.extend8_s" @@ -650,6 +670,10 @@ var instructionNames = [256]string{ OpcodeF32ReinterpretI32: OpcodeF32ReinterpretI32Name, OpcodeF64ReinterpretI64: OpcodeF64ReinterpretI64Name, + OpcodeRefNull: OpcodeRefNullName, + OpcodeRefIsNull: OpcodeRefIsNullName, + OpcodeRefFunc: OpcodeRefFuncName, + // Below are toggled with FeatureSignExtensionOps OpcodeI32Extend8S: OpcodeI32Extend8SName, OpcodeI32Extend16S: OpcodeI32Extend16SName, diff --git a/internal/wasm/interpreter/interpreter.go b/internal/wasm/interpreter/interpreter.go index 85efeb287f6..29d2ce12453 100644 --- a/internal/wasm/interpreter/interpreter.go +++ b/internal/wasm/interpreter/interpreter.go @@ -237,7 +237,7 @@ func (e *engine) NewModuleEngine(name string, module *wasm.Module, importedFunct } for elemIdx, funcidx := range tableInit { // Initialize any elements with compiled functions - table.Table[elemIdx] = me.functions[funcidx] + table.References[elemIdx] = me.functions[funcidx] } return me, nil } @@ -509,11 +509,12 @@ func (e *engine) lowerIR(ir *wazeroir.CompilationResult) (*code, error) { case *wazeroir.OperationMemoryCopy: case *wazeroir.OperationMemoryFill: case *wazeroir.OperationTableInit: - panic("TODO: table.init unimplemented") + op.us = make([]uint64, 1) + op.us[0] = uint64(o.ElemIndex) case *wazeroir.OperationElemDrop: - panic("TODO: elem.drop unimplemented") + op.us = make([]uint64, 1) + op.us[0] = uint64(o.ElemIndex) case *wazeroir.OperationTableCopy: - panic("TODO: table.copy unimplemented") default: return nil, fmt.Errorf("unreachable: a bug in wazeroir engine") } @@ -535,9 +536,18 @@ func (me *moduleEngine) Name() string { return me.name } -// GetFunctionReference implements the same method as documented on wasm.ModuleEngine. -func (me *moduleEngine) GetFunctionReference(index wasm.Index) wasm.Reference { - return me.functions[index] +// CreateFuncElementInstnace implements the same method as documented on wasm.ModuleEngine. +func (me *moduleEngine) CreateFuncElementInstnace(indexes []*wasm.Index) *wasm.ElementInstance { + refs := make([]wasm.Reference, len(indexes)) + for i, index := range indexes { + if index != nil { + refs[i] = me.functions[*index] + } + } + return &wasm.ElementInstance{ + References: refs, + Type: wasm.RefTypeFuncref, + } } // Call implements the same method as documented on wasm.ModuleEngine. @@ -620,6 +630,7 @@ func (ce *callEngine) callNativeFunc(ctx context.Context, callCtx *wasm.CallCont typeIDs := f.source.Module.TypeIDs functions := f.source.Module.Engine.(*moduleEngine).functions dataInstances := f.source.Module.DataInstances + elementInstances := f.source.Module.ElementInstances ce.pushFrame(frame) bodyLen := uint64(len(frame.f.body)) for frame.pc < bodyLen { @@ -668,10 +679,10 @@ func (ce *callEngine) callNativeFunc(ctx context.Context, callCtx *wasm.CallCont case wazeroir.OperationKindCallIndirect: { offset := ce.popValue() - if offset >= uint64(len(table.Table)) { + if offset >= uint64(len(table.References)) { panic(wasmruntime.ErrRuntimeInvalidTableAccess) } - targetcode, ok := table.Table[offset].(*function) + targetcode, ok := table.References[offset].(*function) if !ok { panic(wasmruntime.ErrRuntimeInvalidTableAccess) } else if targetcode.source.TypeID != typeIDs[op.us[0]] { @@ -1743,6 +1754,33 @@ func (ce *callEngine) callNativeFunc(ctx context.Context, callCtx *wasm.CallCont } } frame.pc++ + case wazeroir.OperationKindTableInit: + elementInstance := elementInstances[op.us[0]] + copySize := ce.popValue() + inElementOffset := ce.popValue() + inTableOffset := ce.popValue() + if inElementOffset+copySize > uint64(len(elementInstance.References)) || + inTableOffset+copySize > uint64(len(table.References)) { + panic(wasmruntime.ErrRuntimeInvalidTableAccess) + } else if copySize != 0 { + copy(table.References[inTableOffset:inTableOffset+copySize], elementInstance.References[inElementOffset:]) + } + frame.pc++ + case wazeroir.OperationKindElemDrop: + elementInstances[op.us[0]].References = nil + frame.pc++ + case wazeroir.OperationKindTableCopy: + table := table.References + tableLen := uint64(len(table)) + copySize := ce.popValue() + sourceOffset := ce.popValue() + destinationOffset := ce.popValue() + if sourceOffset+copySize > tableLen || destinationOffset+copySize > tableLen { + panic(wasmruntime.ErrRuntimeInvalidTableAccess) + } else if copySize != 0 { + copy(table[destinationOffset:], table[sourceOffset:sourceOffset+copySize]) + } + frame.pc++ } } ce.popFrame() diff --git a/internal/wasm/jit/arch_arm64.s b/internal/wasm/jit/arch_arm64.s index d02e566cc9c..599614ac5fb 100644 --- a/internal/wasm/jit/arch_arm64.s +++ b/internal/wasm/jit/arch_arm64.s @@ -7,8 +7,8 @@ TEXT ·jitcall(SB),NOSPLIT|NOFRAME,$0-24 MOVD ce+8(FP),R0 // In arm64, return address is stored in R30 after jumping into the code. // We save the return address value into archContext.jitReturnAddress in Engine. - // Note that the const 136 drifts after editting Engine or archContext struct. See TestArchContextOffsetInEngine. - MOVD R30,136(R0) + // Note that the const 144 drifts after editting Engine or archContext struct. See TestArchContextOffsetInEngine. + MOVD R30,144(R0) // Load the address of *wasm.ModuleInstance into arm64CallingConventionModuleInstanceAddressRegister. MOVD moduleInstanceAddress+16(FP),R29 // Load the address of native code. diff --git a/internal/wasm/jit/compiler.go b/internal/wasm/jit/compiler.go index 20948a34e45..1749d1e5b7f 100644 --- a/internal/wasm/jit/compiler.go +++ b/internal/wasm/jit/compiler.go @@ -357,4 +357,19 @@ type compiler interface { // // https://www.w3.org/TR/2022/WD-wasm-core-2-20220419/appendix/changes.html#bulk-memory-and-table-instructions compileMemoryFill() error + // compileTableInit adds instructions to perform operations corresponding to the wasm.OpcodeTableInit instruction in + // the bulk-memory-operations proposal. + // + // https://www.w3.org/TR/2022/WD-wasm-core-2-20220419/appendix/changes.html#bulk-memory-and-table-instructions + compileTableInit(*wazeroir.OperationTableInit) error + // compileTableCopy adds instructions to perform operations corresponding to the wasm.OpcodeTableCopy instruction in + // the bulk-memory-operations proposal. + // + // https://www.w3.org/TR/2022/WD-wasm-core-2-20220419/appendix/changes.html#bulk-memory-and-table-instructions + compileTableCopy(*wazeroir.OperationTableCopy) error + // compileElemDrop adds instructions to perform operations corresponding to the wasm.OpcodeElemDrop instruction in + // the bulk-memory-operations proposal. + // + // https://www.w3.org/TR/2022/WD-wasm-core-2-20220419/appendix/changes.html#bulk-memory-and-table-instructions + compileElemDrop(*wazeroir.OperationElemDrop) error } diff --git a/internal/wasm/jit/engine.go b/internal/wasm/jit/engine.go index ccd3b43e42a..a2f09821230 100644 --- a/internal/wasm/jit/engine.go +++ b/internal/wasm/jit/engine.go @@ -110,11 +110,14 @@ type ( // codesElement0Address is &moduleContext.engine.codes[0] as uintptr. codesElement0Address uintptr - // typeIDsElement0Address holds the &ModuleInstance.typeIDs[0] as uintptr. + // typeIDsElement0Address holds the &ModuleInstance.TypeIDs[0] as uintptr. typeIDsElement0Address uintptr - // dataInstancesElement0Address holds the &ModuleInstance.dataIntances[0] as uintptr. + // dataInstancesElement0Address holds the &ModuleInstance.DataIntances[0] as uintptr. dataInstancesElement0Address uintptr + + // elementInstancesElemen0Address holds the &ModuleInstance.ElementInstances[0] as uintptr. + elementInstancesElemen0Address uintptr } // valueStackContext stores the data to access engine.valueStack. @@ -232,23 +235,24 @@ const ( callEngineGlobalContextCallFrameStackPointerOffset = 32 // Offsets for callEngine moduleContext. - callEngineModuleContextModuleInstanceAddressOffset = 40 - callEngineModuleContextGlobalElement0AddressOffset = 48 - callEngineModuleContextMemoryElement0AddressOffset = 56 - callEngineModuleContextMemorySliceLenOffset = 64 - callEngineModuleContextTableElement0AddressOffset = 72 - callEngineModuleContextTableSliceLenOffset = 80 - callEngineModuleContextCodesElement0AddressOffset = 88 - callEngineModuleContextTypeIDsElement0AddressOffset = 96 - callEngineModuleContextDataInstancesElement0AddressOffset = 104 + callEngineModuleContextModuleInstanceAddressOffset = 40 + callEngineModuleContextGlobalElement0AddressOffset = 48 + callEngineModuleContextMemoryElement0AddressOffset = 56 + callEngineModuleContextMemorySliceLenOffset = 64 + callEngineModuleContextTableElement0AddressOffset = 72 + callEngineModuleContextTableSliceLenOffset = 80 + callEngineModuleContextCodesElement0AddressOffset = 88 + callEngineModuleContextTypeIDsElement0AddressOffset = 96 + callEngineModuleContextDataInstancesElement0AddressOffset = 104 + callEngineModuleContextElementInstancesElement0AddressOffset = 112 // Offsets for callEngine valueStackContext. - callEngineValueStackContextStackPointerOffset = 112 - callEngineValueStackContextStackBasePointerOffset = 120 + callEngineValueStackContextStackPointerOffset = 120 + callEngineValueStackContextStackBasePointerOffset = 128 // Offsets for callEngine exitContext. - callEngineExitContextJITCallStatusCodeOffset = 128 - callEngineExitContextBuiltinFunctionCallAddressOffset = 132 + callEngineExitContextJITCallStatusCodeOffset = 136 + callEngineExitContextBuiltinFunctionCallAddressOffset = 140 // Offsets for callFrame. callFrameDataSize = 32 @@ -264,12 +268,13 @@ const ( functionModuleInstanceAddressOffset = 24 // Offsets for wasm.ModuleInstance. - moduleInstanceGlobalsOffset = 48 - moduleInstanceMemoryOffset = 72 - moduleInstanceTableOffset = 80 - moduleInstanceEngineOffset = 120 - moduleInstanceTypeIDsOffset = 136 - moduleInstanceDataInstancesOffset = 160 + moduleInstanceGlobalsOffset = 48 + moduleInstanceMemoryOffset = 72 + moduleInstanceTableOffset = 80 + moduleInstanceEngineOffset = 120 + moduleInstanceTypeIDsOffset = 136 + moduleInstanceDataInstancesOffset = 160 + moduleInstanceElementInstancesOffset = 184 // Offsets for wasm.TableInstance. tableInstanceTableOffset = 0 @@ -288,7 +293,14 @@ const ( // Offsets for Go's interface. // https://research.swtch.com/interfaces // https://github.com/golang/go/blob/release-branch.go1.17/src/runtime/runtime2.go#L207-L210 - interfaceDataOffset = 8 + interfaceDataOffset = 8 + interfaceDataSizeLog2 = 4 + + // Consts for DataInstance. + dataInstanceStructSize = 24 + + // Consts for ElementInstance. + elementInsanceStructSize = 32 ) // jitCallStatusCode represents the result of `jitcall`. @@ -472,7 +484,7 @@ func (e *engine) NewModuleEngine(name string, module *wasm.Module, importedFunct } for elemIdx, funcidx := range tableInit { // Initialize any elements with compiled functions - table.Table[elemIdx] = me.functions[funcidx] + table.References[elemIdx] = me.functions[funcidx] } return me, nil } @@ -501,13 +513,23 @@ func (me *moduleEngine) Name() string { return me.name } -// GetFunctionReference implements the same method as documented on wasm.ModuleEngine. -func (me *moduleEngine) GetFunctionReference(index wasm.Index) wasm.Reference { - return me.functions[index] +// Call implements the same method as documented on wasm.ModuleEngine. +func (me *moduleEngine) CreateFuncElementInstnace(indexes []*wasm.Index) *wasm.ElementInstance { + refs := make([]wasm.Reference, len(indexes)) + for i, index := range indexes { + if index != nil { + refs[i] = me.functions[*index] + } + } + return &wasm.ElementInstance{ + References: refs, + Type: wasm.RefTypeFuncref, + } } // Call implements the same method as documented on wasm.ModuleEngine. func (me *moduleEngine) Call(ctx context.Context, callCtx *wasm.CallContext, f *wasm.FunctionInstance, params ...uint64) (results []uint64, err error) { + // runtime.Breakpoint() // Note: The input parameters are pre-validated, so a compiled function is only absent on close. Updates to // code on close aren't locked, neither is this read. compiled := me.functions[f.Index] @@ -961,6 +983,12 @@ func compileWasmFunction(enabledFeatures wasm.Features, ir *wazeroir.Compilation err = compiler.compileMemoryCopy() case *wazeroir.OperationMemoryFill: err = compiler.compileMemoryFill() + case *wazeroir.OperationTableInit: + err = compiler.compileTableInit(o) + case *wazeroir.OperationTableCopy: + err = compiler.compileTableCopy(o) + case *wazeroir.OperationElemDrop: + err = compiler.compileElemDrop(o) } if err != nil { return nil, fmt.Errorf("operation %s: %w", op.Kind().String(), err) diff --git a/internal/wasm/jit/engine_test.go b/internal/wasm/jit/engine_test.go index e023d4e98de..8361c09d8fb 100644 --- a/internal/wasm/jit/engine_test.go +++ b/internal/wasm/jit/engine_test.go @@ -39,6 +39,7 @@ func TestJIT_VerifyOffsetValue(t *testing.T) { require.Equal(t, int(unsafe.Offsetof(ce.codesElement0Address)), callEngineModuleContextCodesElement0AddressOffset) require.Equal(t, int(unsafe.Offsetof(ce.typeIDsElement0Address)), callEngineModuleContextTypeIDsElement0AddressOffset) require.Equal(t, int(unsafe.Offsetof(ce.dataInstancesElement0Address)), callEngineModuleContextDataInstancesElement0AddressOffset) + require.Equal(t, int(unsafe.Offsetof(ce.elementInstancesElemen0Address)), callEngineModuleContextElementInstancesElement0AddressOffset) // Offsets for callEngine.valueStackContext require.Equal(t, int(unsafe.Offsetof(ce.stackPointer)), callEngineValueStackContextStackPointerOffset) @@ -73,16 +74,17 @@ func TestJIT_VerifyOffsetValue(t *testing.T) { require.Equal(t, int(unsafe.Offsetof(moduleInstance.Engine)), moduleInstanceEngineOffset) require.Equal(t, int(unsafe.Offsetof(moduleInstance.TypeIDs)), moduleInstanceTypeIDsOffset) require.Equal(t, int(unsafe.Offsetof(moduleInstance.DataInstances)), moduleInstanceDataInstancesOffset) + require.Equal(t, int(unsafe.Offsetof(moduleInstance.ElementInstances)), moduleInstanceElementInstancesOffset) var functionInstance wasm.FunctionInstance require.Equal(t, int(unsafe.Offsetof(functionInstance.TypeID)), functionInstanceTypeIDOffset) // Offsets for wasm.Table. var tableInstance wasm.TableInstance - require.Equal(t, int(unsafe.Offsetof(tableInstance.Table)), tableInstanceTableOffset) + require.Equal(t, int(unsafe.Offsetof(tableInstance.References)), tableInstanceTableOffset) // We add "+8" to get the length of Tables[0].Table // since the slice header is laid out as {Data uintptr, Len int64, Cap int64} on memory. - require.Equal(t, int(unsafe.Offsetof(tableInstance.Table)+8), tableInstanceTableLenOffset) + require.Equal(t, int(unsafe.Offsetof(tableInstance.References)+8), tableInstanceTableLenOffset) // Offsets for wasm.Memory var memoryInstance wasm.MemoryInstance @@ -102,6 +104,13 @@ func TestJIT_VerifyOffsetValue(t *testing.T) { data unsafe.Pointer } require.Equal(t, int(unsafe.Offsetof(eface.data)), interfaceDataOffset) + require.Equal(t, int(unsafe.Sizeof(eface)), 1<= 0; i--) dst[i] = src[i]; - c.assembler.CompileRegisterToRegister(amd64.CMPQ, destinationOffset.register, sourceOffset.register) destLowerThanSourceJump := c.assembler.CompileJump(amd64.JLS) + var scale int16 + var memToReg, regToMem asm.Instruction + if isTable { + // For tables, we move 8 bytes at once. + memToReg = amd64.MOVQ + regToMem = memToReg + scale = 8 + } else { + memToReg = amd64.MOVBQZX + regToMem = amd64.MOVB + scale = 1 + } + // If source offet < destination offset: for (i = size-1; i >= 0; i--) dst[i] = src[i]; var endJump asm.Node { @@ -3544,17 +3623,36 @@ func (c *amd64Compiler) compileMemoryCopy() error { // destinationOffset -= size. c.assembler.CompileRegisterToRegister(amd64.SUBQ, copySize.register, destinationOffset.register) + if isTable { + // Each element has 16 bytes = 2^4 = 1 << interfaceDataSizeLog2. + c.assembler.CompileConstToRegister(amd64.SHLQ, interfaceDataSizeLog2, sourceOffset.register) + c.assembler.CompileConstToRegister(amd64.SHLQ, interfaceDataSizeLog2, destinationOffset.register) + // destinationOffset += table buffer's absolute address. + c.assembler.CompileMemoryToRegister(amd64.ADDQ, + amd64ReservedRegisterForCallEngine, callEngineModuleContextTableElement0AddressOffset, sourceOffset.register) + // sourceOffset += table buffer's absolute address. + c.assembler.CompileMemoryToRegister(amd64.ADDQ, + amd64ReservedRegisterForCallEngine, callEngineModuleContextTableElement0AddressOffset, destinationOffset.register) + // We move 8 bytes at once (via MOVQ) so we need to double the counter (as each element is 16 byte). + c.assembler.CompileConstToRegister(amd64.SHLQ, 1, copySize.register) + } else { + // destinationOffset += memory buffer's absolute address. + c.assembler.CompileRegisterToRegister(amd64.ADDQ, amd64ReservedRegisterForMemory, destinationOffset.register) + // sourceOffset += memory buffer's absolute address. + c.assembler.CompileRegisterToRegister(amd64.ADDQ, amd64ReservedRegisterForMemory, sourceOffset.register) + } + beginCopyLoop := c.assembler.CompileStandAlone(amd64.NOP) // size -= 1 c.assembler.CompileNoneToRegister(amd64.DECQ, copySize.register) - c.assembler.CompileMemoryWithIndexToRegister(amd64.MOVBQZX, - sourceOffset.register, 0, copySize.register, 1, + c.assembler.CompileMemoryWithIndexToRegister(memToReg, + sourceOffset.register, 0, copySize.register, scale, tmp) - c.assembler.CompileRegisterToMemoryWithIndex(amd64.MOVB, + c.assembler.CompileRegisterToMemoryWithIndex(regToMem, tmp, - destinationOffset.register, 0, copySize.register, 1, + destinationOffset.register, 0, copySize.register, scale, ) c.assembler.CompileRegisterToConst(amd64.CMPQ, copySize.register, 0) @@ -3566,17 +3664,36 @@ func (c *amd64Compiler) compileMemoryCopy() error { // Else (destination offet < source offset): for (i = 0; i < size; i++) dst[counter-1-i] = src[counter-1-i]; c.assembler.SetJumpTargetOnNext(destLowerThanSourceJump) { + if isTable { + // Each element has 16 bytes = 2^4 = 1 << interfaceDataSizeLog2. + c.assembler.CompileConstToRegister(amd64.SHLQ, interfaceDataSizeLog2, sourceOffset.register) + c.assembler.CompileConstToRegister(amd64.SHLQ, interfaceDataSizeLog2, destinationOffset.register) + // sourceOffset += table buffer's absolute address. + c.assembler.CompileMemoryToRegister(amd64.ADDQ, + amd64ReservedRegisterForCallEngine, callEngineModuleContextTableElement0AddressOffset, sourceOffset.register) + // destinationOffset += table buffer's absolute address. + c.assembler.CompileMemoryToRegister(amd64.ADDQ, + amd64ReservedRegisterForCallEngine, callEngineModuleContextTableElement0AddressOffset, destinationOffset.register) + // We move 8 bytes at once (via MOVQ) so we need to double the counter (as each element is 16 byte). + c.assembler.CompileConstToRegister(amd64.SHLQ, 1, copySize.register) + } else { + // destinationOffset += memory buffer's absolute address. + c.assembler.CompileRegisterToRegister(amd64.ADDQ, amd64ReservedRegisterForMemory, destinationOffset.register) + // sourceOffset += memory buffer's absolute address. + c.assembler.CompileRegisterToRegister(amd64.ADDQ, amd64ReservedRegisterForMemory, sourceOffset.register) + } + // Negate the counter. c.assembler.CompileNoneToRegister(amd64.NEGQ, copySize.register) beginCopyLoop := c.assembler.CompileStandAlone(amd64.NOP) - c.assembler.CompileMemoryWithIndexToRegister(amd64.MOVBQZX, - sourceOffset.register, 0, copySize.register, 1, + c.assembler.CompileMemoryWithIndexToRegister(memToReg, + sourceOffset.register, 0, copySize.register, scale, tmp) - c.assembler.CompileRegisterToMemoryWithIndex(amd64.MOVB, + c.assembler.CompileRegisterToMemoryWithIndex(regToMem, tmp, - destinationOffset.register, 0, copySize.register, 1, + destinationOffset.register, 0, copySize.register, scale, ) // size += 1 @@ -3657,6 +3774,42 @@ func (c *amd64Compiler) compileMemoryFill() error { return nil } +func (c *amd64Compiler) compileTableInit(o *wazeroir.OperationTableInit) error { + return c.compileInitImpl(true, o.ElemIndex) +} + +func (c *amd64Compiler) compileTableCopy(*wazeroir.OperationTableCopy) error { + return c.compileCopyImpl(true) +} + +func (c *amd64Compiler) compileElemDrop(o *wazeroir.OperationElemDrop) error { + tmp, err := c.allocateRegister(generalPurposeRegisterTypeInt) + if err != nil { + return err + } + + c.compileLoadElemInstanceAddress(o.ElemIndex, tmp) + + // Clears the content of ElementInstances[o.ElemIndex].References (== []interface{} type). + c.assembler.CompileConstToMemory(amd64.MOVQ, 0, tmp, 0) + c.assembler.CompileConstToMemory(amd64.MOVQ, 0, tmp, 8) + c.assembler.CompileConstToMemory(amd64.MOVQ, 0, tmp, 16) + return nil +} + +func (c *amd64Compiler) compileLoadElemInstanceAddress(elemIndex uint32, dst asm.Register) { + // dst = elemIndex * elementInsanceStructSize + c.assembler.CompileConstToRegister(amd64.MOVQ, int64(elemIndex)*elementInsanceStructSize, dst) + + // dst = &moduleInstance.ElementInstances[0] + dst + // = &moduleInstance.ElementInstances[0] + elemIndex*elementInsanceStructSize + // = &moduleInstance.ElementInstances[elemIndex] + c.assembler.CompileMemoryToRegister(amd64.ADDQ, + amd64ReservedRegisterForCallEngine, callEngineModuleContextElementInstancesElement0AddressOffset, + dst, + ) +} + // compileConstI32 implements compiler.compileConstI32 for the amd64 architecture. func (c *amd64Compiler) compileConstI32(o *wazeroir.OperationConstI32) error { c.maybeCompileMoveTopConditionalToFreeGeneralPurposeRegister() @@ -4373,6 +4526,7 @@ func (c *amd64Compiler) compileModuleContextInitialization() error { // * callEngine.moduleContext.codesElement0Address // * callEngine.moduleContext.typeIDsElement0Address // * callEngine.moduleContext.dataInstancesElement0Address + // * callEngine.moduleContext.elementInstancesElement0Address // Update globalElement0Address. // @@ -4473,6 +4627,22 @@ func (c *amd64Compiler) compileModuleContextInitialization() error { ) } + // Update callEngine.moduleContext.elementInstancesElement0Address + if c.ir.NeedsAccessToElementInstances { + // "tmpRegister = &moduleInstance.ElementInstnaces[0]" + c.assembler.CompileMemoryToRegister( + amd64.MOVQ, + amd64CallingConventionModuleInstanceAddressRegister, moduleInstanceElementInstancesOffset, + tmpRegister, + ) + // "callEngine.moduleContext.dataInstancesElement0Address = tmpRegister". + c.assembler.CompileRegisterToMemory( + amd64.MOVQ, + tmpRegister, + amd64ReservedRegisterForCallEngine, callEngineModuleContextElementInstancesElement0AddressOffset, + ) + } + c.locationStack.markRegisterUnused(regs...) // Set the jump target towards the next instruction for the case where module instance address hasn't changed. diff --git a/internal/wasm/jit/jit_impl_arm64.go b/internal/wasm/jit/jit_impl_arm64.go index 8568debee09..5331a7450d8 100644 --- a/internal/wasm/jit/jit_impl_arm64.go +++ b/internal/wasm/jit/jit_impl_arm64.go @@ -85,11 +85,11 @@ var ( const ( // arm64CallEngineArchContextJITCallReturnAddressOffset is the offset of archContext.jitCallReturnAddress in callEngine. - arm64CallEngineArchContextJITCallReturnAddressOffset = 136 + arm64CallEngineArchContextJITCallReturnAddressOffset = 144 // arm64CallEngineArchContextMinimum32BitSignedIntOffset is the offset of archContext.minimum32BitSignedIntAddress in callEngine. - arm64CallEngineArchContextMinimum32BitSignedIntOffset = 144 + arm64CallEngineArchContextMinimum32BitSignedIntOffset = 152 // arm64CallEngineArchContextMinimum64BitSignedIntOffset is the offset of archContext.minimum64BitSignedIntAddress in callEngine. - arm64CallEngineArchContextMinimum64BitSignedIntOffset = 152 + arm64CallEngineArchContextMinimum64BitSignedIntOffset = 160 ) func isZeroRegister(r asm.Register) bool { @@ -2861,8 +2861,21 @@ func (c *arm64Compiler) compileFloatConstant(is32bit bool, value uint64) error { return nil } -// compileMemoryInit implements compiler.compileMemoryInit for the amd64 architecture. +// compileMemoryInit implements compiler.compileMemoryInit for the arm64 architecture. func (c *arm64Compiler) compileMemoryInit(o *wazeroir.OperationMemoryInit) error { + return c.compileInitImpl(false, o.DataIndex) +} + +// compileInitImpl implements compileTableInit and compileMemoryInit. +// +// TODO: the compiled code in this function should be reused and compile at once as +// the code is independent of any module. +func (c *arm64Compiler) compileInitImpl(isTable bool, index uint32) error { + outOfBoundsErrorStatus := jitCallStatusCodeMemoryOutOfBounds + if isTable { + outOfBoundsErrorStatus = jitCallStatusCodeInvalidTableAccess + } + copySize, err := c.popValueOnRegister() if err != nil { return err @@ -2902,76 +2915,115 @@ func (c *arm64Compiler) compileMemoryInit(o *wazeroir.OperationMemoryInit) error c.assembler.CompileRegisterToRegister(arm64.ADD, copySize.register, destinationOffset.register) } - dataInstanceAddr, err := c.allocateRegister(generalPurposeRegisterTypeInt) + instanceAddr, err := c.allocateRegister(generalPurposeRegisterTypeInt) if err != nil { return err } - c.compileLoadDataInstanceAddress(o.DataIndex, dataInstanceAddr) + + if isTable { + c.compileLoadElemInstanceAddress(index, instanceAddr) + } else { + c.compileLoadDataInstanceAddress(index, instanceAddr) + } // Check data instance bounds. c.assembler.CompileMemoryToRegister(arm64.MOVD, - dataInstanceAddr, 8, // DataInstance is []byte therefore the length is stored at offset 8. + instanceAddr, 8, // DataInstance and Element instance holds the length is stored at offset 8. arm64ReservedRegisterForTemporary) - // Check memory len >= destinationOffset. c.assembler.CompileTwoRegistersToNone(arm64.CMP, arm64ReservedRegisterForTemporary, sourceOffset.register) sourceBoundsOK := c.assembler.CompileJump(arm64.BLS) // If not, raise out of bounds memory access error. - c.compileExitFromNativeCode(jitCallStatusCodeMemoryOutOfBounds) + c.compileExitFromNativeCode(outOfBoundsErrorStatus) c.assembler.SetJumpTargetOnNext(sourceBoundsOK) // Check destination bounds. - c.assembler.CompileMemoryToRegister(arm64.MOVD, - arm64ReservedRegisterForCallEngine, callEngineModuleContextMemorySliceLenOffset, - arm64ReservedRegisterForTemporary) + if isTable { + c.assembler.CompileMemoryToRegister(arm64.MOVD, + arm64ReservedRegisterForCallEngine, callEngineModuleContextTableSliceLenOffset, + arm64ReservedRegisterForTemporary) + } else { + c.assembler.CompileMemoryToRegister(arm64.MOVD, + arm64ReservedRegisterForCallEngine, callEngineModuleContextMemorySliceLenOffset, + arm64ReservedRegisterForTemporary) + } - // Check memory len >= destinationOffset. c.assembler.CompileTwoRegistersToNone(arm64.CMP, arm64ReservedRegisterForTemporary, destinationOffset.register) destinationBoundsOK := c.assembler.CompileJump(arm64.BLS) // If not, raise out of bounds memory access error. - c.compileExitFromNativeCode(jitCallStatusCodeMemoryOutOfBounds) + c.compileExitFromNativeCode(outOfBoundsErrorStatus) // Otherwise, ready to copy the value from source to destination. c.assembler.SetJumpTargetOnNext(destinationBoundsOK) - // If the size equals zero, we can skip the entire instructions beflow. - c.assembler.CompileTwoRegistersToNone(arm64.CMP, arm64.REGZERO, copySize.register) - skipCopyJump := c.assembler.CompileJump(arm64.BEQ) + if !isZeroRegister(copySize.register) { + // If the size equals zero, we can skip the entire instructions beflow. + c.assembler.CompileTwoRegistersToNone(arm64.CMP, arm64.REGZERO, copySize.register) + skipCopyJump := c.assembler.CompileJump(arm64.BEQ) + + var movInst asm.Instruction + var movSize int64 + if isTable { + movInst = arm64.MOVD + movSize = 8 + + // arm64ReservedRegisterForTemporary = &Table[0] + c.assembler.CompileMemoryToRegister(arm64.MOVD, arm64ReservedRegisterForCallEngine, + callEngineModuleContextTableElement0AddressOffset, arm64ReservedRegisterForTemporary) + // destinationOffset = (destinationOffset<< interfaceDataySizeLog2) + arm64ReservedRegisterForTemporary + c.assembler.CompileLeftShiftedRegisterToRegister(arm64.ADD, + destinationOffset.register, interfaceDataSizeLog2, + arm64ReservedRegisterForTemporary, destinationOffset.register) + + // arm64ReservedRegisterForTemporary = &ElementInstance.References[0] + c.assembler.CompileMemoryToRegister(arm64.MOVD, instanceAddr, 0, arm64ReservedRegisterForTemporary) + // sourceOffset = (sourceOffset<< interfaceDataSizeLog2) + arm64ReservedRegisterForTemporary + c.assembler.CompileLeftShiftedRegisterToRegister(arm64.ADD, + sourceOffset.register, interfaceDataSizeLog2, + arm64ReservedRegisterForTemporary, sourceOffset.register) + + // copySize = copySize << interfaceDataSizeLog2 + c.assembler.CompileConstToRegister(arm64.LSL, interfaceDataSizeLog2, copySize.register) + } else { + movInst = arm64.MOVBU + movSize = 1 - // destinationOffset += memory buffer's absolute address. - c.assembler.CompileRegisterToRegister(arm64.ADD, arm64ReservedRegisterForMemory, destinationOffset.register) + // destinationOffset += memory buffer's absolute address. + c.assembler.CompileRegisterToRegister(arm64.ADD, arm64ReservedRegisterForMemory, destinationOffset.register) - // sourceOffset += data buffer's absolute address. - c.assembler.CompileMemoryToRegister(arm64.MOVD, dataInstanceAddr, 0, arm64ReservedRegisterForTemporary) - c.assembler.CompileRegisterToRegister(arm64.ADD, arm64ReservedRegisterForTemporary, sourceOffset.register) + // sourceOffset += data buffer's absolute address. + c.assembler.CompileMemoryToRegister(arm64.MOVD, instanceAddr, 0, arm64ReservedRegisterForTemporary) + c.assembler.CompileRegisterToRegister(arm64.ADD, arm64ReservedRegisterForTemporary, sourceOffset.register) - // Negate the counter. - c.assembler.CompileRegisterToRegister(arm64.NEG, copySize.register, copySize.register) + } - beginCopyLoop := c.assembler.CompileStandAlone(arm64.NOP) + // Negate the counter. + c.assembler.CompileRegisterToRegister(arm64.NEG, copySize.register, copySize.register) - // arm64ReservedRegisterForTemporary = [sourceOffset + (size.register)] - c.assembler.CompileMemoryWithRegisterOffsetToRegister(arm64.MOVBU, - sourceOffset.register, copySize.register, - arm64ReservedRegisterForTemporary) - // [destinationOffset + (size.register)] = arm64ReservedRegisterForTemporary. - c.assembler.CompileRegisterToMemoryWithRegisterOffset(arm64.MOVBU, - arm64ReservedRegisterForTemporary, - destinationOffset.register, copySize.register, - ) + beginCopyLoop := c.assembler.CompileStandAlone(arm64.NOP) - // Decrement the size coutner and if the value is still negative, continue the loop. - c.assembler.CompileConstToRegister(arm64.ADDS, 1, copySize.register) - c.assembler.CompileJump(arm64.BMI).AssignJumpTarget(beginCopyLoop) + // arm64ReservedRegisterForTemporary = [sourceOffset + (size.register)] + c.assembler.CompileMemoryWithRegisterOffsetToRegister(movInst, + sourceOffset.register, copySize.register, + arm64ReservedRegisterForTemporary) + // [destinationOffset + (size.register)] = arm64ReservedRegisterForTemporary. + c.assembler.CompileRegisterToMemoryWithRegisterOffset(movInst, + arm64ReservedRegisterForTemporary, + destinationOffset.register, copySize.register, + ) - c.markRegisterUnused(copySize.register, sourceOffset.register, - destinationOffset.register, dataInstanceAddr) + // Decrement the size coutner and if the value is still negative, continue the loop. + c.assembler.CompileConstToRegister(arm64.ADDS, movSize, copySize.register) + c.assembler.CompileJump(arm64.BMI).AssignJumpTarget(beginCopyLoop) - c.assembler.SetJumpTargetOnNext(skipCopyJump) + c.assembler.SetJumpTargetOnNext(skipCopyJump) + } + c.markRegisterUnused(copySize.register, sourceOffset.register, + destinationOffset.register, instanceAddr) return nil } @@ -2992,8 +3044,8 @@ func (c *arm64Compiler) compileDataDrop(o *wazeroir.OperationDataDrop) error { } func (c *arm64Compiler) compileLoadDataInstanceAddress(dataIndex uint32, dst asm.Register) { - // dst = dataIndex * 24 - c.assembler.CompileConstToRegister(arm64.MOVD, int64(dataIndex)*24, dst) + // dst = dataIndex * dataInstanceStructSize + c.assembler.CompileConstToRegister(arm64.MOVD, int64(dataIndex)*dataInstanceStructSize, dst) // arm64ReservedRegisterForTemporary = &moduleInstance.DataInstances[0] c.assembler.CompileMemoryToRegister(arm64.MOVD, @@ -3002,16 +3054,26 @@ func (c *arm64Compiler) compileLoadDataInstanceAddress(dataIndex uint32, dst asm ) // dst = arm64ReservedRegisterForTemporary + dst - // = &moduleInstance.DataInstances[0] + dataIndex*24 + // = &moduleInstance.DataInstances[0] + dataIndex*dataInstanceStructSize // = &moduleInstance.DataInstances[dataIndex] c.assembler.CompileRegisterToRegister(arm64.ADD, arm64ReservedRegisterForTemporary, dst) } // compileMemoryCopy implements compiler.compileMemoryCopy for the arm64 architecture. +func (c *arm64Compiler) compileMemoryCopy() error { + return c.compileCopyImpl(false) +} + +// compileCopyImpl implements compileTableCopy and compileMemoryCopy. // // TODO: the compiled code in this function should be reused and compile at once as // the code is independent of any module. -func (c *arm64Compiler) compileMemoryCopy() error { +func (c *arm64Compiler) compileCopyImpl(isTable bool) error { + outOfBoundsErrorStatus := jitCallStatusCodeMemoryOutOfBounds + if isTable { + outOfBoundsErrorStatus = jitCallStatusCodeInvalidTableAccess + } + copySize, err := c.popValueOnRegister() if err != nil { return err @@ -3051,17 +3113,24 @@ func (c *arm64Compiler) compileMemoryCopy() error { c.assembler.CompileRegisterToRegister(arm64.ADD, copySize.register, destinationOffset.register) } - // arm64ReservedRegisterForTemporary = len(memoryInst.Buffer). - c.assembler.CompileMemoryToRegister(arm64.MOVD, - arm64ReservedRegisterForCallEngine, callEngineModuleContextMemorySliceLenOffset, - arm64ReservedRegisterForTemporary) + if isTable { + // arm64ReservedRegisterForTemporary = len(table.Table). + c.assembler.CompileMemoryToRegister(arm64.MOVD, + arm64ReservedRegisterForCallEngine, callEngineModuleContextTableSliceLenOffset, + arm64ReservedRegisterForTemporary) + } else { + // arm64ReservedRegisterForTemporary = len(memoryInst.Buffer). + c.assembler.CompileMemoryToRegister(arm64.MOVD, + arm64ReservedRegisterForCallEngine, callEngineModuleContextMemorySliceLenOffset, + arm64ReservedRegisterForTemporary) + } // Check memory len >= sourceOffset. c.assembler.CompileTwoRegistersToNone(arm64.CMP, arm64ReservedRegisterForTemporary, sourceOffset.register) sourceBoundsOK := c.assembler.CompileJump(arm64.BLS) // If not, raise out of bounds memory access error. - c.compileExitFromNativeCode(jitCallStatusCodeMemoryOutOfBounds) + c.compileExitFromNativeCode(outOfBoundsErrorStatus) // Otherwise, check memory len >= destinationOffset. c.assembler.SetJumpTargetOnNext(sourceBoundsOK) @@ -3070,79 +3139,132 @@ func (c *arm64Compiler) compileMemoryCopy() error { destinationBoundsOK := c.assembler.CompileJump(arm64.BLS) // If not, raise out of bounds memory access error. - c.compileExitFromNativeCode(jitCallStatusCodeMemoryOutOfBounds) + c.compileExitFromNativeCode(outOfBoundsErrorStatus) // Otherwise, ready to copy the value from source to destination. c.assembler.SetJumpTargetOnNext(destinationBoundsOK) - // If the size equals zero, we can skip the entire instructions beflow. - c.assembler.CompileTwoRegistersToNone(arm64.CMP, arm64.REGZERO, copySize.register) - skipCopyJump := c.assembler.CompileJump(arm64.BEQ) + var movInst asm.Instruction + var movSize int64 + if isTable { + movInst = arm64.MOVD + movSize = 8 + } else { + movInst = arm64.MOVBU + movSize = 1 - // sourceOffset += memory buffer's absolute address. - c.assembler.CompileRegisterToRegister(arm64.ADD, arm64ReservedRegisterForMemory, sourceOffset.register) - // destinationOffset += memory buffer's absolute address. - c.assembler.CompileRegisterToRegister(arm64.ADD, arm64ReservedRegisterForMemory, destinationOffset.register) + } - // If source offet < destination offset: for (i = size-1; i >= 0; i--) dst[i] = src[i]; - c.assembler.CompileTwoRegistersToNone(arm64.CMP, sourceOffset.register, destinationOffset.register) - destLowerThanSourceJump := c.assembler.CompileJump(arm64.BLS) - var endJump asm.Node - { - // sourceOffset -= size. - c.assembler.CompileRegisterToRegister(arm64.SUB, copySize.register, sourceOffset.register) - // destinationOffset -= size. - c.assembler.CompileRegisterToRegister(arm64.SUB, copySize.register, destinationOffset.register) + // If the size equals zero, we can skip the entire instructions beflow. + if !isZeroRegister(copySize.register) { + c.assembler.CompileTwoRegistersToNone(arm64.CMP, arm64.REGZERO, copySize.register) + skipCopyJump := c.assembler.CompileJump(arm64.BEQ) + + // If source offet < destination offset: for (i = size-1; i >= 0; i--) dst[i] = src[i]; + c.assembler.CompileTwoRegistersToNone(arm64.CMP, sourceOffset.register, destinationOffset.register) + destLowerThanSourceJump := c.assembler.CompileJump(arm64.BLS) + var endJump asm.Node + { + // sourceOffset -= size. + c.assembler.CompileRegisterToRegister(arm64.SUB, copySize.register, sourceOffset.register) + // destinationOffset -= size. + c.assembler.CompileRegisterToRegister(arm64.SUB, copySize.register, destinationOffset.register) + + if isTable { + // arm64ReservedRegisterForTemporary = &Table[0] + c.assembler.CompileMemoryToRegister(arm64.MOVD, arm64ReservedRegisterForCallEngine, + callEngineModuleContextTableElement0AddressOffset, arm64ReservedRegisterForTemporary) + // destinationOffset = (destinationOffset<< interfaceDataySizeLog2) + &Table[0] + c.assembler.CompileLeftShiftedRegisterToRegister(arm64.ADD, + destinationOffset.register, interfaceDataSizeLog2, + arm64ReservedRegisterForTemporary, destinationOffset.register) + // sourceOffset = (sourceOffset<< interfaceDataySizeLog2) + &Table[0] + c.assembler.CompileLeftShiftedRegisterToRegister(arm64.ADD, + sourceOffset.register, interfaceDataSizeLog2, + arm64ReservedRegisterForTemporary, sourceOffset.register) + + // copySize = copySize << interfaceDataSizeLog2 + c.assembler.CompileConstToRegister(arm64.LSL, interfaceDataSizeLog2, copySize.register) + } else { + // sourceOffset += memory buffer's absolute address. + c.assembler.CompileRegisterToRegister(arm64.ADD, arm64ReservedRegisterForMemory, sourceOffset.register) + // destinationOffset += memory buffer's absolute address. + c.assembler.CompileRegisterToRegister(arm64.ADD, arm64ReservedRegisterForMemory, destinationOffset.register) + } - beginCopyLoop := c.assembler.CompileStandAlone(arm64.NOP) + beginCopyLoop := c.assembler.CompileStandAlone(arm64.NOP) - // size -= 1 - c.assembler.CompileConstToRegister(arm64.SUBS, 1, copySize.register) + // size -= 1 + c.assembler.CompileConstToRegister(arm64.SUBS, movSize, copySize.register) - // arm64ReservedRegisterForTemporary = [sourceOffset + (size.register)] - c.assembler.CompileMemoryWithRegisterOffsetToRegister(arm64.MOVBU, - sourceOffset.register, copySize.register, - arm64ReservedRegisterForTemporary) - // [destinationOffset + (size.register)] = arm64ReservedRegisterForTemporary. - c.assembler.CompileRegisterToMemoryWithRegisterOffset(arm64.MOVBU, - arm64ReservedRegisterForTemporary, - destinationOffset.register, copySize.register, - ) + // arm64ReservedRegisterForTemporary = [sourceOffset + (size.register)] + c.assembler.CompileMemoryWithRegisterOffsetToRegister(movInst, + sourceOffset.register, copySize.register, + arm64ReservedRegisterForTemporary) + // [destinationOffset + (size.register)] = arm64ReservedRegisterForTemporary. + c.assembler.CompileRegisterToMemoryWithRegisterOffset(movInst, + arm64ReservedRegisterForTemporary, + destinationOffset.register, copySize.register, + ) - // If the value on the copySize.register is not equal zero, continue the loop. - c.assembler.CompileJump(arm64.BNE).AssignJumpTarget(beginCopyLoop) + // If the value on the copySize.register is not equal zero, continue the loop. + c.assembler.CompileJump(arm64.BNE).AssignJumpTarget(beginCopyLoop) - // Otherwise, exit the loop. - endJump = c.assembler.CompileJump(arm64.B) - } + // Otherwise, exit the loop. + endJump = c.assembler.CompileJump(arm64.B) + } - // Else (destination offet < source offset): for (i = 0; i < size; i++) dst[counter-1-i] = src[counter-1-i]; - c.assembler.SetJumpTargetOnNext(destLowerThanSourceJump) - { - // Negate the counter. - c.assembler.CompileRegisterToRegister(arm64.NEG, copySize.register, copySize.register) + // Else (destination offet < source offset): for (i = 0; i < size; i++) dst[counter-1-i] = src[counter-1-i]; + c.assembler.SetJumpTargetOnNext(destLowerThanSourceJump) + { + + if isTable { + // arm64ReservedRegisterForTemporary = &Table[0] + c.assembler.CompileMemoryToRegister(arm64.MOVD, arm64ReservedRegisterForCallEngine, + callEngineModuleContextTableElement0AddressOffset, arm64ReservedRegisterForTemporary) + // destinationOffset = (destinationOffset<< interfaceDataySizeLog2) + &Table[0] + c.assembler.CompileLeftShiftedRegisterToRegister(arm64.ADD, + destinationOffset.register, interfaceDataSizeLog2, + arm64ReservedRegisterForTemporary, destinationOffset.register) + // sourceOffset = (sourceOffset<< interfaceDataySizeLog2) + &Table[0] + c.assembler.CompileLeftShiftedRegisterToRegister(arm64.ADD, + sourceOffset.register, interfaceDataSizeLog2, + arm64ReservedRegisterForTemporary, sourceOffset.register) + + // copySize = copySize << interfaceDataSizeLog2 + c.assembler.CompileConstToRegister(arm64.LSL, interfaceDataSizeLog2, copySize.register) + } else { + // sourceOffset += memory buffer's absolute address. + c.assembler.CompileRegisterToRegister(arm64.ADD, arm64ReservedRegisterForMemory, sourceOffset.register) + // destinationOffset += memory buffer's absolute address. + c.assembler.CompileRegisterToRegister(arm64.ADD, arm64ReservedRegisterForMemory, destinationOffset.register) + } - beginCopyLoop := c.assembler.CompileStandAlone(arm64.NOP) + // Negate the counter. + c.assembler.CompileRegisterToRegister(arm64.NEG, copySize.register, copySize.register) - // arm64ReservedRegisterForTemporary = [sourceOffset + (size.register)] - c.assembler.CompileMemoryWithRegisterOffsetToRegister(arm64.MOVBU, - sourceOffset.register, copySize.register, - arm64ReservedRegisterForTemporary) - // [destinationOffset + (size.register)] = arm64ReservedRegisterForTemporary. - c.assembler.CompileRegisterToMemoryWithRegisterOffset(arm64.MOVBU, - arm64ReservedRegisterForTemporary, - destinationOffset.register, copySize.register, - ) + beginCopyLoop := c.assembler.CompileStandAlone(arm64.NOP) - // size += 1 - c.assembler.CompileConstToRegister(arm64.ADDS, 1, copySize.register) - c.assembler.CompileJump(arm64.BMI).AssignJumpTarget(beginCopyLoop) + // arm64ReservedRegisterForTemporary = [sourceOffset + (size.register)] + c.assembler.CompileMemoryWithRegisterOffsetToRegister(movInst, + sourceOffset.register, copySize.register, + arm64ReservedRegisterForTemporary) + // [destinationOffset + (size.register)] = arm64ReservedRegisterForTemporary. + c.assembler.CompileRegisterToMemoryWithRegisterOffset(movInst, + arm64ReservedRegisterForTemporary, + destinationOffset.register, copySize.register, + ) + + // size += 1 + c.assembler.CompileConstToRegister(arm64.ADDS, movSize, copySize.register) + c.assembler.CompileJump(arm64.BMI).AssignJumpTarget(beginCopyLoop) + } + c.assembler.SetJumpTargetOnNext(skipCopyJump, endJump) } // Mark all of the operand registers. c.markRegisterUnused(copySize.register, sourceOffset.register, destinationOffset.register) - c.assembler.SetJumpTargetOnNext(skipCopyJump, endJump) return nil } @@ -3227,6 +3349,45 @@ func (c *arm64Compiler) compileMemoryFill() error { return nil } +func (c *arm64Compiler) compileTableInit(o *wazeroir.OperationTableInit) error { + return c.compileInitImpl(true, o.ElemIndex) +} + +func (c *arm64Compiler) compileTableCopy(*wazeroir.OperationTableCopy) error { + return c.compileCopyImpl(true) +} + +func (c *arm64Compiler) compileElemDrop(o *wazeroir.OperationElemDrop) error { + tmp, err := c.allocateRegister(generalPurposeRegisterTypeInt) + if err != nil { + return err + } + + c.compileLoadElemInstanceAddress(o.ElemIndex, tmp) + + // Clears the content of ElementInstances[o.ElemIndex] (== []interface{} type). + c.assembler.CompileRegisterToMemory(arm64.MOVD, arm64.REGZERO, tmp, 0) + c.assembler.CompileRegisterToMemory(arm64.MOVD, arm64.REGZERO, tmp, 8) + c.assembler.CompileRegisterToMemory(arm64.MOVD, arm64.REGZERO, tmp, 16) + return nil +} + +func (c *arm64Compiler) compileLoadElemInstanceAddress(elemIndex uint32, dst asm.Register) { + // dst = dataIndex * elementInsanceStructSize + c.assembler.CompileConstToRegister(arm64.MOVD, int64(elemIndex)*elementInsanceStructSize, dst) + + // arm64ReservedRegisterForTemporary = &moduleInstance.ElementInstances[0] + c.assembler.CompileMemoryToRegister(arm64.MOVD, + arm64ReservedRegisterForCallEngine, callEngineModuleContextElementInstancesElement0AddressOffset, + arm64ReservedRegisterForTemporary, + ) + + // dst = arm64ReservedRegisterForTemporary + dst + // = &moduleInstance.ElementInstances[0] + elemIndex*elementInsanceStructSize + // = &moduleInstance.ElementInstances[elemIndex] + c.assembler.CompileRegisterToRegister(arm64.ADD, arm64ReservedRegisterForTemporary, dst) +} + func (c *arm64Compiler) pushZeroValue() { c.pushValueLocationOnRegister(arm64.REGZERO) } @@ -3460,6 +3621,7 @@ func (c *arm64Compiler) compileModuleContextInitialization() error { // * callEngine.moduleContext.codesElement0Address // * callEngine.moduleContext.typeIDsElement0Address // * callEngine.moduleContext.dataInstancesElement0Address + // * callEngine.moduleContext.elementInstancesElement0Address // Update globalElement0Address. // @@ -3619,6 +3781,22 @@ func (c *arm64Compiler) compileModuleContextInitialization() error { ) } + // Update callEngine.moduleContext.elementInstancesElement0Address + if c.ir.NeedsAccessToElementInstances { + // "tmpX = &moduleInstance.DataInstances[0]" + c.assembler.CompileMemoryToRegister( + arm64.MOVD, + arm64CallingConventionModuleInstanceAddressRegister, moduleInstanceElementInstancesOffset, + tmpX, + ) + // "callEngine.moduleContext.dataInstancesElement0Address = tmpX". + c.assembler.CompileRegisterToMemory( + arm64.MOVD, + tmpX, + arm64ReservedRegisterForCallEngine, callEngineModuleContextElementInstancesElement0AddressOffset, + ) + } + c.assembler.SetJumpTargetOnNext(brIfModuleUnchanged) c.markRegisterUnused(regs...) return nil diff --git a/internal/wasm/jit/jit_initialization_test.go b/internal/wasm/jit/jit_initialization_test.go index 617c15a6993..c039480f2ae 100644 --- a/internal/wasm/jit/jit_initialization_test.go +++ b/internal/wasm/jit/jit_initialization_test.go @@ -19,52 +19,73 @@ func TestCompiler_compileModuleContextInitialization(t *testing.T) { { name: "no nil", moduleInstance: &wasm.ModuleInstance{ - Globals: []*wasm.GlobalInstance{{Val: 100}}, - Memory: &wasm.MemoryInstance{Buffer: make([]byte, 10)}, - Table: &wasm.TableInstance{Table: make([]interface{}, 20)}, - TypeIDs: make([]wasm.FunctionTypeID, 10), - DataInstances: make([][]byte, 10), + Globals: []*wasm.GlobalInstance{{Val: 100}}, + Memory: &wasm.MemoryInstance{Buffer: make([]byte, 10)}, + Table: &wasm.TableInstance{References: make([]interface{}, 20)}, + TypeIDs: make([]wasm.FunctionTypeID, 10), + DataInstances: make([][]byte, 10), + ElementInstances: make([]wasm.ElementInstance, 10), }, }, { - name: "data instances", + name: "element instances nil", moduleInstance: &wasm.ModuleInstance{ - Globals: []*wasm.GlobalInstance{{Val: 100}}, - Memory: &wasm.MemoryInstance{Buffer: make([]byte, 10)}, - Table: &wasm.TableInstance{Table: make([]interface{}, 20)}, - TypeIDs: make([]wasm.FunctionTypeID, 10), - DataInstances: nil, + Globals: []*wasm.GlobalInstance{{Val: 100}}, + Memory: &wasm.MemoryInstance{Buffer: make([]byte, 10)}, + Table: &wasm.TableInstance{References: make([]interface{}, 20)}, + TypeIDs: make([]wasm.FunctionTypeID, 10), + DataInstances: make([][]byte, 10), + ElementInstances: nil, + }, + }, + { + name: "data instances nil", + moduleInstance: &wasm.ModuleInstance{ + Globals: []*wasm.GlobalInstance{{Val: 100}}, + Memory: &wasm.MemoryInstance{Buffer: make([]byte, 10)}, + Table: &wasm.TableInstance{References: make([]interface{}, 20)}, + TypeIDs: make([]wasm.FunctionTypeID, 10), + DataInstances: nil, + ElementInstances: make([]wasm.ElementInstance, 10), }, }, { name: "globals nil", moduleInstance: &wasm.ModuleInstance{ - Memory: &wasm.MemoryInstance{Buffer: make([]byte, 10)}, - Table: &wasm.TableInstance{Table: make([]interface{}, 20)}, - TypeIDs: make([]wasm.FunctionTypeID, 10), + Memory: &wasm.MemoryInstance{Buffer: make([]byte, 10)}, + Table: &wasm.TableInstance{References: make([]interface{}, 20)}, + TypeIDs: make([]wasm.FunctionTypeID, 10), + DataInstances: make([][]byte, 10), + ElementInstances: make([]wasm.ElementInstance, 10), }, }, { name: "memory nil", moduleInstance: &wasm.ModuleInstance{ - Globals: []*wasm.GlobalInstance{{Val: 100}}, - Table: &wasm.TableInstance{Table: make([]interface{}, 20)}, - TypeIDs: make([]wasm.FunctionTypeID, 10), + Globals: []*wasm.GlobalInstance{{Val: 100}}, + Table: &wasm.TableInstance{References: make([]interface{}, 20)}, + TypeIDs: make([]wasm.FunctionTypeID, 10), + DataInstances: make([][]byte, 10), + ElementInstances: make([]wasm.ElementInstance, 10), }, }, { name: "table nil", moduleInstance: &wasm.ModuleInstance{ - Memory: &wasm.MemoryInstance{Buffer: make([]byte, 10)}, - Table: &wasm.TableInstance{Table: nil}, - TypeIDs: make([]wasm.FunctionTypeID, 10), + Memory: &wasm.MemoryInstance{Buffer: make([]byte, 10)}, + Table: &wasm.TableInstance{References: nil}, + TypeIDs: make([]wasm.FunctionTypeID, 10), + DataInstances: make([][]byte, 10), + ElementInstances: make([]wasm.ElementInstance, 10), }, }, { name: "table empty", moduleInstance: &wasm.ModuleInstance{ - Table: &wasm.TableInstance{Table: make([]interface{}, 0)}, - TypeIDs: make([]wasm.FunctionTypeID, 10), + Table: &wasm.TableInstance{References: make([]interface{}, 0)}, + TypeIDs: make([]wasm.FunctionTypeID, 10), + DataInstances: make([][]byte, 10), + ElementInstances: make([]wasm.ElementInstance, 10), }, }, { @@ -85,9 +106,10 @@ func TestCompiler_compileModuleContextInitialization(t *testing.T) { ce := env.callEngine() ir := &wazeroir.CompilationResult{ - HasMemory: tc.moduleInstance.Memory != nil, - HasTable: tc.moduleInstance.Table != nil, - NeedsAccessToDataInstances: len(tc.moduleInstance.DataInstances) > 0, + HasMemory: tc.moduleInstance.Memory != nil, + HasTable: tc.moduleInstance.Table != nil, + NeedsAccessToDataInstances: len(tc.moduleInstance.DataInstances) > 0, + NeedsAccessToElementInstances: len(tc.moduleInstance.ElementInstances) > 0, } for _, g := range tc.moduleInstance.Globals { ir.Globals = append(ir.Globals, g.Type) @@ -126,7 +148,7 @@ func TestCompiler_compileModuleContextInitialization(t *testing.T) { } if tc.moduleInstance.Table != nil { - tableHeader := (*reflect.SliceHeader)(unsafe.Pointer(&tc.moduleInstance.Table.Table)) + tableHeader := (*reflect.SliceHeader)(unsafe.Pointer(&tc.moduleInstance.Table.References)) require.Equal(t, uint64(tableHeader.Len), ce.moduleContext.tableSliceLen) require.Equal(t, tableHeader.Data, ce.moduleContext.tableElement0Address) require.Equal(t, uintptr(unsafe.Pointer(&tc.moduleInstance.TypeIDs[0])), ce.moduleContext.typeIDsElement0Address) @@ -138,6 +160,12 @@ func TestCompiler_compileModuleContextInitialization(t *testing.T) { require.Equal(t, uintptr(unsafe.Pointer(&tc.moduleInstance.DataInstances[0])), ce.moduleContext.dataInstancesElement0Address) } + if len(tc.moduleInstance.ElementInstances) > 0 { + elementInstancesHeader := (*reflect.SliceHeader)(unsafe.Pointer(&tc.moduleInstance.ElementInstances)) + require.Equal(t, elementInstancesHeader.Data, ce.moduleContext.elementInstancesElemen0Address) + require.Equal(t, uintptr(unsafe.Pointer(&tc.moduleInstance.ElementInstances[0])), ce.moduleContext.elementInstancesElemen0Address) + } + require.Equal(t, uintptr(unsafe.Pointer(&me.functions[0])), ce.moduleContext.codesElement0Address) }) } diff --git a/internal/wasm/jit/jit_post1_0_test.go b/internal/wasm/jit/jit_post1_0_test.go index 680c52463bc..7dd251c5631 100644 --- a/internal/wasm/jit/jit_post1_0_test.go +++ b/internal/wasm/jit/jit_post1_0_test.go @@ -357,7 +357,7 @@ func TestCompiler_compileDataDrop(t *testing.T) { } func TestCompiler_compileMemoryInit(t *testing.T) { - dataInstances := [][]byte{ + dataInstances := []wasm.DataInstance{ nil, {1, 2, 3, 4, 5}, } @@ -436,3 +436,235 @@ func TestCompiler_compileMemoryInit(t *testing.T) { }) } } + +func TestCompiler_compileElemDrop(t *testing.T) { + origins := []wasm.ElementInstance{ + {References: []wasm.Reference{1}}, + {References: []wasm.Reference{2}}, + {References: []wasm.Reference{3}}, + {References: []wasm.Reference{4}}, + {References: []wasm.Reference{5}}, + } + + for i := 0; i < len(origins); i++ { + t.Run(strconv.Itoa(i), func(t *testing.T) { + env := newJITEnvironment() + + insts := make([]wasm.ElementInstance, len(origins)) + copy(insts, origins) + env.module().ElementInstances = insts + + // Verify assumption that before Drop instruction, all the element instances are not empty. + for _, inst := range insts { + require.NotEqual(t, 0, len(inst.References)) + } + + compiler := env.requireNewCompiler(t, newCompiler, &wazeroir.CompilationResult{ + NeedsAccessToElementInstances: true, Signature: &wasm.FunctionType{}}) + + err := compiler.compilePreamble() + require.NoError(t, err) + + err = compiler.compileElemDrop(&wazeroir.OperationElemDrop{ + ElemIndex: uint32(i), + }) + require.NoError(t, err) + + // Generate the code under test. + err = compiler.compileReturnFunction() + require.NoError(t, err) + code, _, _, err := compiler.compile() + require.NoError(t, err) + + // Run code. + env.exec(code) + + require.Equal(t, jitCallStatusCodeReturned, env.jitStatus()) + + for j := 0; j < len(insts); j++ { + if i == j { + require.Equal(t, 0, len(env.module().ElementInstances[j].References)) + } else { + require.NotEqual(t, 0, len(env.module().ElementInstances[j].References)) + } + } + }) + } +} + +func TestCompiler_compileTableCopy(t *testing.T) { + const tableSize = 100 + for i, tc := range []struct { + sourceOffset, destOffset, size uint32 + requireOutOfBoundsError bool + }{ + {sourceOffset: 0, destOffset: 0, size: 0}, + {sourceOffset: 10, destOffset: 5, size: 10}, + {sourceOffset: 10, destOffset: 9, size: 1}, + {sourceOffset: 10, destOffset: 9, size: 2}, + {sourceOffset: 0, destOffset: 10, size: 10}, + {sourceOffset: 0, destOffset: 5, size: 10}, + {sourceOffset: 9, destOffset: 10, size: 10}, + {sourceOffset: 11, destOffset: 13, size: 4}, + {sourceOffset: 0, destOffset: 10, size: 5}, + {sourceOffset: 1, destOffset: 10, size: 5}, + {sourceOffset: 0, destOffset: 10, size: 1}, + {sourceOffset: 0, destOffset: 10, size: 0}, + {sourceOffset: 5, destOffset: 10, size: 10}, + {sourceOffset: 5, destOffset: 10, size: 5}, + {sourceOffset: 5, destOffset: 10, size: 1}, + {sourceOffset: 5, destOffset: 10, size: 0}, + {sourceOffset: 10, destOffset: 0, size: 10}, + {sourceOffset: 1, destOffset: 0, size: 2}, + {sourceOffset: 1, destOffset: 0, size: 20}, + {sourceOffset: 10, destOffset: 0, size: 5}, + {sourceOffset: 10, destOffset: 0, size: 1}, + {sourceOffset: 10, destOffset: 0, size: 0}, + {sourceOffset: tableSize, destOffset: 0, size: 1, requireOutOfBoundsError: true}, + {sourceOffset: tableSize + 1, destOffset: 0, size: 0, requireOutOfBoundsError: true}, + {sourceOffset: 0, destOffset: tableSize, size: 1, requireOutOfBoundsError: true}, + {sourceOffset: 0, destOffset: tableSize + 1, size: 0, requireOutOfBoundsError: true}, + {sourceOffset: tableSize - 99, destOffset: 0, size: 100, requireOutOfBoundsError: true}, + {sourceOffset: 0, destOffset: tableSize - 99, size: 100, requireOutOfBoundsError: true}, + } { + tc := tc + t.Run(strconv.Itoa(i), func(t *testing.T) { + env := newJITEnvironment() + compiler := env.requireNewCompiler(t, newCompiler, &wazeroir.CompilationResult{HasTable: true, Signature: &wasm.FunctionType{}}) + + err := compiler.compilePreamble() + require.NoError(t, err) + + // Compile operands. + err = compiler.compileConstI32(&wazeroir.OperationConstI32{Value: tc.destOffset}) + require.NoError(t, err) + err = compiler.compileConstI32(&wazeroir.OperationConstI32{Value: tc.sourceOffset}) + require.NoError(t, err) + err = compiler.compileConstI32(&wazeroir.OperationConstI32{Value: tc.size}) + require.NoError(t, err) + + err = compiler.compileTableCopy(&wazeroir.OperationTableCopy{}) + require.NoError(t, err) + + // Generate the code under test. + err = compiler.compileReturnFunction() + require.NoError(t, err) + code, _, _, err := compiler.compile() + require.NoError(t, err) + + // Setup the table. + table := make([]wasm.Reference, tableSize) + env.setTable(table) + for i := 0; i < tableSize; i++ { + table[i] = byte(i) + } + + // Run code. + env.exec(code) + + if !tc.requireOutOfBoundsError { + exp := make([]wasm.Reference, tableSize) + for i := 0; i < tableSize; i++ { + exp[i] = byte(i) + } + copy(exp[tc.destOffset:], + exp[tc.sourceOffset:tc.sourceOffset+tc.size]) + + // Check the status code and the destination memory region. + require.Equal(t, jitCallStatusCodeReturned, env.jitStatus()) + require.Equal(t, exp, table) + } else { + require.Equal(t, jitCallStatusCodeInvalidTableAccess, env.jitStatus()) + } + }) + } +} + +func TestCompiler_compileTableInit(t *testing.T) { + elementInstances := []wasm.ElementInstance{ + {}, {References: []wasm.Reference{1, 2, 3, 4, 5}}, + } + + const tableSize = 100 + for i, tc := range []struct { + sourceOffset, destOffset uint32 + elemIndex uint32 + copySize uint32 + expOutOfBounds bool + }{ + {sourceOffset: 0, destOffset: 0, copySize: 0, elemIndex: 0}, + {sourceOffset: 0, destOffset: 0, copySize: 1, elemIndex: 0, expOutOfBounds: true}, + {sourceOffset: 1, destOffset: 0, copySize: 0, elemIndex: 0, expOutOfBounds: true}, + {sourceOffset: 0, destOffset: 0, copySize: 0, elemIndex: 1}, + {sourceOffset: 0, destOffset: 0, copySize: 5, elemIndex: 1}, + {sourceOffset: 0, destOffset: 0, copySize: 1, elemIndex: 1}, + {sourceOffset: 0, destOffset: 0, copySize: 3, elemIndex: 1}, + {sourceOffset: 0, destOffset: 1, copySize: 3, elemIndex: 1}, + {sourceOffset: 0, destOffset: 7, copySize: 4, elemIndex: 1}, + {sourceOffset: 1, destOffset: 7, copySize: 4, elemIndex: 1}, + {sourceOffset: 4, destOffset: 7, copySize: 1, elemIndex: 1}, + {sourceOffset: 5, destOffset: 7, copySize: 0, elemIndex: 1}, + {sourceOffset: 0, destOffset: 7, copySize: 5, elemIndex: 1}, + {sourceOffset: 1, destOffset: 0, copySize: 3, elemIndex: 1}, + {sourceOffset: 0, destOffset: 1, copySize: 4, elemIndex: 1}, + {sourceOffset: 1, destOffset: 1, copySize: 3, elemIndex: 1}, + {sourceOffset: 0, destOffset: 10, copySize: 5, elemIndex: 1}, + {sourceOffset: 0, destOffset: 0, copySize: 6, elemIndex: 1, expOutOfBounds: true}, + {sourceOffset: 6, destOffset: 0, copySize: 0, elemIndex: 1, expOutOfBounds: true}, + } { + tc := tc + t.Run(strconv.Itoa(i), func(t *testing.T) { + env := newJITEnvironment() + env.module().ElementInstances = elementInstances + + compiler := env.requireNewCompiler(t, newCompiler, &wazeroir.CompilationResult{ + NeedsAccessToElementInstances: true, HasTable: true, + Signature: &wasm.FunctionType{}}) + + err := compiler.compilePreamble() + require.NoError(t, err) + + // Compile operands. + err = compiler.compileConstI32(&wazeroir.OperationConstI32{Value: tc.destOffset}) + require.NoError(t, err) + err = compiler.compileConstI32(&wazeroir.OperationConstI32{Value: tc.sourceOffset}) + require.NoError(t, err) + err = compiler.compileConstI32(&wazeroir.OperationConstI32{Value: tc.copySize}) + require.NoError(t, err) + + err = compiler.compileTableInit(&wazeroir.OperationTableInit{ + ElemIndex: tc.elemIndex, + }) + require.NoError(t, err) + + // Setup the table. + table := make([]wasm.Reference, tableSize) + env.setTable(table) + for i := 0; i < tableSize; i++ { + table[i] = byte(i) + } + + // Generate the code under test. + err = compiler.compileReturnFunction() + require.NoError(t, err) + code, _, _, err := compiler.compile() + require.NoError(t, err) + + // Run code. + env.exec(code) + + if !tc.expOutOfBounds { + exp := make([]wasm.Reference, tableSize) + for i := 0; i < tableSize; i++ { + exp[i] = byte(i) + } + if inst := elementInstances[tc.elemIndex]; inst.References != nil { + copy(exp[tc.destOffset:], inst.References[tc.sourceOffset:tc.sourceOffset+tc.copySize]) + } + require.Equal(t, exp, table) + } else { + require.Equal(t, jitCallStatusCodeInvalidTableAccess, env.jitStatus()) + } + }) + } +} diff --git a/internal/wasm/jit/jit_test.go b/internal/wasm/jit/jit_test.go index 4655b6a79d0..37f5ba773b3 100644 --- a/internal/wasm/jit/jit_test.go +++ b/internal/wasm/jit/jit_test.go @@ -86,7 +86,7 @@ func (j *jitEnv) getGlobal(index uint32) uint64 { } func (j *jitEnv) setTable(table []interface{}) { - j.moduleInstance.Table = &wasm.TableInstance{Table: table} + j.moduleInstance.Table = &wasm.TableInstance{References: table} } func (j *jitEnv) callFrameStackPeek() *callFrame { diff --git a/internal/wasm/module.go b/internal/wasm/module.go index 362f8707936..ffa1bce93a4 100644 --- a/internal/wasm/module.go +++ b/internal/wasm/module.go @@ -162,7 +162,7 @@ type Module struct { // Note: elementSegments retain Module.ElementSection order. Since an ElementSegment can overlap with another, order // preservation ensures a consistent initialization result. // See https://www.w3.org/TR/2019/REC-wasm-core-1-20191205/#table-instances%E2%91%A0 - validatedElementSegments []*validatedElementSegment + validatedActiveElementSegments []*validatedActiveElementSegment // DataCountSection is the optional section and holds the number of data segments in the data section. // @@ -243,7 +243,7 @@ func (m *Module) Validate(enabledFeatures Features) error { return err } - if err = m.validateMemory(memory, globals); err != nil { + if err = m.validateMemory(memory, globals, enabledFeatures); err != nil { return err } @@ -253,7 +253,7 @@ func (m *Module) Validate(enabledFeatures Features) error { } } // No need to validate host functions as NewHostModule validates - if _, err = m.validateTable(); err != nil { + if _, err = m.validateTable(enabledFeatures); err != nil { return err } @@ -344,9 +344,12 @@ func (m *Module) funcDesc(sectionID SectionID, sectionIndex Index) string { return fmt.Sprintf("%s[%d] export[%s]", sectionIDName, sectionIndex, strings.Join(exportNames, ",")) } -func (m *Module) validateMemory(memory *Memory, globals []*GlobalType) error { - if len(m.DataSection) > 0 && memory == nil { - return fmt.Errorf("unknown memory") +func (m *Module) validateMemory(memory *Memory, globals []*GlobalType, enabledFeatures Features) error { + if !enabledFeatures.Get(FeatureBulkMemoryOperations) { + // As of bulk memory operations, data segments can exist without memory declarations. + if len(m.DataSection) > 0 && memory == nil { + return fmt.Errorf("unknown memory") + } } for _, d := range m.DataSection { @@ -577,8 +580,6 @@ func (m *Module) resolveFunction(moduleName string, f *FunctionInstance, funcIdx // See https://www.w3.org/TR/2019/REC-wasm-core-1-20191205/#binary-index type Index = uint32 -type NullableIndex *uint32 - // FunctionType is a possibly empty function signature. // // See https://www.w3.org/TR/2019/REC-wasm-core-1-20191205/#function-types%E2%91%A0 diff --git a/internal/wasm/module_test.go b/internal/wasm/module_test.go index 6f60ec829cc..45628fce187 100644 --- a/internal/wasm/module_test.go +++ b/internal/wasm/module_test.go @@ -496,7 +496,7 @@ func TestModule_validateFunctions(t *testing.T) { func TestModule_validateMemory(t *testing.T) { t.Run("data section exits but memory not declared", func(t *testing.T) { m := Module{DataSection: make([]*DataSegment, 1)} - err := m.validateMemory(nil, nil) + err := m.validateMemory(nil, nil, Features20191205) require.Error(t, err) require.Contains(t, "unknown memory", err.Error()) }) @@ -506,7 +506,7 @@ func TestModule_validateMemory(t *testing.T) { Opcode: OpcodeUnreachable, // Invalid! }, }}} - err := m.validateMemory(&Memory{}, nil) + err := m.validateMemory(&Memory{}, nil, Features20191205) require.EqualError(t, err, "calculate offset: invalid opcode for const expression: 0x0") }) t.Run("ok", func(t *testing.T) { @@ -517,7 +517,7 @@ func TestModule_validateMemory(t *testing.T) { Data: leb128.EncodeInt32(1), }, }}} - err := m.validateMemory(&Memory{}, nil) + err := m.validateMemory(&Memory{}, nil, Features20191205) require.NoError(t, err) }) } diff --git a/internal/wasm/store.go b/internal/wasm/store.go index 7a259ef293f..7da081a6ff7 100644 --- a/internal/wasm/store.go +++ b/internal/wasm/store.go @@ -79,13 +79,18 @@ type ( // This is only used by bulk memory operations. // // https://www.w3.org/TR/2022/WD-wasm-core-2-20220419/exec/runtime.html#data-instances - DataInstances [][]byte + DataInstances []DataInstance // ElementInstances holds the element instance, and each holds the references to either functions // or external objects (unimplemented). ElementInstances []ElementInstance } + // DataInstance holds bytes corresponding to the data segment in a module. + // + // https://www.w3.org/TR/2022/WD-wasm-core-2-20220419/exec/runtime.html#data-instances + DataInstance = []byte + // ExportInstance represents an exported instance in a Store. // The difference from the spec is that in wazero, a ExportInstance holds pointers // to the instances, rather than "addresses" (i.e. index to Store.Functions, Globals, etc) for convenience. @@ -186,7 +191,6 @@ func (m *ModuleInstance) addSections(module *Module, importedFunctions, function m.buildExports(module.ExportSection) m.buildDataInstances(module.DataSection) - m.buildElementInstances(module.ElementSection) } func (m *ModuleInstance) buildDataInstances(segments []*DataSegment) { @@ -198,16 +202,8 @@ func (m *ModuleInstance) buildDataInstances(segments []*DataSegment) { func (m *ModuleInstance) buildElementInstances(elements []*ElementSegment) { m.ElementInstances = make([]ElementInstance, len(elements)) for i, elm := range elements { - refs := make([]Reference, len(elm.Init)) - - if elm.Type == ElemTypeFuncref { - for i, nullableIndex := range elm.Init { - refs[i] = m.Engine.GetFunctionReference(*nullableIndex) - } - } - m.ElementInstances[i] = ElementInstance{ - References: refs, - Type: elm.Type, + if elm.Type == RefTypeFuncref { + m.ElementInstances[i] = *m.Engine.CreateFuncElementInstnace(elm.Init) } } } @@ -347,6 +343,9 @@ func (s *Store) Instantiate( return nil, fmt.Errorf("compilation failed: %w", err) } + // After engine creation, we can create the funcref element instances. + m.buildElementInstances(module.ElementSection) + // Now all the validation passes, we are safe to mutate memory instances (possibly imported ones). m.applyData(module.DataSection) diff --git a/internal/wasm/store_test.go b/internal/wasm/store_test.go index dee9a1dc15b..3d2a521dff0 100644 --- a/internal/wasm/store_test.go +++ b/internal/wasm/store_test.go @@ -394,6 +394,11 @@ func (e *mockEngine) DeleteCompiledModule(*Module) {} // CompileModule implements the same method as documented on wasm.Engine. func (e *mockEngine) CompileModule(_ context.Context, _ *Module) error { return nil } +// CreateFuncElementInstnace implements the same method as documented on wasm.ModuleEngine. +func (me *mockModuleEngine) CreateFuncElementInstnace([]*Index) *ElementInstance { + return nil +} + // Name implements the same method as documented on wasm.ModuleEngine. func (e *mockModuleEngine) Name() string { return e.name diff --git a/internal/wasm/table.go b/internal/wasm/table.go index e321f38ca18..88f80e21077 100644 --- a/internal/wasm/table.go +++ b/internal/wasm/table.go @@ -11,22 +11,28 @@ import ( // Table describes the limits of (function) elements in a table. type Table = limitsType -// ElemType is fixed to ElemTypeFuncref until post 20191205 reference type is implemented. -type ElemType = byte +// RefType is fixed to RefTypeFuncref until post 20191205 reference type is implemented. +type RefType = byte const ( - ElemTypeFuncref ElemType = 0x70 + // RefTypeFuncref represents a reference to a function. + RefTypeFuncref RefType = 0x70 + // RefTypeExternref represents a reference to a host object, which is not currently supported in wazero. + RefTypeExternref RefType = 0x6f ) -// ElemMode represents a mode of element segment which is either active, passive or declarative. +// ElementMode represents a mode of element segment which is either active, passive or declarative. // // https://www.w3.org/TR/2022/WD-wasm-core-2-20220419/syntax/modules.html#element-segments -type ElemMode = byte +type ElementMode = byte const ( - ElemModeActive ElemMode = iota - ElemModePassive - ElemModeDeclared + // ElementModeActive is the mode which requires the runtime to initialize table with the contents in .Init field combined with OffsetExpr. + ElementModeActive ElementMode = iota + // ElementModePassive is the mode which doesn't require the runtime to initialize table, and only used with OpcodeTableInitName. + ElementModePassive + // ElementModeDeclarative is introduced in reference-types proposal, but currently not used. + ElementModeDeclarative ) // ElementSegment are initialization instructions for a TableInstance @@ -38,21 +44,29 @@ type ElementSegment struct { OffsetExpr *ConstantExpression // Init indices are table elements relative to the result of OffsetExpr. - Init []NullableIndex + Init []*Index // Type holds the type of this element segment, which is the RefType in WebAssembly 2.0. - Type ElemType + Type RefType // Mode is the mode of this element segment. - Mode ElemMode + Mode ElementMode } -// TableInstance represents a table of (ElemTypeFuncref) elements in a module. +// IsActive returns true if the element segment is "active" mode which requires the runtime to initialize table +// with the contents in .Init field. +func (e *ElementSegment) IsActive() bool { + return e.Mode == ElementModeActive +} + +// TableInstance represents a table of (RefTypeFuncref) elements in a module. // // See https://www.w3.org/TR/2019/REC-wasm-core-1-20191205/#table-instances%E2%91%A0 type TableInstance struct { - // Table holds the Engine-specific compiled functions mapped by element index. - Table []interface{} + // References holds references whose type is either RefTypeFuncref or RefTypeExternref (unsupported). + // + // Currently only function references are supported. + References []Reference // Min is the minimum (function) elements in this table and cannot grow to accommodate ElementSegment. Min uint32 @@ -61,17 +75,25 @@ type TableInstance struct { Max *uint32 } +// ElementInstance represents a element instance in a module. +// +// See https://www.w3.org/TR/2022/WD-wasm-core-2-20220419/exec/runtime.html#table-instances type ElementInstance struct { + // References holds references whose type is either RefTypeFuncref or RefTypeExternref (unsupported). References []Reference - Type ElemType + // Type is the RefType of the references in this instance's References. + Type RefType } +// Reference is the runtime representation of RefType which is either RefTypeFuncref or RefTypeExternref (unsupported). +// +// Currently the content is a (possively nil) pointer to the engine-specific struct which can be only used in indirect function calls. type Reference = interface{} -// validatedElementSegment is like ElementSegment except the inputs are expanded and validated based on defining module. +// validatedActiveElementSegment is like ElementSegment of active mode except the inputs are expanded and validated based on defining module. // // Note: The global imported at globalIdx may have an offset value that is out-of-bounds for the corresponding table. -type validatedElementSegment struct { +type validatedActiveElementSegment struct { // opcode is OpcodeGlobalGet or OpcodeI32Const opcode Opcode @@ -83,14 +105,14 @@ type validatedElementSegment struct { // init are a range of table elements whose values are positions in the function index namespace. This range // replaces any values in TableInstance.Table at an offset arg which is a constant if opcode == OpcodeI32Const or // derived from a globalIdx if opcode == OpcodeGlobalGet - init []NullableIndex + init []*Index } -// validateTable ensures any ElementSegment is valid. This caches results via Module.validatedElementSegments. +// validateTable ensures any ElementSegment is valid. This caches results via Module.validatedActiveElementSegments. // Note: limitsType are validated by decoders, so not re-validated here. -func (m *Module) validateTable() ([]*validatedElementSegment, error) { - if m.validatedElementSegments != nil { - return m.validatedElementSegments, nil +func (m *Module) validateTable(enabledFeatures Features) ([]*validatedActiveElementSegment, error) { + if m.validatedActiveElementSegments != nil { + return m.validatedActiveElementSegments, nil } t := m.TableSection @@ -104,11 +126,11 @@ func (m *Module) validateTable() ([]*validatedElementSegment, error) { } elementCount := m.SectionElementCount(SectionIDElement) - if elementCount > 0 && t == nil { + if !enabledFeatures.Get(FeatureBulkMemoryOperations) && elementCount > 0 && t == nil { return nil, fmt.Errorf("%s was defined, but not table", SectionIDName(SectionIDElement)) } - ret := make([]*validatedElementSegment, 0, elementCount) + ret := make([]*validatedActiveElementSegment, 0, elementCount) // Create bounds checks as these can err prior to instantiation funcCount := m.importCount(ExternTypeFunc) + m.SectionElementCount(SectionIDFunction) @@ -123,53 +145,55 @@ func (m *Module) validateTable() ([]*validatedElementSegment, error) { // Any offset applied is to the element, not the function index: validate here if the funcidx is sound. for ei, funcIdx := range elem.Init { if funcIdx != nil && *funcIdx >= funcCount { - return nil, fmt.Errorf("%s[%d].init[%d] funcidx %d out of range", SectionIDName(SectionIDElement), idx, ei, funcIdx) + return nil, fmt.Errorf("%s[%d].init[%d] funcidx %d out of range", SectionIDName(SectionIDElement), idx, ei, *funcIdx) } } - // global.get needs to be discovered during initialization - oc := elem.OffsetExpr.Opcode - if oc == OpcodeGlobalGet { - globalIdx, _, err := leb128.DecodeUint32(bytes.NewReader(elem.OffsetExpr.Data)) - if err != nil { - return nil, fmt.Errorf("%s[%d] couldn't read global.get parameter: %w", SectionIDName(SectionIDElement), idx, err) - } else if err = m.verifyImportGlobalI32(SectionIDElement, idx, globalIdx); err != nil { - return nil, err - } + if elem.IsActive() { + // global.get needs to be discovered during initialization + oc := elem.OffsetExpr.Opcode + if oc == OpcodeGlobalGet { + globalIdx, _, err := leb128.DecodeUint32(bytes.NewReader(elem.OffsetExpr.Data)) + if err != nil { + return nil, fmt.Errorf("%s[%d] couldn't read global.get parameter: %w", SectionIDName(SectionIDElement), idx, err) + } else if err = m.verifyImportGlobalI32(SectionIDElement, idx, globalIdx); err != nil { + return nil, err + } - if initCount == 0 { - continue // Per https://github.com/WebAssembly/spec/issues/1427 init can be no-op, but validate anyway! - } + if initCount == 0 { + continue // Per https://github.com/WebAssembly/spec/issues/1427 init can be no-op, but validate anyway! + } - ret = append(ret, &validatedElementSegment{oc, globalIdx, elem.Init}) - } else if oc == OpcodeI32Const { - // Treat constants as signed as their interpretation is not yet known per /RATIONALE.md - o, _, err := leb128.DecodeInt32(bytes.NewReader(elem.OffsetExpr.Data)) - if err != nil { - return nil, fmt.Errorf("%s[%d] couldn't read i32.const parameter: %w", SectionIDName(SectionIDElement), idx, err) - } - offset := Index(o) + ret = append(ret, &validatedActiveElementSegment{oc, globalIdx, elem.Init}) + } else if oc == OpcodeI32Const { + // Treat constants as signed as their interpretation is not yet known per /RATIONALE.md + o, _, err := leb128.DecodeInt32(bytes.NewReader(elem.OffsetExpr.Data)) + if err != nil { + return nil, fmt.Errorf("%s[%d] couldn't read i32.const parameter: %w", SectionIDName(SectionIDElement), idx, err) + } + offset := Index(o) + + // Per https://github.com/WebAssembly/spec/blob/wg-1.0/test/core/elem.wast#L117 we must pass if imported + // table has set its min=0. Per https://github.com/WebAssembly/spec/blob/wg-1.0/test/core/elem.wast#L142, we + // have to do fail if module-defined min=0. + if !imported { + if err = checkSegmentBounds(t.Min, uint64(initCount)+uint64(offset), idx); err != nil { + return nil, err + } + } - // Per https://github.com/WebAssembly/spec/blob/wg-1.0/test/core/elem.wast#L117 we must pass if imported - // table has set its min=0. Per https://github.com/WebAssembly/spec/blob/wg-1.0/test/core/elem.wast#L142, we - // have to do fail if module-defined min=0. - if !imported { - if err = checkSegmentBounds(t.Min, uint64(initCount)+uint64(offset), idx); err != nil { - return nil, err + if initCount == 0 { + continue // Per https://github.com/WebAssembly/spec/issues/1427 init can be no-op, but validate anyway! } - } - if initCount == 0 { - continue // Per https://github.com/WebAssembly/spec/issues/1427 init can be no-op, but validate anyway! + ret = append(ret, &validatedActiveElementSegment{oc, offset, elem.Init}) + } else { + return nil, fmt.Errorf("%s[%d] has an invalid const expression: %s", SectionIDName(SectionIDElement), idx, InstructionName(oc)) } - - ret = append(ret, &validatedElementSegment{oc, offset, elem.Init}) - } else { - return nil, fmt.Errorf("%s[%d] has an invalid const expression: %s", SectionIDName(SectionIDElement), idx, InstructionName(oc)) } } - m.validatedElementSegments = ret + m.validatedActiveElementSegments = ret return ret, nil } @@ -184,7 +208,7 @@ func (m *Module) buildTable(importedTable *TableInstance, importedGlobals []*Glo // The module defining the table is the one that sets its Min/Max etc. if m.TableSection != nil { t := m.TableSection - table = &TableInstance{Table: make([]interface{}, t.Min), Min: t.Min, Max: t.Max} + table = &TableInstance{References: make([]Reference, t.Min), Min: t.Min, Max: t.Max} } else { table = importedTable } @@ -192,7 +216,7 @@ func (m *Module) buildTable(importedTable *TableInstance, importedGlobals []*Glo return // no table } - elementSegments := m.validatedElementSegments + elementSegments := m.validatedActiveElementSegments if len(elementSegments) == 0 { return } diff --git a/internal/wasm/table_test.go b/internal/wasm/table_test.go index 320f77a6737..b096f871af6 100644 --- a/internal/wasm/table_test.go +++ b/internal/wasm/table_test.go @@ -8,6 +8,10 @@ import ( "github.com/tetratelabs/wazero/internal/testing/require" ) +func uint32Ptr(v uint32) *uint32 { + return &v +} + func TestStore_resolveImports_table(t *testing.T) { const moduleName = "test" const name = "target" @@ -54,22 +58,22 @@ func TestModule_validateTable(t *testing.T) { tests := []struct { name string input *Module - expected []*validatedElementSegment + expected []*validatedActiveElementSegment }{ { name: "empty", input: &Module{}, - expected: []*validatedElementSegment{}, + expected: []*validatedActiveElementSegment{}, }, { name: "min zero", input: &Module{TableSection: &Table{}}, - expected: []*validatedElementSegment{}, + expected: []*validatedActiveElementSegment{}, }, { name: "min/max", input: &Module{TableSection: &Table{1, &three}}, - expected: []*validatedElementSegment{}, + expected: []*validatedActiveElementSegment{}, }, { // See: https://github.com/WebAssembly/spec/issues/1427 name: "constant derived element offset=0 and no index", @@ -82,7 +86,7 @@ func TestModule_validateTable(t *testing.T) { {OffsetExpr: &ConstantExpression{Opcode: OpcodeI32Const, Data: const0}}, }, }, - expected: []*validatedElementSegment{}, + expected: []*validatedActiveElementSegment{}, }, { name: "constant derived element offset=0 and one index", @@ -94,12 +98,12 @@ func TestModule_validateTable(t *testing.T) { ElementSection: []*ElementSegment{ { OffsetExpr: &ConstantExpression{Opcode: OpcodeI32Const, Data: const0}, - Init: []Index{0}, + Init: []*Index{uint32Ptr(0)}, }, }, }, - expected: []*validatedElementSegment{ - {opcode: OpcodeI32Const, arg: 0, init: []uint32{0}}, + expected: []*validatedActiveElementSegment{ + {opcode: OpcodeI32Const, arg: 0, init: []*Index{uint32Ptr(0)}}, }, }, { @@ -112,12 +116,12 @@ func TestModule_validateTable(t *testing.T) { ElementSection: []*ElementSegment{ { OffsetExpr: &ConstantExpression{Opcode: OpcodeI32Const, Data: const0}, - Init: []Index{0}, + Init: []*Index{uint32Ptr(0)}, }, }, }, - expected: []*validatedElementSegment{ - {opcode: OpcodeI32Const, arg: 0, init: []uint32{0}}, + expected: []*validatedActiveElementSegment{ + {opcode: OpcodeI32Const, arg: 0, init: []*Index{uint32Ptr(0)}}, }, }, { @@ -130,12 +134,12 @@ func TestModule_validateTable(t *testing.T) { ElementSection: []*ElementSegment{ { OffsetExpr: &ConstantExpression{Opcode: OpcodeI32Const, Data: const0}, - Init: []Index{0}, + Init: []*Index{uint32Ptr(0)}, }, }, }, - expected: []*validatedElementSegment{ - {opcode: OpcodeI32Const, arg: 0, init: []uint32{0}}, + expected: []*validatedActiveElementSegment{ + {opcode: OpcodeI32Const, arg: 0, init: []*Index{uint32Ptr(0)}}, }, }, { @@ -148,12 +152,12 @@ func TestModule_validateTable(t *testing.T) { ElementSection: []*ElementSegment{ { OffsetExpr: &ConstantExpression{Opcode: OpcodeI32Const, Data: const1}, - Init: []Index{0, 2}, + Init: []*Index{uint32Ptr(0), uint32Ptr(2)}, }, }, }, - expected: []*validatedElementSegment{ - {opcode: OpcodeI32Const, arg: 1, init: []uint32{0, 2}}, + expected: []*validatedActiveElementSegment{ + {opcode: OpcodeI32Const, arg: 1, init: []*Index{uint32Ptr(0), uint32Ptr(2)}}, }, }, { // See: https://github.com/WebAssembly/spec/issues/1427 @@ -170,7 +174,7 @@ func TestModule_validateTable(t *testing.T) { {OffsetExpr: &ConstantExpression{Opcode: OpcodeGlobalGet, Data: []byte{0x0}}}, }, }, - expected: []*validatedElementSegment{}, + expected: []*validatedActiveElementSegment{}, }, { name: "imported global derived element offset and one index", @@ -185,12 +189,12 @@ func TestModule_validateTable(t *testing.T) { ElementSection: []*ElementSegment{ { OffsetExpr: &ConstantExpression{Opcode: OpcodeGlobalGet, Data: []byte{0x0}}, - Init: []Index{0}, + Init: []*Index{uint32Ptr(0)}, }, }, }, - expected: []*validatedElementSegment{ - {opcode: OpcodeGlobalGet, arg: 0, init: []uint32{0}}, + expected: []*validatedActiveElementSegment{ + {opcode: OpcodeGlobalGet, arg: 0, init: []*Index{uint32Ptr(0)}}, }, }, { @@ -206,12 +210,12 @@ func TestModule_validateTable(t *testing.T) { ElementSection: []*ElementSegment{ { OffsetExpr: &ConstantExpression{Opcode: OpcodeGlobalGet, Data: []byte{0x0}}, - Init: []Index{0}, + Init: []*Index{uint32Ptr(0)}, }, }, }, - expected: []*validatedElementSegment{ - {opcode: OpcodeGlobalGet, arg: 0, init: []uint32{0}}, + expected: []*validatedActiveElementSegment{ + {opcode: OpcodeGlobalGet, arg: 0, init: []*Index{uint32Ptr(0)}}, }, }, { @@ -227,12 +231,12 @@ func TestModule_validateTable(t *testing.T) { ElementSection: []*ElementSegment{ { OffsetExpr: &ConstantExpression{Opcode: OpcodeGlobalGet, Data: []byte{0x0}}, - Init: []Index{0}, + Init: []*Index{uint32Ptr(0)}, }, }, }, - expected: []*validatedElementSegment{ - {opcode: OpcodeGlobalGet, arg: 0, init: []uint32{0}}, + expected: []*validatedActiveElementSegment{ + {opcode: OpcodeGlobalGet, arg: 0, init: []*Index{uint32Ptr(0)}}, }, }, { @@ -249,12 +253,12 @@ func TestModule_validateTable(t *testing.T) { ElementSection: []*ElementSegment{ { OffsetExpr: &ConstantExpression{Opcode: OpcodeGlobalGet, Data: []byte{0x1}}, - Init: []Index{0, 2}, + Init: []*Index{uint32Ptr(0), uint32Ptr(2)}, }, }, }, - expected: []*validatedElementSegment{ - {opcode: OpcodeGlobalGet, arg: 1, init: []uint32{0, 2}}, + expected: []*validatedActiveElementSegment{ + {opcode: OpcodeGlobalGet, arg: 1, init: []*Index{uint32Ptr(0), uint32Ptr(2)}}, }, }, { @@ -271,17 +275,17 @@ func TestModule_validateTable(t *testing.T) { ElementSection: []*ElementSegment{ { OffsetExpr: &ConstantExpression{Opcode: OpcodeI32Const, Data: const1}, - Init: []Index{0, 2}, + Init: []*Index{uint32Ptr(0), uint32Ptr(2)}, }, { OffsetExpr: &ConstantExpression{Opcode: OpcodeGlobalGet, Data: []byte{0x1}}, - Init: []Index{1, 2}, + Init: []*Index{uint32Ptr(1), uint32Ptr(2)}, }, }, }, - expected: []*validatedElementSegment{ - {opcode: OpcodeI32Const, arg: 1, init: []uint32{0, 2}}, - {opcode: OpcodeGlobalGet, arg: 1, init: []uint32{1, 2}}, + expected: []*validatedActiveElementSegment{ + {opcode: OpcodeI32Const, arg: 1, init: []*Index{uint32Ptr(0), uint32Ptr(2)}}, + {opcode: OpcodeGlobalGet, arg: 1, init: []*Index{uint32Ptr(1), uint32Ptr(2)}}, }, }, } @@ -290,13 +294,13 @@ func TestModule_validateTable(t *testing.T) { tc := tt t.Run(tc.name, func(t *testing.T) { - vt, err := tc.input.validateTable() + vt, err := tc.input.validateTable(Features20191205) require.NoError(t, err) require.Equal(t, tc.expected, vt) // Ensure it was cached. We have to use Equal not Same because this is a slice, not a pointer. - require.Equal(t, vt, tc.input.validatedElementSegments) - vt2, err := tc.input.validateTable() + require.Equal(t, vt, tc.input.validatedActiveElementSegments) + vt2, err := tc.input.validateTable(Features20191205) require.NoError(t, err) require.Equal(t, vt, vt2) }) @@ -320,7 +324,7 @@ func TestModule_validateTable_Errors(t *testing.T) { {OffsetExpr: &ConstantExpression{ Opcode: OpcodeI32Const, Data: leb128.EncodeUint64(math.MaxUint64), - }, Init: []Index{0}}, + }, Init: []*Index{uint32Ptr(0)}}, }, }, expectedErr: "element[0] couldn't read i32.const parameter: overflows a 32-bit integer", @@ -333,7 +337,7 @@ func TestModule_validateTable_Errors(t *testing.T) { FunctionSection: []Index{0}, CodeSection: []*Code{codeEnd}, ElementSection: []*ElementSegment{ - {OffsetExpr: &ConstantExpression{Opcode: OpcodeI64Const, Data: const0}, Init: []Index{0}}, + {OffsetExpr: &ConstantExpression{Opcode: OpcodeI64Const, Data: const0}, Init: []*Index{uint32Ptr(0)}}, }, }, expectedErr: "element[0] has an invalid const expression: i64.const", @@ -345,7 +349,7 @@ func TestModule_validateTable_Errors(t *testing.T) { FunctionSection: []Index{0}, CodeSection: []*Code{codeEnd}, ElementSection: []*ElementSegment{ - {OffsetExpr: &ConstantExpression{Opcode: OpcodeI32Const, Data: const0}, Init: []Index{0}}, + {OffsetExpr: &ConstantExpression{Opcode: OpcodeI32Const, Data: const0}, Init: []*Index{uint32Ptr(0)}}, }, }, expectedErr: "element was defined, but not table", @@ -358,7 +362,7 @@ func TestModule_validateTable_Errors(t *testing.T) { FunctionSection: []Index{0}, CodeSection: []*Code{codeEnd}, ElementSection: []*ElementSegment{ - {OffsetExpr: &ConstantExpression{Opcode: OpcodeI32Const, Data: leb128.EncodeInt32(2)}, Init: []Index{0}}, + {OffsetExpr: &ConstantExpression{Opcode: OpcodeI32Const, Data: leb128.EncodeInt32(2)}, Init: []*Index{uint32Ptr(0)}}, }, }, expectedErr: "element[0].init exceeds min table size", @@ -371,8 +375,8 @@ func TestModule_validateTable_Errors(t *testing.T) { FunctionSection: []Index{0}, CodeSection: []*Code{codeEnd}, ElementSection: []*ElementSegment{ - {OffsetExpr: &ConstantExpression{Opcode: OpcodeI32Const, Data: const1}, Init: []Index{0}}, - {OffsetExpr: &ConstantExpression{Opcode: OpcodeI32Const, Data: const1}, Init: []Index{0, 0}}, + {OffsetExpr: &ConstantExpression{Opcode: OpcodeI32Const, Data: const1}, Init: []*Index{uint32Ptr(0)}}, + {OffsetExpr: &ConstantExpression{Opcode: OpcodeI32Const, Data: const1}, Init: []*Index{uint32Ptr(0), uint32Ptr(0)}}, }, }, expectedErr: "element[1].init exceeds min table size", @@ -398,7 +402,7 @@ func TestModule_validateTable_Errors(t *testing.T) { FunctionSection: []Index{0}, CodeSection: []*Code{codeEnd}, ElementSection: []*ElementSegment{ - {OffsetExpr: &ConstantExpression{Opcode: OpcodeI32Const, Data: const1}, Init: []Index{0, 1}}, + {OffsetExpr: &ConstantExpression{Opcode: OpcodeI32Const, Data: const1}, Init: []*Index{uint32Ptr(0), uint32Ptr(1)}}, }, }, expectedErr: "element[0].init[1] funcidx 1 out of range", @@ -413,7 +417,7 @@ func TestModule_validateTable_Errors(t *testing.T) { FunctionSection: []Index{0}, CodeSection: []*Code{codeEnd}, ElementSection: []*ElementSegment{ - {OffsetExpr: &ConstantExpression{Opcode: OpcodeGlobalGet, Data: []byte{0x0}}, Init: []Index{0}}, + {OffsetExpr: &ConstantExpression{Opcode: OpcodeGlobalGet, Data: []byte{0x0}}, Init: []*Index{uint32Ptr(0)}}, }, }, expectedErr: "element was defined, but not table", @@ -429,7 +433,7 @@ func TestModule_validateTable_Errors(t *testing.T) { FunctionSection: []Index{0}, CodeSection: []*Code{codeEnd}, ElementSection: []*ElementSegment{ - {OffsetExpr: &ConstantExpression{Opcode: OpcodeGlobalGet, Data: []byte{0x0}}, Init: []Index{0, 1}}, + {OffsetExpr: &ConstantExpression{Opcode: OpcodeGlobalGet, Data: []byte{0x0}}, Init: []*Index{uint32Ptr(0), uint32Ptr(1)}}, }, }, expectedErr: "element[0].init[1] funcidx 1 out of range", @@ -445,7 +449,7 @@ func TestModule_validateTable_Errors(t *testing.T) { FunctionSection: []Index{0}, CodeSection: []*Code{codeEnd}, ElementSection: []*ElementSegment{ - {OffsetExpr: &ConstantExpression{Opcode: OpcodeGlobalGet, Data: []byte{0x0}}, Init: []Index{0}}, + {OffsetExpr: &ConstantExpression{Opcode: OpcodeGlobalGet, Data: []byte{0x0}}, Init: []*Index{uint32Ptr(0)}}, }, }, expectedErr: "element[0] (global.get 0): import[0].global.ValType != i32", @@ -464,7 +468,7 @@ func TestModule_validateTable_Errors(t *testing.T) { {OffsetExpr: &ConstantExpression{ Opcode: OpcodeGlobalGet, Data: leb128.EncodeUint64(math.MaxUint64), - }, Init: []Index{0}}, + }, Init: []*Index{uint32Ptr(0)}}, }, }, expectedErr: "element[0] couldn't read global.get parameter: overflows a 32-bit integer", @@ -478,7 +482,7 @@ func TestModule_validateTable_Errors(t *testing.T) { GlobalSection: []*Global{{Type: &GlobalType{ValType: ValueTypeI32}}}, // ignored as not imported CodeSection: []*Code{codeEnd}, ElementSection: []*ElementSegment{ - {OffsetExpr: &ConstantExpression{Opcode: OpcodeGlobalGet, Data: []byte{0x0}}, Init: []Index{0}}, + {OffsetExpr: &ConstantExpression{Opcode: OpcodeGlobalGet, Data: []byte{0x0}}, Init: []*Index{uint32Ptr(0)}}, }, }, expectedErr: "element[0] (global.get 0): out of range of imported globals", @@ -495,7 +499,7 @@ func TestModule_validateTable_Errors(t *testing.T) { GlobalSection: []*Global{{Type: &GlobalType{ValType: ValueTypeI32}}}, // ignored as not imported CodeSection: []*Code{codeEnd}, ElementSection: []*ElementSegment{ - {OffsetExpr: &ConstantExpression{Opcode: OpcodeGlobalGet, Data: []byte{0x0}}, Init: []Index{0}}, + {OffsetExpr: &ConstantExpression{Opcode: OpcodeGlobalGet, Data: []byte{0x0}}, Init: []*Index{uint32Ptr(0)}}, }, }, expectedErr: "element[0] (global.get 0): out of range of imported globals", @@ -506,7 +510,7 @@ func TestModule_validateTable_Errors(t *testing.T) { tc := tt t.Run(tc.name, func(t *testing.T) { - _, err := tc.input.validateTable() + _, err := tc.input.validateTable(Features20191205) require.EqualError(t, err, tc.expectedErr) }) } @@ -528,24 +532,24 @@ func TestModule_buildTable(t *testing.T) { { name: "empty", module: &Module{ - validatedElementSegments: []*validatedElementSegment{}, + validatedActiveElementSegments: []*validatedActiveElementSegment{}, }, }, { name: "min zero", module: &Module{ - TableSection: &Table{}, - validatedElementSegments: []*validatedElementSegment{}, + TableSection: &Table{}, + validatedActiveElementSegments: []*validatedActiveElementSegment{}, }, - expectedTable: &TableInstance{Table: make([]interface{}, 0), Min: 0}, + expectedTable: &TableInstance{References: make([]Reference, 0), Min: 0}, }, { name: "min/max", module: &Module{ - TableSection: &Table{1, &three}, - validatedElementSegments: []*validatedElementSegment{}, + TableSection: &Table{1, &three}, + validatedActiveElementSegments: []*validatedActiveElementSegment{}, }, - expectedTable: &TableInstance{Table: make([]interface{}, 1), Min: 1, Max: &three}, + expectedTable: &TableInstance{References: make([]Reference, 1), Min: 1, Max: &three}, }, { // See: https://github.com/WebAssembly/spec/issues/1427 name: "constant derived element offset=0 and no index", @@ -557,9 +561,9 @@ func TestModule_buildTable(t *testing.T) { ElementSection: []*ElementSegment{ {OffsetExpr: &ConstantExpression{Opcode: OpcodeI32Const, Data: const0}}, }, - validatedElementSegments: []*validatedElementSegment{}, + validatedActiveElementSegments: []*validatedActiveElementSegment{}, }, - expectedTable: &TableInstance{Table: make([]interface{}, 1), Min: 1}, + expectedTable: &TableInstance{References: make([]Reference, 1), Min: 1}, }, { name: "constant derived element offset=0 and one index", @@ -571,14 +575,14 @@ func TestModule_buildTable(t *testing.T) { ElementSection: []*ElementSegment{ { OffsetExpr: &ConstantExpression{Opcode: OpcodeI32Const, Data: const0}, - Init: []Index{0}, + Init: []*Index{uint32Ptr(0)}, }, }, - validatedElementSegments: []*validatedElementSegment{ - {opcode: OpcodeI32Const, arg: 0, init: []uint32{0}}, + validatedActiveElementSegments: []*validatedActiveElementSegment{ + {opcode: OpcodeI32Const, arg: 0, init: []*Index{uint32Ptr(0)}}, }, }, - expectedTable: &TableInstance{Table: make([]interface{}, 1), Min: 1}, + expectedTable: &TableInstance{References: make([]Reference, 1), Min: 1}, expectedInit: map[Index]Index{0: 0}, }, { @@ -591,11 +595,11 @@ func TestModule_buildTable(t *testing.T) { ElementSection: []*ElementSegment{ { OffsetExpr: &ConstantExpression{Opcode: OpcodeI32Const, Data: const0}, - Init: []Index{0}, + Init: []*Index{uint32Ptr(0)}, }, }, - validatedElementSegments: []*validatedElementSegment{ - {opcode: OpcodeI32Const, arg: 0, init: []uint32{0}}, + validatedActiveElementSegments: []*validatedActiveElementSegment{ + {opcode: OpcodeI32Const, arg: 0, init: []*Index{uint32Ptr(0)}}, }, }, }, @@ -609,11 +613,11 @@ func TestModule_buildTable(t *testing.T) { ElementSection: []*ElementSegment{ { OffsetExpr: &ConstantExpression{Opcode: OpcodeI32Const, Data: const0}, - Init: []Index{0}, + Init: []*Index{uint32Ptr(0)}, }, }, - validatedElementSegments: []*validatedElementSegment{ - {opcode: OpcodeI32Const, arg: 0, init: []uint32{0}}, + validatedActiveElementSegments: []*validatedActiveElementSegment{ + {opcode: OpcodeI32Const, arg: 0, init: []*Index{uint32Ptr(0)}}, }, }, }, @@ -627,14 +631,14 @@ func TestModule_buildTable(t *testing.T) { ElementSection: []*ElementSegment{ { OffsetExpr: &ConstantExpression{Opcode: OpcodeI32Const, Data: const1}, - Init: []Index{0, 2}, + Init: []*Index{uint32Ptr(0), uint32Ptr(2)}, }, }, - validatedElementSegments: []*validatedElementSegment{ - {opcode: OpcodeI32Const, arg: 1, init: []uint32{0, 2}}, + validatedActiveElementSegments: []*validatedActiveElementSegment{ + {opcode: OpcodeI32Const, arg: 1, init: []*Index{uint32Ptr(0), uint32Ptr(2)}}, }, }, - expectedTable: &TableInstance{Table: make([]interface{}, 3), Min: 3}, + expectedTable: &TableInstance{References: make([]Reference, 3), Min: 3}, expectedInit: map[Index]Index{1: 0, 2: 2}, }, { // See: https://github.com/WebAssembly/spec/issues/1427 @@ -650,10 +654,10 @@ func TestModule_buildTable(t *testing.T) { ElementSection: []*ElementSegment{ {OffsetExpr: &ConstantExpression{Opcode: OpcodeGlobalGet, Data: []byte{0x0}}}, }, - validatedElementSegments: []*validatedElementSegment{}, + validatedActiveElementSegments: []*validatedActiveElementSegment{}, }, importedGlobals: []*GlobalInstance{{Type: &GlobalType{ValType: ValueTypeI32}, Val: 1}}, - expectedTable: &TableInstance{Table: make([]interface{}, 1), Min: 1}, + expectedTable: &TableInstance{References: make([]Reference, 1), Min: 1}, }, { name: "imported global derived element offset and one index", @@ -668,15 +672,15 @@ func TestModule_buildTable(t *testing.T) { ElementSection: []*ElementSegment{ { OffsetExpr: &ConstantExpression{Opcode: OpcodeGlobalGet, Data: []byte{0x0}}, - Init: []Index{0}, + Init: []*Index{uint32Ptr(0)}, }, }, - validatedElementSegments: []*validatedElementSegment{ - {opcode: OpcodeGlobalGet, arg: 0, init: []uint32{0}}, + validatedActiveElementSegments: []*validatedActiveElementSegment{ + {opcode: OpcodeGlobalGet, arg: 0, init: []*Index{uint32Ptr(0)}}, }, }, importedGlobals: []*GlobalInstance{{Type: &GlobalType{ValType: ValueTypeI32}, Val: 1}}, - expectedTable: &TableInstance{Table: make([]interface{}, 2), Min: 2}, + expectedTable: &TableInstance{References: make([]Reference, 2), Min: 2}, expectedInit: map[Index]Index{1: 0}, }, { @@ -692,15 +696,15 @@ func TestModule_buildTable(t *testing.T) { ElementSection: []*ElementSegment{ { OffsetExpr: &ConstantExpression{Opcode: OpcodeGlobalGet, Data: []byte{0x0}}, - Init: []Index{0}, + Init: []*Index{uint32Ptr(0)}, }, }, - validatedElementSegments: []*validatedElementSegment{ - {opcode: OpcodeGlobalGet, arg: 0, init: []uint32{0}}, + validatedActiveElementSegments: []*validatedActiveElementSegment{ + {opcode: OpcodeGlobalGet, arg: 0, init: []*Index{uint32Ptr(0)}}, }, }, importedGlobals: []*GlobalInstance{{Type: &GlobalType{ValType: ValueTypeI32}, Val: 1}}, - importedTable: &TableInstance{Table: make([]interface{}, 2), Min: 2}, + importedTable: &TableInstance{References: make([]Reference, 2), Min: 2}, expectedInit: map[Index]Index{1: 0}, }, { @@ -716,15 +720,15 @@ func TestModule_buildTable(t *testing.T) { ElementSection: []*ElementSegment{ { OffsetExpr: &ConstantExpression{Opcode: OpcodeGlobalGet, Data: []byte{0x0}}, - Init: []Index{0}, + Init: []*Index{uint32Ptr(0)}, }, }, - validatedElementSegments: []*validatedElementSegment{ - {opcode: OpcodeGlobalGet, arg: 0, init: []uint32{0}}, + validatedActiveElementSegments: []*validatedActiveElementSegment{ + {opcode: OpcodeGlobalGet, arg: 0, init: []*Index{uint32Ptr(0)}}, }, }, importedGlobals: []*GlobalInstance{{Type: &GlobalType{ValType: ValueTypeI32}, Val: 1}}, - importedTable: &TableInstance{Table: make([]interface{}, 2), Min: 2}, + importedTable: &TableInstance{References: make([]Reference, 2), Min: 2}, expectedInit: map[Index]Index{1: 0}, }, { @@ -741,18 +745,18 @@ func TestModule_buildTable(t *testing.T) { ElementSection: []*ElementSegment{ { OffsetExpr: &ConstantExpression{Opcode: OpcodeGlobalGet, Data: []byte{0x1}}, - Init: []Index{0, 2}, + Init: []*Index{uint32Ptr(0), uint32Ptr(2)}, }, }, - validatedElementSegments: []*validatedElementSegment{ - {opcode: OpcodeGlobalGet, arg: 1, init: []uint32{0, 2}}, + validatedActiveElementSegments: []*validatedActiveElementSegment{ + {opcode: OpcodeGlobalGet, arg: 1, init: []*Index{uint32Ptr(0), uint32Ptr(2)}}, }, }, importedGlobals: []*GlobalInstance{ {Type: &GlobalType{ValType: ValueTypeI64}, Val: 3}, {Type: &GlobalType{ValType: ValueTypeI32}, Val: 1}, }, - expectedTable: &TableInstance{Table: make([]interface{}, 3), Min: 3}, + expectedTable: &TableInstance{References: make([]Reference, 3), Min: 3}, expectedInit: map[Index]Index{1: 0, 2: 2}, }, { @@ -769,23 +773,23 @@ func TestModule_buildTable(t *testing.T) { ElementSection: []*ElementSegment{ { OffsetExpr: &ConstantExpression{Opcode: OpcodeI32Const, Data: const1}, - Init: []Index{0, 2}, + Init: []*Index{uint32Ptr(0), uint32Ptr(2)}, }, { OffsetExpr: &ConstantExpression{Opcode: OpcodeGlobalGet, Data: []byte{0x1}}, - Init: []Index{1, 2}, + Init: []*Index{uint32Ptr(1), uint32Ptr(2)}, }, }, - validatedElementSegments: []*validatedElementSegment{ - {opcode: OpcodeI32Const, arg: 1, init: []uint32{0, 2}}, - {opcode: OpcodeGlobalGet, arg: 1, init: []uint32{1, 2}}, + validatedActiveElementSegments: []*validatedActiveElementSegment{ + {opcode: OpcodeI32Const, arg: 1, init: []*Index{uint32Ptr(0), uint32Ptr(2)}}, + {opcode: OpcodeGlobalGet, arg: 1, init: []*Index{uint32Ptr(1), uint32Ptr(2)}}, }, }, importedGlobals: []*GlobalInstance{ {Type: &GlobalType{ValType: ValueTypeI64}, Val: 3}, {Type: &GlobalType{ValType: ValueTypeI32}, Val: 1}, }, - expectedTable: &TableInstance{Table: make([]interface{}, 3), Min: 3}, + expectedTable: &TableInstance{References: make([]Reference, 3), Min: 3}, expectedInit: map[Index]Index{1: 1, 2: 2}, }, } @@ -825,14 +829,14 @@ func TestModule_buildTable_Errors(t *testing.T) { ElementSection: []*ElementSegment{ { OffsetExpr: &ConstantExpression{Opcode: OpcodeI32Const, Data: const0}, - Init: []Index{0}, + Init: []*Index{uint32Ptr(0)}, }, }, - validatedElementSegments: []*validatedElementSegment{ - {opcode: OpcodeI32Const, arg: 2, init: []uint32{0}}, + validatedActiveElementSegments: []*validatedActiveElementSegment{ + {opcode: OpcodeI32Const, arg: 2, init: []*Index{uint32Ptr(0)}}, }, }, - importedTable: &TableInstance{Table: make([]interface{}, 2), Min: 2}, + importedTable: &TableInstance{References: make([]Reference, 2), Min: 2}, expectedErr: "element[0].init exceeds min table size", }, { @@ -848,11 +852,11 @@ func TestModule_buildTable_Errors(t *testing.T) { ElementSection: []*ElementSegment{ { OffsetExpr: &ConstantExpression{Opcode: OpcodeGlobalGet, Data: []byte{0x0}}, - Init: []Index{0}, + Init: []*Index{uint32Ptr(0)}, }, }, - validatedElementSegments: []*validatedElementSegment{ - {opcode: OpcodeGlobalGet, arg: 0, init: []uint32{0}}, + validatedActiveElementSegments: []*validatedActiveElementSegment{ + {opcode: OpcodeGlobalGet, arg: 0, init: []*Index{uint32Ptr(0)}}, }, }, importedGlobals: []*GlobalInstance{{Type: &GlobalType{ValType: ValueTypeI32}, Val: 2}}, @@ -872,14 +876,14 @@ func TestModule_buildTable_Errors(t *testing.T) { ElementSection: []*ElementSegment{ { OffsetExpr: &ConstantExpression{Opcode: OpcodeGlobalGet, Data: []byte{0x0}}, - Init: []Index{0}, + Init: []*Index{uint32Ptr(0)}, }, }, - validatedElementSegments: []*validatedElementSegment{ - {opcode: OpcodeGlobalGet, arg: 0, init: []uint32{0}}, + validatedActiveElementSegments: []*validatedActiveElementSegment{ + {opcode: OpcodeGlobalGet, arg: 0, init: []*Index{uint32Ptr(0)}}, }, }, - importedTable: &TableInstance{Table: make([]interface{}, 2), Min: 2}, + importedTable: &TableInstance{References: make([]Reference, 2), Min: 2}, importedGlobals: []*GlobalInstance{{Type: &GlobalType{ValType: ValueTypeI32}, Val: 2}}, expectedErr: "element[0].init exceeds min table size", }, diff --git a/internal/wazeroir/compiler.go b/internal/wazeroir/compiler.go index ff3a9a3b274..f88fbfb67d6 100644 --- a/internal/wazeroir/compiler.go +++ b/internal/wazeroir/compiler.go @@ -181,6 +181,8 @@ type CompilationResult struct { HasTable bool // NeedsAccessToDataInstances is true if the function needs access to data instances via memory.init or data.drop instructions. NeedsAccessToDataInstances bool + // NeedsAccessToDataInstances is true if the function needs access to element instances via table.init or elem.drop instructions. + NeedsAccessToElementInstances bool } func CompileFunctions(_ context.Context, enabledFeatures wasm.Features, module *wasm.Module) ([]*CompilationResult, error) { @@ -1529,24 +1531,26 @@ operatorSwitch: return fmt.Errorf("reading i32.const value: %v", err) } c.pc += num + // Read table index which is fixed to zero currently. + _, num, err = leb128.DecodeUint32(bytes.NewReader(c.body[c.pc+1:])) + if err != nil { + return fmt.Errorf("reading i32.const value: %v", err) + } + c.pc += num c.emit( &OperationTableInit{ElemIndex: elemIndex}, ) + c.result.NeedsAccessToElementInstances = true case wasm.OpcodeMiscElemDrop: elemIndex, num, err := leb128.DecodeUint32(bytes.NewReader(c.body[c.pc+1:])) if err != nil { return fmt.Errorf("reading i32.const value: %v", err) } c.pc += num - // Read the table index which is not used for now (until reference type proposal impl.) - _, num, err = leb128.DecodeUint32(bytes.NewReader(c.body[c.pc+1:])) - if err != nil { - return fmt.Errorf("reading i32.const value: %v", err) - } - c.pc += num c.emit( &OperationElemDrop{ElemIndex: elemIndex}, ) + c.result.NeedsAccessToElementInstances = true case wasm.OpcodeMiscTableCopy: // Read the source table index which is not used for now (until reference type proposal impl.) _, num, err := leb128.DecodeUint32(bytes.NewReader(c.body[c.pc+1:])) @@ -1564,6 +1568,7 @@ operatorSwitch: c.emit( &OperationTableCopy{}, ) + c.result.NeedsAccessToElementInstances = true default: return fmt.Errorf("unsupported misc instruction in wazeroir: 0x%x", op) } diff --git a/internal/wazeroir/operations.go b/internal/wazeroir/operations.go index 49512111b18..fa1b6040c09 100644 --- a/internal/wazeroir/operations.go +++ b/internal/wazeroir/operations.go @@ -998,5 +998,5 @@ type OperationTableCopy struct { } func (o *OperationTableCopy) Kind() OperationKind { - return OperationKindTableInit + return OperationKindTableCopy }