From be340dd4ffc1bd26e23d7bd54d11782ed775e803 Mon Sep 17 00:00:00 2001 From: Zhi An Ng Date: Fri, 30 Oct 2020 02:13:04 +0000 Subject: [PATCH] Implement v128.load32_zero and v128.load64_zero The tests are adapted from load_extend tests. --- interpreter/runtime/memory.ml | 6 +- interpreter/syntax/operators.ml | 6 ++ interpreter/syntax/types.ml | 1 + interpreter/text/lexer.mll | 4 + test/core/simd/simd_load_zero.wast | 154 +++++++++++++++++++++++++++++ 5 files changed, 169 insertions(+), 2 deletions(-) create mode 100644 test/core/simd/simd_load_zero.wast diff --git a/interpreter/runtime/memory.ml b/interpreter/runtime/memory.ml index ffb65432f..5d4ccfb49 100644 --- a/interpreter/runtime/memory.ml +++ b/interpreter/runtime/memory.ml @@ -133,9 +133,9 @@ let load_packed sz ext mem a o t = let load_simd_packed pack_size simd_load mem a o t = let n = packed_size pack_size in - assert (n <= Types.size t); + assert (n < Types.size t); let x = loadn mem a o n in - let b = Bytes.create 16 in + let b = Bytes.make 16 '\x00' in Bytes.set_int64_le b 0 x; let v = V128.of_bits (Bytes.to_string b) in match pack_size, simd_load with @@ -149,6 +149,8 @@ let load_simd_packed pack_size simd_load mem a o t = | Pack16, PackSplat -> V128 (V128.I16x8.splat (I16.of_int_s (Int64.to_int x))) | Pack32, PackSplat -> V128 (V128.I32x4.splat (I32.of_int_s (Int64.to_int x))) | Pack64, PackSplat -> V128 (V128.I64x2.splat x) + | Pack32, PackZero -> V128 v + | Pack64, PackZero -> V128 v | _ -> assert false let store_packed sz mem a o v = diff --git a/interpreter/syntax/operators.ml b/interpreter/syntax/operators.ml index 34bffee9b..1a5a92b4f 100644 --- a/interpreter/syntax/operators.ml +++ b/interpreter/syntax/operators.ml @@ -238,6 +238,12 @@ let v128_load32_splat align offset = SimdLoad {ty= V128Type; align; offset; sz = Some (Pack32, PackSplat)} let v128_load64_splat align offset = SimdLoad {ty= V128Type; align; offset; sz = Some (Pack64, PackSplat)} + +let v128_load32_zero align offset = + SimdLoad {ty= V128Type; align; offset; sz = Some (Pack32, PackZero)} +let v128_load64_zero align offset = + SimdLoad {ty= V128Type; align; offset; sz = Some (Pack64, PackZero)} + let v128_store align offset = SimdStore {ty = V128Type; align; offset; sz = None} let v128_not = Unary (V128 V128Op.(V128 Not)) diff --git a/interpreter/syntax/types.ml b/interpreter/syntax/types.ml index 4b440e196..9c912e4ba 100644 --- a/interpreter/syntax/types.ml +++ b/interpreter/syntax/types.ml @@ -23,6 +23,7 @@ type pack_simd = | Pack8x8 of extension | Pack16x4 of extension | Pack32x2 of extension + | PackZero (* Attributes *) diff --git a/interpreter/text/lexer.mll b/interpreter/text/lexer.mll index ae2cb2c98..b24310a2a 100644 --- a/interpreter/text/lexer.mll +++ b/interpreter/text/lexer.mll @@ -302,6 +302,10 @@ rule token = parse { LOAD (fun a o -> (v128_load32_splat (opt a 2)) o) } | "v128.load64_splat" { LOAD (fun a o -> (v128_load64_splat (opt a 3)) o) } + | "v128.load32_zero" + { LOAD (fun a o -> (v128_load32_zero (opt a 2)) o) } + | "v128.load64_zero" + { LOAD (fun a o -> (v128_load64_zero (opt a 3)) o) } | (ixx as t)".store"(mem_size as sz) { if t = "i32" && sz = "32" then error lexbuf "unknown operator"; STORE (fun a o -> diff --git a/test/core/simd/simd_load_zero.wast b/test/core/simd/simd_load_zero.wast new file mode 100644 index 000000000..6276a6863 --- /dev/null +++ b/test/core/simd/simd_load_zero.wast @@ -0,0 +1,154 @@ +;; Load and Zero extend test cases + +(module + (memory 1) + (data (i32.const 0) "\00\01\02\03\04\05\06\07\08\09\0A\0B\0C\0D\0E\0F\80\81\82\83\84\85\86\87\88\89") + (data (i32.const 65520) "\0A\0B\0C\0D\0E\0F\80\81\82\83\84\85\86\87\88\89") + + (func (export "v128.load32_zero") (param $0 i32) (result v128) + (v128.load32_zero (local.get $0)) + ) + (func (export "v128.load64_zero") (param $0 i32) (result v128) + (v128.load64_zero (local.get $0)) + ) + + ;; load by a constant amount + (func (export "v128.load32_zero_const0") (result v128) + (v128.load32_zero (i32.const 0)) + ) + (func (export "v128.load64_zero_const8") (result v128) + (v128.load64_zero (i32.const 8)) + ) + + ;; load data with different offset/align arguments + ;; i16x8 + (func (export "v128.load32_zero_offset0") (param $0 i32) (result v128) + (v128.load32_zero offset=0 (local.get $0)) + ) + (func (export "v128.load32_zero_align1") (param $0 i32) (result v128) + (v128.load32_zero align=1 (local.get $0)) + ) + (func (export "v128.load32_zero_offset0_align1") (param $0 i32) (result v128) + (v128.load32_zero offset=0 align=1 (local.get $0)) + ) + (func (export "v128.load32_zero_offset1_align1") (param $0 i32) (result v128) + (v128.load32_zero offset=1 align=1 (local.get $0)) + ) + (func (export "v128.load32_zero_offset10_align4") (param $0 i32) (result v128) + (v128.load32_zero offset=10 align=4 (local.get $0)) + ) + (func (export "v128.load64_zero_offset0") (param $0 i32) (result v128) + (v128.load64_zero offset=0 (local.get $0)) + ) + (func (export "v128.load64_zero_align1") (param $0 i32) (result v128) + (v128.load64_zero align=1 (local.get $0)) + ) + (func (export "v128.load64_zero_offset0_align1") (param $0 i32) (result v128) + (v128.load64_zero offset=0 align=1 (local.get $0)) + ) + (func (export "v128.load64_zero_offset1_align1") (param $0 i32) (result v128) + (v128.load64_zero offset=1 align=1 (local.get $0)) + ) + (func (export "v128.load64_zero_offset10_align4") (param $0 i32) (result v128) + (v128.load64_zero offset=10 align=4 (local.get $0)) + ) + (func (export "v128.load64_zero_offset20_align8") (param $0 i32) (result v128) + (v128.load64_zero offset=20 align=8 (local.get $0)) + ) +) + + +;; normal +(assert_return (invoke "v128.load32_zero" (i32.const 0)) (v128.const i32x4 0x03020100 0x00000000 0x00000000 0x00000000)) +(assert_return (invoke "v128.load64_zero" (i32.const 0)) (v128.const i64x2 0x0706050403020100 0x0000000000000000)) +(assert_return (invoke "v128.load32_zero" (i32.const 10)) (v128.const i32x4 0x0D0C0B0A 0x00000000 0x00000000 0x00000000)) +(assert_return (invoke "v128.load64_zero" (i32.const 10)) (v128.const i64x2 0x81800F0E0D0C0B0A 0x0000000000000000)) +(assert_return (invoke "v128.load32_zero" (i32.const 20)) (v128.const i32x4 0x87868584 0x00000000 0x00000000 0x00000000)) +(assert_return (invoke "v128.load64_zero" (i32.const 20)) (v128.const i64x2 0x0000898887868584 0x0000000000000000)) + +;; load by a constant amount +(assert_return (invoke "v128.load32_zero_const0") (v128.const i32x4 0x03020100 0x00000000 0x00000000 0x00000000)) +(assert_return (invoke "v128.load64_zero_const8") (v128.const i64x2 0x0F0E0D0C0B0A0908 0x0000000000000000)) + +;; load data with different offset/align arguments +;; load32_zero +(assert_return (invoke "v128.load32_zero_offset0" (i32.const 0)) (v128.const i32x4 0x03020100 0x00000000 0x00000000 0x00000000)) +(assert_return (invoke "v128.load32_zero_align1" (i32.const 1)) (v128.const i32x4 0x04030201 0x00000000 0x00000000 0x00000000)) +(assert_return (invoke "v128.load32_zero_offset0_align1" (i32.const 2)) (v128.const i32x4 0x05040302 0x00000000 0x00000000 0x00000000)) +(assert_return (invoke "v128.load32_zero_offset10_align4" (i32.const 3)) (v128.const i32x4 0x800F0E0D 0x00000000 0x00000000 0x00000000)) + +;; load64_zero +(assert_return (invoke "v128.load64_zero_offset0" (i32.const 0)) (v128.const i64x2 0x0706050403020100 0x0000000000000000)) +(assert_return (invoke "v128.load64_zero_align1" (i32.const 1)) (v128.const i64x2 0x0807060504030201 0x0000000000000000)) +(assert_return (invoke "v128.load64_zero_offset0_align1" (i32.const 2)) (v128.const i64x2 0x0908070605040302 0x0000000000000000)) +(assert_return (invoke "v128.load64_zero_offset10_align4" (i32.const 3)) (v128.const i64x2 0x84838281800F0E0D 0x0000000000000000)) +(assert_return (invoke "v128.load64_zero_offset20_align8" (i32.const 4)) (v128.const i64x2 0x0000000000008988 0x0000000000000000)) + +;; out of bounds memory access +(assert_trap (invoke "v128.load32_zero" (i32.const -1)) "out of bounds memory access") +(assert_trap (invoke "v128.load64_zero" (i32.const -1)) "out of bounds memory access") + +(assert_trap (invoke "v128.load32_zero_offset1_align1" (i32.const -1)) "out of bounds memory access") +(assert_trap (invoke "v128.load64_zero_offset1_align1" (i32.const -1)) "out of bounds memory access") + +;; type check +(assert_invalid (module (memory 0) (func (result v128) (v128.load32_zero (f32.const 0)))) "type mismatch") +(assert_invalid (module (memory 0) (func (result v128) (v128.load64_zero (f32.const 0)))) "type mismatch") + +;; Test operation with empty argument + +(assert_invalid + (module (memory 0) + (func $v128.load32_zero-arg-empty (result v128) + (v128.load32_zero) + ) + ) + "type mismatch" +) +(assert_invalid + (module (memory 0) + (func $v128.load64_zero-arg-empty (result v128) + (v128.load64_zero) + ) + ) + "type mismatch" +) + +;; Unknown operator + +(assert_malformed (module quote "(memory 1) (func (drop (i16x8.load16x4_s (i32.const 0))))") "unknown operator") +(assert_malformed (module quote "(memory 1) (func (drop (i16x8.load16x4_u (i32.const 0))))") "unknown operator") +(assert_malformed (module quote "(memory 1) (func (drop (i32x4.load32x2_s (i32.const 0))))") "unknown operator") +(assert_malformed (module quote "(memory 1) (func (drop (i32x4.load32x2_u (i32.const 0))))") "unknown operator") +(assert_malformed (module quote "(memory 1) (func (drop (i64x2.load64x1_s (i32.const 0))))") "unknown operator") +(assert_malformed (module quote "(memory 1) (func (drop (i64x2.load64x1_u (i32.const 0))))") "unknown operator") + +;; combination +(module + (memory 1) + (data (i32.const 0) "\00\01\02\03\04\05\06\07\08\09\0A\0B\0C\0D\0E\0F\80\81\82\83\84\85\86\87\88\89") + (func (export "v128.load32_zero-in-block") (result v128) + (block (result v128) (block (result v128) (v128.load32_zero (i32.const 0)))) + ) + (func (export "v128.load64_zero-in-block") (result v128) + (block (result v128) (block (result v128) (v128.load64_zero (i32.const 1)))) + ) + (func (export "v128.load32_zero-as-br-value") (result v128) + (block (result v128) (br 0 (v128.load32_zero (i32.const 6)))) + ) + (func (export "v128.load64_zero-as-br-value") (result v128) + (block (result v128) (br 0 (v128.load64_zero (i32.const 7)))) + ) + (func (export "v128.load32_zero-extract_lane_s-operand") (result i32) + (i32x4.extract_lane 0 (v128.load32_zero (i32.const 12))) + ) + (func (export "v128.load64_zero-extract_lane_s-operand") (result i64) + (i64x2.extract_lane 0 (v128.load64_zero (i32.const 13))) + ) +) +(assert_return (invoke "v128.load32_zero-in-block") (v128.const i32x4 0x03020100 0x00000000 0x00000000 0x00000000)) +(assert_return (invoke "v128.load64_zero-in-block") (v128.const i64x2 0x0807060504030201 0x0000000000000000)) +(assert_return (invoke "v128.load32_zero-as-br-value") (v128.const i32x4 0x09080706 0x00000000 0x00000000 0x00000000)) +(assert_return (invoke "v128.load64_zero-as-br-value") (v128.const i64x2 0x0E0D0C0B0A090807 0x0000000000000000)) +(assert_return (invoke "v128.load32_zero-extract_lane_s-operand") (i32.const 0x0F0E0D0C)) +(assert_return (invoke "v128.load64_zero-extract_lane_s-operand") (i64.const 0x84838281800F0E0D))