@@ -2,155 +2,155 @@ use super::FuncTranslator;
2
2
use wasmparser:: VisitSimdOperator ;
3
3
4
4
impl VisitSimdOperator < ' _ > for FuncTranslator {
5
- fn visit_v128_load ( & mut self , memarg : wasmparser:: MemArg ) -> Self :: Output {
5
+ fn visit_v128_load ( & mut self , _memarg : wasmparser:: MemArg ) -> Self :: Output {
6
6
todo ! ( )
7
7
}
8
8
9
- fn visit_v128_load8x8_s ( & mut self , memarg : wasmparser:: MemArg ) -> Self :: Output {
9
+ fn visit_v128_load8x8_s ( & mut self , _memarg : wasmparser:: MemArg ) -> Self :: Output {
10
10
todo ! ( )
11
11
}
12
12
13
- fn visit_v128_load8x8_u ( & mut self , memarg : wasmparser:: MemArg ) -> Self :: Output {
13
+ fn visit_v128_load8x8_u ( & mut self , _memarg : wasmparser:: MemArg ) -> Self :: Output {
14
14
todo ! ( )
15
15
}
16
16
17
- fn visit_v128_load16x4_s ( & mut self , memarg : wasmparser:: MemArg ) -> Self :: Output {
17
+ fn visit_v128_load16x4_s ( & mut self , _memarg : wasmparser:: MemArg ) -> Self :: Output {
18
18
todo ! ( )
19
19
}
20
20
21
- fn visit_v128_load16x4_u ( & mut self , memarg : wasmparser:: MemArg ) -> Self :: Output {
21
+ fn visit_v128_load16x4_u ( & mut self , _memarg : wasmparser:: MemArg ) -> Self :: Output {
22
22
todo ! ( )
23
23
}
24
24
25
- fn visit_v128_load32x2_s ( & mut self , memarg : wasmparser:: MemArg ) -> Self :: Output {
25
+ fn visit_v128_load32x2_s ( & mut self , _memarg : wasmparser:: MemArg ) -> Self :: Output {
26
26
todo ! ( )
27
27
}
28
28
29
- fn visit_v128_load32x2_u ( & mut self , memarg : wasmparser:: MemArg ) -> Self :: Output {
29
+ fn visit_v128_load32x2_u ( & mut self , _memarg : wasmparser:: MemArg ) -> Self :: Output {
30
30
todo ! ( )
31
31
}
32
32
33
- fn visit_v128_load8_splat ( & mut self , memarg : wasmparser:: MemArg ) -> Self :: Output {
33
+ fn visit_v128_load8_splat ( & mut self , _memarg : wasmparser:: MemArg ) -> Self :: Output {
34
34
todo ! ( )
35
35
}
36
36
37
- fn visit_v128_load16_splat ( & mut self , memarg : wasmparser:: MemArg ) -> Self :: Output {
37
+ fn visit_v128_load16_splat ( & mut self , _memarg : wasmparser:: MemArg ) -> Self :: Output {
38
38
todo ! ( )
39
39
}
40
40
41
- fn visit_v128_load32_splat ( & mut self , memarg : wasmparser:: MemArg ) -> Self :: Output {
41
+ fn visit_v128_load32_splat ( & mut self , _memarg : wasmparser:: MemArg ) -> Self :: Output {
42
42
todo ! ( )
43
43
}
44
44
45
- fn visit_v128_load64_splat ( & mut self , memarg : wasmparser:: MemArg ) -> Self :: Output {
45
+ fn visit_v128_load64_splat ( & mut self , _memarg : wasmparser:: MemArg ) -> Self :: Output {
46
46
todo ! ( )
47
47
}
48
48
49
- fn visit_v128_load32_zero ( & mut self , memarg : wasmparser:: MemArg ) -> Self :: Output {
49
+ fn visit_v128_load32_zero ( & mut self , _memarg : wasmparser:: MemArg ) -> Self :: Output {
50
50
todo ! ( )
51
51
}
52
52
53
- fn visit_v128_load64_zero ( & mut self , memarg : wasmparser:: MemArg ) -> Self :: Output {
53
+ fn visit_v128_load64_zero ( & mut self , _memarg : wasmparser:: MemArg ) -> Self :: Output {
54
54
todo ! ( )
55
55
}
56
56
57
- fn visit_v128_store ( & mut self , memarg : wasmparser:: MemArg ) -> Self :: Output {
57
+ fn visit_v128_store ( & mut self , _memarg : wasmparser:: MemArg ) -> Self :: Output {
58
58
todo ! ( )
59
59
}
60
60
61
- fn visit_v128_load8_lane ( & mut self , memarg : wasmparser:: MemArg , lane : u8 ) -> Self :: Output {
61
+ fn visit_v128_load8_lane ( & mut self , _memarg : wasmparser:: MemArg , _lane : u8 ) -> Self :: Output {
62
62
todo ! ( )
63
63
}
64
64
65
- fn visit_v128_load16_lane ( & mut self , memarg : wasmparser:: MemArg , lane : u8 ) -> Self :: Output {
65
+ fn visit_v128_load16_lane ( & mut self , _memarg : wasmparser:: MemArg , _lane : u8 ) -> Self :: Output {
66
66
todo ! ( )
67
67
}
68
68
69
- fn visit_v128_load32_lane ( & mut self , memarg : wasmparser:: MemArg , lane : u8 ) -> Self :: Output {
69
+ fn visit_v128_load32_lane ( & mut self , _memarg : wasmparser:: MemArg , _lane : u8 ) -> Self :: Output {
70
70
todo ! ( )
71
71
}
72
72
73
- fn visit_v128_load64_lane ( & mut self , memarg : wasmparser:: MemArg , lane : u8 ) -> Self :: Output {
73
+ fn visit_v128_load64_lane ( & mut self , _memarg : wasmparser:: MemArg , _lane : u8 ) -> Self :: Output {
74
74
todo ! ( )
75
75
}
76
76
77
- fn visit_v128_store8_lane ( & mut self , memarg : wasmparser:: MemArg , lane : u8 ) -> Self :: Output {
77
+ fn visit_v128_store8_lane ( & mut self , _memarg : wasmparser:: MemArg , _lane : u8 ) -> Self :: Output {
78
78
todo ! ( )
79
79
}
80
80
81
- fn visit_v128_store16_lane ( & mut self , memarg : wasmparser:: MemArg , lane : u8 ) -> Self :: Output {
81
+ fn visit_v128_store16_lane ( & mut self , _memarg : wasmparser:: MemArg , _lane : u8 ) -> Self :: Output {
82
82
todo ! ( )
83
83
}
84
84
85
- fn visit_v128_store32_lane ( & mut self , memarg : wasmparser:: MemArg , lane : u8 ) -> Self :: Output {
85
+ fn visit_v128_store32_lane ( & mut self , _memarg : wasmparser:: MemArg , _lane : u8 ) -> Self :: Output {
86
86
todo ! ( )
87
87
}
88
88
89
- fn visit_v128_store64_lane ( & mut self , memarg : wasmparser:: MemArg , lane : u8 ) -> Self :: Output {
89
+ fn visit_v128_store64_lane ( & mut self , _memarg : wasmparser:: MemArg , _lane : u8 ) -> Self :: Output {
90
90
todo ! ( )
91
91
}
92
92
93
- fn visit_v128_const ( & mut self , value : wasmparser:: V128 ) -> Self :: Output {
93
+ fn visit_v128_const ( & mut self , _value : wasmparser:: V128 ) -> Self :: Output {
94
94
todo ! ( )
95
95
}
96
96
97
- fn visit_i8x16_shuffle ( & mut self , lanes : [ u8 ; 16 ] ) -> Self :: Output {
97
+ fn visit_i8x16_shuffle ( & mut self , _lanes : [ u8 ; 16 ] ) -> Self :: Output {
98
98
todo ! ( )
99
99
}
100
100
101
- fn visit_i8x16_extract_lane_s ( & mut self , lane : u8 ) -> Self :: Output {
101
+ fn visit_i8x16_extract_lane_s ( & mut self , _lane : u8 ) -> Self :: Output {
102
102
todo ! ( )
103
103
}
104
104
105
- fn visit_i8x16_extract_lane_u ( & mut self , lane : u8 ) -> Self :: Output {
105
+ fn visit_i8x16_extract_lane_u ( & mut self , _lane : u8 ) -> Self :: Output {
106
106
todo ! ( )
107
107
}
108
108
109
- fn visit_i8x16_replace_lane ( & mut self , lane : u8 ) -> Self :: Output {
109
+ fn visit_i8x16_replace_lane ( & mut self , _lane : u8 ) -> Self :: Output {
110
110
todo ! ( )
111
111
}
112
112
113
- fn visit_i16x8_extract_lane_s ( & mut self , lane : u8 ) -> Self :: Output {
113
+ fn visit_i16x8_extract_lane_s ( & mut self , _lane : u8 ) -> Self :: Output {
114
114
todo ! ( )
115
115
}
116
116
117
- fn visit_i16x8_extract_lane_u ( & mut self , lane : u8 ) -> Self :: Output {
117
+ fn visit_i16x8_extract_lane_u ( & mut self , _lane : u8 ) -> Self :: Output {
118
118
todo ! ( )
119
119
}
120
120
121
- fn visit_i16x8_replace_lane ( & mut self , lane : u8 ) -> Self :: Output {
121
+ fn visit_i16x8_replace_lane ( & mut self , _lane : u8 ) -> Self :: Output {
122
122
todo ! ( )
123
123
}
124
124
125
- fn visit_i32x4_extract_lane ( & mut self , lane : u8 ) -> Self :: Output {
125
+ fn visit_i32x4_extract_lane ( & mut self , _lane : u8 ) -> Self :: Output {
126
126
todo ! ( )
127
127
}
128
128
129
- fn visit_i32x4_replace_lane ( & mut self , lane : u8 ) -> Self :: Output {
129
+ fn visit_i32x4_replace_lane ( & mut self , _lane : u8 ) -> Self :: Output {
130
130
todo ! ( )
131
131
}
132
132
133
- fn visit_i64x2_extract_lane ( & mut self , lane : u8 ) -> Self :: Output {
133
+ fn visit_i64x2_extract_lane ( & mut self , _lane : u8 ) -> Self :: Output {
134
134
todo ! ( )
135
135
}
136
136
137
- fn visit_i64x2_replace_lane ( & mut self , lane : u8 ) -> Self :: Output {
137
+ fn visit_i64x2_replace_lane ( & mut self , _lane : u8 ) -> Self :: Output {
138
138
todo ! ( )
139
139
}
140
140
141
- fn visit_f32x4_extract_lane ( & mut self , lane : u8 ) -> Self :: Output {
141
+ fn visit_f32x4_extract_lane ( & mut self , _lane : u8 ) -> Self :: Output {
142
142
todo ! ( )
143
143
}
144
144
145
- fn visit_f32x4_replace_lane ( & mut self , lane : u8 ) -> Self :: Output {
145
+ fn visit_f32x4_replace_lane ( & mut self , _lane : u8 ) -> Self :: Output {
146
146
todo ! ( )
147
147
}
148
148
149
- fn visit_f64x2_extract_lane ( & mut self , lane : u8 ) -> Self :: Output {
149
+ fn visit_f64x2_extract_lane ( & mut self , _lane : u8 ) -> Self :: Output {
150
150
todo ! ( )
151
151
}
152
152
153
- fn visit_f64x2_replace_lane ( & mut self , lane : u8 ) -> Self :: Output {
153
+ fn visit_f64x2_replace_lane ( & mut self , _lane : u8 ) -> Self :: Output {
154
154
todo ! ( )
155
155
}
156
156
0 commit comments