@@ -64,4 +64,170 @@ entry:
64
64
ret double %abs
65
65
}
66
66
67
+ ; Verify nofpexcept is set to constrained conversions when ignoring exceptions
68
+ define void @fptoint_nofpexcept (ppc_fp128 %p , fp128 %m , i32* %addr1 , i64* %addr2 ) {
69
+ ; CHECK-LABEL: name: fptoint_nofpexcept
70
+ ; CHECK: bb.0.entry:
71
+ ; CHECK: successors: %bb.1(0x40000000), %bb.2(0x40000000)
72
+ ; CHECK: liveins: $f1, $f2, $v2, $x7, $x8
73
+ ; CHECK: [[COPY:%[0-9]+]]:g8rc_and_g8rc_nox0 = COPY $x8
74
+ ; CHECK: [[COPY1:%[0-9]+]]:g8rc_and_g8rc_nox0 = COPY $x7
75
+ ; CHECK: [[COPY2:%[0-9]+]]:vrrc = COPY $v2
76
+ ; CHECK: [[COPY3:%[0-9]+]]:f8rc = COPY $f2
77
+ ; CHECK: [[COPY4:%[0-9]+]]:f8rc = COPY $f1
78
+ ; CHECK: %5:vrrc = nofpexcept XSCVQPSWZ [[COPY2]]
79
+ ; CHECK: [[COPY5:%[0-9]+]]:vslrc = COPY %5
80
+ ; CHECK: [[COPY6:%[0-9]+]]:vfrc = COPY [[COPY5]].sub_64
81
+ ; CHECK: [[MFVSRWZ:%[0-9]+]]:gprc = MFVSRWZ killed [[COPY6]]
82
+ ; CHECK: STW killed [[MFVSRWZ]], 0, [[COPY1]] :: (volatile store 4 into %ir.addr1)
83
+ ; CHECK: %8:vrrc = nofpexcept XSCVQPUWZ [[COPY2]]
84
+ ; CHECK: [[COPY7:%[0-9]+]]:vslrc = COPY %8
85
+ ; CHECK: [[COPY8:%[0-9]+]]:vfrc = COPY [[COPY7]].sub_64
86
+ ; CHECK: [[MFVSRWZ1:%[0-9]+]]:gprc = MFVSRWZ killed [[COPY8]]
87
+ ; CHECK: STW killed [[MFVSRWZ1]], 0, [[COPY1]] :: (volatile store 4 into %ir.addr1)
88
+ ; CHECK: %11:vrrc = nofpexcept XSCVQPSDZ [[COPY2]]
89
+ ; CHECK: %12:g8rc = nofpexcept MFVRD killed %11
90
+ ; CHECK: STD killed %12, 0, [[COPY]] :: (volatile store 8 into %ir.addr2)
91
+ ; CHECK: %13:vrrc = nofpexcept XSCVQPUDZ [[COPY2]]
92
+ ; CHECK: %14:g8rc = nofpexcept MFVRD killed %13
93
+ ; CHECK: STD killed %14, 0, [[COPY]] :: (volatile store 8 into %ir.addr2)
94
+ ; CHECK: [[MFFS:%[0-9]+]]:f8rc = MFFS implicit $rm
95
+ ; CHECK: MTFSB1 31, implicit-def $rm
96
+ ; CHECK: MTFSB0 30, implicit-def $rm
97
+ ; CHECK: %15:f8rc = nofpexcept FADD [[COPY3]], [[COPY4]], implicit $rm
98
+ ; CHECK: MTFSFb 1, [[MFFS]], implicit-def $rm
99
+ ; CHECK: %16:vsfrc = nofpexcept XSCVDPSXWS killed %15, implicit $rm
100
+ ; CHECK: [[MFVSRWZ2:%[0-9]+]]:gprc = MFVSRWZ killed %16
101
+ ; CHECK: STW killed [[MFVSRWZ2]], 0, [[COPY1]] :: (volatile store 4 into %ir.addr1)
102
+ ; CHECK: [[ADDIStocHA8_:%[0-9]+]]:g8rc_and_g8rc_nox0 = ADDIStocHA8 $x2, %const.0
103
+ ; CHECK: [[DFLOADf32_:%[0-9]+]]:vssrc = DFLOADf32 target-flags(ppc-toc-lo) %const.0, killed [[ADDIStocHA8_]] :: (load 4 from constant-pool)
104
+ ; CHECK: [[COPY9:%[0-9]+]]:f8rc = COPY [[DFLOADf32_]]
105
+ ; CHECK: [[FCMPOD:%[0-9]+]]:crrc = FCMPOD [[COPY4]], [[COPY9]]
106
+ ; CHECK: [[COPY10:%[0-9]+]]:crbitrc = COPY [[FCMPOD]].sub_eq
107
+ ; CHECK: [[XXLXORdpz:%[0-9]+]]:f8rc = XXLXORdpz
108
+ ; CHECK: [[FCMPOD1:%[0-9]+]]:crrc = FCMPOD [[COPY3]], [[XXLXORdpz]]
109
+ ; CHECK: [[COPY11:%[0-9]+]]:crbitrc = COPY [[FCMPOD1]].sub_lt
110
+ ; CHECK: [[CRAND:%[0-9]+]]:crbitrc = CRAND killed [[COPY10]], killed [[COPY11]]
111
+ ; CHECK: [[COPY12:%[0-9]+]]:crbitrc = COPY [[FCMPOD]].sub_eq
112
+ ; CHECK: [[COPY13:%[0-9]+]]:crbitrc = COPY [[FCMPOD]].sub_lt
113
+ ; CHECK: [[CRANDC:%[0-9]+]]:crbitrc = CRANDC killed [[COPY13]], killed [[COPY12]]
114
+ ; CHECK: [[CROR:%[0-9]+]]:crbitrc = CROR killed [[CRANDC]], killed [[CRAND]]
115
+ ; CHECK: [[LIS:%[0-9]+]]:gprc_and_gprc_nor0 = LIS 32768
116
+ ; CHECK: [[LI:%[0-9]+]]:gprc_and_gprc_nor0 = LI 0
117
+ ; CHECK: [[ISEL:%[0-9]+]]:gprc = ISEL [[LI]], [[LIS]], [[CROR]]
118
+ ; CHECK: BC [[CROR]], %bb.2
119
+ ; CHECK: bb.1.entry:
120
+ ; CHECK: successors: %bb.2(0x80000000)
121
+ ; CHECK: bb.2.entry:
122
+ ; CHECK: [[PHI:%[0-9]+]]:f8rc = PHI [[COPY9]], %bb.1, [[XXLXORdpz]], %bb.0
123
+ ; CHECK: ADJCALLSTACKDOWN 32, 0, implicit-def dead $r1, implicit $r1
124
+ ; CHECK: $f1 = COPY [[COPY4]]
125
+ ; CHECK: $f2 = COPY [[COPY3]]
126
+ ; CHECK: $f3 = COPY [[PHI]]
127
+ ; CHECK: $f4 = COPY [[XXLXORdpz]]
128
+ ; CHECK: BL8_NOP &__gcc_qsub, csr_ppc64_altivec, implicit-def dead $lr8, implicit $rm, implicit $f1, implicit $f2, implicit $f3, implicit $f4, implicit $x2, implicit-def $r1, implicit-def $f1, implicit-def $f2
129
+ ; CHECK: ADJCALLSTACKUP 32, 0, implicit-def dead $r1, implicit $r1
130
+ ; CHECK: [[COPY14:%[0-9]+]]:f8rc = COPY $f1
131
+ ; CHECK: [[COPY15:%[0-9]+]]:f8rc = COPY $f2
132
+ ; CHECK: [[MFFS1:%[0-9]+]]:f8rc = MFFS implicit $rm
133
+ ; CHECK: MTFSB1 31, implicit-def $rm
134
+ ; CHECK: MTFSB0 30, implicit-def $rm
135
+ ; CHECK: %37:f8rc = nofpexcept FADD [[COPY15]], [[COPY14]], implicit $rm
136
+ ; CHECK: MTFSFb 1, [[MFFS1]], implicit-def $rm
137
+ ; CHECK: %38:vsfrc = nofpexcept XSCVDPSXWS killed %37, implicit $rm
138
+ ; CHECK: [[MFVSRWZ3:%[0-9]+]]:gprc = MFVSRWZ killed %38
139
+ ; CHECK: [[XOR:%[0-9]+]]:gprc = XOR killed [[MFVSRWZ3]], killed [[ISEL]]
140
+ ; CHECK: STW killed [[XOR]], 0, [[COPY1]] :: (volatile store 4 into %ir.addr1)
141
+ ; CHECK: BLR8 implicit $lr8, implicit $rm
142
+ entry:
143
+ %conv1 = tail call i32 @llvm.experimental.constrained.fptosi.i32.f128 (fp128 %m , metadata !"fpexcept.ignore" ) #0
144
+ store volatile i32 %conv1 , i32* %addr1 , align 4
145
+ %conv2 = tail call i32 @llvm.experimental.constrained.fptoui.i32.f128 (fp128 %m , metadata !"fpexcept.ignore" ) #0
146
+ store volatile i32 %conv2 , i32* %addr1 , align 4
147
+ %conv3 = tail call i64 @llvm.experimental.constrained.fptosi.i64.f128 (fp128 %m , metadata !"fpexcept.ignore" ) #0
148
+ store volatile i64 %conv3 , i64* %addr2 , align 8
149
+ %conv4 = tail call i64 @llvm.experimental.constrained.fptoui.i64.f128 (fp128 %m , metadata !"fpexcept.ignore" ) #0
150
+ store volatile i64 %conv4 , i64* %addr2 , align 8
151
+
152
+ %conv5 = tail call i32 @llvm.experimental.constrained.fptosi.i32.ppcf128 (ppc_fp128 %p , metadata !"fpexcept.ignore" ) #0
153
+ store volatile i32 %conv5 , i32* %addr1 , align 4
154
+ %conv6 = tail call i32 @llvm.experimental.constrained.fptoui.i32.ppcf128 (ppc_fp128 %p , metadata !"fpexcept.ignore" ) #0
155
+ store volatile i32 %conv6 , i32* %addr1 , align 4
156
+ ret void
157
+ }
158
+
159
+ ; Verify nofpexcept is NOT set to constrained conversions
160
+ define signext i32 @q_to_i32 (fp128 %m ) #0 {
161
+ ; CHECK-LABEL: name: q_to_i32
162
+ ; CHECK: bb.0.entry:
163
+ ; CHECK: liveins: $v2
164
+ ; CHECK: [[COPY:%[0-9]+]]:vrrc = COPY $v2
165
+ ; CHECK: [[XSCVQPSWZ:%[0-9]+]]:vrrc = XSCVQPSWZ [[COPY]]
166
+ ; CHECK: [[COPY1:%[0-9]+]]:vslrc = COPY [[XSCVQPSWZ]]
167
+ ; CHECK: [[COPY2:%[0-9]+]]:vfrc = COPY [[COPY1]].sub_64
168
+ ; CHECK: [[MFVSRWZ:%[0-9]+]]:gprc = MFVSRWZ killed [[COPY2]]
169
+ ; CHECK: [[EXTSW_32_64_:%[0-9]+]]:g8rc = EXTSW_32_64 killed [[MFVSRWZ]]
170
+ ; CHECK: $x3 = COPY [[EXTSW_32_64_]]
171
+ ; CHECK: BLR8 implicit $lr8, implicit $rm, implicit $x3
172
+ entry:
173
+ %conv = tail call i32 @llvm.experimental.constrained.fptosi.i32.f128 (fp128 %m , metadata !"fpexcept.strict" ) #0
174
+ ret i32 %conv
175
+ }
176
+
177
+ define i64 @q_to_i64 (fp128 %m ) #0 {
178
+ ; CHECK-LABEL: name: q_to_i64
179
+ ; CHECK: bb.0.entry:
180
+ ; CHECK: liveins: $v2
181
+ ; CHECK: [[COPY:%[0-9]+]]:vrrc = COPY $v2
182
+ ; CHECK: [[XSCVQPSDZ:%[0-9]+]]:vrrc = XSCVQPSDZ [[COPY]]
183
+ ; CHECK: [[MFVRD:%[0-9]+]]:g8rc = MFVRD killed [[XSCVQPSDZ]]
184
+ ; CHECK: $x3 = COPY [[MFVRD]]
185
+ ; CHECK: BLR8 implicit $lr8, implicit $rm, implicit $x3
186
+ entry:
187
+ %conv = tail call i64 @llvm.experimental.constrained.fptosi.i64.f128 (fp128 %m , metadata !"fpexcept.strict" ) #0
188
+ ret i64 %conv
189
+ }
190
+
191
+ define i64 @q_to_u64 (fp128 %m ) #0 {
192
+ ; CHECK-LABEL: name: q_to_u64
193
+ ; CHECK: bb.0.entry:
194
+ ; CHECK: liveins: $v2
195
+ ; CHECK: [[COPY:%[0-9]+]]:vrrc = COPY $v2
196
+ ; CHECK: [[XSCVQPUDZ:%[0-9]+]]:vrrc = XSCVQPUDZ [[COPY]]
197
+ ; CHECK: [[MFVRD:%[0-9]+]]:g8rc = MFVRD killed [[XSCVQPUDZ]]
198
+ ; CHECK: $x3 = COPY [[MFVRD]]
199
+ ; CHECK: BLR8 implicit $lr8, implicit $rm, implicit $x3
200
+ entry:
201
+ %conv = tail call i64 @llvm.experimental.constrained.fptoui.i64.f128 (fp128 %m , metadata !"fpexcept.strict" ) #0
202
+ ret i64 %conv
203
+ }
204
+
205
+ define zeroext i32 @q_to_u32 (fp128 %m ) #0 {
206
+ ; CHECK-LABEL: name: q_to_u32
207
+ ; CHECK: bb.0.entry:
208
+ ; CHECK: liveins: $v2
209
+ ; CHECK: [[COPY:%[0-9]+]]:vrrc = COPY $v2
210
+ ; CHECK: [[XSCVQPUWZ:%[0-9]+]]:vrrc = XSCVQPUWZ [[COPY]]
211
+ ; CHECK: [[COPY1:%[0-9]+]]:vslrc = COPY [[XSCVQPUWZ]]
212
+ ; CHECK: [[COPY2:%[0-9]+]]:vfrc = COPY [[COPY1]].sub_64
213
+ ; CHECK: [[MFVSRWZ:%[0-9]+]]:gprc = MFVSRWZ killed [[COPY2]]
214
+ ; CHECK: [[DEF:%[0-9]+]]:g8rc = IMPLICIT_DEF
215
+ ; CHECK: [[INSERT_SUBREG:%[0-9]+]]:g8rc = INSERT_SUBREG [[DEF]], killed [[MFVSRWZ]], %subreg.sub_32
216
+ ; CHECK: [[RLDICL:%[0-9]+]]:g8rc = RLDICL killed [[INSERT_SUBREG]], 0, 32
217
+ ; CHECK: $x3 = COPY [[RLDICL]]
218
+ ; CHECK: BLR8 implicit $lr8, implicit $rm, implicit $x3
219
+ entry:
220
+ %conv = tail call i32 @llvm.experimental.constrained.fptoui.i32.f128 (fp128 %m , metadata !"fpexcept.strict" ) #0
221
+ ret i32 %conv
222
+ }
223
+
67
224
declare double @llvm.fabs.f64 (double )
225
+
226
+ declare i32 @llvm.experimental.constrained.fptosi.i32.f128 (fp128 , metadata )
227
+ declare i64 @llvm.experimental.constrained.fptosi.i64.f128 (fp128 , metadata )
228
+ declare i64 @llvm.experimental.constrained.fptoui.i64.f128 (fp128 , metadata )
229
+ declare i32 @llvm.experimental.constrained.fptoui.i32.f128 (fp128 , metadata )
230
+ declare i32 @llvm.experimental.constrained.fptosi.i32.ppcf128 (ppc_fp128 , metadata )
231
+ declare i32 @llvm.experimental.constrained.fptoui.i32.ppcf128 (ppc_fp128 , metadata )
232
+
233
+ attributes #0 = { strictfp }
0 commit comments