|
8 | 8 |
|
9 | 9 | ###############
|
10 | 10 | ### CAUTION!!!
|
11 |
| -### Be sure to put constant numpy arrays out of @ost.script() decorated fucntion. |
12 |
| -### Otherwise random values change each time eager mode is enter. |
| 11 | +### Be sure to put random-generated constant numpy arrays out of @ost.script() decorated fucntion. |
| 12 | +### Otherwise random values change each time eager mode is entereded. |
13 | 13 | ### See discussions in https://github.com/microsoft/onnxscript/issues/1313
|
14 | 14 | ###############
|
15 | 15 |
|
@@ -364,4 +364,19 @@ def biased_matmul(x: ost.FLOAT[b, m, k]) -> ost.FLOAT[b, m, n]:
|
364 | 364 | matmul = op.MatMul(x, weight)
|
365 | 365 | bias = op.Constant(value=onnx.helper.make_tensor("", onnx.TensorProto.FLOAT, [n], bias_data))
|
366 | 366 | return op.Add(bias, matmul)
|
367 |
| -make_model_and_data(biased_matmul, np.random.rand(b, m, k).astype(np.float32), use_ort=True, ort_input_keys=["x"]) |
| 367 | +make_model_and_data(biased_matmul, np.random.rand(b, m, k).astype(np.float32)) |
| 368 | + |
| 369 | +''' Subgraph: [Input] -> Clip<min=0, max=6> -> Add<B=6> -> Clip<min=0, max=6> -> Add<B=6> -> [Output] |
| 370 | +
|
| 371 | + Here max=6 and B=6 shares the same Constant node. |
| 372 | +''' |
| 373 | + |
| 374 | +@ost.script() |
| 375 | +def clip_div_shared_constant(x: ost.FLOAT[1, 8, 12, 10]) -> ost.FLOAT[1, 8, 12, 10]: |
| 376 | + Constant_output_0 = op.Constant(value=onnx.helper.make_tensor("", onnx.TensorProto.FLOAT, [], np.array([0], dtype=np.float32))) |
| 377 | + Constant_1_output_0 = op.Constant(value=onnx.helper.make_tensor("", onnx.TensorProto.FLOAT, [], np.array([6], dtype=np.float32))) |
| 378 | + |
| 379 | + div = op.Div(x, Constant_1_output_0) |
| 380 | + clip = op.Clip(div, Constant_output_0, Constant_1_output_0) |
| 381 | + return clip |
| 382 | +make_model_and_data(clip_div_shared_constant, np.random.rand(1, 8, 12, 10).astype(np.float32)) |
0 commit comments