forked from pytorch/pytorch
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathTestBatched.test_if_noelse_with_scalar.expect
45 lines (45 loc) · 1.65 KB
/
TestBatched.test_if_noelse_with_scalar.expect
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
graph(%a.1_data : Tensor
%a.1_mask : Tensor
%a.1_dims : Tensor
%b_data : Tensor
%b_mask : Tensor
%b_dims : Tensor) {
%6 : int = prim::Constant[value=1]()
%7 : float = prim::Constant[value=0.1]()
%8 : Float() = prim::NumToTensor(%7)
%other : float = prim::TensorToNum(%8)
%10 : Tensor = aten::gt(%a.1_data, %other)
%11 : bool = prim::TensorToBool(%10)
%12 : Long() = prim::NumToTensor(%6)
%alpha : float = prim::TensorToNum(%12)
%data : Tensor = aten::add(%a.1_data, %b_data, %alpha)
%mask : Tensor = aten::mul(%a.1_mask, %b_mask)
%dims : Tensor = aten::__or__(%a.1_dims, %b_dims)
%17 : bool = prim::Constant[value=1]()
%18 : int = prim::Constant[value=1]()
%19 : Tensor = aten::type_as(%a.1_mask, %10)
%data.2 : Tensor = aten::mul(%10, %19)
%21 : int = aten::dim(%data.2)
%22 : bool = aten::eq(%21, %18)
%cond_data : Tensor, %cond_mask : Tensor = prim::If(%22)
block0() {
%25 : int = aten::dim(%data)
%26 : int = aten::sub(%25, %18)
%data.4 : Tensor = prim::Loop(%26, %17, %data.2)
block0(%28 : int, %29 : Tensor) {
%30 : int = aten::dim(%29)
%data.3 : Tensor = aten::unsqueeze(%29, %30)
-> (%17, %data.3)
}
%cond_data.1 : Tensor = aten::expand_as(%data.4, %data)
%cond_mask.1 : Tensor = aten::expand_as(%data.4, %mask)
-> (%cond_data.1, %cond_mask.1)
}
block1() {
-> (%data.2, %data.2)
}
%res_data : Tensor = aten::where(%cond_data, %data, %a.1_data)
%res_mask : Tensor = aten::where(%cond_mask, %mask, %a.1_mask)
%res_dims : Tensor = aten::__or__(%dims, %a.1_dims)
return (%res_data, %res_mask, %res_dims);
}