@@ -593,6 +593,66 @@ def forward(self, x):
593593input = Variable (torch .rand (1 , 10 , 2 , 2 ))
594594save_data_and_model ('clip' , input , model )
595595
596+ ########### clip_init ###########
597+
598+ operation = "Clip"
599+ min = - 0.5
600+ max = 0.5
601+
602+ input = np .random .randn (3 , 4 , 5 ).astype (np .float32 )
603+ output = np .clip (input , min , max )
604+
605+ X = onnx .helper .make_tensor_value_info ('input' , onnx .TensorProto .FLOAT , [3 , 4 , 5 ])
606+ MIN = onnx .helper .make_tensor_value_info ('min' , onnx .TensorProto .FLOAT , [1 ])
607+ MAX = onnx .helper .make_tensor_value_info ('max' , onnx .TensorProto .FLOAT , [1 ])
608+ Y = onnx .helper .make_tensor_value_info ('output' , onnx .TensorProto .FLOAT , [3 , 4 , 5 ])
609+ MIN_INIT = onnx .helper .make_tensor ("min" , onnx .TensorProto .FLOAT , [1 ], np .array ([min ]))
610+ MAX_INIT = onnx .helper .make_tensor ("max" , onnx .TensorProto .FLOAT , [1 ], np .array ([max ]))
611+
612+ name = "clip_init_min_max"
613+ input = np .random .randn (3 , 4 , 5 ).astype (np .float32 )
614+ output = np .clip (input , min , max )
615+
616+ input_files = os .path .join ("data" , "input_" + name )
617+ np .save (input_files , input .data )
618+ output_files = os .path .join ("data" , "output_" + name )
619+ np .save (output_files , np .ascontiguousarray (output .data ))
620+
621+ node = onnx .helper .make_node (operation , inputs = ['input' , "min" , "max" ], outputs = ['output' ])
622+ graph = onnx .helper .make_graph ([node ], name , [X , MIN , MAX ], [Y ], [MIN_INIT , MAX_INIT ])
623+ model = onnx .helper .make_model (graph , producer_name = name )
624+ onnx .save (model , os .path .join ("models" , name + ".onnx" ))
625+
626+ name = "clip_init_min"
627+ input = np .random .randn (3 , 4 , 5 ).astype (np .float32 )
628+ output = np .clip (input , min , None )
629+
630+ input_files = os .path .join ("data" , "input_" + name )
631+ np .save (input_files , input .data )
632+ output_files = os .path .join ("data" , "output_" + name )
633+ np .save (output_files , np .ascontiguousarray (output .data ))
634+
635+ node = onnx .helper .make_node (operation , inputs = ['input' , "min" , "" ], outputs = ['output' ])
636+ graph = onnx .helper .make_graph ([node ], name , [X , MIN ], [Y ], [MIN_INIT ])
637+ model = onnx .helper .make_model (graph , producer_name = name )
638+ onnx .save (model , os .path .join ("models" , name + ".onnx" ))
639+
640+ name = "clip_init_max"
641+ input = np .random .randn (3 , 4 , 5 ).astype (np .float32 )
642+ output = np .clip (input , None , max )
643+
644+ input_files = os .path .join ("data" , "input_" + name )
645+ np .save (input_files , input .data )
646+ output_files = os .path .join ("data" , "output_" + name )
647+ np .save (output_files , np .ascontiguousarray (output .data ))
648+
649+ node = onnx .helper .make_node (operation , inputs = ['input' , "" , "max" ], outputs = ['output' ])
650+ graph = onnx .helper .make_graph ([node ], name , [X , MAX ], [Y ], [MAX_INIT ])
651+ model = onnx .helper .make_model (graph , producer_name = name )
652+ onnx .save (model , os .path .join ("models" , name + ".onnx" ))
653+
654+ #################################
655+
596656input = Variable (torch .randn (1 , 3 , 6 , 6 , 6 ))
597657deconv = nn .ConvTranspose3d (3 , 3 , kernel_size = (3 , 3 , 3 ), stride = (1 , 1 , 1 ), padding = (0 , 0 , 0 ), bias = False )
598658save_data_and_model ("deconv3d" , input , deconv )
@@ -1444,14 +1504,14 @@ def forward(self, x):
14441504save_data_and_model ("reduce_max" , x , model )
14451505
14461506class ReduceMax (nn .Module ):
1447- def __init__ (self , axes ):
1448- super (ReduceMax , self ).__init__ ()
1449- self .axes = axes
1507+ def __init__ (self , axes ):
1508+ super (ReduceMax , self ).__init__ ()
1509+ self .axes = axes
14501510
1451- def forward (self , x ):
1452- # torch.return_types.max(values, indices)
1453- out = torch .max (x , dim = self .axes , keepdim = False )[0 ]
1454- return out
1511+ def forward (self , x ):
1512+ # torch.return_types.max(values, indices)
1513+ out = torch .max (x , dim = self .axes , keepdim = False )[0 ]
1514+ return out
14551515
14561516x = Variable (torch .randn (1 , 3 , 2 , 2 ))
14571517
0 commit comments