From fdba14ba633f5b732f887bb0ae1634116732a6ad Mon Sep 17 00:00:00 2001 From: Zihao Mu Date: Fri, 24 Jun 2022 06:35:30 +0800 Subject: [PATCH 1/2] Merge pull request #983 from zihaomu:gemm_onnx_bug_fix_branch34 generate gemm sample for 3.4 branch * generate gemm sample for 34 branch * update the original data and model. * update the same generate scrip. --- .../dnn/onnx/data/input_gemm_no_transB.npy | Bin 0 -> 208 bytes .../dnn/onnx/data/input_gemm_transB_0.npy | Bin 0 -> 208 bytes .../dnn/onnx/data/output_gemm_no_transB.npy | Bin 0 -> 152 bytes .../dnn/onnx/data/output_gemm_transB_0.npy | Bin 0 -> 152 bytes testdata/dnn/onnx/generate_onnx_models.py | 64 +++++++++++++++++- testdata/dnn/onnx/models/gemm_no_transB.onnx | 16 +++++ testdata/dnn/onnx/models/gemm_transB_0.onnx | Bin 0 -> 326 bytes 7 files changed, 78 insertions(+), 2 deletions(-) create mode 100644 testdata/dnn/onnx/data/input_gemm_no_transB.npy create mode 100644 testdata/dnn/onnx/data/input_gemm_transB_0.npy create mode 100644 testdata/dnn/onnx/data/output_gemm_no_transB.npy create mode 100644 testdata/dnn/onnx/data/output_gemm_transB_0.npy create mode 100644 testdata/dnn/onnx/models/gemm_no_transB.onnx create mode 100644 testdata/dnn/onnx/models/gemm_transB_0.onnx diff --git a/testdata/dnn/onnx/data/input_gemm_no_transB.npy b/testdata/dnn/onnx/data/input_gemm_no_transB.npy new file mode 100644 index 0000000000000000000000000000000000000000..b56cdfaa5d6a4764b192e630ea2352e42ce73c1c GIT binary patch literal 208 zcmbR27wQ`j$;eQ~P_3SlTAW;@Zl$1ZlV+l>qoAIaUsO_*m=~X4l#&V(cT3DEP6dh= zXCxM+0{I$7ItqpcnmP)#3giMV?s^{kWn$*`oBgEh%WJvqJzn0h3yf5?&*OezXXBk> zKWkH&eNoU6ySjB=_A64^?c=fq?Vo?nv5%^rXnV6@lP&MUr8bk*LhU_F-RqoAIaUsO_*m=~X4l#&V(cT3DEP6dh= zXCxM+0{I$7ItqpcnmP)#3giMV?s^{kWn$*`oBgEh%WJvqJzn0h3yf5?&*OezXXBk> zKWkH&eNoU6ySjB=_A64^?c=fq?Vo?nv5%^rXnV6@lP&MUr8bk*LhU_F-RqoAIaUsO_*m=~X4l#&V(cT3DEP6dh= wXCxM+0{I$7Its>`ItsN4WCJeoW$_LI50f2Ic%&Q{j0_wmx7#|HoELTg07T;=wEzGB literal 0 HcmV?d00001 diff --git a/testdata/dnn/onnx/data/output_gemm_transB_0.npy b/testdata/dnn/onnx/data/output_gemm_transB_0.npy new file mode 100644 index 0000000000000000000000000000000000000000..f9ea2ed37031d473956a71739cb8be2b4c293f99 GIT binary patch literal 152 zcmbR27wQ`j$;eQ~P_3SlTAW;@Zl$1ZlV+l>qoAIaUsO_*m=~X4l#&V(cT3DEP6dh= wXCxM+0{I$7Its>`ItsN4WCJeoW$_LI50f2Ic%&Q{j0_wmx7#|HoELTg07T;=wEzGB literal 0 HcmV?d00001 diff --git a/testdata/dnn/onnx/generate_onnx_models.py b/testdata/dnn/onnx/generate_onnx_models.py index eed78b9dd..b5ab95229 100644 --- a/testdata/dnn/onnx/generate_onnx_models.py +++ b/testdata/dnn/onnx/generate_onnx_models.py @@ -11,7 +11,7 @@ import onnxsim import google.protobuf.text_format import io - +from typing import Optional def assertExpected(s): if not (isinstance(s, str) or (sys.version_info[0] == 2 and isinstance(s, unicode))): @@ -73,6 +73,24 @@ def save_onnx_data_and_model(input, output, name, operation, *args, **kwargs): model = onnx.helper.make_model(graph, producer_name=name) onnx.save(model, models_files) +def save_data_and_onnx_model(name, input_np, output_np, onnx_model): + print(name + " input has sizes", input_np.shape) + input_files = os.path.join("data", "input_" + name) + np.save(input_files, input_np.data) + + print(name + " output has sizes", output_np.shape) + print() + output_files = os.path.join("data", "output_" + name) + np.save(output_files, np.ascontiguousarray(output_np.data)) + + models_files = os.path.join("models", name + ".onnx") + + onnx_model_pb = onnx._serialize(onnx_model) + model_def = assertONNXExpected(onnx_model_pb) + with open(models_files, 'wb') as file: + file.write(model_def.SerializeToString()) + + def simplify(name, rename=False, **kwargs): model, check = onnxsim.simplify(name, **kwargs) assert check, "couldn't valide" @@ -1665,4 +1683,46 @@ def forward(self, a, b): save_data_and_model_multy_inputs('output_registration', model, a, b) model = onnx.load('models/output_registration.onnx') model.graph.node[0].name = model.graph.output[0].name -onnx.save(model, 'models/output_registration.onnx') \ No newline at end of file +onnx.save(model, 'models/output_registration.onnx') + +# ########################## GEMM ########################## +# The original code is : https://github.com/onnx/onnx/blob/main/onnx/backend/test/case/node/gemm.py +def gemm_reference_implementation(A: np.ndarray, B: np.ndarray, C: Optional[np.ndarray] = None, alpha: float = 1., beta: float = 1., transA: int = 0, + transB: int = 0) -> np.ndarray: + A = A if transA == 0 else A.T + B = B if transB == 0 else B.T + C = C if C is not None else np.array(0) + + Y = alpha * np.dot(A, B) + beta * C + + return Y + +## gemm without transB +input_np = np.random.rand(2, 10).astype("float32") +inputs = [onnx.helper.make_tensor_value_info("input1", onnx.mapping.NP_TYPE_TO_TENSOR_TYPE[input_np.dtype], shape=input_np.shape)] + +weight_np = np.random.rand(10, 3).astype("float32") +weight_tensor = onnx.helper.make_tensor('weight_tensor', data_type=onnx.mapping.NP_TYPE_TO_TENSOR_TYPE[weight_np.dtype], dims=weight_np.shape, vals=weight_np) + +outputs = [onnx.helper.make_tensor_value_info("output", onnx.TensorProto.FLOAT, shape=(2, 3))] + +nodes = [onnx.helper.make_node("Gemm", ["input1", "weight_tensor"], ["output"])] + +graph = onnx.helper.make_graph(nodes, + "gemm_test", + inputs, + outputs, initializer=[weight_tensor]) +gemm_model = onnx.helper.make_model(graph) +output_np = gemm_reference_implementation(input_np, weight_np) +save_data_and_onnx_model("gemm_no_transB", input_np, output_np, gemm_model) + +## gemm with transB = 0 + +nodes2 = [onnx.helper.make_node("Gemm", ["input1", "weight_tensor"], ["output"], transB=0)] +graph2 = onnx.helper.make_graph(nodes2, + "gemm_test", + inputs, + outputs, initializer=[weight_tensor]) +gemm_model2 = onnx.helper.make_model(graph2) +output_np = gemm_reference_implementation(input_np, weight_np) +save_data_and_onnx_model("gemm_transB_0", input_np, output_np, gemm_model2) \ No newline at end of file diff --git a/testdata/dnn/onnx/models/gemm_no_transB.onnx b/testdata/dnn/onnx/models/gemm_no_transB.onnx new file mode 100644 index 000000000..07e47ff22 --- /dev/null +++ b/testdata/dnn/onnx/models/gemm_no_transB.onnx @@ -0,0 +1,16 @@ +: +weight_node_outinput22"Constant* +value* +"xz?L?G>G?9=#?4>q?ڗ?N>s>.4F?>?<\?N?c?yq?Ƌ.?k>>2?v=9*?+?nW>A>>L8>B weight_tensor +' +input1 +weight_node_outoutput"Gemm gemm_testZ +input1 +  + + +b +output +  + +B \ No newline at end of file diff --git a/testdata/dnn/onnx/models/gemm_transB_0.onnx b/testdata/dnn/onnx/models/gemm_transB_0.onnx new file mode 100644 index 0000000000000000000000000000000000000000..46bf7fe4a06548842d95578eef3abc3b4004b4d7 GIT binary patch literal 326 zcmd@Hm`w7)f- z)qaxS6}zj&?RI)5ZuXr$FYO+;2-*w2ooN#zD`)SwNya|;y{vuZ%tHHP-Fo)ff;;S% zAG&Y%b-I!LpLJ!nmK(L~->lQNZ_f+2b7W<)yIjA}&c|Yxof9wA!6m7A#rZ`G7+JW? zxY)pMHsnJ0j1U{pM?l{wvACz^=4$bBv6U1h<`p|hFf3qX65>n;3dWbD7MDaxz>E;$ d Date: Mon, 11 Jul 2022 19:37:53 +0800 Subject: [PATCH 2/2] update the test case of Div --- .../dnn/onnx/data/input_div_test_1x1_0.npy | Bin 0 -> 144 bytes .../dnn/onnx/data/input_div_test_1x1_1.npy | Bin 0 -> 132 bytes .../dnn/onnx/data/output_div_test_1x1.npy | Bin 0 -> 144 bytes testdata/dnn/onnx/generate_onnx_models.py | 38 +++++++++++++++++- testdata/dnn/onnx/models/div_test_1x1.onnx | 16 ++++++++ 5 files changed, 53 insertions(+), 1 deletion(-) create mode 100644 testdata/dnn/onnx/data/input_div_test_1x1_0.npy create mode 100644 testdata/dnn/onnx/data/input_div_test_1x1_1.npy create mode 100644 testdata/dnn/onnx/data/output_div_test_1x1.npy create mode 100644 testdata/dnn/onnx/models/div_test_1x1.onnx diff --git a/testdata/dnn/onnx/data/input_div_test_1x1_0.npy b/testdata/dnn/onnx/data/input_div_test_1x1_0.npy new file mode 100644 index 0000000000000000000000000000000000000000..487769bcfa95be145d190cbe893bcb5aa2f3ea14 GIT binary patch literal 144 zcmbR27wQ`j$;eQ~P_3SlTAW;@Zl$1ZlV+l>qoAIaUsO_*m=~X4l#&V(cT3DEP6dh= nXCxM+0{I$-ItnJ5ItsN4WCJeldLH{_V&?Xn{iN*6Yq{+KGngO? literal 0 HcmV?d00001 diff --git a/testdata/dnn/onnx/data/input_div_test_1x1_1.npy b/testdata/dnn/onnx/data/input_div_test_1x1_1.npy new file mode 100644 index 0000000000000000000000000000000000000000..e3ffd0e064812c3766caa55263a9018d3825976d GIT binary patch literal 132 zcmbR27wQ`j$;eQ~P_3SlTAW;@Zl$1ZlV+l>qoAIaUsO_*m=~X4l#&V(cT3DEP6dh= bXCxM+0{I$-ItqrGItsN4WCJdbmpAMH$r~N8 literal 0 HcmV?d00001 diff --git a/testdata/dnn/onnx/data/output_div_test_1x1.npy b/testdata/dnn/onnx/data/output_div_test_1x1.npy new file mode 100644 index 0000000000000000000000000000000000000000..0192e0d459714fe813bc7ebacdc18c6c5d0e8b33 GIT binary patch literal 144 zcmbR27wQ`j$;eQ~P_3SlTAW;@Zl$1ZlV+l>qoAIaUsO_*m=~X4l#&V(cT3DEP6dh= oXCxM+0{I$-ItnJ5ItsN4WCO1D3rp?UMQ_-L$ZoUmSg^z%07!TuO8@`> literal 0 HcmV?d00001 diff --git a/testdata/dnn/onnx/generate_onnx_models.py b/testdata/dnn/onnx/generate_onnx_models.py index b5ab95229..ef6de05b6 100644 --- a/testdata/dnn/onnx/generate_onnx_models.py +++ b/testdata/dnn/onnx/generate_onnx_models.py @@ -90,6 +90,23 @@ def save_data_and_onnx_model(name, input_np, output_np, onnx_model): with open(models_files, 'wb') as file: file.write(model_def.SerializeToString()) +def save_data_and_onnx_model_multy_inputs(name, input_list, output_np, onnx_model): + for index in range(len(input_list)): + print(name + " input "+str(index)+" has sizes", input_list[index].shape) + input_files = os.path.join("data", "input_" + name + "_" + str(index)) + np.save(input_files, input_list[index]) + + print(name + " output has sizes", output_np.shape) + print() + output_files = os.path.join("data", "output_" + name) + np.save(output_files, np.ascontiguousarray(output_np.data)) + + models_files = os.path.join("models", name + ".onnx") + + onnx_model_pb = onnx._serialize(onnx_model) + model_def = assertONNXExpected(onnx_model_pb) + with open(models_files, 'wb') as file: + file.write(model_def.SerializeToString()) def simplify(name, rename=False, **kwargs): model, check = onnxsim.simplify(name, **kwargs) @@ -1725,4 +1742,23 @@ def gemm_reference_implementation(A: np.ndarray, B: np.ndarray, C: Optional[np.n outputs, initializer=[weight_tensor]) gemm_model2 = onnx.helper.make_model(graph2) output_np = gemm_reference_implementation(input_np, weight_np) -save_data_and_onnx_model("gemm_transB_0", input_np, output_np, gemm_model2) \ No newline at end of file +save_data_and_onnx_model("gemm_transB_0", input_np, output_np, gemm_model2) + +# ########################## DivBroadcast ########################## +input_np = np.random.rand(1, 4).astype("float32") +input2_np = np.random.rand(1, 1).astype(np.float32) +inputs = [onnx.helper.make_tensor_value_info("input1", onnx.mapping.NP_TYPE_TO_TENSOR_TYPE[input_np.dtype], shape=input_np.shape), \ + onnx.helper.make_tensor_value_info("input2", onnx.mapping.NP_TYPE_TO_TENSOR_TYPE[input2_np.dtype], shape=input2_np.shape)] + +outputs = [onnx.helper.make_tensor_value_info("output", onnx.TensorProto.FLOAT, shape=(1, 4))] + +nodes = [onnx.helper.make_node("Div", ["input1", "input2"], ["output"])] + +graph = onnx.helper.make_graph(nodes, + "div_test", + inputs, + outputs) +onnx_model = onnx.helper.make_model(graph) + +output_np = input_np/input2_np +save_data_and_onnx_model_multy_inputs("div_test_1x1", [input_np, input2_np], output_np, onnx_model) \ No newline at end of file diff --git a/testdata/dnn/onnx/models/div_test_1x1.onnx b/testdata/dnn/onnx/models/div_test_1x1.onnx new file mode 100644 index 000000000..52eee842e --- /dev/null +++ b/testdata/dnn/onnx/models/div_test_1x1.onnx @@ -0,0 +1,16 @@ +:w + +input1 +input2output"Divdiv_testZ +input1 +  + +Z +input2 +  + +b +output +  + +B \ No newline at end of file