|
| 1 | +# Use this script to generate test data for dnn module and TFLite models |
| 2 | +import os |
| 3 | +import numpy as np |
| 4 | +import tensorflow as tf |
| 5 | +import mediapipe as mp |
| 6 | + |
| 7 | +import cv2 as cv |
| 8 | + |
| 9 | +testdata = os.environ['OPENCV_TEST_DATA_PATH'] |
| 10 | + |
| 11 | +image = cv.imread(os.path.join(testdata, "cv", "shared", "lena.png")) |
| 12 | +image = cv.cvtColor(image, cv.COLOR_BGR2RGB) |
| 13 | + |
| 14 | +def run_tflite_model(model_name, inp_size): |
| 15 | + interpreter = tf.lite.Interpreter(model_name + ".tflite", |
| 16 | + experimental_preserve_all_tensors=True) |
| 17 | + interpreter.allocate_tensors() |
| 18 | + |
| 19 | + # Get input and output tensors. |
| 20 | + input_details = interpreter.get_input_details() |
| 21 | + output_details = interpreter.get_output_details() |
| 22 | + |
| 23 | + # Run model |
| 24 | + inp = cv.resize(image, inp_size) |
| 25 | + inp = np.expand_dims(inp, 0) |
| 26 | + inp = inp.astype(np.float32) / 255 # NHWC |
| 27 | + |
| 28 | + interpreter.set_tensor(input_details[0]['index'], inp) |
| 29 | + |
| 30 | + interpreter.invoke() |
| 31 | + |
| 32 | + for details in output_details: |
| 33 | + out = interpreter.get_tensor(details['index']) # Or use an intermediate layer index |
| 34 | + out_name = details['name'] |
| 35 | + np.save(f"{model_name}_out_{out_name}.npy", out) |
| 36 | + |
| 37 | + |
| 38 | +def run_mediapipe_solution(solution, inp_size): |
| 39 | + with solution as selfie_segmentation: |
| 40 | + inp = cv.resize(image, inp_size) |
| 41 | + results = selfie_segmentation.process(inp) |
| 42 | + np.save(f"selfie_segmentation_out_activation_10.npy", results.segmentation_mask) |
| 43 | + |
| 44 | +run_tflite_model("face_landmark", (192, 192)) |
| 45 | +run_tflite_model("face_detection_short_range", (128, 128)) |
| 46 | + |
| 47 | +run_mediapipe_solution(mp.solutions.selfie_segmentation.SelfieSegmentation(model_selection=0), (256, 256)) |
0 commit comments