diff --git a/README.md b/README.md index 7756cbb..9787785 100644 --- a/README.md +++ b/README.md @@ -9,6 +9,7 @@ This is an unofficial Pytorch implementation of MVSNet ### Environment * python 3.6 (Anaconda) * pytorch 1.0.1 +* `pip install -r requirements.txt` ### Training @@ -23,6 +24,14 @@ This is an unofficial Pytorch implementation of MVSNet * in ``test.sh``, set ``DTU_TESTING`` as your testing data path and ``CKPT_FILE`` as your checkpoint file. You can also download my [pretrained model](https://drive.google.com/file/d/1j2I_LNKb9JeCl6wdA7hh8z1WgVQZfLU9/view?usp=sharing). * Test MVSNet: ``./test.sh`` +### Depthmap Visualization + + | | +:---------------------------------------:|:---------------------------------------:|:---------------------------------------: +reference image |depth map (matplotlib) | depth map (opencv) + +Visualize the estimated depth map using `python visualize.py xxx.pfm` + ### Fusion in ``eval.py``, I implemented a simple version of depth map fusion. Welcome contributions to improve the code. @@ -36,3 +45,7 @@ in ``eval.py``, I implemented a simple version of depth map fusion. Welcome cont | PyTorch-MVSNet(D=192) | 0.4492 | 0.3796 | 0.4144 | Due to the memory limit, we only train the model with ``D=192``, the fusion code is also different from the original repo. + +### TODO +1. add dataloader for [blendedmvs dataset](https://github.com/YoYo000/BlendedMVS) +2. using [fusibile of mvsnet version](https://github.com/XYZ-qiyh/fusibile-mvsnet) diff --git a/doc/depthmap.jpg b/doc/depthmap.jpg new file mode 100644 index 0000000..fb3cb8d Binary files /dev/null and b/doc/depthmap.jpg differ diff --git a/doc/depthmap.png b/doc/depthmap.png new file mode 100644 index 0000000..be8b179 Binary files /dev/null and b/doc/depthmap.png differ diff --git a/doc/rgb.jpg b/doc/rgb.jpg new file mode 100644 index 0000000..2a1e2e3 Binary files /dev/null and b/doc/rgb.jpg differ diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..6abed53 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,4 @@ +opencv-python +plyfile +matplotlib +tensorboardX \ No newline at end of file diff --git a/tools/fusibile_to_dtu_eval.py b/tools/fusibile_to_dtu_eval.py new file mode 100644 index 0000000..bb79698 --- /dev/null +++ b/tools/fusibile_to_dtu_eval.py @@ -0,0 +1,36 @@ +# Convert output of fusibile to DTU evaluation format. +# By: Jiayu Yang +# Date: 2020-03-30 + +import os +from os import listdir + +fusibile_out_folder="../outputs-dtu/" +dtu_eval_folder="../outputs-dtu/" + +if not os.path.isdir(dtu_eval_folder): + os.mkdir(dtu_eval_folder) + +scans = ["scan1", "scan4", "scan9", "scan10", "scan11", + "scan12", "scan13", "scan15", "scan23", "scan24", + "scan29", "scan32", "scan33", "scan34", "scan48", + "scan49", "scan62", "scan75", "scan77", "scan110", + "scan114", "scan118"] + +for scan in scans: + # Move ply to dtu eval folder and rename + scan_folder = os.path.join(fusibile_out_folder, scan, "points_mvsnet") + consis_folders = [f for f in listdir(scan_folder) if f.startswith('consistencyCheck-')] + + consis_folders.sort() + consis_folder = consis_folders[-1] + source_ply = os.path.join(fusibile_out_folder, scan, "points_mvsnet", consis_folder, 'final3d_model.ply') + #print("source :{}".format(source_ply)) + #source_ply = os.path.join(fusibile_out_folder,scan,'consistencyCheck/final3d_model.ply') + scan_idx = int(scan[4:]) + target_ply = os.path.join(dtu_eval_folder,'mvsnet{:03d}_l3.ply'.format(scan_idx)) + + cmd = 'mv '+source_ply+' '+target_ply + + print(cmd) + os.system(cmd) diff --git a/tools/gray2color.py b/tools/gray2color.py new file mode 100644 index 0000000..ebf0875 --- /dev/null +++ b/tools/gray2color.py @@ -0,0 +1,39 @@ +import numpy as np +import cv2 +import argparse + +import sys +sys.path.append("../datasets") +from data_io import read_pfm#, write_depth_img + + +def write_depth_img(filename, depth_image): + # Mask the array where equal to a given value + ma = np.ma.masked_equal(depth_image, 0.0, copy=False) + d_min = ma.min() + d_max = ma.max() + depth_n = 255.0 * (depth_image - d_min) / (d_max - d_min) # depth map normalize + depth_n = depth_n.astype(np.uint8) + out_depth_image = cv2.applyColorMap(depth_n, cv2.COLORMAP_JET) # applyColorMap + cv2.imwrite(filename, out_depth_image) + + +if __name__ == "__main__": + # parse argument + parser = argparse.ArgumentParser() + parser.add_argument("depth_path") + args = parser.parse_args() + depth_path = args.depth_path + + # read_pfm + depth_map, _ = read_pfm(depth_path) + print('depth shape: {}'.format(depth_map.shape)) + + ## photometric filter + #if False: + # pfm_prob_path = depth_path.replace("depth_est", "confidence") + # prob_map, _ = read_pfm(pfm_prob_path) + # depth_map[prob_map < 0.9] = 0 + + # gray2color + write_depth_img(depth_path.replace(".pfm", ".jpg"), depth_map) diff --git a/tools/visualize.py b/tools/visualize.py new file mode 100644 index 0000000..12f00a5 --- /dev/null +++ b/tools/visualize.py @@ -0,0 +1,33 @@ +import numpy as np +import cv2 +import argparse +import matplotlib.pyplot as plt + +import sys +sys.path.append("../datasets") +from data_io import read_pfm + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('depth_path') + args = parser.parse_args() + depth_path = args.depth_path + if depth_path.endswith('npy'): + depth_image = np.load(depth_path) + depth_image = np.squeeze(depth_image) + print('value range: ', depth_image.min(), depth_image.max()) + plt.imshow(depth_image, 'rainbow') + plt.show() + elif depth_path.endswith('pfm'): + depth_image, _ = read_pfm(depth_path) + print('depth shape: {}'.format(depth_image.shape)) + ma = np.ma.masked_equal(depth_image, 0.0, copy=False) + print('value range: ', ma.min(), ma.max()) + plt.imshow(depth_image, 'rainbow') + plt.show() + else: + depth_image = cv2.imread(depth_path) + ma = np.ma.masked_equal(depth_image, 0.0, copy=False) + print('value range: ', ma.min(), ma.max()) + plt.imshow(depth_image) + plt.show() diff --git a/tools/visualize_save.py b/tools/visualize_save.py new file mode 100644 index 0000000..6f2bd67 --- /dev/null +++ b/tools/visualize_save.py @@ -0,0 +1,36 @@ +import numpy as np +import cv2 +import argparse +import matplotlib.pyplot as plt + +import sys +sys.path.append("../datasets") +from data_io import read_pfm + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('depth_path') + args = parser.parse_args() + depth_path = args.depth_path + if depth_path.endswith('npy'): + depth_image = np.load(depth_path) + depth_image = np.squeeze(depth_image) + print('value range: ', depth_image.min(), depth_image.max()) + plt.imshow(depth_image, 'rainbow') + plt.show() + elif depth_path.endswith('pfm'): + depth_image, _ = read_pfm(depth_path) + print('depth shape: {}'.format(depth_image.shape)) + ma = np.ma.masked_equal(depth_image, 0.0, copy=False) + print('value range: ', ma.min(), ma.max()) + plt.imshow(depth_image, 'rainbow') + # plt.show() + plt.axis('off') + out_filename = depth_path.replace(".pfm", ".png") + plt.savefig(out_filename, bbox_inches='tight', pad_inches=0, dpi=300) + else: + depth_image = cv2.imread(depth_path) + ma = np.ma.masked_equal(depth_image, 0.0, copy=False) + print('value range: ', ma.min(), ma.max()) + plt.imshow(depth_image) + plt.show()