-
Notifications
You must be signed in to change notification settings - Fork 51
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
merge normalLoader and dataLoader, add consistency checks
Also adjust normals GT paths to match generate_normals.py output. Normals GT is now separate for left and right cameras. Add option to use camera image path format used by the `raw_data_downloader.sh` script.
- Loading branch information
Showing
5 changed files
with
68 additions
and
121 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,72 +1,82 @@ | ||
from __future__ import print_function | ||
|
||
import os | ||
import os.path | ||
import numpy as np | ||
from os.path import join, exists | ||
|
||
ROOT_DIR = os.path.dirname(os.path.abspath(__file__)) | ||
parent_path = os.path.dirname(ROOT_DIR) | ||
IMG_EXTENSIONS = [ | ||
'.jpg', '.JPG', '.jpeg', '.JPEG', | ||
'.png', '.PNG', '.ppm', '.PPM', '.bmp', '.BMP', | ||
] | ||
def is_image_file(filename): | ||
return any(filename.endswith(extension) for extension in IMG_EXTENSIONS) | ||
|
||
def dataloader(filepath): | ||
def dataloader(data_dir, separate_raw_dir=False): | ||
images = [] | ||
lidars = [] | ||
depths = [] | ||
depths_gt = [] | ||
normals_gt = [] | ||
|
||
temp = filepath | ||
filepathl = temp + 'data_depth_velodyne/train' | ||
filepathd = temp + 'data_depth_annotated/train' | ||
filepathgt = temp + 'gt/out/train' | ||
if separate_raw_dir: | ||
imgs_root = join(data_dir, 'raw') | ||
sparse_depth_root = join(data_dir, 'data_depth_velodyne/train') | ||
depth_gt_root = join(data_dir, 'data_depth_annotated/train') | ||
normals_gt_root = join(data_dir, 'normals_gt/train') | ||
|
||
seqs = [seq for seq in os.listdir(filepathl) if seq.find('sync') > -1] | ||
left_fold = '/image_02/data' | ||
right_fold = '/image_03/data' | ||
lidar_foldl = '/proj_depth/velodyne_raw/image_02' | ||
lidar_foldr = '/proj_depth/velodyne_raw/image_03' | ||
depth_foldl = '/proj_depth/groundtruth/image_02' | ||
depth_foldr = '/proj_depth/groundtruth/image_03' | ||
seqs = sorted(seq for seq in os.listdir(sparse_depth_root) if seq.endswith('_sync')) | ||
|
||
for seq in seqs: | ||
temp = os.path.join(filepathgt, seq) | ||
date = seq.split('_drive')[0] | ||
for cam_dir in ('image_02', 'image_03'): | ||
if separate_raw_dir: | ||
imgs_path = join(imgs_root, date, seq, cam_dir, 'data') | ||
else: | ||
imgs_path = join(sparse_depth_root, seq, cam_dir, 'data') | ||
lidars_path = join(sparse_depth_root, seq, 'proj_depth/velodyne_raw', cam_dir) | ||
depth_gt_path = join(depth_gt_root, seq, 'proj_depth/groundtruth', cam_dir) | ||
normals_gt_path = join(normals_gt_root, seq, cam_dir) | ||
|
||
imgsl = os.path.join(filepathl, seq) + left_fold | ||
imagel = [os.path.join(imgsl, img) for img in os.listdir(temp)] | ||
imagel.sort() | ||
images = np.append(images, imagel) | ||
imgsr = os.path.join(filepathl, seq) + right_fold | ||
imager = [os.path.join(imgsr, img) for img in os.listdir(temp)] | ||
imager.sort() | ||
images = np.append(images, imager) | ||
if not exists(imgs_path): | ||
print("Warning: missing data dir", imgs_path) | ||
continue | ||
if not exists(lidars_path): | ||
print("Warning: missing data dir", lidars_path) | ||
continue | ||
if not exists(depth_gt_path): | ||
print("Warning: missing data dir", depth_gt_path) | ||
continue | ||
if not exists(normals_gt_path): | ||
print("Warning: missing data dir", normals_gt_path) | ||
continue | ||
|
||
lids2l = os.path.join(filepathl, seq) + lidar_foldl | ||
lidar2l = [os.path.join(lids2l, lid) for lid in os.listdir(temp)] | ||
lidar2l.sort() | ||
lidars = np.append(lidars, lidar2l) | ||
lids2r = os.path.join(filepathl, seq) + lidar_foldr | ||
lidar2r = [os.path.join(lids2r, lid) for lid in os.listdir(temp)] | ||
lidar2r.sort() | ||
lidars = np.append(lidars, lidar2r) | ||
img_files = set(os.listdir(imgs_path)) | ||
lidar_files = set(os.listdir(lidars_path)) | ||
depth_gt_files = set(os.listdir(depth_gt_path)) | ||
normals_gt_files = set(os.listdir(normals_gt_path)) | ||
|
||
depsl = os.path.join(filepathd, seq) + depth_foldl | ||
depthl = [os.path.join(depsl, dep) for dep in os.listdir(temp)] | ||
depthl.sort() | ||
depths = np.append(depths, depthl) | ||
depsr = os.path.join(filepathd, seq) + depth_foldr | ||
depthr = [os.path.join(depsr, dep) for dep in os.listdir(temp)] | ||
depthr.sort() | ||
depths = np.append(depths, depthr) | ||
img_depth_diff = 14 if seq == '2011_09_26_drive_0009_sync' else 10 | ||
max_size = max(len(img_files) - img_depth_diff, | ||
len(lidar_files), len(depth_gt_files), len(normals_gt_files)) | ||
if len(img_files) - img_depth_diff < max_size: | ||
print("Warning:", max_size + img_depth_diff - len(img_files), "files missing in", imgs_path) | ||
if len(lidar_files) < max_size: | ||
print("Warning:", max_size - len(lidar_files), "files missing in", lidars_path) | ||
if len(depth_gt_files) < max_size: | ||
print("Warning:", max_size - len(depth_gt_files), "files missing in", depth_gt_path) | ||
if len(normals_gt_files) < max_size: | ||
print("Warning:", max_size - len(normals_gt_files), "files missing in", normals_gt_path) | ||
|
||
left_train = images | ||
lidar2_train = lidars | ||
depth_train = depths | ||
common_files = sorted( | ||
img_files & | ||
lidar_files & | ||
depth_gt_files & | ||
normals_gt_files | ||
) | ||
images += [join(imgs_path, img) for img in common_files] | ||
lidars += [join(lidars_path, lid) for lid in common_files] | ||
depths_gt += [join(depth_gt_path, dep) for dep in common_files] | ||
normals_gt += [join(normals_gt_path, norm) for norm in common_files] | ||
|
||
return left_train,lidar2_train,depth_train | ||
return images, lidars, normals_gt, depths_gt | ||
|
||
|
||
if __name__ == '__main__': | ||
datapath = '' | ||
|
||
import sys | ||
from pprint import pprint | ||
|
||
result = dataloader(sys.argv[1]) | ||
print("Found", len(result[0]), "samples") | ||
pprint(list(zip(*result))[:3]) |
This file was deleted.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters