Skip to content

Commit

Permalink
added codes
Browse files Browse the repository at this point in the history
  • Loading branch information
zovelsanj committed Feb 7, 2024
1 parent 7be797d commit 9e8758b
Show file tree
Hide file tree
Showing 20 changed files with 1,901 additions and 0 deletions.
20 changes: 20 additions & 0 deletions codes/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
analysis/
data/
color_palatte.png
azure kinect color.png
data.zip
**/__pycache__/
zi0vrcRW
ziVuEiKI
zinxpZy7
analysis2/
main*
_minted-main/
images/
../360-alignment/
../dummy.png
../experiments/test.py
../test.png
../test_realsense.png
../k4a-hdr-imaging/
stat/__pycache__/
11 changes: 11 additions & 0 deletions codes/config/azurekinect.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
{
"camera_fps" : "K4A_FRAMES_PER_SECOND_30",
"color_format" : "K4A_IMAGE_FORMAT_COLOR_MJPG",
"color_resolution" : "K4A_COLOR_RESOLUTION_720P",
"depth_delay_off_color_usec" : "0",
"depth_mode" : "K4A_DEPTH_MODE_NFOV_UNBINNED",
"disable_streaming_indicator" : "false",
"subordinate_delay_off_master_usec" : "0",
"synchronized_images_only" : "false",
"wired_sync_mode" : "K4A_WIRED_SYNC_MODE_STANDALONE"
}
13 changes: 13 additions & 0 deletions codes/config/default_config.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
{
"camera_fps" : "K4A_FRAMES_PER_SECOND_30",
"color_format" : "K4A_IMAGE_FORMAT_COLOR_MJPG",
"color_resolution" : "K4A_COLOR_RESOLUTION_720P",
"depth_delay_off_color_usec" : "0",
"depth_mode" : "K4A_DEPTH_MODE_NFOV_2X2BINNED",
"disable_streaming_indicator" : "false",
"subordinate_delay_off_master_usec" : "0",
"synchronized_images_only" : "false",
"wired_sync_mode" : "K4A_WIRED_SYNC_MODE_STANDALONE",
"enable_auto_exposure": "false",
"exposure_time_us": 5
}
16 changes: 16 additions & 0 deletions codes/config/realsense.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
{
"name": "Realsense bag file",
"path_dataset": "dataset/realsense.bag",
"path_intrinsic": "optional/read_from_bag_file.json",
"depth_max": 3.0,
"voxel_size": 0.05,
"depth_diff_max": 0.07,
"preference_loop_closure_odometry": 0.1,
"preference_loop_closure_registration": 5.0,
"tsdf_cubic_size": 3.0,
"icp_method": "color",
"global_registration": "ransac",
"python_multi_threading": true,
"resolution": [1280, 720],
"camera_fps": 30
}
73 changes: 73 additions & 0 deletions codes/experiments/ambiance.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
import numpy as np
import argparse
# import matplotlib
# matplotlib.use('TkAgg')
import matplotlib.pyplot as plt
from libs.experiments import Experiments
from libs.visualization import Visualization

def subplots(data_list, title, camera, label_list, path, heatmap=True):
num_rows, num_cols = Visualization.get_grid(len(data_list))
print(f"Rows: {num_rows}, Columns: {num_cols}")
fig = plt.figure(constrained_layout=True)
fig.suptitle('Per-Pixel Mean Distance Distribution')
subfigs = fig.subfigures(nrows=num_rows, ncols=1)

for row, subfig in enumerate(subfigs):
subfig.suptitle(f"({camera[row]})", fontsize=12)
axs = subfig.subplots(nrows=1, ncols=num_cols)
for col, ax in enumerate(axs):
if heatmap:
im = data_list[col+len(axs)*row].noise_visualization(ax=ax)
cbar = fig.colorbar(im, ax=ax,label=f"distance ({label_list[row]})")
cbar.ax.tick_params(labelsize=8)
else:
Visualization.temporal_depth_variation(data_list[i])
if row==0:
ax.set_title(title[col])
if path is not None:
plt.savefig(path)
plt.show()

if __name__ == "__main__":
parser = argparse.ArgumentParser(
description=
"Realsense Recorder. Please select one of the optional arguments")
parser.add_argument("--input",
required=False,
nargs = '+',
help="set path to input color and depth directory. Give color path as 1st argument")
parser.add_argument("--npy",
required=False,
nargs = '*',
help="npy filename to export data or npy path to load data")
parser.add_argument("--export_depth",
action='store_true',
help="option to export depth data after selection ROIs")
parser.add_argument("--fig_path",
required=False,
help="path to save plots")

args = parser.parse_args()

obj_list = []
# title_list = ["Indoor Ambient", "Indoor Controlled", "Outdoor Shadow", "Outdoor Sunlight"]
title_list = ["Controlled Environment", "Uncontrolled Environment"]
if len(args.npy)>1:
for i,file_name in enumerate(args.npy):
data=np.load(file_name)
if i<len(title_list): #pass RealSense .npy files first then AzureKinect
data=(data+0.0082)*1000
obj_list.append(Visualization(data,data.shape[1],data.shape[2]))
# print(len(obj_list))
subplots(obj_list, title_list, camera=["RealSense","AzureKinect"],label_list=['mm','mm'], path=args.fig_path)

else:
ROI_coordinates = None
experiment5 = Experiments(args.input, args.export_depth, args.npy)
if len(args.input)>1:
ROI_coordinates, depth_data = experiment5.get_depth_data()
print(f'depth_data.shape = {depth_data.shape}, ROI: {ROI_coordinates}')
else:
ROI_coordinates, _ = experiment5.get_depth_data()
print(f'Starting and ending coordinates of ROI: {ROI_coordinates}')
74 changes: 74 additions & 0 deletions codes/experiments/camera_distance.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
import numpy as np
import argparse
import matplotlib
matplotlib.use('TkAgg')
import matplotlib.pyplot as plt
import sys
sys.path.append(".")
import os
from libs.experiments import Experiments
from libs.visualization import Visualization
from libs.type_convert import convert_float64_to_32

def plots(data_list, title_list, suptitle, camera, path, label_list, heatmap, warmup, offset, check_distribution):
if offset:
Visualization.subplots(data_list, title_list[1], suptitle[1], camera, path, label_list, heatmap, offset)
return
if warmup:
Visualization.subplots(data_list, title_list[0], suptitle[0], camera, path, label_list, heatmap)
else:
Visualization.subplots(data_list, title_list[1], suptitle[1], camera, path, label_list, heatmap, offset, check_distribution)

if __name__ == "__main__":
parser = argparse.ArgumentParser(
description=
"Realsense Recorder. Please select one of the optional arguments")
parser.add_argument("--input",
required=False,
nargs = '+',
help="set path to input color and depth directory. Give color path as 1st argument")
parser.add_argument("--npy",
required=False,
nargs = '*',
help="npy filename to export data or npy path to load data")
parser.add_argument("--export_depth",
action='store_true',
help="option to export depth data after selection ROIs")
parser.add_argument("--camera_id",
type=int,
help="RealSense = 0 or AzureKinect = 1")
parser.add_argument("--fig_path",
required=False,
help="path to save plots")

args = parser.parse_args()

object_list = []
data_list = []
if len(args.npy)>1:
for i, file_name in enumerate(args.npy):
print(f'Loading {file_name}')
data=np.load(file_name)
convert_float64_to_32(data, out_dir=os.path.dirname(file_name), idx=[args.camera_id, i])
if args.camera_id==0:
data = (data+0.0082)*1000 #for RealSense

object_list.append(Visualization(data,[50,50],[100,100]))
data_list.append(data)
obj_list = [object_list, data_list]
title_list = [["Before Warmup", "After Warmup"], ["0.5m", "1m", "1.5m", "2m", "2.5m"]]
suptitles = ["Effect of Warmup Time", "Effect of Camera Distance"]
camera=["RealSense","AzureKinect"]
label_list=['mm','mm'] #Replace mm by m in case metric required as meters.

plots(obj_list, title_list, suptitles, camera=camera[args.camera_id], path=args.fig_path, label_list=label_list[args.camera_id], heatmap=False, warmup=False, offset=False, check_distribution=False)

else:
ROI_coordinates = None
experiment1 = Experiments(args.input, args.export_depth, args.npy)
if len(args.input)>1:
ROI_coordinates, depth_data = experiment1.get_depth_data()
print(f'depth_data.shape = {depth_data.shape}, ROI: {ROI_coordinates}')
else:
ROI_coordinates, _ = experiment1.get_depth_data()
print(f'Starting and ending coordinates of ROI: {ROI_coordinates}')
70 changes: 70 additions & 0 deletions codes/experiments/object_color.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
import numpy as np
import argparse
import os
import matplotlib.pyplot as plt
from libs.experiments import Experiments
from libs.visualization import Visualization

def plots(obj_list, title_list, suptitle, camera, path, label_list, heatmap=True):
if camera=="AzureKinect":
Visualization.subplots(obj_list, title_list[::-1], suptitle, camera, path, label_list, heatmap)
else:
Visualization.subplots(obj_list, title_list, suptitle, camera, path, label_list, heatmap)

if __name__ == "__main__":
parser = argparse.ArgumentParser(
description=
"Realsense Recorder. Please select one of the optional arguments")
parser.add_argument("--input",
required=False,
nargs = '+',
help="set path to input color and depth directory. Give color path as 1st argument")
parser.add_argument("--npy",
required=False,
help="npy filename to export data or npy path to read data")
parser.add_argument("--export_depth",
action='store_true',
help="option to view bounding box")
parser.add_argument("--camera_id",
type=int,
required=False,
help="RealSense = 0 or AzureKinect = 1")
parser.add_argument("--fig_path",
required=False,
help="path to save plots")

args = parser.parse_args()

ROI_coordinates = []
data = None
experiment3 = Experiments(args.input, args.export_depth, args.npy)
if len(args.input)>1:
ROI_coordinates, depth_data = experiment3.get_depth_data() # Depth values can be obtained from depth images only for Azure Kinect
print(f'depth_data.shape = {depth_data.shape}, ROI: {ROI_coordinates}')
path_splitted = os.path.split(args.input[1])
kinect_data_path = path_splitted[0]
data = np.load(os.path.join(kinect_data_path, args.npy))
if args.camera_id==0:
data = (data + (0.0084))*1000 #For RealSense

else:
ROI_coordinates, _ = experiment3.get_depth_data() # For RealSense Camera, only ROI coordinates can be obtained from color images
print(f'Starting and ending coordinates of ROI: {ROI_coordinates}')
data = np.load(args.npy)
data = (data + (0.0084))*1000
# data = data+0.0084

scaled_roi = Experiments.crop_pallette(ROI_coordinates)
print(f"scaled ROI: {scaled_roi}")
object_list = []
data_list = []
for i, roi in enumerate(scaled_roi):
object_list.append(Visualization(data, roi[0], roi[1]))
data_list.append(data)

obj_list = [object_list, data_list]
textures_list = ["Blue", "Green", "Red", "Black", "Yellow", "White"]
camera=["RealSense","AzureKinect"]
label_list=['mm','mm']

plots(obj_list, title_list=textures_list, suptitle="Effect of Color (Absorptivity)", camera=camera[args.camera_id], path=args.fig_path, label_list=label_list[args.camera_id])
59 changes: 59 additions & 0 deletions codes/experiments/object_motion.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
import numpy as np
import argparse
import matplotlib
matplotlib.use('TkAgg')
import matplotlib.pyplot as plt
import sys
sys.path.append(".")
from libs.experiments import Experiments
from libs.visualization import Visualization

if __name__ == "__main__":
parser = argparse.ArgumentParser(
description=
"Realsense Recorder. Please select one of the optional arguments")
parser.add_argument("--input",
required=False,
nargs = '+',
help="set path to input color and depth directory. Give color path as 1st argument")
parser.add_argument("--npy",
required=False,
nargs="*",
help="npy filename to export data or npy path to read data")
parser.add_argument("--export_depth",
action='store_true',
help="option to view bounding box")
parser.add_argument("--camera_id",
type=int,
help="RealSense = 0 or AzureKinect = 1")
parser.add_argument("--fig_path",
required=False,
help="path to save plots")

args = parser.parse_args()
object_list = []
data_list = []
if len(args.npy)>1:
for file_name in args.npy:
data=np.load(file_name)
if args.camera_id==0: #for RealSense
data = (data + (0.0084))*1000 #in case not added in realsense_recorder.py
object_list.append(Visualization(data,data.shape[1],data.shape[2]))
data_list.append(data)

obj_list = [object_list, data_list]
title_list = ["Static", "Motion"]
camera=["RealSense","AzureKinect"]
label_list='mm'
suptitle = "Effect of Motion"

Visualization.subplots(obj_list, title_list, suptitle, camera=camera[args.camera_id], path=None, label_list=label_list, heatmap=False)
else:
ROI_coordinates = None
experiment2 = Experiments(args.input, args.export_depth, args.npy)
if len(args.input)>1:
ROI_coordinates, depth_data = experiment2.get_depth_data()
print(f'depth_data.shape = {depth_data.shape}, ROI: {ROI_coordinates}')
else:
ROI_coordinates, _ = experiment2.get_depth_data()
print(f'Starting and ending coordinates of ROI: {ROI_coordinates}')
Loading

0 comments on commit 9e8758b

Please sign in to comment.