Skip to content

Commit

Permalink
Added playback from file
Browse files Browse the repository at this point in the history
  • Loading branch information
JeremyBYU committed Jun 17, 2020
1 parent 5ea48d0 commit d1e8993
Show file tree
Hide file tree
Showing 4 changed files with 33 additions and 19 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
*.bag
*.csv
.vscode
*.ini
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
Expand Down
28 changes: 20 additions & 8 deletions grounddetector/capture_ga.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,11 @@ def create_pipeline(config: dict):
pipeline.start(rs_config)
profile = pipeline.get_active_profile()

if config['playback']['enabled']:
dev = profile.get_device()
playback = dev.as_playback()
playback.set_real_time(False)

# Processing blocks
filters = []
decimate = None
Expand Down Expand Up @@ -226,9 +231,8 @@ def get_polygon(depth_image: np.ndarray, config, ll_objects, h, w, intrinsics, *
postprocess=config['polygon']['postprocess'])
alg_timings.update(timings)

# return planes, obstacles, alg_timings, o3d_mesh
return planes, obstacles, alg_timings
# return planes, obstacles, timings, mesh, o3d_mesh, o3d_mesh_painted, arrow_o3d, all_poly_lines
# return polygons, points_rot, rm


def valid_frames(color_image, depth_image, depth_min_valid=0.5):
Expand Down Expand Up @@ -270,6 +274,7 @@ def capture(config, video=None):

# Long lived objects. These are the object that hold all the algorithms for surface exraction.
# They need to be long lived (objects) because they hold state (thread scheduler, image datastructures, etc.)
# import ipdb; ipdb.set_trace()
ll_objects = dict()
ll_objects['pl'] = Polylidar3D(**config['polylidar'])
ll_objects['ga'] = GaussianAccumulatorS2(level=config['fastga']['level'])
Expand All @@ -281,6 +286,7 @@ def capture(config, video=None):
out_vid = cv2.VideoWriter(video, cv2.VideoWriter_fourcc('M', 'J', 'P', 'G'), 20, (frame_width, frame_height))

all_records = []
counter = 0
try:
while True:
t00 = time.time()
Expand All @@ -295,10 +301,13 @@ def capture(config, video=None):
logging.debug("Invalid Frames")
continue
t1 = time.time()
counter += 1
# if counter < 340:
# continue

try:
if config['show_polygon']:
# planes, obstacles, timings, mesh, o3d_mesh, o3d_mesh_painted, arrow_o3d, all_poly_lines = get_polygon(depth_image, config, ll_objects, **meta)
# planes, obstacles, timings, o3d_mesh = get_polygon(depth_image, config, ll_objects, **meta)
planes, obstacles, timings = get_polygon(depth_image, config, ll_objects, **meta)
timings['t_get_frames'] = (t0 - t00) * 1000
timings['t_check_frames'] = (t1 - t0) * 1000
Expand All @@ -323,6 +332,11 @@ def capture(config, video=None):
cv2.imwrite(path.join(PICS_DIR, "{}_color.jpg".format(uid)), color_image_cv)
cv2.imwrite(path.join(PICS_DIR, "{}_stack.jpg".format(uid)), images)
if res == ord('m'):
# o3d.visualization.draw_geometries([o3d_mesh])
plt.imshow(np.asarray(ll_objects['ico'].image_to_vertex_idx))
plt.show()
plt.imshow(np.asarray(ll_objects['ico'].mask))
plt.show()
plt.imshow(np.asarray(ll_objects['ico'].image))
plt.show()
# all_lines = [line_mesh.cylinder_segments for line_mesh in all_poly_lines]
Expand All @@ -332,8 +346,9 @@ def capture(config, video=None):
# # import ipdb; ipdb.set_trace()
# o3d.visualization.draw_geometries([axis, o3d_mesh_painted, arrow_o3d, *all_lines])
# print(timings)
logging.info(f"Get Frames: %.2f; Check Valid Frame: %.2f; Laplacian: %.2f; Bilateral: %.2f; Mesh: %.2f; FastGA: %.2f; Plane/Poly: %.2f; Filtering: %.2f",
timings['t_get_frames'], timings['t_check_frames'], timings['t_laplacian'], timings['t_bilateral'], timings['t_mesh'], timings['t_fastga_total'], timings['t_polylidar_planepoly'], timings['t_polylidar_filter'])
logging.info(f"Frame %d; Get Frames: %.2f; Check Valid Frame: %.2f; Laplacian: %.2f; Bilateral: %.2f; Mesh: %.2f; FastGA: %.2f; Plane/Poly: %.2f; Filtering: %.2f",
counter, timings['t_get_frames'], timings['t_check_frames'], timings['t_laplacian'], timings['t_bilateral'], timings['t_mesh'], timings['t_fastga_total'],
timings['t_polylidar_planepoly'], timings['t_polylidar_filter'])
except Exception as e:
logging.exception("Error!")
finally:
Expand All @@ -348,9 +363,6 @@ def capture(config, video=None):
df.to_csv(config['save'].get('timings', 'data/timings.csv'))





def main():
parser = argparse.ArgumentParser(description="Captures ground plane and obstacles as polygons")
parser.add_argument('-c', '--config', help="Configuration file", default=DEFAULT_CONFIG_FILE)
Expand Down
22 changes: 11 additions & 11 deletions grounddetector/config/default.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
name: Default Settings for D435 extracting multiple surfaces using Polylidar3D
advanced: "grounddetector/config/json/high_accuracy.json" # JSON file path holding advanced settings
playback:
enabled: true
file: "data/test.bag"
enabled: false
file: "data/20200617_162944.bag"
repeat: false
save:
video: ""
Expand Down Expand Up @@ -42,25 +42,25 @@
kernel_size: 3
loops_bilateral: 3
sigma_length: 0.1
sigma_angle: 0.261
sigma_angle: 0.20
polylidar: # Parameters we send to polylidar. Determine plane and polygon extraction from point clouds.
alpha: 0.0 # must be set to 0.0 if using lmax
lmax: 0.1 # maximum distance between points in plane for spatial connection
z_thresh: 0.08 # enforce point to plane distance constraints during region growing.
lmax: 0.05 # maximum distance between points in plane for spatial connection
z_thresh: 0.0 # enforce point to plane distance constraints during region growing.
norm_thresh: 0.96 # Not used, set to the same as norm_thresh_min. Will deprecate later.
norm_thresh_min: 0.95 # triangles must have a minimum amount of planarity.
min_hole_vertices : 6 # minimum number of vertices in a hole to return
norm_thresh_min: 0.96 # triangles must have a minimum amount of planarity.
min_hole_vertices : 10 # minimum number of vertices in a hole to return
min_triangles: 500 # minimum number of triangles needed to make a plane
fastga: # Parameters used for dominant plane normal estimation
level: 3 # refinement level of the gaussian accumulator
down_sample_fraction: 0.12 # only use X% of triangle normals from mesh for integration, lower the faster
find_peaks_kwargs: # peak detection arguments
threshold_abs: 50 # [0-255], minimum value of normalized histogram of S2 to be a peak
min_distance: 1 # 1 = 3X3 kernel for peak detector. I recommend to not change
exclude_border: false
exclude_border: true
indices: false # must return mask
cluster_kwargs: # Agglomerative hierarchal clustering
t: 0.18 # min distance in 3D space of peaks (surface normals on sphere) before merging
t: 0.28 # min distance in 3D space of peaks (surface normals on sphere) before merging
criterion: 'distance'
average_filter: # A merge group must have at least x% of value in all of histogram, this doesn't have much meaning and will probably be deprecated
min_total_weight: 0.1
Expand All @@ -75,9 +75,9 @@
hole_vertices:
min: 6
plane_area:
min: .15 # m^2
min: .05 # m^2
# These parameters correspond to Shapely polygon geometry operations
positive_buffer: 0.005 # m, Positively expand polygon. Fills in small holes
negative_buffer: 0.03 # m, Negative buffer to polygon. Expands holes and constricts outer hull of polygon
negative_buffer: 0.01 # m, Negative buffer to polygon. Expands holes and constricts outer hull of polygon
simplify: 0.02 # m, simplify edges of polygon

1 change: 1 addition & 0 deletions grounddetector/helper_updated.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,6 +156,7 @@ def extract_all_dominant_plane_normals(tri_mesh, level=5, with_o3d=False, ga_=No
triangle_normals = np.asarray(tri_mesh.triangle_normals)
triangle_normals_ds = down_sample_normals(triangle_normals, **kwargs)

# np.savetxt('bad_normals.txt', triangle_normals_ds)
triangle_normals_ds_mat = MatX3d(triangle_normals_ds)
t1 = time.perf_counter()
ga.integrate(triangle_normals_ds_mat)
Expand Down

0 comments on commit d1e8993

Please sign in to comment.