Skip to content

Commit

Permalink
Added scripts to build surf maps in parallel (nasa#484)
Browse files Browse the repository at this point in the history
Helper scripts for map creation, takes a list of bags or uses all the bags in a directory and generates individual surf maps for these.
  • Loading branch information
rsoussan authored May 13, 2022
1 parent 6e9f7b5 commit 7a3c611
Show file tree
Hide file tree
Showing 18 changed files with 360 additions and 106 deletions.
3 changes: 3 additions & 0 deletions localization/localization_common/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,9 @@ find_package(GTSAM REQUIRED)
# System dependencies are found with CMake's conventions
find_package(Eigen3 REQUIRED)

# Allow other packages to use python scripts from this package
catkin_python_setup()

catkin_package(
INCLUDE_DIRS include
LIBRARIES ${PROJECT_NAME} ${GTSAM_LIBRARIES} gtsam
Expand Down
Empty file.
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
# Copyright (c) 2017, United States Government, as represented by the
# Administrator of the National Aeronautics and Space Administration.
#
# All rights reserved.
#
# The Astrobee platform is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.

import datetime
import glob
import os
import subprocess

import pandas as pd


# Forward errors so we can recover failures
# even when running commands through multiprocessing
# pooling
def full_traceback(func):
import functools
import traceback

@functools.wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as e:
msg = "{}\n\nOriginal {}".format(e, traceback.format_exc())
raise type(e)(msg)

return wrapper


def get_files(directory, file_string):
return glob.glob(os.path.join(directory, file_string))


def get_files_recursive(directory, file_string):
subdirectory_csv_files = []
_, subdirectories, _ = next(os.walk(directory))
for subdirectory in subdirectories:
subdirectory_path = os.path.join(directory, subdirectory)
for subdirectory_csv_file in get_files(subdirectory_path, file_string):
subdirectory_csv_files.append(subdirectory_csv_file)
return subdirectory_csv_files


def create_directory(directory=None):
if directory == None:
directory = os.path.join(
os.getcwd(), datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
)
if os.path.exists(directory):
print((directory + " already exists!"))
exit()
os.makedirs(directory)
return directory


def load_dataframe(files):
dataframes = [pd.read_csv(file) for file in files]
dataframe = pd.concat(dataframes)
return dataframe


def run_command_and_save_output(command, output_filename, print_command=True):
if print_command:
print(command)
with open(output_filename, "w") as output_file:
subprocess.call(command, shell=True, stdout=output_file, stderr=output_file)


def basename(filename):
return os.path.splitext(os.path.basename(filename))[0]
9 changes: 9 additions & 0 deletions localization/localization_common/setup.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
from distutils.core import setup

from catkin_pkg.python_setup import generate_distutils_setup

d = generate_distutils_setup(
packages=["localization_common"], package_dir={"": "scripts"}
)

setup(**d)
53 changes: 53 additions & 0 deletions localization/sparse_mapping/build_map_from_multiple_bags.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
\page build_map_from_multiple_bags Build map from multiple bags
Several tools exist to ease the process of building a new map from a set of bagfiles. For more options/instructions for each tool,
use
`rosrun package_name tool_name.py -h`.


## Steps
### 1. Convert bags from bayer to rgb if necessary
Assuming the bags are all in the same directory, from that directory use:
`rosrun bag_processing convert_all_bayer_bags.py`

The original bayer bags can now be removed or moved elsewhere.

### 2. Splice bags using splice tool
For each bagfile, use the splice tool with:
`rosrun bag_processing splice_bag.py bag_name`

The mapping pipeline struggles with in place rotations, so try to splice a bagfile before and after the rotation if necessary.
If the rotation contains enough simultaneous translational movement, do not splice around the rotation since splicing could result in different segments of a movement no longer matching to each other when they are later merged if there is not enough overlap in their images.

If a bag contains multiple movements that contain sufficient overlap, these can be spliced to make the mapping process more efficient (smaller bags are faster to create maps for) and allow for more modular sanity checks (if movements are spliced and one fails to map well, the other may still be usable).

After splicing, the original bags can be removed or moved elsewhere.

### 3. Build individual maps for each spliced bag
Run
`rosrun sparse_mapping make_surf_maps.py`

This builds individual maps in parallel for each spliced bag in the directory. The mapping process first removes low movement images from a bagfile to prevent triangulation issues and make the mapping process more efficient, then builds a surf map (detects image features, matches features, runs incremental bundle adjustment using successive images, runs a final round of bundle adjustment on the whole map).


### 4. Merge maps
A common merge strategy for a set of movements is to identify the longest movement that covers the most of an area and incrementally merge in maps that have sufficient overlap with this. Use:
`rosrun sparse_mapping merge_maps larger_map_name smaller_map_name -output_map combined_map_name -num_image_overlaps_at_endpoints 1000000`

Sometimes an existing map is available that may contain areas not covered in the new bags. In this case, first merge the new movements together using the above strategy, then merge the resultant combined new movements map into the existing map.

### 5. Register map against real world
Since mapping is perfomed using monocular images, no set scale is provied and the map needs to be registered using defined real world points before using for localization.

First copy the map that should be registered then run:
`rosrun sparse_mapping build_map -registration registration_points.pto registration_points.txt -output_map registered_map_name`

The creation of registration points is detailed in `build_map.md`, images from the map are used to manually select feature locations and 3D points for these features are selected using a 3D model of the mapped environment provided externally.

### 6. Verify the resulting map
Use:
`rosrun sparse_mapping run_graph_bag_and_plot_results bag_name map_name config_path --generate-image-features -r robot_config -w world_name`
to test localization with the map.

The bags used here should not have been used for map creation but should contain data in the area of the map. They additionally need IMU data, if this is not available image registration can be testing using the sparse_mapping_pose_adder tool. Make sure to include the `--generate-image-features` option since image features in the bag are recorded using matches with whatever map was used when the bag was recorded.

This script creates a pdf plotting localization accuracy and map-based pose estimates. If the map is well made, localization pose estimates should be relatively smooth and consistent. Jumps in poses tend to indicate the portions of the map may not be well aligned.
3 changes: 2 additions & 1 deletion localization/sparse_mapping/readme.md
Original file line number Diff line number Diff line change
Expand Up @@ -496,8 +496,9 @@ reduced map with a small list of desired images which can be set with
-image_list, and then all images for which localization fails will be
added back to it.

\subpage build_map_from_multiple_bags
\subpage map_building
\subpage total_station
\subpage granite_lab_registration
\subpage faro
\subpage theia_map
\subpage theia_map
100 changes: 100 additions & 0 deletions localization/sparse_mapping/scripts/make_surf_map.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
#!/usr/bin/python
#
# Copyright (c) 2017, United States Government, as represented by the
# Administrator of the National Aeronautics and Space Administration.
#
# All rights reserved.
#
# The Astrobee platform is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Extracts images, removes low movements images, and generates a map for a given input bagfile.
"""

import argparse
import os
import shutil
import sys

import localization_common.utilities as lu


def make_surf_map(
bagfile,
world,
robot_name,
):
basename = lu.basename(bagfile)
bag_images_dir = basename + "_bag_images"
os.mkdir(bag_images_dir)
bag_images_dir_path = os.path.abspath(bag_images_dir)
extract_images_command = (
"rosrun localization_node extract_image_bag "
+ bagfile
+ " -use_timestamp_as_image_name -image_topic /mgt/img_sampler/nav_cam/image_record -output_directory "
+ bag_images_dir_path
)
lu.run_command_and_save_output(
extract_images_command, basename + "_extract_images.txt"
)

remove_low_movement_images_command = (
"rosrun sparse_mapping remove_low_movement_images " + bag_images_dir_path
)
lu.run_command_and_save_output(
remove_low_movement_images_command, basename + "_remove_low_movement_images.txt"
)

# Set environment variables
home = os.path.expanduser("~")
robot_config_file = os.path.join("config/robots", robot_name + ".config")
astrobee_path = os.path.join(home, "astrobee/src/astrobee")
os.environ["ASTROBEE_RESOURCE_DIR"] = os.path.join(astrobee_path, "resources")
os.environ["ASTROBEE_CONFIG_DIR"] = os.path.join(astrobee_path, "config")
os.environ["ASTROBEE_ROBOT"] = os.path.join(
astrobee_path, "config/robots/bumble.config"
)
os.environ["ASTROBEE_WORLD"] = world

# Build map
relative_bag_images_path = os.path.relpath(bag_images_dir)
bag_images = os.path.join(relative_bag_images_path, "*.jpg")
map_name = basename + ".map"
build_map_command = (
"rosrun sparse_mapping build_map "
+ bag_images
+ " -output_map "
+ map_name
+ " -feature_detection -feature_matching -track_building -incremental_ba -bundle_adjustment -histogram_equalization -num_subsequent_images 100000000"
)
lu.run_command_and_save_output(build_map_command, basename + "_build_map.txt")


if __name__ == "__main__":
parser = argparse.ArgumentParser(
description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument("bagfile", help="Input bagfile to generate map for.")
parser.add_argument("-w", "--world", default="iss")
parser.add_argument("-r", "--robot-name", default="bumble")

args = parser.parse_args()
if not os.path.isfile(args.bagfile):
print("Bag file " + args.bagfile + " does not exist.")
sys.exit()

bagfile = os.path.abspath(args.bagfile)
make_surf_map(
bagfile,
args.world,
args.robot_name,
)
81 changes: 81 additions & 0 deletions localization/sparse_mapping/scripts/make_surf_maps.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
#!/usr/bin/python
#
# Copyright (c) 2017, United States Government, as represented by the
# Administrator of the National Aeronautics and Space Administration.
#
# All rights reserved.
#
# The Astrobee platform is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Makes surf maps for the provided bagfiles. See make_surf_map.py script for more
details on surf map creation.
"""

import argparse
import csv
import glob
import itertools
import multiprocessing
import os
import sys

import localization_common.utilities as lu

# Import as to avoid same name as function in this script
import make_surf_map as mm


# Add traceback so errors are forwarded, otherwise
# some errors are suppressed due to the multiprocessing
# library call
@lu.full_traceback
def make_surf_map(bagfile, world, robot_name):
mm.make_surf_map(bagfile, world, robot_name)


# Helper that unpacks arguments and calls original function
# Aides running jobs in parallel as pool only supports
# passing a single argument to workers
def make_surf_map_helper(zipped_vals):
return make_surf_map(*zipped_vals)


def make_surf_maps(bags, world, robot_name, num_processes):
pool = multiprocessing.Pool(num_processes)
pool.map(
make_surf_map_helper,
list(zip(bags, itertools.repeat(world), itertools.repeat(robot_name))),
)


if __name__ == "__main__":
parser = argparse.ArgumentParser(
description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
)
parser.add_argument(
"--bags",
nargs="*",
help="List of bags to make maps for. If none provided, all bags in the current directory are used.",
)
parser.add_argument("-w", "--world", default="iss")
parser.add_argument("-r", "--robot-name", default="bumble")
parser.add_argument(
"-p",
"--num-processes",
type=int,
default=10,
help="Number of concurrent processes to run, where each map creation job is assigned to one process.",
)
args = parser.parse_args()
bags = args.bags if args.bags is not None else glob.glob("*.bag")
make_surf_maps(bags, args.world, args.robot_name, args.num_processes)
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ int main(int argc, char** argv) {
desc.add_options()("help,h", "produce help message")(
"image-directory", po::value<std::string>()->required(),
"Directory containing images. Images are assumed to be named in sequential order.")(
"--max-low-movement-mean-distance,m", po::value<double>(&max_low_movement_mean_distance)->default_value(0.1),
"--max-low-movement-mean-distance,m", po::value<double>(&max_low_movement_mean_distance)->default_value(0.15),
"Max mean distance for optical flow tracks between sequential images to be classified as a low movement pair.");
po::positional_options_description p;
p.add("image-directory", 1);
Expand Down
1 change: 0 additions & 1 deletion tools/localization_analysis/scripts/average_results.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
import os

import pandas as pd
import utilities


def combined_results(csv_files):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,10 +27,10 @@
import sys

import bag_sweep
import localization_common.utilities as lu
import pandas as pd
import parameter_sweep
import plot_parameter_sweep_results
import utilities


def save_ranges(param_range_directory, output_directory):
Expand Down Expand Up @@ -123,7 +123,7 @@ def bag_and_parameter_sweep(graph_bag_params_list, output_dir):
if os.path.isdir(args.output_dir):
print(("Output directory " + args.output_dir + " already exists."))
sys.exit()
output_dir = utilities.create_directory(args.output_dir)
output_dir = lu.create_directory(args.output_dir)

graph_bag_params_list = bag_sweep.load_params(args.config_file)
bag_and_parameter_sweep(graph_bag_params_list, output_dir)
Expand Down
Loading

0 comments on commit 7a3c611

Please sign in to comment.