Skip to content

Commit

Permalink
add support for kinematics measurement in realsense device itself.
Browse files Browse the repository at this point in the history
  • Loading branch information
wei.wei committed Nov 29, 2023
1 parent 52d80ca commit 4fb634e
Show file tree
Hide file tree
Showing 10 changed files with 413 additions and 55 deletions.
5 changes: 5 additions & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,11 @@ find_package(realsense2 REQUIRED)
find_package(OpenGL REQUIRED)
find_package(glfw3 REQUIRED)

# let cmake find OpenGL in GLVND preference
if (POLICY CMP0072)
cmake_policy (SET CMP0072 NEW)
endif(POLICY CMP0072)


add_executable(${PROJECT_NAME}
main.cpp
Expand Down
65 changes: 54 additions & 11 deletions detec_aruco.py → detect_aruco.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,17 @@ def detect_markers(image, aruco_dict_info, make_pair=True):
gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
_aruco_info = cv2.aruco.getPredefinedDictionary(aruco_dict_info)
parameters = cv2.aruco.DetectorParameters()
# for new apis
detector = cv2.aruco.ArucoDetector(_aruco_info, parameters)

corners, ids, rejected_img_points = cv2.aruco.detectMarkers(
gray, _aruco_info, parameters=parameters
)
if not make_pair:
return corners, ids, rejected_img_points
corners, ids, rejected_img_points = detector.detectMarkers(gray)
if ids is None:
return None
else:
return list(zip(ids, corners))
if not make_pair:
return corners, ids, rejected_img_points
else:
return list(zip(ids, corners))


def find_min_id_corners(pairs):
Expand Down Expand Up @@ -54,24 +57,53 @@ def build_intrinsic(intrinsic_param):
return np.array([[fx, 0, cx], [0, fy, cy], [0, 0, 1]])


def estimate_single_markers(corners, marker_len, cam_matrix, dist_coeffs):
marker_points = np.array(
[
[-marker_len / 2, marker_len / 2, 0],
[marker_len / 2, marker_len / 2, 0],
[marker_len / 2, -marker_len / 2, 0],
[-marker_len / 2, -marker_len / 2, 0],
],
dtype=np.float32,
)

_, rvec, tvec = cv2.solvePnP(marker_points, corners, cam_matrix, dist_coeffs)
return rvec, tvec


def get_ypr_and_translation(
corner, marker_len, intrinsic_matrix, dist_coef=np.zeros(5)
):
rvec, tvec, _ = cv2.aruco.estimatePoseSingleMarkers(
# opencv 4.8.0 has deleted `cv2.aruco.estimatePoseSingleMarkers`
# rvec, tvec, _ = cv2.aruco.estimatePoseSingleMarkers(
# corners=corner,
# markerLength=marker_len,
# cameraMatrix=intrinsic_matrix,
# distCoeffs=dist_coef,
# )
rvec, tvec = estimate_single_markers(
corners=corner,
markerLength=marker_len,
cameraMatrix=intrinsic_matrix,
distCoeffs=dist_coef,
marker_len=marker_len,
cam_matrix=intrinsic_matrix,
dist_coeffs=dist_coef,
)
# convert rvec to rotation matrix
rotvec, _ = cv2.Rodrigues(rvec)
# inverse the world2cam to cam2world
rot_cam2world = np.transpose(rotvec)
trans_cam2world = -rot_cam2world @ tvec[0].T
trans_cam2world = -rot_cam2world @ tvec
ypr = R2ypr(rot_cam2world)
return ypr, trans_cam2world


def process(image, dict_choose, marker_len, camera_matrix, dist_coef):
pairs = detect_markers(image, dict_choose)
corners = find_min_id_corners(pairs)
ypr, trans = get_ypr_and_translation(corners, marker_len, camera_matrix, dist_coef)
return ypr, trans


if __name__ == "__main__":
left_depature_path = "aruco_capture/left_departure.png"
right_depature_path = "aruco_capture/right_departure.png"
Expand Down Expand Up @@ -101,3 +133,14 @@ def get_ypr_and_translation(
ypr, trans = get_ypr_and_translation(corner, 0.176, camera_matrix, dist_coef)
print(ypr)
print(trans)

rootdir = "aruco_capture"
dict_choose = aruco_marker_dict["DICT_6X6_250"]
accept_names = []
for i in range(100, 201):
name = "frame_{:04d}.png".format(i)
pairs = detect_markers(cv2.imread(f"{rootdir}/{name}"), dict_choose)
if pairs is not None:
accept_names.append(name)

print(accept_names)
47 changes: 6 additions & 41 deletions main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3,51 +3,16 @@
#include "src/rscvcapture.h"
#include <filesystem>
#include <vector>
#include <opencv2/core/version.hpp>
using namespace aruconavi;
namespace fs = std::filesystem;
using namespace std;

#define RED "\033[31m"
#define GREEN "\033[32m"
#define YELLOW "\033[33m"
#define BLUE "\033[34m"
#define MAGENTA "\033[35m"
#define CYAN "\033[36m"
#define WHITE "\033[37m"

int main() {
// string img_dir = "/Users/williamwei/Codes/aruco-navi/aruco_capture";
// vector<string> img_names;
// if (fs::is_directory(img_dir)) {
// for(const auto& entry : fs::directory_iterator(img_dir)) {
// img_names.emplace_back(entry.path().filename());
// }
// }
//
// for(const auto& name : img_names) {
// string full_nanme = img_dir + "/" + name;
//
// }
// string img_path =
// "/home/william/Codes/aruco-navi/aruco_capture/frame_0139.png"; Calc
// calculator(INTRINSICS640480, DISTORTION); cv::Mat image =
// cv::imread(img_path, cv::IMREAD_ANYCOLOR); calculator.process(image); cout
// << calculator.ypr << endl; cout << calculator.trans_cam2world << endl;
//
// cv::Mat image_copy;
// image.copyTo(image_copy);
// draw_coordinate_system(image_copy, calculator.camera_matrix_,
// calculator.dist_coef_, calculator.rvec_, calculator.tvec_);
// cv::imshow("detected coordinate system", image_copy);
// cv::waitKey(0);
// cv::destroyAllWindows();
// bool status = rs_with_cv_capture(1280, 720);
// if (status) {
// cout << "launch succeed!" << endl;
// } else {
// cout << "launch failed!" << endl;
// }
std::cout << "OpenCV version: " << CV_VERSION << std::endl;
bool status = rs_with_cv_capture(1280, 720);
if (status) {
cout << "launch succeed!" << endl;
} else {
cout << "launch failed!" << endl;
}
return 0;
}
3 changes: 3 additions & 0 deletions src/external_api/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@ cmake_minimum_required(VERSION 3.5.1)

project(pyaruconavi)

set(CMAKE_EXPORT_COMPILE_COMMANDS ON)

find_package(OpenCV REQUIRED)
find_package(Eigen3 REQUIRED)
MESSAGE(" *** OpenCV_INCLUDE_DIRS : " ${OpenCV_INCLUDE_DIRS})
Expand All @@ -11,6 +13,7 @@ MESSAGE(" *** OpenCV_LIBS : " ${OpenCV_LIBS})
# specify your pybind11 location
# set(pybind11_DIR "/home/william/anaconda3/lib/python3.11/site-packages/pybind11/share/cmake/pybind11")
execute_process(
COMMAND conda activate base
COMMAND python ${CMAKE_CURRENT_LIST_DIR}/get_pybind11_cmake.py
OUTPUT_VARIABLE PYTHON_OUTPUT
)
Expand Down
3 changes: 0 additions & 3 deletions src/external_api/export_apis.h
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,6 @@
#include <Eigen/Core>
#include <Eigen/Dense>
#include <cmath>
// #include "../detect.h"
// #include "../mathutil.h"
// #include "../constant.h"

struct Intrinsic {
float fx = 610.117;
Expand Down
109 changes: 109 additions & 0 deletions src/kinematics.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
/*
* Copyright (c) 2022-2023, William Wei. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "kinematics.h"
#include <iostream>

namespace aruconavi {
using float3 = vec3<float>;

bool check_imu_is_supported() {
bool found_gyro = false;
bool found_accel = false;
rs2::context ctx;
for (auto dev : ctx.query_devices()) {
// The same device should support gyro and accel
found_gyro = false;
found_accel = false;
for (auto sensor : dev.query_sensors()) {
for (auto profile : sensor.get_stream_profiles()) {
if (profile.stream_type() == RS2_STREAM_GYRO)
found_gyro = true;

if (profile.stream_type() == RS2_STREAM_ACCEL)
found_accel = true;
}
}
if (found_gyro && found_accel)
break;
}
return found_gyro && found_accel;
}

void RotationEstimator::process_gyro(rs2_vector gyro_data, double ts) {
if (first_gyro) {
first_gyro = false;
last_ts_gyro = ts;
}
// Holds the change in angle, as calculated from gyro
float3 gyro_angle;

gyro_angle.e[0] = gyro_data.x; // pitch
gyro_angle.e[1] = gyro_data.y; // yaw
gyro_angle.e[2] = gyro_data.z; // roll

// Compute the difference between arrival times of previous and current gyro
// frames
double dt_gyro = (ts - last_ts_gyro) / 1000.0;
last_ts_gyro = ts;

// Change in angle equals gyro measures * time passed since last measurement
gyro_angle = gyro_angle * static_cast<float>(dt_gyro);
// Apply the calculated change of angle to the current angle (theta)
std::lock_guard<std::mutex> lock(theta_mutex);
theta.add(-gyro_angle.z(), -gyro_angle.y(), -gyro_angle.x());
}

void RotationEstimator::process_accel(rs2_vector accel_data) {
float3 accel_angle;
// Calculate rotation angle from accelerometer data
accel_angle.e[2] = atan2(accel_data.y, accel_data.z);
accel_angle.e[0] = atan2(accel_data.x, sqrt(accel_data.y * accel_data.y +
accel_data.z * accel_data.z));
// If it is the first iteration, set initial pose of camera according to
// accelerometer data (note the different handling for Y axis)
std::lock_guard<std::mutex> lock(theta_mutex);
if (first_accel) {
first_accel = false;
theta = accel_angle;
// Since we can't infer the angle around Y axis using accelerometer data,
// we'll use PI as a convetion for the initial pose
theta.e[1] = M_PI;
} else {
/*
Apply Complementary Filter:
- high-pass filter = theta * alpha: allows short-duration signals to
pass through while filtering out signals that are steady over time, is
used to cancel out drift.
- low-pass filter = accel * (1- alpha): lets through long term
changes, filtering out short term fluctuations
*/
theta.e[0] = theta.e[0] * alpha + accel_angle.e[0] * (1 - alpha);
theta.e[2] = theta.e[2] * alpha + accel_angle.e[2] * (1 - alpha);
}
}

float3 RotationEstimator::get_theta() {
std::lock_guard<std::mutex> lock(theta_mutex);
return theta;
}

void RotationEstimator::print_theta() {
float3 cur_rot = get_theta();
std::cout << "realsense device rotate in yall: " << cur_rot.y() << "\n"
<< "realsense device rotate in pitch: " << cur_rot.x() << "\n"
<< "realsense device rotate in roll: " << cur_rot.z() << std::endl;
}
} // namespace aruconavi
49 changes: 49 additions & 0 deletions src/kinematics.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
/*
* Copyright (c) 2022-2023, William Wei. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

#ifndef ARUCO_NAVI_KINEMATICS_H
#define ARUCO_NAVI_KINEMATICS_H
#include <librealsense2/rs.hpp>
#include <mutex>
#include "mathtypes.h"

namespace aruconavi {

class RotationEstimator {
using float3 = vec3<float>;
// theta is the angle of camera rotation in x, y and z components
float3 theta;
std::mutex theta_mutex;
/* alpha indicates the part that gyro and accelerometer take in computation
of theta; higher alpha gives more weight to gyro, but too high values cause
drift; lower alpha gives more weight to accelerometer, which is more
sensitive to disturbances */
float alpha = 0.98f;
bool first_gyro = true;
bool first_accel = true;
// keeps the arrival time of previous gyro frame
double last_ts_gyro = 0;

public:
void process_gyro(rs2_vector gyro_data, double ts);
void process_accel(rs2_vector accel_data);
float3 get_theta();
void print_theta();
};

bool check_imu_is_supported();
} // namespace aruconavi
#endif // ARUCO_NAVI_KINEMATICS_H
20 changes: 20 additions & 0 deletions src/magic.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
/*
* Copyright (c) 2022-2023, William Wei. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

#ifndef ARUCO_NAVI_MAGIC_H
#define ARUCO_NAVI_MAGIC_H
static auto anyone = [](auto&& k, auto&&... args) ->bool { return ((args == k) || ...); };
#endif // ARUCO_NAVI_MAGIC_H
Loading

0 comments on commit 4fb634e

Please sign in to comment.