# 双目 T265 使用 github
# 简介
realsense T265 传感器输出的数据
双目摄像头可以采集到 848X800 分辨率下 30FPS 的鱼眼图像数据;
IMU 数据(包括 6DoF 的 Pose,3DoF 的加速度计,3DoF 的陀螺仪)
而且以上数据的出厂时全都进行了标定,并且都保存在传感器中,你可以通过传感器的输出很容易获得内部的标定数据。
# 安装
建议按照 github 上的安装
sudo apt-key adv --keyserver keyserver.ubuntu.com --recv-key F6E65AC044F831AC80A06380C8B3A55A6F3EFCDE || sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-key F6E65AC044F831AC80A06380C8B3A55A6F3EFCDE若运行后出现公匙问题,则运行
sudo gedit /etc/apt/sources.list更新下载源 [源](LUG's repo file generator (ustc.edu.cn))ubuntu16
sudo add-apt-repository "deb http://realsense-hw-public.s3.amazonaws.com/Debian/apt-repo xenial main" -uubuntu18
sudo add-apt-repository "deb http://realsense-hw-public.s3.amazonaws.com/Debian/apt-repo bionic main" -usudo apt-get install librealsense2-dkms librealsense2-utils librealsense2-dev librealsense2-dbg第五步安装成功了,将你的 realsense T265 连接到电脑 USB(至少 USB3.0)口上,然后终端中输入以下命令就能看到传感器输出的数据了。
realsense-viewer
# 编程使用 realsense T265
通过 IMU 积分出位姿
当你按照上述 2 中的方法进行了必要程序安装之后,你就已经成功的把 realsense T265 传感器所要用的库文件和头文件都安装到系统中了。所以我们直接编程使用就行了。下面我向你介绍一下如何通过传感器输出的 IMU 信息,计算传感器运动的轨迹。其实就是通过 IMU 的速度和加速度信息积分产生位姿(位置和角度)信息。不废话,直接上代码。// License: Apache 2.0. See LICENSE file in root directory. // Copyright(c) 2019 Intel Corporation. All Rights Reserved. #include <librealsense2/rs.hpp> #include <iostream> #include <iomanip> #include <chrono> #include <thread> #include <mutex> #include <math.h> #include <float.h> //欧拉角转四元数,两种不同的旋转表示方法之间转换 //(如果你不懂的话,不必深究,这里慢慢来,你一时半会估计很难理解啥是四元数) inline rs2_quaternion quaternion_exp(rs2_vector v) { float x = v.x/2, y = v.y/2, z = v.z/2, th2, th = sqrtf(th2 = x*x + y*y + z*z); float c = cosf(th), s = th2 < sqrtf(120*FLT_EPSILON) ? 1-th2/6 : sinf(th)/th; rs2_quaternion Q = { s*x, s*y, s*z, c }; return Q; } //两个四元素之间进行“乘法”,相当于是旋转之后再旋转 inline rs2_quaternion quaternion_multiply(rs2_quaternion a, rs2_quaternion b) { rs2_quaternion Q = { a.x * b.w + a.w * b.x - a.z * b.y + a.y * b.z, a.y * b.w + a.z * b.x + a.w * b.y - a.x * b.z, a.z * b.w - a.y * b.x + a.x * b.y + a.w * b.z, a.w * b.w - a.x * b.x - a.y * b.y - a.z * b.z, }; return Q; } //通过离散欧拉积分计算出位姿(旋转和位置) rs2_pose predict_pose(rs2_pose & pose, float dt_s) { rs2_pose P = pose; P.translation.x = dt_s * (dt_s/2 * pose.acceleration.x + pose.velocity.x) + pose.translation.x; P.translation.y = dt_s * (dt_s/2 * pose.acceleration.y + pose.velocity.y) + pose.translation.y; P.translation.z = dt_s * (dt_s/2 * pose.acceleration.z + pose.velocity.z) + pose.translation.z; rs2_vector W = { dt_s * (dt_s/2 * pose.angular_acceleration.x + pose.angular_velocity.x), dt_s * (dt_s/2 * pose.angular_acceleration.y + pose.angular_velocity.y), dt_s * (dt_s/2 * pose.angular_acceleration.z + pose.angular_velocity.z), }; P.rotation = quaternion_multiply(quaternion_exp(W), pose.rotation); return P; } int main(int argc, char * argv[]) try { //声明一个realsense传感器设备 rs2::pipeline pipe; // 创建一个配置信息 rs2::config cfg; //告诉配置信息,我需要传感器的POSE和6DOF IMU数据 cfg.enable_stream(RS2_STREAM_POSE, RS2_FORMAT_6DOF); std::mutex mutex; //回调函数 auto callback = [&](const rs2::frame& frame) { std::lock_guard<std::mutex> lock(mutex); if (rs2::pose_frame fp = frame.as<rs2::pose_frame>()) { rs2_pose pose_data = fp.get_pose_data(); auto now = std::chrono::system_clock::now().time_since_epoch(); double now_ms = std::chrono::duration_cast<std::chrono::milliseconds>(now).count(); double pose_time_ms = fp.get_timestamp(); float dt_s = static_cast<float>(std::max(0., (now_ms - pose_time_ms)/1000.)); rs2_pose predicted_pose = predict_pose(pose_data, dt_s); std::cout << "Predicted " << std::fixed << std::setprecision(3) << dt_s*1000 << "ms " << "Confidence: " << pose_data.tracker_confidence << " T: " << predicted_pose.translation.x << " " << predicted_pose.translation.y << " " << predicted_pose.translation.z << " (meters) \r"; } }; //开始接收数据,接收数据之后进入回调函数进行处理 rs2::pipeline_profile profiles = pipe.start(cfg, callback); std::cout << "started thread\n"; while(true) { std::this_thread::sleep_for(std::chrono::milliseconds(100)); } return EXIT_SUCCESS; } catch (const rs2::error & e) { std::cerr << "RealSense error calling " << e.get_failed_function() << "(" << e.get_failed_args() << "):\n " << e.what() << std::endl; return EXIT_FAILURE; } catch (const std::exception& e) { std::cerr << e.what() << std::endl; return EXIT_FAILURE; }源文件中牵扯到一些数学,不必深究看过就算过。有了源文件我们还需要
CMakeLists.txt文件,这里你直接复制我写的就可以运行。cmake_minimum_required(VERSION 3.1.0) project(test) set(CMAKE_BUILD_TYPE "release") add_executable(test main.cpp) #通过main.cpp编译生成可执行文件test target_link_libraries(test realsense2) #将realsense2的库文件链接给test将源文件
main.cpp和CMakeLists.txt放在同一个文件夹下,然后顺序执行以下命令就可以编译生成可执行文件了。最后将 realsense T265 连接上电脑(至少 USB3.0),然后运行刚刚生成的程序
test,移动移动传感器,就可以得到位姿信息了。如果你运行的时候报错:error while loading shared libraries: librealsense2.so.2.34: cannot open shared object file: no such file or directory,那你需要将 CMakeLists.txt 中的 librealsense2 的链接方式改为如下方式:
find_package(realsense2 REQUIRED) include_directories( ${realsense2_INCLUDE_DIR} ) target_link_libraries(test ${realsense2_LIBRARY} )
# 用 opencv 显示双目摄像头的数据
有了 3.1 的基础,这里我就直接上代码了。
#include <librealsense2/rs.hpp> #include <opencv2/opencv.hpp> int main(){ rs2::config cfg; cfg.enable_stream(RS2_STREAM_FISHEYE,1, RS2_FORMAT_Y8); cfg.enable_stream(RS2_STREAM_FISHEYE,2, RS2_FORMAT_Y8); rs2::pipeline pipe; pipe.start(cfg); rs2::frameset data; while (1){ data = pipe.wait_for_frames(); rs2::frame image_left = data.get_fisheye_frame(1); rs2::frame image_right = data.get_fisheye_frame(2); if (!image_left || !image_right) break; cv::Mat cv_image_left(cv::Size(848, 800), CV_8U, (void*)image_left.get_data(), cv::Mat::AUTO_STEP); cv::Mat cv_image_right(cv::Size(848, 800), CV_8U, (void*)image_right.get_data(), cv::Mat::AUTO_STEP); cv::imshow("left", cv_image_left); cv::imshow("right", cv_image_right); cv::waitKey(1); } return 0; }对应的
CMakeLists.txt文件内容如下:cmake_minimum_required(VERSION 3.1.0) project(test) set(CMAKE_BUILD_TYPE "release") find_package(OpenCV) include_directories(${OpenCV_INCLUDE_DIRS}) add_executable(test main.cpp) target_link_libraries(test realsense2 ${OpenCV_LIBS} )