Commit 3824162b authored by 胡雪纯's avatar 胡雪纯

[feat] add HmdCalibratorIndustry method

parent 70b4fbb9
......@@ -6,7 +6,7 @@ the useful function as following:
Firstly, calib the tracking cameras: fisheye camera by using the cpp file: calib_fisheye_cam.cpp
Then, use HMDCLAIB::HmdCalibrator hmdcalibrator to calib, you can refer the test/perform_Interaction_calib.cpp :
Then, use HMDCLAIB::HmdCalibratorInteraction hmdcalibrator to calib, you can refer the test/perform_Interaction_calib.cpp :
1. iteractive cilick the screen display and get get fisheye uv, by using the function : hmdcalibrator.GetCaibCorrespondence()
......
......@@ -21,6 +21,25 @@ namespace HMDCLAIB{
Eigen::Matrix4f T_rw;
};
struct EyeCameraInfo// industrial camera
{
cv::Size image_size;
Eigen::Matrix3f K_l;
Eigen::VectorXf D_l;
Eigen::Matrix3f K_r;
Eigen::VectorXf D_r;
Eigen::Matrix4f T_lw;
Eigen::Matrix4f T_rw;
};
struct GlassInfo
{
float hfov_l;// degree
float vfov_l;// degree
float hfov_r;// degree
float vfov_r;// degree
};
struct CalibResultInfo
{
Eigen::Matrix3f K_eyel;
......@@ -29,10 +48,15 @@ namespace HMDCLAIB{
Eigen::Matrix4f T_eyer_tracking;
};
class HmdCalibrator{
enum DeviceType{
left = 0,
right =1
}
class HmdCalibratorInteraction{
public:
HmdCalibrator(const std::string& left_image_dir,const std::string& right_image_dir,const std::string& screen_uv_path, std::string image_type="png", std::string tracking_stereo_uv_path = "tracking_stereo_uv.txt"){
HmdCalibratorInteraction(const std::string& left_image_dir,const std::string& right_image_dir,const std::string& screen_uv_path, std::string image_type="png", std::string tracking_stereo_uv_path = "tracking_stereo_uv.txt"){
_left_image_dir = left_image_dir;
_right_image_dir = right_image_dir;
_screen_uv_path = screen_uv_path;
......@@ -41,7 +65,7 @@ namespace HMDCLAIB{
_screen_uv.clear();
_tracking_stereo_uv.clear();
}
~HmdCalibrator(){
~HmdCalibratorInteraction(){
_screen_uv.clear();
_tracking_stereo_uv.clear();
}
......@@ -73,4 +97,32 @@ namespace HMDCLAIB{
};
class HmdCalibratorIndustry{
public:
explicit HmdCalibratorIndustry(TrackingCameraInfo trackingcamerainfo, EyeCameraInfo eyecamerainfo, GlassInfo glassinfo){
_trackingcamerainfo = std::make_shared<TrackingCameraInfo>(trackingcamerainfo);
_eyecamerainfo = std::make_shared<EyeCameraInfo>(eyecamerainfo);
_glassinfo = std::make_shared<GlassInfo>(glassinfo);
}
~HmdCalibratorIndustry() = default;
private:
bool GetCaibCorrespondence(const cv::Mat& chessboard,const cv::Mat& chessboard_in_eye_Camera,cv::Size chessboard_pattern);
bool PerformCalib(CalibResultInfo& result_info);
Eigen::Matrix3f _H_screenl_eyel;
Eigen::Matrix3f _K_on_l;
Eigen::Matrix3f _K_off_l;
Eigen::Matrix3f _H_screenr_eyer;
Eigen::Matrix3f _K_on_r;
Eigen::Matrix3f _K_off_r;
std::shared_ptr<TrackingCameraInfo> _trackingcamerainfo;
std::shared_ptr<EyeCameraInfo> _eyecamerainfo;
std::shared_ptr<GlassInfo> _glassinfo;
};
}
\ No newline at end of file
......@@ -4,11 +4,14 @@
#include "RansacProjection.h"
namespace HMDCLAIB{
#define PI 3.14159265359
std::string _window_name;
cv::Mat _mouse_img,_mouse_tmp;
std::vector<cv::Point2f> _uvs_image;
bool HmdCalibrator::PerformCalib(const TrackingCameraInfo& TrackingCameraInfo, CalibResultInfo& result_info){
float degree2radian(float degree){
return degree*PI/180.0f;
}
bool HmdCalibratorInteraction::PerformCalib(const TrackingCameraInfo& TrackingCameraInfo, CalibResultInfo& result_info){
if(read_tracking_stereo_uv() && read_screen_uv()){
cv::Mat cvK_l,cvK_r;
cv::eigen2cv(TrackingCameraInfo.K_l, cvK_l);
......@@ -155,7 +158,7 @@ bool HmdCalibrator::PerformCalib(const TrackingCameraInfo& TrackingCameraInfo, C
return true;
}
void HmdCalibrator::on_mouse(int event, int x, int y, int flags, void *ustc) {
void HmdCalibratorInteraction::on_mouse(int event, int x, int y, int flags, void *ustc) {
static cv::Point pre_pt = cv::Point(-1, -1);//初始坐标
static cv::Point cur_pt = cv::Point(-1, -1);//实时坐标
char temp[16];
......@@ -179,7 +182,7 @@ void HmdCalibrator::on_mouse(int event, int x, int y, int flags, void *ustc) {
}
}
bool HmdCalibrator::GetCaibCorrespondence(){
bool HmdCalibratorInteraction::GetCaibCorrespondence(){
std::ofstream fs_uv_path(_tracking_stereo_uv_path);
if(!fs_uv_path.is_open()){
std::cout<<"BIG ERRROR!!!: Can not create the txt file: "<<_tracking_stereo_uv_path<<std::endl;
......@@ -230,7 +233,7 @@ bool HmdCalibrator::GetCaibCorrespondence(){
return true;
}
bool HmdCalibrator::read_tracking_stereo_uv(){
bool HmdCalibratorInteraction::read_tracking_stereo_uv(){
std::ifstream file_in(_tracking_stereo_uv_path);
if(file_in.is_open()){
while (!file_in.eof()){
......@@ -265,7 +268,7 @@ bool HmdCalibrator::read_tracking_stereo_uv(){
return false;
}
bool HmdCalibrator::read_screen_uv(){
bool HmdCalibratorInteraction::read_screen_uv(){
std::ifstream file_in(_screen_uv_path);
if(file_in.is_open()){
while (!file_in.eof()){
......@@ -293,7 +296,7 @@ bool HmdCalibrator::read_screen_uv(){
}
return false;
}
Eigen::Vector2f HmdCalibrator::ReprojectError(const Eigen::Vector3f& pt,const Eigen::Matrix4f& Rtcw,
Eigen::Vector2f HmdCalibratorInteraction::ReprojectError(const Eigen::Vector3f& pt,const Eigen::Matrix4f& Rtcw,
const Eigen::Matrix3f& K_c,
const Eigen::Vector2f& uv_0) {
Eigen::Vector3f pt_cam = Rtcw.block<3,3>(0,0) * pt + Rtcw.block<3,1>(0,3);
......@@ -302,7 +305,7 @@ bool HmdCalibrator::read_screen_uv(){
return uv_0-uv_1.head<2>();
}
Eigen::Vector3f HmdCalibrator::TriangulatePoint(const Eigen::Vector2f uvl , const Eigen::Vector2f uvr,
Eigen::Vector3f HmdCalibratorInteraction::TriangulatePoint(const Eigen::Vector2f uvl , const Eigen::Vector2f uvr,
const Eigen::Matrix3f& K_l,const Eigen::Matrix3f& K_r,
const Eigen::Matrix4f& Rtlw,const Eigen::Matrix4f& Rtrw,
Eigen::Vector2f& diff0, Eigen::Vector2f& diff1) {
......@@ -326,5 +329,120 @@ bool HmdCalibrator::read_screen_uv(){
return pt3;
}
bool HmdCalibratorIndustry::GetCaibCorrespondence(const cv::Mat& chessboard_screen,const cv::Mat& chessboard_eyecamera,cv::Size chessboard_pattern, DeviceType& type){
cv::Mat gray_chessborad_screen;
cv::Mat gray_chessboard_eyecamera;
if(chessboard_screen.channels()>1){
cv::cvtColor(chessboard,gray_chessborad_screen,cv::COLOR_RGB2GRAY);
}else{
gray_chessborad_screen = chessboard_screen;
}
if(chessboard_eyecamera.channels()>1){
cv::cvtColor(chessboard_eyecamera,gray_chessboard_eyecamera,cv::COLOR_RGB2GRAY);
}else{
gray_chessboard_eyecamera = chessboard_eyecamera;
}
std::vector<cv::Point2f> corners_screen;
bool found1 = cv::findChessboardCorners(gray_chessborad_screen, chessboard_pattern, corners_screen,
cv::CALIB_CB_ADAPTIVE_THRESH | cv::CALIB_CB_FILTER_QUADS);
std::vector<cv::Point2f> corners_eyecamera;
bool found2 = cv::findChessboardCorners(gray_chessboard_eyecamera, chessboard_pattern, corners_eyecamera,
cv::CALIB_CB_ADAPTIVE_THRESH | cv::CALIB_CB_FILTER_QUADS);
if (found1 && found2)
{
cv::cornerSubPix(gray_chessborad_screen, corners_screen, cv::Size(5, 5), cv::Size(-1, -1),cv::TermCriteria( cv::TermCriteria::COUNT + cv::TermCriteria::EPS, 30, 0.1));
cv::cornerSubPix(gray_chessboard_eyecamera, corners_eyecamera, cv::Size(5, 5), cv::Size(-1, -1),cv::TermCriteria( cv::TermCriteria::COUNT + cv::TermCriteria::EPS, 30, 0.1));
}else{
std::cout<<" cannot found chessboard in chessboard_screen or chessboard_eyecamera--> found gray_chessborad_screen: "<< found1<<" , found chessboard_eyecamera:"<< found2<<std::endl;
return false;
}
assert(corners_screen.size() == corners_eyecamera.size());
cv::Mat cv_H_screen_eye = cv::findHomography(corners_eyecamera,corners_screen);
Eigen::Matrix3f H_screen_eye;
cv::cv2eigen(cv_H_screen_eye,H_screen_eye);
if(type==DeviceType::left){
_H_screenl_eyel = H_screen_eye;
}else{
_H_screenr_eyer = H_screen_eye;
}
return true;
}
bool HmdCalibratorIndustry::PerformCalib(CalibResultInfo& result_info){
_K_on_l = Eigen::Matrix3f::Identity();
_K_on_l(0,0) = float(IMAGEWIDTH)/2*tan(degree2radian(_glassinfo->hfov_l/2));
_K_on_l(1,1) = float(IMAGEHEIGHT)/2*tan(degree2radian(_glassinfo->vfov_l/2));
_K_on_l(0,2) = float(IMAGEWIDTH)/2;
_K_on_l(1,2) = float(IMAGEHEIGHT)/2;
_K_on_r = Eigen::Matrix3f::Identity();
_K_on_r(0,0) = float(IMAGEWIDTH)/2*tan(degree2radian(_glassinfo->hfov_r/2));
_K_on_r(1,1) = float(IMAGEHEIGHT)/2*tan(degree2radian(_glassinfo->vfov_r/2));
_K_on_r(0,2) = float(IMAGEWIDTH)/2;
_K_on_r(1,2) = float(IMAGEHEIGHT)/2;
//T_eyel_w * T_w_trackingl
Eigen::Matrix4f T_eyel_trackingl = _eyecamerainfo.T_lw * (_trackingcamerainfo.T_lw).inverse();
Eigen::Matrix4f T_eyer_trackingr = _eyecamerainfo.T_rw * (_trackingcamerainfo.T_rw).inverse();
Eigen::Matrix4f P_eyel_trackingl = Eigen::Matrix4f::Identity();
P_eyel_trackingl.block<3,3>(0,0) = _H_screenl_eyel*_eyecamerainfo.K_l*T_eyel_trackingl..block<3,3>(0,0);
P_eyel_trackingl.block<3,1>(0,3) = _H_screenl_eyel*_eyecamerainfo.K_l*T_eyel_trackingl..block<3,1>(0,3);
Eigen::Matrix4f P_eyer_trackingr = Eigen::Matrix4f::Identity();
P_eyer_trackingr.block<3,3>(0,0) = _H_screenr_eyer*_eyecamerainfo.K_r*T_eyer_trackingr..block<3,3>(0,0);
P_eyer_trackingr.block<3,1>(0,3) = _H_screenr_eyer*_eyecamerainfo.K_r*T_eyer_trackingr..block<3,1>(0,3);
/////////////////you need decompose "P_eye_tracking" to "K_eye", "T_eye_tracking"//////////////////////////
Eigen::Matrix3f H_Rt_l = _K_on_l.inverse()*_H_screenl_eyel*_eyecamerainfo.K_l;//
cv::Mat cv_H_Rt_l,cv_H_Rt_r;
cv::eigen2cv(H_Rt_l,cv_H_Rt_l);
Eigen::Matrix3f H_Rt_r = _K_on_r.inverse()*_H_screenr_eyer*_eyecamerainfo.K_r;//
cv::eigen2cv(H_Rt_r,cv_H_Rt_r);
cv::Mat K_identity = cv::Mat::eye(3,3,CV_32FC1);
std::vector<cv::Mat> cv_R_screenl_eyel,cv_R_screenr_eyer;
std::vector<cv::Mat> cv_t_screenl_eyel,cv_t_screenr_eyer;
std::vector<cv::Mat> cv_normal_screenl_eyel,cv_normal_screenr_eyer;// select 旋转角最小估计
cv::decomposeHomographyMat(cv_H_Rt_l,K_identity,cv_R_screenl_eyel,cv_t_screenl_eyel,cv_normal_screenl_eyel);
cv::decomposeHomographyMat(cv_H_Rt_r,K_identity,cv_R_screenr_eyer,cv_t_screenr_eyer,cv_normal_screenr_eyer);
///////////////////////////////////////////////////////////////////////////////////////////////////////////
/////////////need select one of the R,t ///////////////
//TODO:@huxuechun
///////////////////////////////////////////////////////////////////////////////////////////////////////////
Eigen::Matrix3f R_screenl_eyel,R_screenr_eyer;
Eigen::Vector3f t_screenl_eyel,t_screenr_eyer;
//TODO:@huxuechun
///////////////////////////////////////////////////////////////////////////////////////////////////////////
Eigen::Matrix4f T_screenl_eyel,T_screenr_eyer;
T_screenl_eyel = Eigen::Matrix4f::Identity();
T_screenr_eyer = Eigen::Matrix4f::Identity();
T_screenl_eyel.block<3,3>(0,0) = R_screenl_eyel;
T_screenl_eyel.block<3,1>(0,3) = t_screenl_eyel;
T_screenr_eyer.block<3,3>(0,0) = R_screenr_eyer;
T_screenr_eyer.block<3,1>(0,3) = t_screenr_eyer;
////////////////////////////////////////////////////////////////////////////////////////////////////////////
_K_off_l = _K_on_l*R_screenl_eyel.inverse()*_K_on_l.inverse()*_H_screenl_eyel*_eyecamerainfo.K_l;
_K_off_r = _K_on_r*R_screenr_eyer.inverse()*_K_on_r.inverse()*_H_screenr_eyer*_eyecamerainfo.K_r;
////////////////////////////////////////////////////////////////////////////////////////////////////////////
result_info.K_eyel = _K_off_l;
result_info.K_eyer = _K_off_r;
Eigen::Matrix4f T_trackingr_trackingl;//TODO:@huxuechun
result_info.T_eyel_tracking = T_screenl_eyel*T_eyel_trackingl;
result_info.T_eyer_tracking = T_screenr_eyer*T_eyer_trackingr*T_trackingr_trackingl;
///////////////////////////////////////////////////////////////////////////////////////////////////////////
}
}
\ No newline at end of file
add_executable(calib_fisheye_cam calib_fisheye_cam.cpp)
target_link_libraries(calib_fisheye_cam
add_executable(calib_stereo_fisheye_cam ./calib_camera_tools/calib_stereo_fisheye_cam.cpp)
target_link_libraries(calib_stereo_fisheye_cam
CALIB_GLASS
${OpenCV_LIBS}
GlobImage
)
add_executable(calib_stereo_pinhole ./calib_camera_tools/calib_stereo_pinhole.cpp)
target_link_libraries(calib_stereo_pinhole
CALIB_GLASS
${OpenCV_LIBS}
GlobImage)
add_executable(calib_pinhole calib_pinhole.cpp)
target_link_libraries(calib_pinhole
add_executable(calib_mono_pinhole ./calib_camera_tools/calib_mono_pinhole.cpp)
target_link_libraries(calib_mono_pinhole
CALIB_GLASS
${OpenCV_LIBS}
GlobImage)
......
......@@ -89,8 +89,8 @@ int main(){
std::cout<<"error: "<<error<<std::endl;
std::cout << "K1" << Mat(K1)<<std::endl;
std::cout << "D1" << D1<<", size:"<<D1.size()<<" type: "<<D1.type()<<std::endl;
printf("Done Calibration\n");
printf("Starting Rectification\n");
printf("Done Single PineHole Calibration\n");
printf("Starting Single PineHole Rectification\n");
////////////show rectify/////////////////////////
cv::Mat remapx1,remapx2,remapy1,remapy2;
......
......@@ -102,8 +102,9 @@ int main(){
std::cout << "D2" << D2<<std::endl;
std::cout << "R" << Mat(R)<<std::endl;
std::cout << "T" << T<<std::endl;
printf("Done Calibration\n");
printf("Starting Rectification\n");
printf("Done FishEye Stereo Calibration\n");
printf("Starting FishEye Stereo Rectification\n");
cv::Mat R1, R2, P1, P2, Q;
cv::fisheye::stereoRectify(K1, D1, K2, D2, img1.size(), R, T, R1, R2, P1, P2,
Q, cv::CALIB_ZERO_DISPARITY, img1.size());
......
#include <iostream>
#include "GlobImage.h"
#include <opencv2/core/core.hpp>
#include <opencv2/calib3d/calib3d.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <stdio.h>
#include <iostream>
using namespace std;
using namespace cv;
vector< vector< Point3d > > object_points;
vector< vector< Point2f > > imagePoints1, imagePoints2;
vector< Point2f > corners1, corners2;
vector< vector< Point2d > > left_img_points, right_img_points;
Mat img1, img2, gray1, gray2, spl1, spl2;
void load_image_points(int board_width, int board_height, float square_size, const std::vector<std::string>& left_file,
const std::vector<std::string>& right_file) {
Size board_size = Size(board_width, board_height);
int board_n = board_width * board_height;
for (int i = 0; i < left_file.size(); i++) {
img1 = imread(left_file[i]);
img2 = imread(right_file[i]);
cv::cvtColor(img1,gray1,cv::COLOR_RGB2GRAY);
cv::cvtColor(img2,gray2,cv::COLOR_RGB2GRAY);
bool found1 = false, found2 = false;
found1 = cv::findChessboardCorners(img1, board_size, corners1,
cv::CALIB_CB_ADAPTIVE_THRESH | cv::CALIB_CB_FILTER_QUADS);
found2 = cv::findChessboardCorners(img2, board_size, corners2,
cv::CALIB_CB_ADAPTIVE_THRESH | cv::CALIB_CB_FILTER_QUADS);
if (found1)
{
std::cout<<"found left"<< left_file[i]<<std::endl;
cv::cornerSubPix(gray1, corners1, cv::Size(5, 5), cv::Size(-1, -1),cv::TermCriteria( cv::TermCriteria::COUNT + cv::TermCriteria::EPS, 30, 0.1));
cv::drawChessboardCorners(gray1, board_size, corners1, found1);
}
if (found2)
{
std::cout<<"found right"<< right_file[i]<<std::endl;
cv::cornerSubPix(gray2, corners2, cv::Size(5, 5), cv::Size(-1, -1),cv::TermCriteria( cv::TermCriteria::COUNT + cv::TermCriteria::EPS, 30, 0.1));
cv::drawChessboardCorners(gray2, board_size, corners2, found2);
}
vector<cv::Point3d> obj;
for( int i = 0; i < board_height; ++i )
for( int j = 0; j < board_width; ++j )
obj.push_back(Point3d(double( (float)j * square_size ), double( (float)i * square_size ), 0));
if (found1 && found2) {
cout << i << ". Found corners!" << endl;
imagePoints1.push_back(corners1);
imagePoints2.push_back(corners2);
object_points.push_back(obj);
if(i==left_file.size()-1){
cv::imshow("gray1",gray1);
cv::imshow("gray2",gray2);
cv::waitKey(0);
}
}
}
for (int i = 0; i < imagePoints1.size(); i++) {
vector< Point2d > v1, v2;
for (int j = 0; j < imagePoints1[i].size(); j++) {
v1.push_back(Point2d((double)imagePoints1[i][j].x, (double)imagePoints1[i][j].y));
v2.push_back(Point2d((double)imagePoints2[i][j].x, (double)imagePoints2[i][j].y));
}
left_img_points.push_back(v1);
right_img_points.push_back(v2);
}
}
int main(){
cv::String left_pattern = "/home/hxc/Camera/cam0/data/*.png";
cv::String right_pattern = "/home/hxc/Camera/cam1/data/*.png";
std::vector<std::string> left_file = GLOB::GlobImage::ReadImage(left_pattern);
std::vector<std::string> right_file = GLOB::GlobImage::ReadImage(right_pattern);
int board_width = 11;
int board_height = 8;
float square_size = 0.035;
load_image_points( board_width,board_height,square_size, left_file, right_file);
cv::Matx33d K1, K2, R;
cv::Vec3d T;
cv::Vec4d D1, D2;
int flag = 0;
flag |= cv::fisheye::CALIB_RECOMPUTE_EXTRINSIC;
flag |= cv::fisheye::CALIB_CHECK_COND;
flag |= cv::fisheye::CALIB_FIX_SKEW;
cv::Mat E,F;
double error = cv::stereoCalibrate(object_points, left_img_points, right_img_points,
K1, D1, K2, D2, img1.size(), R, T,E,F, flag,
cv::TermCriteria(3, 12, 0));
std::cout<<"error: "<<error<<std::endl;
std::cout << "K1" << Mat(K1)<<std::endl;
std::cout << "K2" << Mat(K2)<<std::endl;
std::cout << "D1" << D1<<std::endl;
std::cout << "D2" << D2<<std::endl;
std::cout << "R" << Mat(R)<<std::endl;
std::cout << "T" << T<<std::endl;
printf("Done PineHole Stereo Calibration\n");
printf("Starting PineHole Stereo Rectification\n");
cv::Mat R1, R2, P1, P2, Q;
cv::stereoRectify(K1, D1, K2, D2, img1.size(), R, T, R1, R2, P1, P2,
Q, cv::CALIB_ZERO_DISPARITY);
std::cout << "R1" << R1<<std::endl;
std::cout << "R2" << R2<<std::endl;
std::cout << "P1" << P1<<std::endl;
std::cout << "P2" << P2<<std::endl;
std::cout << "Q" << Q<<std::endl;
std::cout<<"baseline: "<<1/Q.at<double>(3,2)<<endl;
////////////show rectify/////////////////////////
cv::Mat remapx1,remapx2,remapy1,remapy2;
cv::initUndistortRectifyMap(K1, D1, R1,K1, img1.size(), CV_16SC2, remapx1, remapy1);
cv::initUndistortRectifyMap(K2, D2, R2,K2, img1.size(), CV_16SC2, remapx2, remapy2);
cv::Mat grayImageL,grayImageR;
cv::Mat rgbImageL = imread(left_file[0]);
cvtColor(rgbImageL, grayImageL, cv::COLOR_RGB2GRAY);
cv::Mat rgbImageR = imread(right_file[0]);
cvtColor(rgbImageR, grayImageR, cv::COLOR_RGB2GRAY);
imshow("ImageL Before Rectify", grayImageL);
imshow("ImageR Before Rectify", grayImageR);
cv::Mat rectifyImageL,rectifyImageR;
remap(grayImageL, rectifyImageL, remapx1, remapy1, INTER_LINEAR);
remap(grayImageR, rectifyImageR, remapx2, remapy2, INTER_LINEAR);
Mat rgbRectifyImageL, rgbRectifyImageR;
cvtColor(rectifyImageL, rgbRectifyImageL, cv::COLOR_GRAY2RGB);
cvtColor(rectifyImageR, rgbRectifyImageR, cv::COLOR_GRAY2RGB);
imshow("ImageL After Rectify", rgbRectifyImageL);
imshow("ImageR After Rectify", rgbRectifyImageR);
cv::Mat canvas;
int w, h;
w = img1.cols;
h = img1.rows;
canvas.create(h, w * 2, CV_8UC3);
cv::Mat canvasPart = canvas(Rect(w * 0, 0, w, h));
rgbRectifyImageL.copyTo(canvasPart);
canvasPart = canvas(Rect(w, 0, w, h));
rgbRectifyImageR.copyTo(canvasPart);
for (int i = 0; i < canvas.rows; i += 16)
cv::line(canvas, Point(0, i), Point(canvas.cols, i), Scalar(0, 255, 0), 1, 8);
cv::imshow("rectified", canvas);
cv::waitKey(0);
}
\ No newline at end of file
......@@ -4,7 +4,7 @@ int main(){
std::string left_image_dir = "/home/hxc/calib/mav0/cam0/data";
std::string right_image_dir = "/home/hxc/calib/mav0/cam1/data";
std::string screen_uv_path = "/home/hxc/calib/mav0/xy/data.txt";
HMDCLAIB::HmdCalibrator hmdcalibrator(left_image_dir,right_image_dir,screen_uv_path);
HMDCLAIB::HmdCalibratorInteraction hmdcalibrator(left_image_dir,right_image_dir,screen_uv_path);
hmdcalibrator.GetCaibCorrespondence();
HMDCLAIB::TrackingCameraInfo cameraInfo;
int mono_image_width = 640;
......
#include "calib_hmd.h"
int main(){
HMDCLAIB::TrackingCameraInfo trackingcameraInfo;
HMDCLAIB::EyeCameraInfo eyecameraInfo;//industry camera
}
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment