Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

大佬你好, 深度相机的fov计算应该是有点小bug, 一个 100°x100°的深度相机参数,画出来的模型,是长方形的,按道理应该是正方形。设置100x100的fov,后作者给的原始的如图中红色的所示: #273

Open
onlyliucat opened this issue Aug 4, 2023 · 0 comments

Comments

@onlyliucat
Copy link

          大佬你好, 深度相机的fov计算应该是有点小bug, 一个 100°x100°的深度相机参数,画出来的模型,是长方形的,按道理应该是正方形。设置100x100的fov,后作者给的原始的如图中红色的所示:

image,图中绿色的为 opencv的viz模块显示的fov, 红色的是将作者的六个平面交线画出来显示的fov效果。 正确应该是绿色的部分。 修改fov计算方式之后:
image, 与绿色的重合了。从六个面的参数画出相机模型的代码如下: //A:0,3,4

//B:1,3,4
//C:1,2,4
//D:0,2,4
//E:0,2,5
//F:1,2,5
//G:1,3,5
//H:0,3,5
//ABCDEFGHADEFCBGHE
bool drawModels(std::vector<cv::Point3f> pts, std::vector<cv::Vec3f>normals, std::string val_str = "WPolyLine")
{

    // int idx[]={0,1,2,3,4,5,6};
    //  int idx[]={0,3,1,2,4,5};
    int idx[]={0,2,3,1,4,5};
    cv::Point3f p0,p1,p2,p3,p4,p5,p6,p7;
    int k1 = 0, k2 = 3, k3 =4;
    k1 = idx[0], k2 = idx[3], k3 =idx[4];
    bool b0 = calculateIntersectionPoint(pts[k1], pts[k2], pts[k3], normals[k1], normals[k2], normals[k3], p0);
    k1 = idx[1], k2 = idx[3], k3 =idx[4];
    bool b1 = calculateIntersectionPoint(pts[k1], pts[k2], pts[k3], normals[k1], normals[k2], normals[k3], p1);
    k1 = idx[1], k2 = idx[2], k3 =idx[4];
    bool b2 = calculateIntersectionPoint(pts[k1], pts[k2], pts[k3], normals[k1], normals[k2], normals[k3], p2);
    k1 = idx[0], k2 = idx[2], k3 =idx[4];
    bool b3 = calculateIntersectionPoint(pts[k1], pts[k2], pts[k3], normals[k1], normals[k2], normals[k3], p3);
    k1 = idx[0], k2 = idx[2], k3 =idx[5];
    bool b4 = calculateIntersectionPoint(pts[k1], pts[k2], pts[k3], normals[k1], normals[k2], normals[k3], p4);
    k1 = idx[1], k2 = idx[2], k3 =idx[5];
    bool b5 = calculateIntersectionPoint(pts[k1], pts[k2], pts[k3], normals[k1], normals[k2], normals[k3], p5);
    k1 = idx[1], k2 = idx[3], k3 =idx[5];
    bool b6 = calculateIntersectionPoint(pts[k1], pts[k2], pts[k3], normals[k1], normals[k2], normals[k3], p6);
    k1 = idx[0], k2 = idx[3], k3 =idx[5];
    bool b7 = calculateIntersectionPoint(pts[k1], pts[k2], pts[k3], normals[k1], normals[k2], normals[k3], p7);
    std::vector<cv::Point3f>pp={p0,p1,p2,p3,p4,p5,p6,p7,p0,p3,p4,p5,p2,p1,p6,p7,p4};
    if(b0&&b1&&b2&&b3&&b4&&b5&&b6&&b7)
    {
        viz::WPolyLine pl( pp, cv::viz::Color::red());

        viewer_->showWidget(val_str,  pl);
    }

}

修改后的相机模型的六个面的计算代码如下: ` // calculate the 8 vertices of the frustum
float near_width = 2 * _min_d * tan(_hFOV / 2);
float near_height = 2 * _min_d * tan(_vFOV / 2);
float far_width = 2 * _max_d * tan(_hFOV / 2);
float far_height = 2 * _max_d * tan(_vFOV / 2);

Eigen::Vector3f near_top_left(-near_width / 2, near_height / 2, _min_d);
Eigen::Vector3f near_top_right(near_width / 2, near_height / 2, _min_d);
Eigen::Vector3f near_bottom_left(-near_width / 2, -near_height / 2, _min_d);
Eigen::Vector3f near_bottom_right(near_width / 2, -near_height / 2, _min_d);

Eigen::Vector3f far_top_left(-far_width / 2, far_height / 2, _max_d);
Eigen::Vector3f far_top_right(far_width / 2, far_height / 2, _max_d);
Eigen::Vector3f far_bottom_left(-far_width / 2, -far_height / 2, _max_d);
Eigen::Vector3f far_bottom_right(far_width / 2, -far_height / 2, _max_d);

// calculate the normals and points for each plane

// left plane
Eigen::Vector3f v_01 = near_bottom_left - near_top_left;
Eigen::Vector3f v_03 = far_top_left - near_top_left;
Eigen::Vector3f T_l = v_01.cross(v_03).normalized();
_plane_normals.push_back(VectorWithPt3D(T_l[0], T_l[1], T_l[2], near_top_left));

// top plane
v_01 = near_top_right - near_top_left;

v_03 = far_top_left - near_top_left;
Eigen::Vector3f T_t = -1*v_01.cross(v_03).normalized();
_plane_normals.push_back(VectorWithPt3D(T_t[0], T_t[1], T_t[2], near_top_left));

// right plane
v_01 = near_top_right - near_bottom_right;
 v_03 = far_top_right - near_top_right;
Eigen::Vector3f T_r = v_01.cross(v_03).normalized();
_plane_normals.push_back(VectorWithPt3D(T_r[0], T_r[1], T_r[2], near_bottom_right));


// bottom plane
v_01 = near_bottom_left - near_bottom_right;
v_03 = far_bottom_right - near_bottom_right;
Eigen::Vector3f T_b = -1*v_01.cross(v_03).normalized();
_plane_normals.push_back(VectorWithPt3D(T_b[0], T_b[1], T_b[2], near_bottom_right));

    // near plane
_plane_normals.push_back(VectorWithPt3D(0, 0, -1, near_top_left));

    // far plane

_plane_normals.push_back(VectorWithPt3D(0, 0, 1, far_top_left));
`

Originally posted by @onlyliucat in #238 (comment)

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

1 participant