Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Tried Undistorting but the output is crap #1815

Open
ImTheMan153 opened this issue Mar 23, 2024 · 1 comment
Open

Tried Undistorting but the output is crap #1815

ImTheMan153 opened this issue Mar 23, 2024 · 1 comment

Comments

@ImTheMan153
Copy link

Hello @dusty-nv,
I tried to use undistort my image by integrating cudawarpintrinsic function into gstcamera but the output starts to throw crap, i.e. it doesn't work as intended. I have attached my modifications to the create function in gstcamera.cpp as per your suggestions here CameraUndistortion. Please let me know if it is the right way to do it.

gstCamera.cpp:

#include "cudaWarp.h"
const float2 focalLength = make_float2(1445.386870f, 1426.720513f);
const float2 principalPoint = make_float2(808.692663f, 268.506265f);
const float4 distortionCoefficients = make_float4(-0.412884f, 0.084565f, 0.010779f, 0.001105f);
// Modified Capture function to include undistortion
bool gstCamera::Capture(void** output, imageFormat format, uint64_t timeout, int* status)
{
    // Original capture logic
    if (!output) RETURN_STATUS(ERROR);

    if (!mStreaming) {
        if (!Open()) RETURN_STATUS(ERROR);
    }

    int result = mBufferManager->Dequeue(output, format, timeout);
    if (result < 0) {
        LogError(LOG_GSTREAMER "gstCamera::Capture() -- an error occurred retrieving the next image buffer\n");
        RETURN_STATUS(ERROR);
    } else if (result == 0) {
        LogWarning(LOG_GSTREAMER "gstCamera::Capture() -- a timeout occurred waiting for the next image buffer\n");
        RETURN_STATUS(TIMEOUT);
    }

    mLastTimestamp = mBufferManager->GetLastTimestamp();
    mRawFormat = mBufferManager->GetRawFormat();

    // Assuming the format is compatible with CUDA undistortion
    // Allocate memory for the undistorted output frame
    float4* undistortedOutput;
    const int width = 640;  // Known width
    const int height = 480; // Known height
    cudaMalloc(&undistortedOutput, width * height * sizeof(float4));  // Assume 'width' and 'height' are available

    // Apply undistortion
    cudaError_t error = cudaWarpIntrinsic((uchar4*)*output, (uchar4*)undistortedOutput, width, height,
                                          focalLength, principalPoint, distortionCoefficients);

    if (error != cudaSuccess) {
        cudaFree(undistortedOutput);  // Clean up
        LogError(LOG_GSTREAMER "gstCamera::Capture() -- CUDA undistortion failed\n");
        RETURN_STATUS(ERROR);  // Add appropriate error handling
    }

    // Update output to point to the undistorted frame
    *output = undistortedOutput;

    RETURN_STATUS(OK);
}


// CaptureRGBA
bool gstCamera::CaptureRGBA(float** output, unsigned long timeout, bool zeroCopy) {
    mOptions.zeroCopy = zeroCopy;

    // Original capture logic to get the frame into 'output'
    if (!Capture((void**)output, IMAGE_RGBA32F, timeout, nullptr)) {
        return false; // Capture failed
    }

    // Assuming 'output' now holds the pointer to the captured frame
    // and the format is compatible with CUDA undistortion.
    // Note: Ensure your CUDA undistortion process is compatible with RGBA32F format.

    // Allocate memory for the undistorted output frame
    float4* undistortedOutput;
    const int width = 640;  // Assuming width is known, replace with dynamic width if available
    const int height = 480; // Assuming height is known, replace with dynamic height if available
    cudaMalloc(&undistortedOutput, width * height * sizeof(float4));

    // Apply undistortion
    cudaError_t error = cudaWarpIntrinsic((uchar4*)*output, (uchar4*)undistortedOutput, width, height,
                                          focalLength, principalPoint, distortionCoefficients);

    if (error != cudaSuccess) {
        cudaFree(undistortedOutput); // Clean up
        return false; // Handle error appropriately
    }

    // Free the original output memory if not zeroCopy
    if (!zeroCopy) {
        cudaFree(*output);
    }

    // Update output to point to the undistorted frame
    *output = (float*)undistortedOutput;

    return true; // Return true if everything succeeded
}

After this modification, I built the workspace and ran the videoSourcenode with ROS2. And the output was,

Output Video Screenshot:
Screenshot from 2024-03-23 19-17-39

@ImTheMan153
Copy link
Author

ImTheMan153 commented Mar 23, 2024

I also tried integrating inside the videosourcenode of ROS instead of gstreamer but I got the same output. I know that I am doing something wrong, so I would appreciate if you give me some inputs. Thanks.

node_video_source.cpp:

#include "ros_compat.h"
#include "image_converter.h"

// aquire and publish camera frame
#include "cuda_runtime.h"
#include <jetson-utils/cudaWarp.h> // Assuming this is where cudaWarpIntrinsic is defined
#include <jetson-utils/videoSource.h>



// globals	
videoSource* stream = NULL;
imageConverter* image_cvt = NULL;
Publisher<sensor_msgs::Image> image_pub = NULL;
const float2 focalLength = make_float2(1445.386870f, 1426.720513f);
const float2 principalPoint = make_float2(808.692663f, 268.506265f);
const float4 distortionCoefficients = make_float4(-0.412884f, 0.084565f, 0.010779f, 0.001105f);



bool aquireFrame()
{
    imageConverter::PixelType* nextFrame = NULL;

    // get the latest frame
    if( !stream->Capture(&nextFrame, 1000) )
    {
        ROS_ERROR("failed to capture next frame");
        return false;
    }

    const int width = stream->GetWidth();
    const int height = stream->GetHeight();
    float4* undistortedOutput = nullptr;
    cudaMalloc(&undistortedOutput, width * height * sizeof(float4)); // Allocate memory for the undistorted frame

    cudaError_t error = cudaWarpIntrinsic((uchar4*)nextFrame, (uchar4*)undistortedOutput, width, height,
                                          focalLength, principalPoint, distortionCoefficients);
    if (error != cudaSuccess) {
        cudaFree(undistortedOutput); // Clean up
        ROS_ERROR("CUDA undistortion failed");
        return false;
    }

    if( !image_cvt->Resize(width, height, imageConverter::ROSOutputFormat) )
    {
        ROS_ERROR("failed to resize camera image converter");
        cudaFree(undistortedOutput); // Clean up
        return false;
    }

    sensor_msgs::Image msg;
    if( !image_cvt->Convert(msg, imageConverter::ROSOutputFormat, (imageConverter::PixelType*)undistortedOutput) )
    {
        ROS_ERROR("failed to convert video stream frame to sensor_msgs::Image");
        cudaFree(undistortedOutput); // Clean up
        return false;
    }

    cudaFree(undistortedOutput); // Don't forget to free CUDA memory

    // publish the message
    msg.header.stamp = ROS_TIME_NOW();
    image_pub->publish(msg);
    ROS_DEBUG("published %ux%u video frame", width, height);
    
    return true;
}

Output:
Screenshot from 2024-03-23 19-46-33

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

1 participant