1. 程式人生 > 其它 >Kinect-Dk獲取深度圖

Kinect-Dk獲取深度圖

基於Kinect-Dk的多聚焦影象融合

2021.7.16

本機環境

Visual Studio2019(需安裝c++拓展)

kinectSDK1.4.1

新建空白C++控制檯工程並新增原始檔Main.cpp

使用kinect-dk

1、下載SDK

此處為Github倉庫提供下載,選擇最新版本1.4.1最新版即可。

這裡我建議安裝到C盤,會直接出現在根目錄下

SDK目錄下是一些標頭檔案

tools目錄下則是工具,總共有三種

Kinect檢視器可以啟動相機,或者開啟已經錄製好的視訊。並且展示KinectDk的三種模式,RGB模式,紅外模式,深度模式。

錄製器

主要使用命令列來執行,可以錄製一定長度的視訊。

命令列切換到tool目錄下

k4arecorder.exe -l 5 %TEMP%\output.mkv

即可錄製五秒視訊

完整引數介紹詳見此頁

https://docs.microsoft.com/zh-cn/azure/kinect-dk/azure-kinect-recorder

2、連線電源

在圖片中,type-c介面的線是資料介面,要連線到電腦上,要求USB3.0(如果連線不良可以檢查一下是否是使用了拓展塢,嘗試使用電腦自帶的USB3.0介面)

圓形介面的線是電源線,需要連線到電源。

專案部署

1、新建空白C++控制檯工程並新增原始檔Main.cpp

2、安裝 Azure Kinect NuGet 包

右鍵 引用 - 管理NuGet程式包

3、搜尋Microsoft.Azure.Kinect.Sensor,從列表中選擇該包並安裝。

4、搜尋cv

5、新增標頭檔案和庫檔案

5.1 加入標頭檔案k4a.h

直接右鍵 標頭檔案 - 新增 - 現有項,找到k4a.h匯入。預設路徑為C:\Program Files\Azure Kinect SDK v1.4.1\sdk\include\k4a

5.2 配置標頭檔案目錄和庫檔案目錄

右鍵自己的專案,選擇屬性,上方的平臺選擇 x64

  • C/C++ - 常規 - 附加包含目錄,加入SDK的include路徑
  • 連結器 - 常規 - 附加庫目錄,加入SDK的lib路徑
  • 連結器 - 輸入 - 附加依賴項,加入k4a.lib

    注意:每新增一項後,點選右下角應用。

專案執行

專案地址https://github.com/Applied-Energetic/KinectDK

在VS專案目錄下新建三個資料夾——rgb、depth、ir,輸出的圖片會儲存到這裡。

執行時點選本地Windows偵錯程式即可。

相機內參獲取

KinectDk內建深度相機,其焦距等引數為一個固定值,因此無法多聚焦,下面提供一種獲取焦距的方法。

獲取內參矩陣

#define VERIFY(result, error)                                                                            \
    if(result != K4A_RESULT_SUCCEEDED)                                                                   \
    {                                                                                                    \
        printf("%s \n - (File: %s, Function: %s, Line: %d)\n", error, __FILE__, __FUNCTION__, __LINE__); \
        exit(1);                                                                                         \
    }   
int main() {

	uint32_t count = k4a_device_get_installed_count();
	if (count == 0)
	{
		printf("No k4a devices attached!\n");
		return 1;
	}

	// Open the first plugged in Kinect device
	k4a_device_t device = NULL;

	if (K4A_FAILED(k4a_device_open(K4A_DEVICE_DEFAULT, &device)))
	{
		printf("Failed to open k4a device!\n");
		return 1;
	}


	// Get the size of the serial number
	size_t serial_size = 0;
	k4a_device_get_serialnum(device, NULL, &serial_size);

	// Allocate memory for the serial, then acquire it
	char* serial = (char*)(malloc(serial_size));
	k4a_device_get_serialnum(device, serial, &serial_size);
	printf("Opened device: %s\n", serial);
	free(serial);

	// Configure a stream of 4096x3072 BRGA color data at 15 frames per second
	k4a_device_configuration_t config = K4A_DEVICE_CONFIG_INIT_DISABLE_ALL;
	config.camera_fps = K4A_FRAMES_PER_SECOND_15;
	config.color_format = K4A_IMAGE_FORMAT_COLOR_BGRA32;
	config.color_resolution = K4A_COLOR_RESOLUTION_3072P;

	// Start the camera with the given configuration
	if (K4A_FAILED(k4a_device_start_cameras(device, &config)))
	{
		printf("Failed to start cameras!\n");
		k4a_device_close(device);
		return 1;
	}


	k4a_calibration_t sensor_calibration;

	k4a_device_get_calibration(device, config.depth_mode, config.color_resolution, &sensor_calibration);
	VERIFY(k4a_device_get_calibration(device, config.depth_mode, config.color_resolution, &sensor_calibration),
		"Get depth camera calibration failed!");

	k4a_transformation_create(&sensor_calibration);	
	cout << k4a_transformation_create(&sensor_calibration)<<endl;

	// ...Camera capture and application specific code would go here...

	// Shut down the camera when finished with application logic
	k4a_device_stop_cameras(device);

	k4a_device_close(device);
	return 0;

}


#include <k4a/k4a.h>

#include <stdio.h>

#include <vector>
using namespace std;

#include "opencv2/core.hpp"
#include "opencv2/calib3d.hpp"
using namespace cv;

static void clean_up(k4a_device_t device)
{
    if (device != NULL)
    {
        k4a_device_close(device);
    }
}

int main(int argc, char** /*argv*/)
{
    uint32_t device_count = 0;
    k4a_device_t device = NULL;
    k4a_device_configuration_t config = K4A_DEVICE_CONFIG_INIT_DISABLE_ALL;

    if (argc != 1)
    {
        printf("Usage: opencv_example.exe\n");
        return 2;
    }

    device_count = k4a_device_get_installed_count();

    if (device_count == 0)
    {
        printf("No K4A devices found\n");
        return 1;
    }

    if (K4A_RESULT_SUCCEEDED != k4a_device_open(K4A_DEVICE_DEFAULT, &device))
    {
        printf("Failed to open device\n");
        clean_up(device);
        return 1;
    }

    config.depth_mode = K4A_DEPTH_MODE_WFOV_2X2BINNED;
    config.color_resolution = K4A_COLOR_RESOLUTION_1080P;
    config.camera_fps = K4A_FRAMES_PER_SECOND_30;

    k4a_calibration_t calibration;
    if (K4A_RESULT_SUCCEEDED !=
        k4a_device_get_calibration(device, config.depth_mode, config.color_resolution, &calibration))
    {
        printf("Failed to get calibration\n");
        clean_up(device);
        return 1;
    }

    vector<k4a_float3_t> points_3d = { { { 0.f, 0.f, 1000.f } },          // color camera center
                                       { { -1000.f, -1000.f, 1000.f } },  // color camera top left
                                       { { 1000.f, -1000.f, 1000.f } },   // color camera top right
                                       { { 1000.f, 1000.f, 1000.f } },    // color camera bottom right
                                       { { -1000.f, 1000.f, 1000.f } } }; // color camera bottom left

    // k4a project function
    vector<k4a_float2_t> k4a_points_2d(points_3d.size());
    for (size_t i = 0; i < points_3d.size(); i++)
    {
        int valid = 0;
        k4a_calibration_3d_to_2d(&calibration,
            &points_3d[i],
            K4A_CALIBRATION_TYPE_COLOR,
            K4A_CALIBRATION_TYPE_DEPTH,
            &k4a_points_2d[i],
            &valid);
    }

    // converting the calibration data to OpenCV format
    // extrinsic transformation from color to depth camera
    Mat se3 =
        Mat(3, 3, CV_32FC1, calibration.extrinsics[K4A_CALIBRATION_TYPE_COLOR][K4A_CALIBRATION_TYPE_DEPTH].rotation);
    Mat r_vec = Mat(3, 1, CV_32FC1);
    Rodrigues(se3, r_vec);
    Mat t_vec =
        Mat(3, 1, CV_32F, calibration.extrinsics[K4A_CALIBRATION_TYPE_COLOR][K4A_CALIBRATION_TYPE_DEPTH].translation);

    // intrinsic parameters of the depth camera
    k4a_calibration_intrinsic_parameters_t* intrinsics = &calibration.depth_camera_calibration.intrinsics.parameters;
    vector<float> _camera_matrix = {
        intrinsics->param.fx, 0.f, intrinsics->param.cx, 0.f, intrinsics->param.fy, intrinsics->param.cy, 0.f, 0.f, 1.f
    };
    Mat camera_matrix = Mat(3, 3, CV_32F, &_camera_matrix[0]);
    vector<float> _dist_coeffs = { intrinsics->param.k1, intrinsics->param.k2, intrinsics->param.p1,
                                   intrinsics->param.p2, intrinsics->param.k3, intrinsics->param.k4,
                                   intrinsics->param.k5, intrinsics->param.k6 };
    Mat dist_coeffs = Mat(8, 1, CV_32F, &_dist_coeffs[0]);

    // OpenCV project function
    vector<Point2f> cv_points_2d(points_3d.size());
    projectPoints(*reinterpret_cast<vector<Point3f>*>(&points_3d),
        r_vec,
        t_vec,
        camera_matrix,
        dist_coeffs,
        cv_points_2d);

    for (size_t i = 0; i < points_3d.size(); i++)
    {
        printf("3d point:\t\t\t(%.5f, %.5f, %.5f)\n", points_3d[i].v[0], points_3d[i].v[1], points_3d[i].v[2]);
        printf("OpenCV projectPoints:\t\t(%.5f, %.5f)\n", cv_points_2d[i].x, cv_points_2d[i].y);
        printf("k4a_calibration_3d_to_2d:\t(%.5f, %.5f)\n\n", k4a_points_2d[i].v[0], k4a_points_2d[i].v[1]);
    }

    cout << camera_matrix << endl;
    //cout << intrinsics->param.fx << endl;
    clean_up(device);
    return 0;
}

結論

1、目前還沒找到修改相機的方法,官方人員的回覆是不能修改內參。

issue連結如下

https://github.com/microsoft/Azure-Kinect-Sensor-SDK/issues/1640

The calibration matrices (intrin, extrin) are unchangable for a given mode (wide pov, narrow pov, etc.). There is no variable lens focus.

Dale Phurrough diablodale

Berlin, Germany

2、如果想要獲取多聚焦RGBD影象的話應該考慮更換一種裝置。

3、不更換裝置的前提下想要利用RGBD資料在影象融合領域進行訓練可能需要一種新的影象處理演算法。

引用

https://github.com/microsoft/Azure-Kinect-Sensor-SDK

此專案為KinectDk開發者倉庫,有問題可以在這裡詢問。