文章目錄
- 1.參考例子
主要梳理了圖漾官網Sample_V1版本的例子
1.參考例子
主要增加了從storage區域讀取相機參數的設置,使用圖漾PercipioViewer軟件,如何將相機參數保存到srorage區,可參考鏈接:保存相機參數操作
注:在進行保存參數前,需要關閉相機各個組件取流
具體代碼如下:
#include <limits>
#include <cassert>
#include <cmath>
#include "../common/common.hpp"
#include <TYImageProc.h>
#include <chrono>//設置相機參數開關,默認使用相機內保存的參數。(使用保存的參數,也可以修改參數)
//不同型號相機具備不同的參數屬性,可以使用PercipioViewer看圖軟件確認相機支持的參數屬性和參數取值范圍
bool setParameters = false;//深度圖對齊到彩色圖開關,置1則將深度圖對齊到彩色圖坐標系,置0則不對齊
//因彩色圖對齊到深度圖時會有部分深度缺失的區域丟失彩色信息,因此默認使用深度圖對齊到彩色圖方式
#define MAP_DEPTH_TO_COLOR 0
//開啟以下深度圖渲染顯示將會降低幀率
DepthViewer depthViewer0("OrgDepth");//用于顯示渲染后的原深度圖
DepthViewer depthViewer1("FillHoleDepth");//用于顯示渲染后的填洞處理之后的深度圖
DepthViewer depthViewer2("SpeckleFilterDepth"); //用于顯示渲染后的經星噪濾波過的深度圖
DepthViewer depthViewer3("EnhenceFilterDepth"); //用于顯示渲染后的經時域濾波過的深度圖
DepthViewer depthViewer4("MappedDepth"); //用于顯示渲染后的對齊到彩色圖坐標系的深度圖//事件回調
void eventCallback(TY_EVENT_INFO *event_info, void *userdata)
{if (event_info->eventId == TY_EVENT_DEVICE_OFFLINE) {LOGD("=== Event Callback: Device Offline!");*(bool*)userdata = true;// Note: // Please set TY_BOOL_KEEP_ALIVE_ON OFF feature to false if you need to debug with breakpoint!}else if (event_info->eventId == TY_EVENT_LICENSE_ERROR) {LOGD("=== Event Callback: License Error!");}
}//數據格式轉換
//cv pixel format to TY_PIXEL_FORMAT
static int cvpf2typf(int cvpf)
{switch(cvpf){case CV_8U: return TY_PIXEL_FORMAT_MONO;case CV_8UC3: return TY_PIXEL_FORMAT_RGB;case CV_16UC1: return TY_PIXEL_FORMAT_DEPTH16;default: return TY_PIXEL_FORMAT_UNDEFINED;}
}//數據格式轉換
//mat to TY_IMAGE_DATA
static void mat2TY_IMAGE_DATA(int comp, const cv::Mat& mat, TY_IMAGE_DATA& data)
{data.status = 0;data.componentID = comp;data.size = mat.total() * mat.elemSize();data.buffer = mat.data;data.width = mat.cols;data.height = mat.rows;data.pixelFormat = cvpf2typf(mat.type());
}//回調數據
struct CallbackData {int index;TY_DEV_HANDLE hDevice;TY_CAMERA_INTRINSIC* intri_depth;TY_CAMERA_INTRINSIC* intri_color;TY_CAMERA_CALIB_INFO depth_calib;TY_CAMERA_CALIB_INFO color_calib;float scale_unit;bool saveOneFramePoint3d;int fileIndex;bool isTof;
};
CallbackData cb_data;//通過內參實訓深度圖轉點云,方式供參考
//depth to pointcloud
cv::Mat depthToWorld(float* intr, const cv::Mat& depth, float scale_unit = 1.0)
{cv::Mat world(depth.rows, depth.cols, CV_32FC3);float cx = intr[2];float cy = intr[5];float inv_fx = 1.0f / intr[0];float inv_fy = 1.0f / intr[4];for (int r = 0; r < depth.rows; r++){uint16_t* pSrc = (uint16_t*)depth.data + r * depth.cols;cv::Vec3f* pDst = (cv::Vec3f*)world.data + r * depth.cols;for (int c = 0; c < depth.cols; c++){uint16_t z = pSrc[c] * scale_unit;if (z == 0) {pDst[c][0] = NAN;pDst[c][1] = NAN;pDst[c][2] = NAN;}else {pDst[c][0] = (c - cx) * z * inv_fx;pDst[c][1] = (r - cy) * z * inv_fy;pDst[c][2] = z;}}}return world;
}//輸出畸變校正的彩色圖,并實現深度圖對齊到彩色圖
static void doRegister(const TY_CAMERA_CALIB_INFO& depth_calib, const TY_CAMERA_CALIB_INFO& color_calib, const cv::Mat& depth, const float f_scale_unit, const cv::Mat& color, cv::Mat& undistort_color, cv::Mat& out, bool map_depth_to_color
)
{// do undistortionTY_IMAGE_DATA src;src.width = color.cols;src.height = color.rows;src.size = color.size().area() * 3;src.pixelFormat = TY_PIXEL_FORMAT_RGB;src.buffer = color.data;undistort_color = cv::Mat(color.size(), CV_8UC3);TY_IMAGE_DATA dst;dst.width = color.cols;dst.height = color.rows;dst.size = undistort_color.size().area() * 3;dst.buffer = undistort_color.data;dst.pixelFormat = TY_PIXEL_FORMAT_RGB;ASSERT_OK(TYUndistortImage(&color_calib, &src, NULL, &dst));// do registerif (map_depth_to_color) {out = cv::Mat::zeros(undistort_color.size(), CV_16U);ASSERT_OK(TYMapDepthImageToColorCoordinate(&depth_calib,depth.cols, depth.rows, depth.ptr<uint16_t>(),&color_calib,out.cols, out.rows, out.ptr<uint16_t>(), f_scale_unit));}else {out = depth;}
}//幀處理
void frameHandler(TY_FRAME_DATA* frame, void* userdata)
{CallbackData* pData = (CallbackData*) userdata;LOGD("=== Get frame %d", ++pData->index);std::vector<TY_VECT_3F> P3dtoColor;//對齊到color的點云cv::Mat depth, color, p3d, newP3d;//auto StartParseFrame = std::chrono::steady_clock::now();//解析幀parseFrame(*frame, &depth, 0, 0, &color);//拿深度圖和color圖//parseFrame(*frame, &depth, 0, 0, 0);//只拿深度圖// auto ParseFrameFinished = std::chrono::steady_clock::now(); // auto duration2 = std::chrono::duration_cast<std::chrono::microseconds>(ParseFrameFinished - StartParseFrame); // LOGI("*******ParseFrame spend Time : %lld", duration2);//填洞開關,開啟后會降低幀率bool FillHole = 0;//星噪濾波開關,深度圖中離散點降噪處理bool SpeckleFilter = 1;//時域濾波,可降低單點抖動,提升點云平面度bool EnhenceFilter = 0;//深度圖處理if (!depth.empty()){if (pData->isTof){//r如果是TOF相機,深度圖需要做畸變校正,如TM26X相機和TM421,而雙目相機不需要執行該步驟TY_IMAGE_DATA src;src.width = depth.cols;src.height = depth.rows;src.size = depth.size().area() * 2;src.pixelFormat = TY_PIXEL_FORMAT_DEPTH16;src.buffer = depth.data;cv::Mat undistort_depth = cv::Mat(depth.size(), CV_16U);TY_IMAGE_DATA dst;dst.width = depth.cols;dst.height = depth.rows;dst.size = undistort_depth.size().area() * 2;dst.buffer = undistort_depth.data;dst.pixelFormat = TY_PIXEL_FORMAT_DEPTH16;ASSERT_OK(TYUndistortImage(&pData->depth_calib, &src, NULL, &dst));depth = undistort_depth.clone();}if (FillHole){//深度圖填洞處理DepthInpainter inpainter;inpainter._kernelSize = 10;inpainter._maxInternalHoleToBeFilled = 1800;inpainter._fillAll = false;inpainter.inpaint(depth, depth, cv::Mat());depthViewer1.show(depth);}if (SpeckleFilter){//使用星噪濾波TY_IMAGE_DATA sfFilteredDepth;cv::Mat filteredDepth(depth.size(), depth.type());filteredDepth = depth.clone();mat2TY_IMAGE_DATA(TY_COMPONENT_DEPTH_CAM, filteredDepth, sfFilteredDepth);struct DepthSpeckleFilterParameters sfparam = DepthSpeckleFilterParameters_Initializer;sfparam.max_speckle_size = 300;//噪點面積小于該值將被過濾sfparam.max_speckle_diff = 64;//相鄰像素視差大于該值將被視為噪點TYDepthSpeckleFilter(&sfFilteredDepth, &sfparam);//顯示星噪濾波后深度圖渲染depthViewer2.show(filteredDepth);//點云, pointcloud in CV_32FC3 formatnewP3d = depthToWorld(pData->intri_depth->data, filteredDepth,pData->scale_unit);depth = filteredDepth.clone();保存濾波后的深度圖//char file[32];//sprintf(file, "depth-%d.png", pData->fileIndex++);//cv::imwrite(file, filteredDepth);}if (EnhenceFilter){//使用時域濾波TY_IMAGE_DATA efFilteredDepthin, efFilteredDepthout;cv::Mat filteredDepth1(depth.size(), depth.type());cv::Mat filteredDepth2(depth.size(), depth.type());filteredDepth1 = depth.clone();mat2TY_IMAGE_DATA(TY_COMPONENT_DEPTH_CAM, filteredDepth1, efFilteredDepthin);mat2TY_IMAGE_DATA(TY_COMPONENT_DEPTH_CAM, filteredDepth2, efFilteredDepthout);struct DepthEnhenceParameters efparam = DepthEnhenceParameters_Initializer;efparam.sigma_s = 0;//空間濾波系數efparam.sigma_r = 0;//深度濾波系數efparam.outlier_win_sz = 0;//以像素為單位的濾波窗口efparam.outlier_rate = 0.f;//噪音過濾系數TY_IMAGE_DATA *guide = nullptr;TYDepthEnhenceFilter(&efFilteredDepthin, 3, guide, &efFilteredDepthout, &efparam);//顯示時域濾波后深度圖渲染depthViewer3.show(filteredDepth2);//點云, pointcloud in CV_32FC3 formatnewP3d = depthToWorld(pData->intri_depth->data, filteredDepth2, pData->scale_unit);depth = filteredDepth2.clone();保存濾波后的深度圖//char file[32];//sprintf(file, "depth-%d.png", pData->fileIndex++);//cv::imwrite(file, filteredDepth);}else if (!FillHole&&!SpeckleFilter&&!EnhenceFilter){//顯示原深度圖渲染depthViewer0.show(depth);//原點云p3d = depthToWorld(pData->intri_depth->data, depth, pData->scale_unit);}}//彩色圖處理cv::Mat color_data_mat,p3dtocolorMat;if (!color.empty()){//顯示原彩色圖//imshow("orgColor", color);cv::Mat undistort_color, MappedDepth;if (MAP_DEPTH_TO_COLOR){auto BeforedoRegister = std::chrono::steady_clock::now();//彩色圖去畸變,并將深度圖對齊到彩色圖坐標系doRegister(pData->depth_calib, pData->color_calib, depth, pData->scale_unit, color,undistort_color, MappedDepth, MAP_DEPTH_TO_COLOR);//數據格式轉換cv::cvtColor(undistort_color, color_data_mat, CV_BGR2RGB);//生成對齊到彩色圖坐標系的點云,兩種方法//方法一:生成點云放在TY_VECT_3F---P3dtoColorP3dtoColor.resize(MappedDepth.size().area());ASSERT_OK(TYMapDepthImageToPoint3d(&pData->color_calib, MappedDepth.cols, MappedDepth.rows, (uint16_t*)MappedDepth.data, &P3dtoColor[0],pData->scale_unit));//方法二:生成點云放在32FC3 Mat---p3dtocolorMat//p3dtocolorMat = depthToWorld(pData->intri_color->data, MappedDepth);auto AfterdoRegister = std::chrono::steady_clock::now(); auto duration3 = std::chrono::duration_cast<std::chrono::microseconds>(AfterdoRegister - BeforedoRegister);LOGI("*******do Rgb Undistortion--MapDepthToColor--P3D spend Time : %lld", duration3); //顯示畸變校正后的彩色圖imshow("undistort_color", undistort_color);//顯示對齊到彩色圖坐標系的深度圖depthViewer4.show(MappedDepth);}else{//彩色圖去畸變,未對齊的深度圖doRegister(pData->depth_calib, pData->color_calib, depth, pData->scale_unit, color, undistort_color, MappedDepth, MAP_DEPTH_TO_COLOR);//顯示畸變校正后的彩色圖imshow("undistort_color", undistort_color);}}//保存點云//save pointcloudif (pData->saveOneFramePoint3d) {char file[32];if (MAP_DEPTH_TO_COLOR){LOGD("Save p3dtocolor now!!!");//保存對齊到color坐標系XYZRGB格式彩色點云sprintf(file, "p3dtocolor-%d.xyz", pData->fileIndex++);writePointCloud((cv::Point3f*)p3dtocolorMat.data, (const cv::Vec3b*)color_data_mat.data, p3dtocolorMat.total(), file, PC_FILE_FORMAT_XYZ);}else{LOGD("Save point3d now!!!");//保存XYZ格式點云sprintf(file, "points-%d.xyz", pData->fileIndex++);writePointCloud((cv::Point3f*)newP3d.data, 0, newP3d.total(), file, PC_FILE_FORMAT_XYZ);}pData->saveOneFramePoint3d = false;}//歸還Buffer隊列LOGD("=== Re-enqueue buffer(%p, %d)", frame->userBuffer, frame->bufferSize);ASSERT_OK( TYEnqueueBuffer(pData->hDevice, frame->userBuffer, frame->bufferSize) );
}int main(int argc, char* argv[])
{std::string ID, IP;TY_INTERFACE_HANDLE hIface = NULL;TY_DEV_HANDLE hDevice = NULL;TY_CAMERA_INTRINSIC intri_depth;TY_CAMERA_INTRINSIC intri_color;int32_t resend = 1;bool isTof = 0;for(int i = 1; i < argc; i++){if(strcmp(argv[i], "-id") == 0){ID = argv[++i];} else if(strcmp(argv[i], "-ip") == 0) {IP = argv[++i];} else if(strcmp(argv[i], "-h") == 0){LOGI("Usage: SimpleView_Callback [-h] [-id <ID>]");return 0;}}LOGD("=== Init lib");ASSERT_OK(TYInitLib());TY_VERSION_INFO ver;ASSERT_OK(TYLibVersion(&ver));LOGD(" - lib version: %d.%d.%d", ver.major, ver.minor, ver.patch);std::vector<TY_DEVICE_BASE_INFO> selected;//選擇相機ASSERT_OK(selectDevice(TY_INTERFACE_ALL, ID, IP, 1, selected));ASSERT(selected.size() > 0);//默認加載第一個相機TY_DEVICE_BASE_INFO& selectedDev = selected[0];//const std::string actualBrand = selected[0].modelName;//打開接口和設備ASSERT_OK(TYOpenInterface(selectedDev.iface.id, &hIface));ASSERT_OK(TYOpenDevice(hIface, selectedDev.id, &hDevice));//對時設置LOGD("Set type of time sync mechanism");ASSERT_OK(TYSetEnum(hDevice, TY_COMPONENT_DEVICE, TY_ENUM_TIME_SYNC_TYPE, TY_TIME_SYNC_TYPE_HOST));LOGD("Wait for time sync ready");while (1){bool sync_ready;ASSERT_OK(TYGetBool(hDevice, TY_COMPONENT_DEVICE, TY_BOOL_TIME_SYNC_READY, &sync_ready));if (sync_ready){break;}MSLEEP(10);}//設置相機log輸出等級,VERBOSE > DEBUG > INFO > WARNING > ERROR//ASSERT_OK(TYSetLogLevel(TY_LOG_LEVEL_ERROR));//ASSERT_OK(TYAppendLogToFile("test_log.txt", TY_LOG_LEVEL_DEBUG)); //相機日志輸出到文件內,搭配關閉輸出TYRemoveLogFile//使用相機內保存參數if (!setParameters){std::string js_data;int ret;ret = load_parameters_from_storage(hDevice, js_data);//使用相機內保存參數if (ret == TY_STATUS_ERROR){LOGD("no save parameters in the camera");setParameters = true;}else if (ret != TY_STATUS_OK){LOGD("Failed: error %d(%s)", ret, TYErrorString(ret));setParameters = true;}}//使能彩色相機//try to enable color cameraLOGD("Has RGB camera, open RGB cam");ASSERT_OK(TYEnableComponents(hDevice, TY_COMPONENT_RGB_CAM));//設置彩色相機像素格式和分辨率if (setParameters){LOGD("=== Configure feature, set RGB resolution");//方法一:直接設置像素格式和分辨率//ASSERT_OK(TYSetEnum(hDevice, TY_COMPONENT_RGB_CAM, TY_ENUM_IMAGE_MODE, TY_IMAGE_MODE_BAYER8GB_640x480));//ASSERT_OK(TYSetEnum(hDevice, TY_COMPONENT_RGB_CAM, TY_ENUM_IMAGE_MODE, TY_IMAGE_MODE_YUYV_1920x1080));//不同型號圖漾相機的彩色像素格式和分辨率不同,具體可見相機規格書;//方法二:通過枚舉相機支持的圖像模式,結合圖像寬度選定分辨率,不關注像素格式TY_STATUS status = TY_STATUS_OK;if (TY_COMPONENT_RGB_CAM) {std::vector<TY_ENUM_ENTRY> image_mode_list;status = get_feature_enum_list(hDevice, TY_COMPONENT_RGB_CAM, TY_ENUM_IMAGE_MODE, image_mode_list);for (int idx = 0; idx < image_mode_list.size(); idx++) {TY_ENUM_ENTRY& entry = image_mode_list[idx];//try to select a resolutionif (TYImageWidth(entry.value) == 640) {LOGD("Select RGB Image Mode: %s", entry.description);int err = TYSetEnum(hDevice, TY_COMPONENT_RGB_CAM, TY_ENUM_IMAGE_MODE, entry.value);ASSERT(err == TY_STATUS_OK || err == TY_STATUS_NOT_PERMITTED);break;}}}}//讀取彩色相機標定數據//TY_STRUCT_CAM_CALIB_DATA內參是相機最大分辨率的內參//TY_STRUCT_CAM_INTRINSIC內參是相機當前分辨率的內參LOGD("=== Get color intrinsic");ASSERT_OK(TYGetStruct(hDevice, TY_COMPONENT_RGB_CAM, TY_STRUCT_CAM_INTRINSIC, &intri_color, sizeof(intri_color)));LOGD("=== Read color calib data");ASSERT_OK(TYGetStruct(hDevice, TY_COMPONENT_RGB_CAM, TY_STRUCT_CAM_CALIB_DATA, &cb_data.color_calib, sizeof(cb_data.color_calib)));//硬ISP功能,僅部分相機的RGB支持硬ISP,如FM854-E1,FM855-E1,TM265和TM421相機。//獲取RGB是否支持自動曝光,自動白平衡,自動增益等屬性,這些屬性雖然不能保存到storage里面,但是默認是開啟的。//*********************這幾個屬性可以默認啟用,可以不設置************************** bool hasAUTOEXPOSURE, hasAUTOGAIN, hasAUTOAWB;ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_RGB_CAM, TY_BOOL_AUTO_EXPOSURE, &hasAUTOEXPOSURE));ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_RGB_CAM, TY_BOOL_AUTO_AWB, &hasAUTOAWB));ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_RGB_CAM, TY_BOOL_AUTO_GAIN, &hasAUTOGAIN));if (hasAUTOEXPOSURE && setParameters){ASSERT_OK(TYSetBool(hDevice, TY_COMPONENT_RGB_CAM, TY_BOOL_AUTO_EXPOSURE, true));//turn on AEC 打開自動曝光}if (hasAUTOAWB && setParameters){ASSERT_OK(TYSetBool(hDevice, TY_COMPONENT_RGB_CAM, TY_BOOL_AUTO_AWB, true));//turn on AWB 打開白平衡}if (hasAUTOGAIN && setParameters){ASSERT_OK(TYSetBool(hDevice, TY_COMPONENT_RGB_CAM, TY_BOOL_AUTO_GAIN, true));//turn on AGC,自動增益,僅TM265相機支持該屬性}//*********************這幾個屬性可默認啟用,可以不設置************************** //獲取RGB支持的屬性//*********************這幾個屬性可以保存到storage里面,可以不設置**************************bool hasRGB_ANALOG_GAIN, hasRGB_R_GAIN, hasRGB_G_GAIN, hasRGB_B_GAIN, hasRGB_EXPOSURE_TIME , hasRGB_AE_TARGET_V;;ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_ANALOG_GAIN, &hasRGB_ANALOG_GAIN));ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_R_GAIN, &hasRGB_R_GAIN));ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_G_GAIN, &hasRGB_G_GAIN));ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_B_GAIN, &hasRGB_B_GAIN));ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_EXPOSURE_TIME, &hasRGB_EXPOSURE_TIME));if (hasRGB_ANALOG_GAIN && setParameters){ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_ANALOG_GAIN, 2));//設置RGB模擬增益,僅FM854和FM855等雙目相機支持}if (hasRGB_R_GAIN && setParameters){ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_R_GAIN, 130));//設置RGB數字增益R通道}if (hasRGB_G_GAIN && setParameters){ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_G_GAIN, 80));//設置RGB數字增益G通道}if (hasRGB_B_GAIN && setParameters){ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_B_GAIN, 150));//設置RGB數字增益B通道}if (hasRGB_EXPOSURE_TIME && setParameters){ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_EXPOSURE_TIME, 300));//設置RGB曝光時間,所有帶RGB的相機都支持該屬性,只是范圍不同}//*********************這幾個屬性可以保存到storage里面,可以不設置**************************ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_AE_TARGET_V, &hasRGB_AE_TARGET_V));if (hasRGB_AE_TARGET_V && setParameters){ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_RGB_CAM, TY_INT_AE_TARGET_V, 3500));//設置RGB自動曝光目標亮度,范圍(0,4000)}//使能深度相機//try to enable depth camLOGD("=== Configure components, open depth cam");int32_t componentIDs = TY_COMPONENT_DEPTH_CAM;ASSERT_OK( TYEnableComponents(hDevice, componentIDs) );//設置深度圖分辨率if (setParameters){LOGD("=== Configure feature, set depth resolution");//方法一:直接設置分辨率//ASSERT_OK(TYSetEnum(hDevice, TY_COMPONENT_DEPTH_CAM, TY_ENUM_IMAGE_MODE, TY_IMAGE_MODE_DEPTH16_640x480))//方法二:通過枚舉相機支持的圖像模式,結合圖像寬度選定分辨率,不關注具體分辨率if (TY_COMPONENT_DEPTH_CAM){std::vector<TY_ENUM_ENTRY> image_mode_list;TY_STATUS status = TY_STATUS_OK;status = get_feature_enum_list(hDevice, TY_COMPONENT_DEPTH_CAM, TY_ENUM_IMAGE_MODE, image_mode_list);for (int idx = 0; idx < image_mode_list.size(); idx++) {TY_ENUM_ENTRY &entry = image_mode_list[idx];//try to select a resolutionif (TYImageWidth(entry.value) == 640) {LOGD("Select Depth Image Mode: %s", entry.description);int err = TYSetEnum(hDevice, TY_COMPONENT_DEPTH_CAM, TY_ENUM_IMAGE_MODE, entry.value);ASSERT(err == TY_STATUS_OK || err == TY_STATUS_NOT_PERMITTED);status = TYEnableComponents(hDevice, TY_COMPONENT_DEPTH_CAM);break;}}}}//讀取深度相機內參和深度相機標定數據//TY_STRUCT_CAM_CALIB_DATA內參是相機最大分辨率的內參//TY_STRUCT_CAM_INTRINSIC內參是相機當前分辨率的內參LOGD("=== Get depth intrinsic");ASSERT_OK(TYGetStruct(hDevice, TY_COMPONENT_DEPTH_CAM, TY_STRUCT_CAM_INTRINSIC, &intri_depth, sizeof(intri_depth)));LOGD("=== Read depth calib data");ASSERT_OK(TYGetStruct(hDevice, TY_COMPONENT_DEPTH_CAM, TY_STRUCT_CAM_CALIB_DATA, &cb_data.depth_calib, sizeof(cb_data.depth_calib)));//you can set TOF camera feature here 下面是TOF相機屬性設置//************************************************* ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_DEPTH_CAM, TY_STRUCT_CAM_DISTORTION, &isTof));//判斷是否為Tof相機if (isTof && setParameters) {//*********************這些屬性可以保存到storage**************************** //設置頻段,多相機模式下可設置不同頻道int channel = 0;//頻段0,1,2,3等LOGD("Set TOF_CHANNEL %d", channel);ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_DEPTH_CAM, TY_INT_TOF_CHANNEL, channel));//設置圖像質量模式int quality = 2;//質量模式:1,2,4,TM26X只有Basic和mediu模式,而TL430/TM421相機有medium和high模式LOGD("Set DEPTH_QUALITY %d", quality);ASSERT_OK(TYSetEnum(hDevice, TY_COMPONENT_DEPTH_CAM, TY_ENUM_DEPTH_QUALITY, quality));//設置強度置信度閾值int modulation = 640; //強度置信度過濾,小于此閾值的像素點不參與計算深度,即像素點的深度值賦值為 0,,范圍(0.65535)LOGD("Set TOF_MODULATION_THRESHOLD %d", modulation);ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_DEPTH_CAM, TY_INT_TOF_MODULATION_THRESHOLD, modulation));//設置飛點濾波閾值int filter = 0;//飛點濾波,濾波閾值設置越小,過濾的飛點越多LOGD("Set FILTER_THRESHOLD %d", filter);ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_DEPTH_CAM, TY_INT_FILTER_THRESHOLD, filter));bool hasJITTER_THRESHOLD = true;//設置抖動過濾閾值ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_DEPTH_CAM, TY_INT_TOF_JITTER_THRESHOLD, &hasJITTER_THRESHOLD));if (hasJITTER_THRESHOLD && setParameters){int jitter = 6;//(1,10),閾值設置值越大,深度圖邊緣抖動的深度數據過濾得越少,拍攝黑色等低反材質,建議增大該值LOGD("Set TOF_JITTER_THRESHOLD %d", jitter);ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_DEPTH_CAM, TY_INT_TOF_JITTER_THRESHOLD, jitter));}濾波設置//set TY_INT_MAX_SPECKLE_SIZE bool hasMAX_SPECKLE_SIZE = true;ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_DEPTH_CAM, TY_INT_MAX_SPECKLE_SIZE, &hasMAX_SPECKLE_SIZE));if (hasMAX_SPECKLE_SIZE && setParameters){int speckle_size = 50; //(0,200) //噪點面積小于該值將被過濾LOGD("Set MAX_SPECKLE_SIZE %d", speckle_size);ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_DEPTH_CAM, TY_INT_MAX_SPECKLE_SIZE, speckle_size));}//set TY_INT_MAX_SPECKLE_DIFF bool hasMAX_SPECKLE_DIFF = true;ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_DEPTH_CAM, TY_INT_MAX_SPECKLE_DIFF, &hasMAX_SPECKLE_DIFF));if (hasMAX_SPECKLE_DIFF && setParameters){int speckle_diff = 200; //(100,500) //相鄰像素視差大于該值將被視為噪點LOGD("Set MAX_SPECKLE_DIFF %d", speckle_diff);ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_DEPTH_CAM, TY_INT_MAX_SPECKLE_DIFF, speckle_diff));}//*********************這些屬性可以保存到storage**************************** // Set TY_BOOL_TOF_ANTI_INTERFERENCE 比如排除3m以外干擾物影響,優先調整anti-inference,只有TM26X相機有該屬性bool hasANTI_INTERFERENCE = true;ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_DEPTH_CAM, TY_BOOL_TOF_ANTI_INTERFERENCE, &hasANTI_INTERFERENCE));if (hasANTI_INTERFERENCE && setParameters){LOGD("Set TY_BOOL_TOF_ANTI_INTERFERENCE ");ASSERT_OK(TYSetBool(hDevice, TY_COMPONENT_DEPTH_CAM, TY_BOOL_TOF_ANTI_INTERFERENCE, true));}}//********************只有雙目相機擁有下面屬性******************************//設置左右IR的模擬增益,數字增益和曝光//adjust the gain and exposure of Left&Right IR camera//獲取左右IR支持的屬性bool hasIR_ANALOG_GAIN, hasIR_GAIN, hasIR_EXPOSURE_TIME, hasIR_HDR;ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_IR_CAM_LEFT, TY_INT_ANALOG_GAIN, &hasIR_ANALOG_GAIN));ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_IR_CAM_LEFT, TY_INT_GAIN, &hasIR_GAIN));ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_IR_CAM_LEFT, TY_INT_EXPOSURE_TIME, &hasIR_EXPOSURE_TIME));ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_IR_CAM_LEFT, TY_BOOL_HDR, &hasIR_HDR));if (hasIR_ANALOG_GAIN && setParameters){ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_IR_CAM_LEFT, TY_INT_ANALOG_GAIN, 2));//設置左右IR模擬增益if (!isTof){ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_IR_CAM_RIGHT, TY_INT_ANALOG_GAIN, 2));}}if (hasIR_GAIN && setParameters){ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_IR_CAM_LEFT, TY_INT_GAIN, 32));//設置左右IR數字增益if (!isTof){ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_IR_CAM_RIGHT, TY_INT_GAIN, 32));}}if (hasIR_EXPOSURE_TIME && setParameters){ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_IR_CAM_LEFT, TY_INT_EXPOSURE_TIME, 500)); //設置IR曝光時間if (!isTof){ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_IR_CAM_RIGHT, TY_INT_EXPOSURE_TIME, 500));}}if (hasIR_HDR && setParameters){ASSERT_OK(TYSetBool(hDevice, TY_COMPONENT_IR_CAM_LEFT, TY_BOOL_HDR, true));ASSERT_OK(TYSetBool(hDevice, TY_COMPONENT_IR_CAM_RIGHT, TY_BOOL_HDR, true));//設置開啟HDR功能}//設置激光器亮度,默認不用設置,除非深度圖過曝//adjust the laser powerbool hasLASER_POWER;ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_DEVICE, TY_INT_LASER_POWER, &hasLASER_POWER));if (hasLASER_POWER && setParameters){ASSERT_OK(TYSetInt(hDevice, TY_COMPONENT_LASER, TY_INT_LASER_POWER, 100));// range(0,100)}// ********************只有雙目相機擁有下面屬性******************************//獲取所需Buffer大小LOGD("=== Prepare image buffer");uint32_t frameSize;ASSERT_OK( TYGetFrameBufferSize(hDevice, &frameSize) );LOGD("- Get size of framebuffer, %d", frameSize);//分配兩個Buffer,并壓入隊列LOGD(" - Allocate & enqueue buffers");char* frameBuffer[2];frameBuffer[0] = new char[frameSize];frameBuffer[1] = new char[frameSize];LOGD(" - Enqueue buffer (%p, %d)", frameBuffer[0], frameSize);ASSERT_OK( TYEnqueueBuffer(hDevice, frameBuffer[0], frameSize) );LOGD(" - Enqueue buffer (%p, %d)", frameBuffer[1], frameSize);ASSERT_OK( TYEnqueueBuffer(hDevice, frameBuffer[1], frameSize) );//注冊事件回調,相機掉線捕獲bool device_offline = false;LOGD("Register event callback");ASSERT_OK(TYRegisterEventCallback(hDevice, eventCallback, &device_offline));//觸發模式設置bool hasTrigger;ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_DEVICE, TY_STRUCT_TRIGGER_PARAM, &hasTrigger));if (hasTrigger) {TY_TRIGGER_PARAM trigger;//LOGD("Disable trigger mode");//trigger.mode = TY_TRIGGER_MODE_OFF;//連續采集模式 LOGD("=== enable trigger mode");trigger.mode = TY_TRIGGER_MODE_SLAVE;//軟觸發和硬觸發模式ASSERT_OK(TYSetStruct(hDevice, TY_COMPONENT_DEVICE, TY_STRUCT_TRIGGER_PARAM, &trigger, sizeof(trigger)));bool hasDI0_WORKMODE;ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_DEVICE, TY_STRUCT_DI0_WORKMODE, &hasDI0_WORKMODE));//if (hasDI0_WORKMODE)//{// //硬觸發模式防抖// TY_DI_WORKMODE di_wm;// di_wm.mode = TY_DI_PE_INT;// di_wm.int_act = TY_DI_INT_TRIG_CAP;// uint32_t time_hw = 10;//單位ms,硬件濾波,小于設定時間的電平信號會被過濾// uint32_t time_sw = 200;//單位ms,軟件濾波,連續高頻觸發情形,小于設置周期的后一個觸發信號將被過濾// di_wm.reserved[0] = time_hw | (time_sw << 16);// ASSERT_OK(TYSetStruct(hDevice, TY_COMPONENT_DEVICE, TY_STRUCT_DI0_WORKMODE, &di_wm, sizeof(di_wm)));//}// }//網口相機,啟用丟包重傳功能//for network onlyLOGD("=== resend: %d", resend);if (resend) {bool hasResend;ASSERT_OK(TYHasFeature(hDevice, TY_COMPONENT_DEVICE, TY_BOOL_GVSP_RESEND, &hasResend));if (hasResend) {LOGD("=== Open resend");ASSERT_OK(TYSetBool(hDevice, TY_COMPONENT_DEVICE, TY_BOOL_GVSP_RESEND, true));}else {LOGD("=== Not support feature TY_BOOL_GVSP_RESEND");}}//開始采集LOGD("=== Start capture");ASSERT_OK( TYStartCapture(hDevice) );//回調數據初始化cb_data.index = 0;cb_data.hDevice = hDevice;cb_data.saveOneFramePoint3d = false;cb_data.fileIndex = 0;cb_data.intri_depth = &intri_depth;cb_data.intri_color = &intri_color;float scale_unit = 1.;TYGetFloat(hDevice, TY_COMPONENT_DEPTH_CAM, TY_FLOAT_SCALE_UNIT, &scale_unit);cb_data.scale_unit = scale_unit;cb_data.isTof = isTof;//循環取圖LOGD("=== While loop to fetch frame");TY_FRAME_DATA frame;bool exit_main = false;int index = 0;while(!exit_main){int key = cv::waitKey(1);switch(key & 0xff){case 0xff:break;case 'q':exit_main = true;break;case 's':cb_data.saveOneFramePoint3d = true;//圖片顯示窗口上按s鍵則存一張點云圖break;default:LOGD("Pressed key %d", key);}auto timeTrigger = std::chrono::steady_clock::now();//發送一次軟觸發while (TY_STATUS_BUSY == TYSendSoftTrigger(hDevice));//獲取幀,默認超時設置為10sint err = TYFetchFrame(hDevice, &frame, 10000);//獲取圖像時間戳代碼LOGD("=== Time Stamp (%" PRIu64 ")", frame.image[0].timestamp);time_t tick = (time_t)(frame.image[0].timestamp / 1000000);struct tm tm;char s[100];tm = *localtime(&tick);strftime(s, sizeof(s), "%Y-%m-%d %H:%M:%S", &tm);int milliseconds = (int)((frame.image[0].timestamp % 1000000) / 1000);char ms_str[5];sprintf(ms_str, ".%d", milliseconds);strcat(s, ms_str);LOGD("===Time Stamp %d:%s\n", (int)tick, s);if (device_offline){LOGI("Found device offline");break;}if( err != TY_STATUS_OK ){LOGD("... Drop one frame");continue;}if (err == TY_STATUS_OK) {LOGD("Get frame %d", ++index);int fps = get_fps();if (fps > 0) {LOGI("***************************fps: %d", fps);}}frameHandler(&frame, &cb_data);auto timeGetFrame = std::chrono::steady_clock::now();auto duration = std::chrono::duration_cast<std::chrono::microseconds>(timeGetFrame - timeTrigger);LOGI("*******FetchFrame spend Time : %lld", duration);}ASSERT_OK( TYStopCapture(hDevice) );ASSERT_OK( TYCloseDevice(hDevice) );ASSERT_OK( TYCloseInterface(hIface) );ASSERT_OK( TYDeinitLib() );delete frameBuffer[0];delete frameBuffer[1];LOGD("=== Main done!");return 0;
}