組件關系
整體流程:
camxhal3.cpp:704 open()camxhal3.cpp:1423 configure_streams()chxextensionmodule.cpp:2810 InitializeOverrideSessionchxusecaseutils.cpp:850 GetMatchingUsecase()chxadvancedcamerausecase.cpp:4729 Initialize()chxadvancedcamerausecase.cpp:5757 SelectFeatures()
usecase ID匹配邏輯
GetMatchingUsecase 函數根據輸入的相機信息(pCamInfo)和流配置(pStreamConfig)返回最適合的用例ID(UsecaseId)。
– UsecaseSelector::GetMatchingUsecase
代碼邏輯總結
- ??優先級最高的用例??:
QuadCFA用例(特定傳感器和流配置)
超級慢動作模式 - ??多攝像頭用例??:
如果啟用VR模式,選擇MultiCameraVR
否則選擇MultiCamera - ??單攝像頭用例??:
根據流數量(2/3/4)選擇不同的用例
考慮多種功能標志:ZSL、GPU處理、MFNR、EIS等 - ??特殊用例??:
最后檢查是否為手電筒小部件用例 - ??默認用例??:
如果沒有匹配其他條件,使用Default用例
關鍵判斷條件
流數量(num_streams)
攝像頭類型和數量
各種功能模塊的啟用狀態(通過ExtensionModule獲取)
特定的流配置檢查(通過IsXXXStreamConfig函數)
UsecaseId UsecaseSelector::GetMatchingUsecase(const LogicalCameraInfo* pCamInfo, // 攝像頭邏輯信息camera3_stream_configuration_t* pStreamConfig) // 流配置信息
{UsecaseId usecaseId = UsecaseId::Default; // 默認用例UINT32 VRDCEnable = ExtensionModule::GetInstance()->GetDCVRMode(); // 獲取VR模式設置// 檢查是否為QuadCFA傳感器且符合特定條件if ((pStreamConfig->num_streams == 2) && IsQuadCFASensor(pCamInfo, NULL) &&(LogicalCameraType_Default == pCamInfo->logicalCameraType)){// 如果快照尺寸小于傳感器binning尺寸,選擇默認ZSL用例// 只有當快照尺寸大于傳感器binning尺寸時,才選擇QuadCFA用例if (TRUE == QuadCFAMatchingUsecase(pCamInfo, pStreamConfig)){usecaseId = UsecaseId::QuadCFA;CHX_LOG_CONFIG("Quad CFA usecase selected");return usecaseId;}}// 檢查是否為超級慢動作模式if (pStreamConfig->operation_mode == StreamConfigModeSuperSlowMotionFRC){usecaseId = UsecaseId::SuperSlowMotionFRC;CHX_LOG_CONFIG("SuperSlowMotionFRC usecase selected");return usecaseId;}// 重置用例標志VideoEISV2Usecase = 0;VideoEISV3Usecase = 0;GPURotationUsecase = FALSE;GPUDownscaleUsecase = FALSE;// 多攝像頭VR用例判斷if ((NULL != pCamInfo) && (pCamInfo->numPhysicalCameras > 1) && VRDCEnable){CHX_LOG_CONFIG("MultiCameraVR usecase selected");usecaseId = UsecaseId::MultiCameraVR;}// 多攝像頭用例判斷else if ((NULL != pCamInfo) && (pCamInfo->numPhysicalCameras > 1) && (pStreamConfig->num_streams > 1)){CHX_LOG_CONFIG("MultiCamera usecase selected");usecaseId = UsecaseId::MultiCamera;}else{SnapshotStreamConfig snapshotStreamConfig;CHISTREAM** ppChiStreams = reinterpret_cast<CHISTREAM**>(pStreamConfig->streams);// 根據流數量選擇用例switch (pStreamConfig->num_streams){case 2: // 2個流的情況if (TRUE == IsRawJPEGStreamConfig(pStreamConfig)){CHX_LOG_CONFIG("Raw + JPEG usecase selected");usecaseId = UsecaseId::RawJPEG;break;}// 檢查是否啟用ZSLif (FALSE == m_pExtModule->DisableZSL()){if (TRUE == IsPreviewZSLStreamConfig(pStreamConfig)){usecaseId = UsecaseId::PreviewZSL;CHX_LOG_CONFIG("ZSL usecase selected");}}// 檢查是否啟用GPU旋轉if(TRUE == m_pExtModule->UseGPURotationUsecase()){CHX_LOG_CONFIG("GPU Rotation usecase flag set");GPURotationUsecase = TRUE;}// 檢查是否啟用GPU下采樣if (TRUE == m_pExtModule->UseGPUDownscaleUsecase()){CHX_LOG_CONFIG("GPU Downscale usecase flag set");GPUDownscaleUsecase = TRUE;}// 檢查是否啟用MFNR(多幀降噪)if (TRUE == m_pExtModule->EnableMFNRUsecase()){if (TRUE == MFNRMatchingUsecase(pStreamConfig)){usecaseId = UsecaseId::MFNR;CHX_LOG_CONFIG("MFNR usecase selected");}}// 檢查是否啟用無3A的HFR(高幀率)if (TRUE == m_pExtModule->EnableHFRNo3AUsecas()){CHX_LOG_CONFIG("HFR without 3A usecase flag set");HFRNo3AUsecase = TRUE;}break;case 3: // 3個流的情況// 設置EIS(電子圖像穩定)標志VideoEISV2Usecase = m_pExtModule->EnableEISV2Usecase();VideoEISV3Usecase = m_pExtModule->EnableEISV3Usecase();// 檢查ZSL預覽if (FALSE == m_pExtModule->DisableZSL() && (TRUE == IsPreviewZSLStreamConfig(pStreamConfig))){usecaseId = UsecaseId::PreviewZSL;CHX_LOG_CONFIG("ZSL usecase selected");}// 檢查Raw+JPEG配置else if(TRUE == IsRawJPEGStreamConfig(pStreamConfig) && FALSE == m_pExtModule->DisableZSL()){CHX_LOG_CONFIG("Raw + JPEG usecase selected");usecaseId = UsecaseId::RawJPEG;}// 檢查視頻實時拍攝配置else if((FALSE == IsVideoEISV2Enabled(pStreamConfig)) && (FALSE == IsVideoEISV3Enabled(pStreamConfig)) &&(TRUE == IsVideoLiveShotConfig(pStreamConfig)) && (FALSE == m_pExtModule->DisableZSL())){CHX_LOG_CONFIG("Video With Liveshot, ZSL usecase selected");usecaseId = UsecaseId::VideoLiveShot;}// 處理BPS攝像頭和EIS設置if ((NULL != pCamInfo) && (RealtimeEngineType_BPS == pCamInfo->ppDeviceInfo[0]->pDeviceConfig->realtimeEngine)){if((TRUE == IsVideoEISV2Enabled(pStreamConfig)) || (TRUE == IsVideoEISV3Enabled(pStreamConfig))){CHX_LOG_CONFIG("BPS Camera EIS V2 = %d, EIS V3 = %d",IsVideoEISV2Enabled(pStreamConfig),IsVideoEISV3Enabled(pStreamConfig));// 對于BPS攝像頭且至少啟用一個EIS的情況// 設置(偽)用例將引導到feature2選擇器usecaseId = UsecaseId::PreviewZSL;}}break;case 4: // 4個流的情況GetSnapshotStreamConfiguration(pStreamConfig->num_streams, ppChiStreams, snapshotStreamConfig);// 檢查HEIC格式和Raw流if ((SnapshotStreamType::HEIC == snapshotStreamConfig.type) && (NULL != snapshotStreamConfig.pRawStream)){CHX_LOG_CONFIG("Raw + HEIC usecase selected");usecaseId = UsecaseId::RawJPEG;}break;default: // 其他情況CHX_LOG_CONFIG("Default usecase selected");break;}}// 檢查是否為手電筒小部件用例if (TRUE == ExtensionModule::GetInstance()->IsTorchWidgetUsecase()){CHX_LOG_CONFIG("Torch widget usecase selected");usecaseId = UsecaseId::Torch;}CHX_LOG_INFO("usecase ID:%d",usecaseId);return usecaseId;
}
/// @brief Usecase identifying enums
enum class UsecaseId
{NoMatch = 0,Default = 1,Preview = 2,PreviewZSL = 3,MFNR = 4,MFSR = 5,MultiCamera = 6,QuadCFA = 7,RawJPEG = 8,MultiCameraVR = 9,Torch = 10,YUVInBlobOut = 11,VideoLiveShot = 12,SuperSlowMotionFRC = 13,MaxUsecases = 14,
};
Feature 匹配
feature的定義
/// @brief Advance feature types
enum AdvanceFeatureType
{
AdvanceFeatureNone = 0x0, ///< mask for none features
AdvanceFeatureZSL = 0x1, ///< mask for feature ZSL
AdvanceFeatureMFNR = 0x2, ///< mask for feature MFNR
AdvanceFeatureHDR = 0x4, ///< mask for feature HDR(AE_Bracket)
AdvanceFeatureSWMF = 0x8, ///< mask for feature SWMF
AdvanceFeatureMFSR = 0x10, ///< mask for feature MFSR
AdvanceFeatureQCFA = 0x20, ///< mask for feature QuadCFA
AdvanceFeature2Wrapper = 0x40, ///< mask for feature2 wrapper
AdvanceFeatureCountMax = AdvanceFeature2Wrapper ///< Max of advance feature mask
};
CDKResult AdvancedCameraUsecase::FeatureSetup(camera3_stream_configuration_t* pStreamConfig)
{CDKResult result = CDKResultSuccess;if ((UsecaseId::PreviewZSL == m_usecaseId) ||(UsecaseId::YUVInBlobOut == m_usecaseId) ||(UsecaseId::VideoLiveShot == m_usecaseId) ||(UsecaseId::QuadCFA == m_usecaseId) ||(UsecaseId::RawJPEG == m_usecaseId)){SelectFeatures(pStreamConfig);}else if (UsecaseId::MultiCamera == m_usecaseId){SelectFeatures(pStreamConfig);}return result;
}// START of OEM to change section
VOID AdvancedCameraUsecase::SelectFeatures(camera3_stream_configuration_t* pStreamConfig)
{// OEM to change// 這個函數根據當前的pStreamConfig和靜態設置決定要運行哪些特性INT32 index = 0;UINT32 enabledAdvanceFeatures = 0;// 從ExtensionModule獲取已啟用的高級特性掩碼enabledAdvanceFeatures = ExtensionModule::GetInstance()->GetAdvanceFeatureMask();CHX_LOG("SelectFeatures(), enabled feature mask:%x", enabledAdvanceFeatures);// 如果當前是FastShutter模式,則強制啟用SWMF和MFNR特性if (StreamConfigModeFastShutter == ExtensionModule::GetInstance()->GetOpMode(m_cameraId)){enabledAdvanceFeatures = AdvanceFeatureSWMF|AdvanceFeatureMFNR;}CHX_LOG("SelectFeatures(), enabled feature mask:%x", enabledAdvanceFeatures);// 遍歷所有物理攝像頭設備for (UINT32 physicalCameraIndex = 0 ; physicalCameraIndex < m_numOfPhysicalDevices ; physicalCameraIndex++){index = 0;// 檢查當前用例是否屬于以下類型之一if ((UsecaseId::PreviewZSL == m_usecaseId) ||(UsecaseId::MultiCamera == m_usecaseId) ||(UsecaseId::QuadCFA == m_usecaseId) ||(UsecaseId::VideoLiveShot == m_usecaseId) ||(UsecaseId::RawJPEG == m_usecaseId)){// 如果啟用了MFNR(多幀降噪)特性if (AdvanceFeatureMFNR == (enabledAdvanceFeatures & AdvanceFeatureMFNR)){// 啟用離線噪聲重處理m_isOfflineNoiseReprocessEnabled = ExtensionModule::GetInstance()->EnableOfflineNoiseReprocessing();// 需要FD(人臉檢測)流緩沖區m_isFDstreamBuffersNeeded = TRUE;}// 如果啟用了SWMF(軟件多幀)、HDR或2Wrapper特性if ((AdvanceFeatureSWMF == (enabledAdvanceFeatures & AdvanceFeatureSWMF)) ||(AdvanceFeatureHDR == (enabledAdvanceFeatures & AdvanceFeatureHDR)) ||((AdvanceFeature2Wrapper == (enabledAdvanceFeatures & AdvanceFeature2Wrapper)))){// 創建Feature2Wrapper的輸入信息結構Feature2WrapperCreateInputInfo feature2WrapperCreateInputInfo;feature2WrapperCreateInputInfo.pUsecaseBase = this;feature2WrapperCreateInputInfo.pMetadataManager = m_pMetadataManager;feature2WrapperCreateInputInfo.pFrameworkStreamConfig =reinterpret_cast<ChiStreamConfigInfo*>(pStreamConfig);// 清除所有流的pHalStream指針for (UINT32 i = 0; i < feature2WrapperCreateInputInfo.pFrameworkStreamConfig->numStreams; i++){feature2WrapperCreateInputInfo.pFrameworkStreamConfig->pChiStreams[i]->pHalStream = NULL;}// 如果Feature2Wrapper尚未創建if (NULL == m_pFeature2Wrapper){// 如果是多攝像頭用例if (TRUE == IsMultiCameraUsecase()){// 如果流配置中不包含融合流,則設置輸入輸出類型為YUV_OUTif (FALSE == IsFusionStreamIncluded(pStreamConfig)){feature2WrapperCreateInputInfo.inputOutputType =static_cast<UINT32>(InputOutputType::YUV_OUT);}// 添加內部輸入流(RDI和FD流)for (UINT8 streamIndex = 0; streamIndex < m_numOfPhysicalDevices; streamIndex++){feature2WrapperCreateInputInfo.internalInputStreams.push_back(m_pRdiStream[streamIndex]);feature2WrapperCreateInputInfo.internalInputStreams.push_back(m_pFdStream[streamIndex]);}// 需要FD流緩沖區m_isFDstreamBuffersNeeded = TRUE;}// 創建Feature2Wrapper實例m_pFeature2Wrapper = Feature2Wrapper::Create(&feature2WrapperCreateInputInfo, physicalCameraIndex);}// 將創建的Feature2Wrapper添加到啟用特性列表中m_enabledFeatures[physicalCameraIndex][index] = m_pFeature2Wrapper;index++;}}// 記錄當前物理攝像頭啟用的特性數量m_enabledFeaturesCount[physicalCameraIndex] = index;}// 如果第一個物理攝像頭有啟用的特性if (m_enabledFeaturesCount[0] > 0){// 如果還沒有活動的特性,則設置為第一個啟用的特性if (NULL == m_pActiveFeature){m_pActiveFeature = m_enabledFeatures[0][0];}// 記錄日志:選擇的特性數量和預覽的特性類型CHX_LOG_INFO("num features selected:%d, FeatureType for preview:%d",m_enabledFeaturesCount[0], m_pActiveFeature->GetFeatureType());}else{CHX_LOG_INFO("No features selected");}// 將最后一個快照特性設置為當前活動特性m_pLastSnapshotFeature = m_pActiveFeature;
}
Usecase 匹配邏輯
- 如果是PreviewZSL、MultiCamera、QuadCFA等模式,直接ConfigureStream和BuildUsecase
- 其他模式-通過DefaultMatchingUsecase重新匹配
CDKResult AdvancedCameraUsecase::SelectUsecaseConfig(LogicalCameraInfo* pCameraInfo, ///< Camera infocamera3_stream_configuration_t* pStreamConfig) ///< Stream configuration
{if ((UsecaseId::PreviewZSL == m_usecaseId) ||(UsecaseId::YUVInBlobOut == m_usecaseId) ||(UsecaseId::VideoLiveShot == m_usecaseId) ||(UsecaseId::MultiCamera == m_usecaseId) ||(UsecaseId::QuadCFA == m_usecaseId) ||(UsecaseId::RawJPEG == m_usecaseId)){ConfigureStream(pCameraInfo, pStreamConfig);BuildUsecase(pCameraInfo, pStreamConfig);}else{CHX_LOG("Initializing using default usecase matching");m_pChiUsecase = UsecaseSelector::DefaultMatchingUsecase(pStreamConfig);}
}
PreviewZSL 模式下
ZSL模式下,pAdvancedUsecase 賦值為UsecaseZSL,
BuildUsecase函數調用CloneUsecase來根據配置選擇克隆ZSL調優用例或高級用例,
一般走pAdvancedUsecase
AdvancedCameraUsecase::Initialize {}//這里 ZSL_USECASE_NAME = "UsecaseZSL"pAdvancedUsecase = GetXMLUsecaseByName(ZSL_USECASE_NAME);}
- BuildUsecase
-
根據feature添加pipeline
-
根據feature克隆并配置用例模板
-
設置管道與會話、相機的映射關系(Map camera ID and session ID for the pipeline and prepare for pipeline/session creation)
-
為feature覆蓋流配置
-
AdvancedCameraUsecase::BuildUsecase(){// 根據配置選擇克隆ZSL調優用例或高級用例if (static_cast<UINT>(UsecaseZSLTuningId) == ExtensionModule::GetInstance()->OverrideUseCase()){m_pClonedUsecase = UsecaseSelector::CloneUsecase(pZslTuningUsecase, totalPipelineCount, pipelineIDMap);}else{m_pClonedUsecase = UsecaseSelector::CloneUsecase(pAdvancedUsecase, totalPipelineCount, pipelineIDMap);}
}
default模式下
調用流程
m_pChiUsecase = UsecaseSelector::DefaultMatchingUsecase(pStreamConfig);
--ChiUsecase* GetDefaultMatchingUsecase(camera3_stream_configuration_t* pStreamConfig)
---UsecaseSelector::DefaultMatchingUsecaseSelection(pStreamConfig)
----IsMatchingUsecase(pStreamConfig, pUsecase, &pruneSettings);
- DefaultMatchingUsecaseSelection
- 優先匹配Selected EISv3 usecase
- 其次匹配 Selected EISv2 usecase
- 根據pStreamConfig->num_streams匹配
extern "C" CAMX_VISIBILITY_PUBLIC ChiUsecase* UsecaseSelector::DefaultMatchingUsecaseSelection(camera3_stream_configuration_t* pStreamConfig)
{auto UsecaseMatches = [&pStreamConfig, &pruneSettings](const ChiUsecase* const pUsecase) -> BOOL{return IsMatchingUsecase(pStreamConfig, pUsecase, &pruneSettings);};if (pStreamConfig->num_streams <= ChiMaxNumTargets){else if (NULL == pSelectedUsecase){ //匹配EIS V3算法Usecaseif (TRUE == IsVideoEISV3Enabled(pStreamConfig)){
···if (TRUE == UsecaseMatches(&Usecases3Target[usecaseEIS3Id])){CHX_LOG("Selected EISv3 usecase");pSelectedUsecase = &Usecases3Target[usecaseEIS3Id];}}//匹配EIS V2算法Usecaseif ((TRUE == IsVideoEISV2Enabled(pStreamConfig)) && (NULL == pSelectedUsecase) &&(TRUE == UsecaseMatches(&Usecases3Target[UsecaseVideoEIS2PreviewEIS2Id]))){CHX_LOG("Selected EISv2 usecase");pSelectedUsecase = &Usecases3Target[UsecaseVideoEIS2PreviewEIS2Id];}// This if block is only for kamorta usecases where Preview & Video streams are presentif ((pStreamConfig->num_streams > 1) && (NULL == pSelectedUsecase)){// If both Preview and Video < 1080p then only Preview < Video and Preview >Video Scenario occursif ((numYUVStreams == 2) && (YUV0Height <= TFEMaxHeight && YUV0Width <= TFEMaxWidth) &&(YUV1Height <= TFEMaxHeight && YUV1Width <= TFEMaxWidth)){switch (pStreamConfig->num_streams){case 2:if (((YUV0Height * YUV0Width) < (YUV1Height * YUV1Width)) &&(TRUE == UsecaseMatches(&Usecases2Target[UsecaseVideo_PVLT1080p_PLTVId]))){pSelectedUsecase = &Usecases2Target[UsecaseVideo_PVLT1080p_PLTVId];}else if (TRUE == UsecaseMatches(&Usecases2Target[UsecaseVideo_PVLT1080p_PGTVId])){pSelectedUsecase = &Usecases2Target[UsecaseVideo_PVLT1080p_PGTVId];}break;case 3:if (TRUE == bJpegStreamExists){// JPEG is taking from RealTimeif (((YUV0Height * YUV0Width) < (YUV1Height * YUV1Width)) &&(TRUE == UsecaseMatches(&Usecases5Target[UsecaseJPEGEncodeLiveSnapshot_BothPVLT1080p_PLTVId]))){pSelectedUsecase = &Usecases5Target[UsecaseJPEGEncodeLiveSnapshot_BothPVLT1080p_PLTVId];}else if (TRUE == UsecaseMatches(&Usecases5Target[UsecaseJPEGEncodeLiveSnapshot_BothPVLT1080p_PGTVId])){pSelectedUsecase = &Usecases5Target[UsecaseJPEGEncodeLiveSnapshot_BothPVLT1080p_PGTVId];}}break;// For HEICcase 4:if (TRUE == bHEICStreamExists){if (((YUV0Height * YUV0Width) < (YUV1Height * YUV1Width)) &&(TRUE == UsecaseMatches(&Usecases5Target[UsecaseJPEGEncodeLiveSnapshot_BothPVLT1080p_PLTVId]))){pSelectedUsecase = &Usecases5Target[UsecaseJPEGEncodeLiveSnapshot_BothPVLT1080p_PLTVId];}else if (TRUE == UsecaseMatches(&Usecases5Target[UsecaseJPEGEncodeLiveSnapshot_BothPVLT1080p_PGTVId])){pSelectedUsecase = &Usecases5Target[UsecaseJPEGEncodeLiveSnapshot_BothPVLT1080p_PGTVId];}}break;default:break;}}}//如果上面匹配都失敗了,pStreamConfig->num_streams -1if (NULL == pSelectedUsecase){if ((StreamConfigModeQTIEISLookAhead == pStreamConfig->operation_mode) ||(StreamConfigModeQTIEISRealTime == pStreamConfig->operation_mode)){// EIS is disabled, ensure that operation_mode is also set accordinglypStreamConfig->operation_mode = 0;}numTargetsExpected = pStreamConfig->num_streams - 1;}for (/* Initialized outside*/; numTargetsExpected < ChiMaxNumTargets; numTargetsExpected++){pChiTargetUsecases = &PerNumTargetUsecases[numTargetsExpected];if (0 == pChiTargetUsecases->numUsecases){continue;}CHX_LOG_INFO("Considering %u usecases that have %u targets", pChiTargetUsecases->numUsecases, numTargetsExpected + 1);for (UINT i = 0; i < pChiTargetUsecases->numUsecases; i++){const ChiUsecase* pChiUsecaseDescriptor = &pChiTargetUsecases->pChiUsecases[i];CHX_LOG_INFO("pChiUsecaseDescriptor %s ", pChiUsecaseDescriptor->pUsecaseName);···if (TRUE == UsecaseMatches(pChiUsecaseDescriptor)){}
}
最后代碼匹配邏輯:
- pStreamConfig->num_streams -1
- 循環從PerNumTargetUsecases去找Usecase
pStreamConfig->num_streams -1,這里-1是因為
比如2路流,pStreamConfig->num_streams -1 = 2-1 =1
PerNumTargetUsecases[1] 對應的是{16, Usecases2Target},
BOOL UsecaseSelector::IsMatchingUsecase
vendor/qcom/proprietary/chi-cdk/core/chiusecase/chxusecaseutils.cpp
主要匹配邏輯如下:
- IsMatchingVideo :比較是否是視頻模式
- IsMatchingFormat: 比較圖像格式
- IsMatchingdirection: 比較圖像流是輸入還是輸出
- IsMatchingDimension:比較圖片寬高是否符合要求
streamFormat 匹配格式
typedef enum ChiStreamFormat
{ChiStreamFormatYCrCb420_SP = 0x00000113, ///< YCrCb420_SP is mapped to ChiStreamFormatYCbCr420_888 with ZSL flagsChiStreamFormatRaw16 = 0x00000020, ///< Blob formatChiStreamFormatBlob = 0x00000021, ///< Carries data which does not have a standard image structure (e.g. JPEG)ChiStreamFormatImplDefined = 0x00000022, ///< Format is up to the device-specific Gralloc implementation.ChiStreamFormatYCbCr420_888 = 0x00000023, ///< Efficient YCbCr/YCrCb 4:2:0 buffer layout, layout-independentChiStreamFormatRawOpaque = 0x00000024, ///< Raw OpaqueChiStreamFormatRaw10 = 0x00000025, ///< Raw 10ChiStreamFormatRaw12 = 0x00000026, ///< Raw 12ChiStreamFormatRaw64 = 0x00000027, ///< Blob formatChiStreamFormatUBWCNV124R = 0x00000028, ///< UBWCNV12-4RChiStreamFormatNV12HEIF = 0x00000116, ///< HEIF video YUV420 formatChiStreamFormatNV12YUVFLEX = 0x00000125, ///< Flex NV12 YUV format with 1 batchChiStreamFormatNV12UBWCFLEX = 0x00000126, ///< Flex NV12 UBWC formatChiStreamFormatY8 = 0x20203859, ///< Y 8ChiStreamFormatY16 = 0x20363159, ///< Y 16ChiStreamFormatP010 = 0x7FA30C0A, ///< P010ChiStreamFormatUBWCTP10 = 0x7FA30C09, ///< UBWCTP10ChiStreamFormatUBWCNV12 = 0x7FA30C06, ///< UBWCNV12ChiStreamFormatPD10 = 0x7FA30C08, ///< PD10
}
BufferFormat匹配格式
/// @brief Buffer Format
typedef enum ChiBufferFormat
{ChiFormatJpeg = 0, ///< JPEG format.ChiFormatY8 = 1, ///< Luma only, 8 bits per pixel.ChiFormatY16 = 2, ///< Luma only, 16 bits per pixel.ChiFormatYUV420NV12 = 3, ///< YUV 420 format as described by the NV12 fourcc.ChiFormatYUV420NV21 = 4, ///< YUV 420 format as described by the NV21 fourcc.ChiFormatYUV420NV16 = 5, ///< YUV 422 format as described by the NV16 fourccChiFormatBlob = 6, ///< Any non image dataChiFormatRawYUV8BIT = 7, ///< Packed YUV/YVU raw format. 16 bpp: 8 bits Y and 8 bits UV./// U and V are interleaved as YUYV or YVYV.ChiFormatRawPrivate = 8, ///< Private RAW formats where data is packed into 64bit word./// 8BPP: 64-bit word contains 8 pixels p0-p7, p0 is stored at LSB./// 10BPP: 64-bit word contains 6 pixels p0-p5,/// most significant 4 bits are set to 0. P0 is stored at LSB./// 12BPP: 64-bit word contains 5 pixels p0-p4,/// most significant 4 bits are set to 0. P0 is stored at LSB./// 14BPP: 64-bit word contains 4 pixels p0-p3,/// most significant 8 bits are set to 0. P0 is stored at LSB.ChiFormatRawMIPI = 9, ///< MIPI RAW formats based on MIPI CSI-2 specification./// 8BPP: Each pixel occupies one bytes, starting at LSB./// Output width of image has no restrictions./// 10BPP: 4 pixels are held in every 5 bytes./// The output width of image must be a multiple of 4 pixels.ChiFormatRawMIPI10 = ChiFormatRawMIPI, ///< By default 10-bit, ChiFormatRawMIPI10ChiFormatRawPlain16 = 10, ///< Plain16 RAW format. Single pixel is packed into two bytes,/// little endian format./// Not all bits may be used as RAW data is/// generally 8, or 10 bits per pixel./// Lower order bits are filled first.ChiFormatRawPlain16LSB10bit = ChiFormatRawPlain16, ///< By default 10-bit, ChiFormatRawPlain16LSB10bitChiFormatRawMeta8BIT = 11, ///< Generic 8-bit raw meta data for internal camera usage.ChiFormatUBWCTP10 = 12, ///< UBWC TP10 format (as per UBWC2.0 design specification)ChiFormatUBWCNV12 = 13, ///< UBWC NV12 format (as per UBWC2.0 design specification)ChiFormatUBWCNV124R = 14, ///< UBWC NV12-4R format (as per UBWC2.0 design specification)ChiFormatYUV420NV12TP10 = 15, ///< YUV 420 format 10bits per comp tight packed format.ChiFormatYUV420NV21TP10 = 16, ///< YUV 420 format 10bits per comp tight packed format.ChiFormatYUV422NV16TP10 = 17, ///< YUV 422 format 10bits per comp tight packed format.ChiFormatPD10 = 18, ///< PD10 formatChiFormatRawMIPI8 = 19, ///< 8BPP: Each pixel occupies one bytes, starting at LSB./// Output width of image has no restrictions.ChiFormatP010 = 22, ///< P010 format.ChiFormatRawPlain64 = 23, ///< Raw Plain 64ChiFormatUBWCP010 = 24, ///< UBWC P010 format.ChiFormatRawDepth = 25, ///< 16 bit depthChiFormatRawMIPI12 = 26, ///< 12BPP: 2 pixels are held in every 3 bytes./// The output width of image must be a multiple of 2 pixels.ChiFormatRawPlain16LSB12bit = 27, ///< Plain16 RAW format. 12 bits per pixel.ChiFormatRawMIPI14 = 28, /// 14BPP: 4 pixels are held in every 7 bytes./// The output width of image must be a multiple of 4 pixels.ChiFormatRawPlain16LSB14bit = 29, ///< Plain16 RAW format. 14 bits per pixel.
} CHIBUFFERFORMAT;
基于log解析代碼邏輯
源碼實現
// 函數:判斷當前流配置是否匹配指定的用例
// 參數:
// pStreamConfig - 流配置信息
// pUsecase - 用例信息
// pPruneSettings - 修剪設置(用于排除某些目標)
// 返回值:BOOL - TRUE表示匹配,FALSE表示不匹配
BOOL UsecaseSelector::IsMatchingUsecase(const camera3_stream_configuration_t* pStreamConfig,const ChiUsecase* pUsecase,const PruneSettings* pPruneSettings)
{// 斷言檢查輸入參數不能為空CHX_ASSERT(NULL != pStreamConfig);CHX_ASSERT(NULL != pUsecase);// 初始化變量UINT numStreams = pStreamConfig->num_streams; // 流數量BOOL isMatching = FALSE; // 匹配結果標志UINT streamConfigMode = pUsecase->streamConfigMode; // 用例的流配置模式BOOL bTargetVideoCheck = FALSE; // 是否需要檢查視頻目標BOOL bHasVideoTarget = FALSE; // 用例是否有視頻目標BOOL bHasVideoStream = FALSE; // 流配置中是否有視頻流UINT videoStreamIdx = 0; // 視頻流索引// 初始化比較掩碼:// compareTargetIndexMask - 用于跟蹤需要比較的目標(初始設置為所有目標都需要比較)// compareStreamIndexMask - 用于跟蹤需要比較的流(初始設置為所有流都需要比較)UINT compareTargetIndexMask = ((1 << pUsecase->numTargets) - 1);UINT compareStreamIndexMask = ((1 << numStreams) - 1);// 檢查流配置中是否有視頻流for (UINT streamIdx = 0; streamIdx < numStreams; streamIdx++){if(IsVideoStream(pStreamConfig->streams[streamIdx])){bHasVideoStream = TRUE;videoStreamIdx = streamIdx;break;}}// 檢查用例中是否有視頻目標for (UINT targetIdx = 0; targetIdx < pUsecase->numTargets; targetIdx++){ChiTarget* pTargetInfo = pUsecase->ppChiTargets[targetIdx];if (!CdkUtils::StrCmp(pTargetInfo->pTargetName, "TARGET_BUFFER_VIDEO")){bHasVideoTarget = TRUE;break;}}// 設置視頻目標檢查標志(當既有視頻流又有視頻目標時需要特殊檢查)bTargetVideoCheck = bHasVideoStream && bHasVideoTarget;// 檢查流配置模式是否匹配if (streamConfigMode == static_cast<UINT>(pStreamConfig->operation_mode)){// 遍歷用例中的所有目標for (UINT targetInfoIdx = 0; targetInfoIdx < pUsecase->numTargets; targetInfoIdx++){ChiTarget* pTargetInfo = pUsecase->ppChiTargets[targetInfoIdx];// 檢查是否需要修剪當前目標(根據修剪設置)if ((NULL != pUsecase->pTargetPruneSettings) &&(TRUE == ShouldPrune(pPruneSettings, &pUsecase->pTargetPruneSettings[targetInfoIdx]))){CHX_LOG_INFO("Ignoring Target Info because of prune settings: ""format[0]: %u targetType = %d streamWidth = %d streamHeight = %d",pTargetInfo->pBufferFormats[0],pTargetInfo->direction,pTargetInfo->dimension.maxWidth,pTargetInfo->dimension.maxHeight);// 從比較掩碼中移除當前目標compareTargetIndexMask = ChxUtils::BitReset(compareTargetIndexMask, targetInfoIdx);continue; // 跳過被修剪的目標}isMatching = FALSE; // 重置匹配標志// 檢查當前目標是否是視頻目標BOOL bIsVideoTarget = !CdkUtils::StrCmp(pTargetInfo->pTargetName, "TARGET_BUFFER_VIDEO");// 遍歷所有流for (UINT streamId = 0; streamId < numStreams; streamId++){// 如果當前流已經被匹配過,則跳過if (FALSE == ChxUtils::IsBitSet(compareStreamIndexMask, streamId)){continue;}ChiStream* pStream = reinterpret_cast<ChiStream*>(pStreamConfig->streams[streamId]);CHX_ASSERT(pStream != NULL);if (NULL != pStream){// 獲取流屬性INT streamFormat = pStream->format;UINT streamType = pStream->streamType;UINT32 streamWidth = pStream->width;UINT32 streamHeight = pStream->height;CHX_LOG("streamType = %d streamFormat = %d streamWidth = %d streamHeight = %d",streamType, streamFormat, streamWidth, streamHeight);// 檢查格式是否匹配isMatching = IsMatchingFormat(reinterpret_cast<ChiStream*>(pStream),pTargetInfo->numFormats,pTargetInfo->pBufferFormats);// 檢查流類型(方向)是否匹配if (TRUE == isMatching){isMatching = ((streamType == static_cast<UINT>(pTargetInfo->direction)) ? TRUE : FALSE);}// 檢查分辨率是否在目標范圍內if (TRUE == isMatching){BufferDimension* pRange = &pTargetInfo->dimension;if ((streamWidth >= pRange->minWidth) && (streamWidth <= pRange->maxWidth) &&(streamHeight >= pRange->minHeight) && (streamHeight <= pRange->maxHeight)){isMatching = TRUE;}else{isMatching = FALSE;}}// 特殊處理視頻流和目標if (bTargetVideoCheck){BOOL bIsVideoStream = (videoStreamIdx == streamId);if(bIsVideoTarget ^ bIsVideoStream) // XOR操作:必須同時是視頻或同時不是視頻{isMatching = FALSE;}}// 如果匹配成功,更新掩碼并跳出循環if (TRUE == isMatching){pTargetInfo->pChiStream = pStream; // 將流與目標關聯// 從比較掩碼中移除已匹配的目標和流compareTargetIndexMask = ChxUtils::BitReset(compareTargetIndexMask, targetInfoIdx);compareStreamIndexMask = ChxUtils::BitReset(compareStreamIndexMask, streamId);break; // 處理下一個流}}}// 如果當前流沒有找到匹配的目標,則整個用例不匹配if (FALSE == isMatching){break;}}}// 最終檢查:所有流都必須找到匹配的目標if (TRUE == isMatching){isMatching = (0 == compareStreamIndexMask) ? TRUE : FALSE;}// 記錄調試信息CHX_LOG_VERBOSE("Target Mask: %x Stream Mask: %x - %s",compareTargetIndexMask,compareStreamIndexMask,pUsecase->pUsecaseName);return isMatching;
}