Camx-usecase ID和pipeline的匹配源码解读

发布于:2025-08-02 ⋅ 阅读:(11) ⋅ 点赞:(0)

组件关系


整体流程:

  camxhal3.cpp:704 open()
  	camxhal3.cpp:1423 configure_streams()
  		chxextensionmodule.cpp:2810 InitializeOverrideSession
  			chxusecaseutils.cpp:850 GetMatchingUsecase()
  				chxadvancedcamerausecase.cpp:4729 Initialize()
  					chxadvancedcamerausecase.cpp:5757 SelectFeatures()
  					

usecase ID匹配逻辑

GetMatchingUsecase 函数根据输入的相机信息(pCamInfo)和流配置(pStreamConfig)返回最适合的用例ID(UsecaseId)。
– UsecaseSelector::GetMatchingUsecase

代码逻辑总结

  • ​​优先级最高的用例​​:
    QuadCFA用例(特定传感器和流配置)
    超级慢动作模式
  • ​​多摄像头用例​​:
    如果启用VR模式,选择MultiCameraVR
    否则选择MultiCamera
  • ​​单摄像头用例​​:
    根据流数量(2/3/4)选择不同的用例
    考虑多种功能标志:ZSL、GPU处理、MFNR、EIS等
  • ​​特殊用例​​:
    最后检查是否为手电筒小部件用例
  • ​​默认用例​​:
    如果没有匹配其他条件,使用Default用例

关键判断条件

流数量(num_streams)
摄像头类型和数量
各种功能模块的启用状态(通过ExtensionModule获取)
特定的流配置检查(通过IsXXXStreamConfig函数)

UsecaseId UsecaseSelector::GetMatchingUsecase(
    const LogicalCameraInfo*        pCamInfo,          // 摄像头逻辑信息
    camera3_stream_configuration_t* pStreamConfig)     // 流配置信息
{
    UsecaseId usecaseId = UsecaseId::Default;          // 默认用例
    UINT32 VRDCEnable = ExtensionModule::GetInstance()->GetDCVRMode(); // 获取VR模式设置
    
    // 检查是否为QuadCFA传感器且符合特定条件
    if ((pStreamConfig->num_streams == 2) && IsQuadCFASensor(pCamInfo, NULL) &&
        (LogicalCameraType_Default == pCamInfo->logicalCameraType))
    {
        // 如果快照尺寸小于传感器binning尺寸,选择默认ZSL用例
        // 只有当快照尺寸大于传感器binning尺寸时,才选择QuadCFA用例
        if (TRUE == QuadCFAMatchingUsecase(pCamInfo, pStreamConfig))
        {
            usecaseId = UsecaseId::QuadCFA;
            CHX_LOG_CONFIG("Quad CFA usecase selected");
            return usecaseId;
        }
    }

    // 检查是否为超级慢动作模式
    if (pStreamConfig->operation_mode == StreamConfigModeSuperSlowMotionFRC)
    {
        usecaseId = UsecaseId::SuperSlowMotionFRC;
        CHX_LOG_CONFIG("SuperSlowMotionFRC usecase selected");
        return usecaseId;
    }

    // 重置用例标志
    VideoEISV2Usecase   = 0;
    VideoEISV3Usecase   = 0;
    GPURotationUsecase  = FALSE;
    GPUDownscaleUsecase = FALSE;

    // 多摄像头VR用例判断
    if ((NULL != pCamInfo) && (pCamInfo->numPhysicalCameras > 1) && VRDCEnable)
    {
        CHX_LOG_CONFIG("MultiCameraVR usecase selected");
        usecaseId = UsecaseId::MultiCameraVR;
    }
    // 多摄像头用例判断
    else if ((NULL != pCamInfo) && (pCamInfo->numPhysicalCameras > 1) && (pStreamConfig->num_streams > 1))
    {
        CHX_LOG_CONFIG("MultiCamera usecase selected");
        usecaseId = UsecaseId::MultiCamera;
    }
    else
    {
        SnapshotStreamConfig snapshotStreamConfig;
        CHISTREAM** ppChiStreams = reinterpret_cast<CHISTREAM**>(pStreamConfig->streams);
        
        // 根据流数量选择用例
        switch (pStreamConfig->num_streams)
        {
            case 2:  // 2个流的情况
                if (TRUE == IsRawJPEGStreamConfig(pStreamConfig))
                {
                    CHX_LOG_CONFIG("Raw + JPEG usecase selected");
                    usecaseId = UsecaseId::RawJPEG;
                    break;
                }

                // 检查是否启用ZSL
                if (FALSE == m_pExtModule->DisableZSL())
                {
                    if (TRUE == IsPreviewZSLStreamConfig(pStreamConfig))
                    {
                        usecaseId = UsecaseId::PreviewZSL;
                        CHX_LOG_CONFIG("ZSL usecase selected");
                    }
                }

                // 检查是否启用GPU旋转
                if(TRUE == m_pExtModule->UseGPURotationUsecase())
                {
                    CHX_LOG_CONFIG("GPU Rotation usecase flag set");
                    GPURotationUsecase = TRUE;
                }

                // 检查是否启用GPU下采样
                if (TRUE == m_pExtModule->UseGPUDownscaleUsecase())
                {
                    CHX_LOG_CONFIG("GPU Downscale usecase flag set");
                    GPUDownscaleUsecase = TRUE;
                }

                // 检查是否启用MFNR(多帧降噪)
                if (TRUE == m_pExtModule->EnableMFNRUsecase())
                {
                    if (TRUE == MFNRMatchingUsecase(pStreamConfig))
                    {
                        usecaseId = UsecaseId::MFNR;
                        CHX_LOG_CONFIG("MFNR usecase selected");
                    }
                }

                // 检查是否启用无3A的HFR(高帧率)
                if (TRUE == m_pExtModule->EnableHFRNo3AUsecas())
                {
                    CHX_LOG_CONFIG("HFR without 3A usecase flag set");
                    HFRNo3AUsecase = TRUE;
                }

                break;

            case 3:  // 3个流的情况
                // 设置EIS(电子图像稳定)标志
                VideoEISV2Usecase = m_pExtModule->EnableEISV2Usecase();
                VideoEISV3Usecase = m_pExtModule->EnableEISV3Usecase();
                
                // 检查ZSL预览
                if (FALSE == m_pExtModule->DisableZSL() && (TRUE == IsPreviewZSLStreamConfig(pStreamConfig)))
                {
                    usecaseId = UsecaseId::PreviewZSL;
                    CHX_LOG_CONFIG("ZSL usecase selected");
                }
                // 检查Raw+JPEG配置
                else if(TRUE == IsRawJPEGStreamConfig(pStreamConfig) && FALSE == m_pExtModule->DisableZSL())
                {
                    CHX_LOG_CONFIG("Raw + JPEG usecase selected");
                    usecaseId = UsecaseId::RawJPEG;
                }
                // 检查视频实时拍摄配置
                else if((FALSE == IsVideoEISV2Enabled(pStreamConfig)) && (FALSE == IsVideoEISV3Enabled(pStreamConfig)) &&
                    (TRUE == IsVideoLiveShotConfig(pStreamConfig)) && (FALSE == m_pExtModule->DisableZSL()))
                {
                    CHX_LOG_CONFIG("Video With Liveshot, ZSL usecase selected");
                    usecaseId = UsecaseId::VideoLiveShot;
                }

                // 处理BPS摄像头和EIS设置
                if ((NULL != pCamInfo) && (RealtimeEngineType_BPS == pCamInfo->ppDeviceInfo[0]->pDeviceConfig->realtimeEngine))
                {
                    if((TRUE == IsVideoEISV2Enabled(pStreamConfig)) || (TRUE == IsVideoEISV3Enabled(pStreamConfig)))
                    {
                        CHX_LOG_CONFIG("BPS Camera EIS V2 = %d, EIS V3 = %d",
                                       IsVideoEISV2Enabled(pStreamConfig),
                                       IsVideoEISV3Enabled(pStreamConfig));
                        // 对于BPS摄像头且至少启用一个EIS的情况
                        // 设置(伪)用例将引导到feature2选择器
                        usecaseId = UsecaseId::PreviewZSL;
                    }
                }

                break;

            case 4:  // 4个流的情况
                GetSnapshotStreamConfiguration(pStreamConfig->num_streams, ppChiStreams, snapshotStreamConfig);
                // 检查HEIC格式和Raw流
                if ((SnapshotStreamType::HEIC == snapshotStreamConfig.type) && (NULL != snapshotStreamConfig.pRawStream))
                {
                    CHX_LOG_CONFIG("Raw + HEIC usecase selected");
                    usecaseId = UsecaseId::RawJPEG;
                }
                break;

            default:  // 其他情况
                CHX_LOG_CONFIG("Default usecase selected");
                break;
        }
    }

    // 检查是否为手电筒小部件用例
    if (TRUE == ExtensionModule::GetInstance()->IsTorchWidgetUsecase())
    {
        CHX_LOG_CONFIG("Torch widget usecase selected");
        usecaseId = UsecaseId::Torch;
    }

    CHX_LOG_INFO("usecase ID:%d",usecaseId);
    return usecaseId;
}
/// @brief Usecase identifying enums
enum class UsecaseId
{
    NoMatch             = 0,
    Default             = 1,
    Preview             = 2,
    PreviewZSL          = 3,
    MFNR                = 4,
    MFSR                = 5,
    MultiCamera         = 6,
    QuadCFA             = 7,
    RawJPEG             = 8,
    MultiCameraVR       = 9,
    Torch               = 10,
    YUVInBlobOut        = 11,
    VideoLiveShot       = 12,
    SuperSlowMotionFRC  = 13,
    MaxUsecases         = 14,
};

Feature 匹配

feature的定义
/// @brief Advance feature types
enum AdvanceFeatureType
{
AdvanceFeatureNone = 0x0, ///< mask for none features
AdvanceFeatureZSL = 0x1, ///< mask for feature ZSL
AdvanceFeatureMFNR = 0x2, ///< mask for feature MFNR
AdvanceFeatureHDR = 0x4, ///< mask for feature HDR(AE_Bracket)
AdvanceFeatureSWMF = 0x8, ///< mask for feature SWMF
AdvanceFeatureMFSR = 0x10, ///< mask for feature MFSR
AdvanceFeatureQCFA = 0x20, ///< mask for feature QuadCFA
AdvanceFeature2Wrapper = 0x40, ///< mask for feature2 wrapper
AdvanceFeatureCountMax = AdvanceFeature2Wrapper ///< Max of advance feature mask
};

CDKResult AdvancedCameraUsecase::FeatureSetup(
    camera3_stream_configuration_t* pStreamConfig)
{
    CDKResult result = CDKResultSuccess;

    if ((UsecaseId::PreviewZSL    == m_usecaseId) ||
        (UsecaseId::YUVInBlobOut  == m_usecaseId) ||
        (UsecaseId::VideoLiveShot == m_usecaseId) ||
        (UsecaseId::QuadCFA       == m_usecaseId) ||
        (UsecaseId::RawJPEG       == m_usecaseId))
    {
        SelectFeatures(pStreamConfig);
    }
    else if (UsecaseId::MultiCamera == m_usecaseId)
    {
        SelectFeatures(pStreamConfig);
    }
    return result;
}

// START of OEM to change section
VOID AdvancedCameraUsecase::SelectFeatures(camera3_stream_configuration_t* pStreamConfig)
{
    // OEM to change
    // 这个函数根据当前的pStreamConfig和静态设置决定要运行哪些特性
    INT32  index                  = 0;
    UINT32 enabledAdvanceFeatures = 0;

    // 从ExtensionModule获取已启用的高级特性掩码
    enabledAdvanceFeatures = ExtensionModule::GetInstance()->GetAdvanceFeatureMask();
    CHX_LOG("SelectFeatures(), enabled feature mask:%x", enabledAdvanceFeatures);

    // 如果当前是FastShutter模式,则强制启用SWMF和MFNR特性
    if (StreamConfigModeFastShutter == ExtensionModule::GetInstance()->GetOpMode(m_cameraId))
    {
        enabledAdvanceFeatures = AdvanceFeatureSWMF|AdvanceFeatureMFNR;
    }
    CHX_LOG("SelectFeatures(), enabled feature mask:%x", enabledAdvanceFeatures);

    // 遍历所有物理摄像头设备
    for (UINT32 physicalCameraIndex = 0 ; physicalCameraIndex < m_numOfPhysicalDevices ; physicalCameraIndex++)
    {
        index = 0;
        // 检查当前用例是否属于以下类型之一
        if ((UsecaseId::PreviewZSL      == m_usecaseId)   ||
            (UsecaseId::MultiCamera     == m_usecaseId)   ||
            (UsecaseId::QuadCFA         == m_usecaseId)   ||
            (UsecaseId::VideoLiveShot   == m_usecaseId)   ||
            (UsecaseId::RawJPEG         == m_usecaseId))
        {
            // 如果启用了MFNR(多帧降噪)特性
            if (AdvanceFeatureMFNR == (enabledAdvanceFeatures & AdvanceFeatureMFNR))
            {
                // 启用离线噪声重处理
                m_isOfflineNoiseReprocessEnabled = ExtensionModule::GetInstance()->EnableOfflineNoiseReprocessing();
                // 需要FD(人脸检测)流缓冲区
                m_isFDstreamBuffersNeeded = TRUE;
            }

            // 如果启用了SWMF(软件多帧)、HDR或2Wrapper特性
            if ((AdvanceFeatureSWMF         == (enabledAdvanceFeatures & AdvanceFeatureSWMF))   ||
                (AdvanceFeatureHDR          == (enabledAdvanceFeatures & AdvanceFeatureHDR))    ||
                ((AdvanceFeature2Wrapper    == (enabledAdvanceFeatures & AdvanceFeature2Wrapper))))
            {
                // 创建Feature2Wrapper的输入信息结构
                Feature2WrapperCreateInputInfo feature2WrapperCreateInputInfo;
                feature2WrapperCreateInputInfo.pUsecaseBase             = this;
                feature2WrapperCreateInputInfo.pMetadataManager         = m_pMetadataManager;
                feature2WrapperCreateInputInfo.pFrameworkStreamConfig   =
                    reinterpret_cast<ChiStreamConfigInfo*>(pStreamConfig);

                // 清除所有流的pHalStream指针
                for (UINT32 i = 0; i < feature2WrapperCreateInputInfo.pFrameworkStreamConfig->numStreams; i++)
                {
                    feature2WrapperCreateInputInfo.pFrameworkStreamConfig->pChiStreams[i]->pHalStream = NULL;
                }

                // 如果Feature2Wrapper尚未创建
                if (NULL == m_pFeature2Wrapper)
                {
                    // 如果是多摄像头用例
                    if (TRUE == IsMultiCameraUsecase())
                    {
                        // 如果流配置中不包含融合流,则设置输入输出类型为YUV_OUT
                        if (FALSE == IsFusionStreamIncluded(pStreamConfig))
                        {
                            feature2WrapperCreateInputInfo.inputOutputType =
                                static_cast<UINT32>(InputOutputType::YUV_OUT);
                        }

                        // 添加内部输入流(RDI和FD流)
                        for (UINT8 streamIndex = 0; streamIndex < m_numOfPhysicalDevices; streamIndex++)
                        {
                            feature2WrapperCreateInputInfo.internalInputStreams.push_back(m_pRdiStream[streamIndex]);
                            feature2WrapperCreateInputInfo.internalInputStreams.push_back(m_pFdStream[streamIndex]);
                        }

                        // 需要FD流缓冲区
                        m_isFDstreamBuffersNeeded = TRUE;
                    }

                    // 创建Feature2Wrapper实例
                    m_pFeature2Wrapper = Feature2Wrapper::Create(&feature2WrapperCreateInputInfo, physicalCameraIndex);
                }

                // 将创建的Feature2Wrapper添加到启用特性列表中
                m_enabledFeatures[physicalCameraIndex][index] = m_pFeature2Wrapper;
                index++;
            }
        }

        // 记录当前物理摄像头启用的特性数量
        m_enabledFeaturesCount[physicalCameraIndex] = index;
    }

    // 如果第一个物理摄像头有启用的特性
    if (m_enabledFeaturesCount[0] > 0)
    {
        // 如果还没有活动的特性,则设置为第一个启用的特性
        if (NULL == m_pActiveFeature)
        {
            m_pActiveFeature = m_enabledFeatures[0][0];
        }

        // 记录日志:选择的特性数量和预览的特性类型
        CHX_LOG_INFO("num features selected:%d, FeatureType for preview:%d",
            m_enabledFeaturesCount[0], m_pActiveFeature->GetFeatureType());
    }
    else
    {
        CHX_LOG_INFO("No features selected");
    }

    // 将最后一个快照特性设置为当前活动特性
    m_pLastSnapshotFeature = m_pActiveFeature;
}

pipeline 匹配逻辑

  • 如果是PreviewZSL、MultiCamera、QuadCFA等模式,直接ConfigureStream和BuildUsecase
  • 其他模式-通过DefaultMatchingUsecase重新匹配
CDKResult AdvancedCameraUsecase::SelectUsecaseConfig(
    LogicalCameraInfo*              pCameraInfo,   ///< Camera info
    camera3_stream_configuration_t* pStreamConfig)  ///< Stream configuration
{
    if ((UsecaseId::PreviewZSL    == m_usecaseId) ||
        (UsecaseId::YUVInBlobOut  == m_usecaseId) ||
        (UsecaseId::VideoLiveShot == m_usecaseId) ||
        (UsecaseId::MultiCamera   == m_usecaseId) ||
        (UsecaseId::QuadCFA       == m_usecaseId) ||
        (UsecaseId::RawJPEG       == m_usecaseId))
    {
        ConfigureStream(pCameraInfo, pStreamConfig);
        BuildUsecase(pCameraInfo, pStreamConfig);
    }
    else
    {
        CHX_LOG("Initializing using default usecase matching");
        m_pChiUsecase = UsecaseSelector::DefaultMatchingUsecase(pStreamConfig);
    }
}
PreviewZSL 模式下

ZSL模式下,pAdvancedUsecase 赋值为UsecaseZSL,
BuildUsecase函数调用CloneUsecase来根据配置选择克隆ZSL调优用例或高级用例,
一般走pAdvancedUsecase

AdvancedCameraUsecase::Initialize {}
	//这里 ZSL_USECASE_NAME = "UsecaseZSL"
 	pAdvancedUsecase = GetXMLUsecaseByName(ZSL_USECASE_NAME);
 }
  • BuildUsecase
    • 根据feature添加pipeline

    • 根据feature克隆并配置用例模板

    • 设置管道与会话、相机的映射关系(Map camera ID and session ID for the pipeline and prepare for pipeline/session creation)

    • 为feature覆盖流配置

AdvancedCameraUsecase::BuildUsecase(){
	// 根据配置选择克隆ZSL调优用例或高级用例
	if (static_cast<UINT>(UsecaseZSLTuningId) == ExtensionModule::GetInstance()->OverrideUseCase())
	{
	    m_pClonedUsecase = UsecaseSelector::CloneUsecase(pZslTuningUsecase, totalPipelineCount, pipelineIDMap);
	}
	else
	{
	    m_pClonedUsecase = UsecaseSelector::CloneUsecase(pAdvancedUsecase, totalPipelineCount, pipelineIDMap);
	    }
}
default模式下

调用流程

m_pChiUsecase = UsecaseSelector::DefaultMatchingUsecase(pStreamConfig);
--ChiUsecase* GetDefaultMatchingUsecase(camera3_stream_configuration_t* pStreamConfig)
---UsecaseSelector::DefaultMatchingUsecaseSelection(pStreamConfig)
----IsMatchingUsecase(pStreamConfig, pUsecase, &pruneSettings);
  • DefaultMatchingUsecaseSelection
    • 优先匹配Selected EISv3 usecase
    • 其次匹配 Selected EISv2 usecase
    • 根据pStreamConfig->num_streams匹配
extern "C" CAMX_VISIBILITY_PUBLIC ChiUsecase* UsecaseSelector::DefaultMatchingUsecaseSelection(
    camera3_stream_configuration_t* pStreamConfig)
{
    auto UsecaseMatches = [&pStreamConfig, &pruneSettings](const ChiUsecase* const pUsecase) -> BOOL
    {
        return IsMatchingUsecase(pStreamConfig, pUsecase, &pruneSettings);
    };
  
    if (pStreamConfig->num_streams <= ChiMaxNumTargets)
    {
        else if (NULL == pSelectedUsecase)
        {	//匹配EIS V3算法Usecase
            if (TRUE == IsVideoEISV3Enabled(pStreamConfig))
            {
···
                if (TRUE == UsecaseMatches(&Usecases3Target[usecaseEIS3Id]))
                {
                    CHX_LOG("Selected EISv3 usecase");
                    pSelectedUsecase = &Usecases3Target[usecaseEIS3Id];
                }
            }
			//匹配EIS V2算法Usecase
            if ((TRUE == IsVideoEISV2Enabled(pStreamConfig)) && (NULL == pSelectedUsecase) &&
                (TRUE == UsecaseMatches(&Usecases3Target[UsecaseVideoEIS2PreviewEIS2Id])))
            {
                CHX_LOG("Selected EISv2 usecase");
                pSelectedUsecase = &Usecases3Target[UsecaseVideoEIS2PreviewEIS2Id];
            }
            // This if block is only for kamorta usecases where Preview & Video streams are present
            if ((pStreamConfig->num_streams > 1) && (NULL == pSelectedUsecase))
            {
                // If both Preview and Video < 1080p then only Preview < Video and Preview >Video Scenario occurs
                if ((numYUVStreams == 2) && (YUV0Height <= TFEMaxHeight && YUV0Width <= TFEMaxWidth) &&
                    (YUV1Height <= TFEMaxHeight && YUV1Width <= TFEMaxWidth))
                {
                    switch (pStreamConfig->num_streams)
                    {
                        case 2:
                            if (((YUV0Height * YUV0Width) < (YUV1Height * YUV1Width)) &&
                                (TRUE == UsecaseMatches(&Usecases2Target[UsecaseVideo_PVLT1080p_PLTVId])))
                            {
                                pSelectedUsecase = &Usecases2Target[UsecaseVideo_PVLT1080p_PLTVId];
                            }
                            else if (TRUE == UsecaseMatches(&Usecases2Target[UsecaseVideo_PVLT1080p_PGTVId]))
                            {
                                pSelectedUsecase = &Usecases2Target[UsecaseVideo_PVLT1080p_PGTVId];
                            }

							break;
                        case 3:
                            if (TRUE == bJpegStreamExists)
                            {
                                // JPEG is taking from RealTime
                                if (((YUV0Height * YUV0Width) < (YUV1Height * YUV1Width)) &&
                                    (TRUE == UsecaseMatches(
                                    &Usecases5Target[UsecaseJPEGEncodeLiveSnapshot_BothPVLT1080p_PLTVId])))
                                {
                                    pSelectedUsecase = &
                                        Usecases5Target[UsecaseJPEGEncodeLiveSnapshot_BothPVLT1080p_PLTVId];
                                }
                                else if (TRUE == UsecaseMatches(
                                    &Usecases5Target[UsecaseJPEGEncodeLiveSnapshot_BothPVLT1080p_PGTVId]))
                                {
                                    pSelectedUsecase = &
                                        Usecases5Target[UsecaseJPEGEncodeLiveSnapshot_BothPVLT1080p_PGTVId];
                                }
                            }
                            break;
                        // For HEIC
                        case 4:
                            if (TRUE == bHEICStreamExists)
                            {
                                if (((YUV0Height * YUV0Width) < (YUV1Height * YUV1Width)) &&
                                    (TRUE == UsecaseMatches(
                                    &Usecases5Target[UsecaseJPEGEncodeLiveSnapshot_BothPVLT1080p_PLTVId])))
                                {
                                    pSelectedUsecase = &
                                        Usecases5Target[UsecaseJPEGEncodeLiveSnapshot_BothPVLT1080p_PLTVId];
                                }
                                else if (TRUE == UsecaseMatches(
                                    &Usecases5Target[UsecaseJPEGEncodeLiveSnapshot_BothPVLT1080p_PGTVId]))
                                {
                                    pSelectedUsecase = &
                                        Usecases5Target[UsecaseJPEGEncodeLiveSnapshot_BothPVLT1080p_PGTVId];
                                }
                            }
                            break;
                        default:
                            break;
                    }
		}
	}
}

BOOL UsecaseSelector::IsMatchingUsecase
该函数用于检查给定的流配置是否匹配指定的用例配置
使用位掩码(compareTargetIndexMask和compareStreamIndexMask)来跟踪需要比较的目标和流
匹配条件包括:
流配置模式匹配
格式匹配(通过IsMatchingFormat函数)
流类型/方向匹配
分辨率在目标范围内
视频流和目标的特殊处理
修剪设置(prune settings)可以排除某些目标不被考虑
只有当所有流都找到匹配的目标时,才认为整个用例匹配

// 函数:判断当前流配置是否匹配指定的用例
// 参数:
//   pStreamConfig - 流配置信息
//   pUsecase - 用例信息
//   pPruneSettings - 修剪设置(用于排除某些目标)
// 返回值:BOOL - TRUE表示匹配,FALSE表示不匹配
BOOL UsecaseSelector::IsMatchingUsecase(
    const camera3_stream_configuration_t* pStreamConfig,
    const ChiUsecase*                     pUsecase,
    const PruneSettings*                  pPruneSettings)
{
    // 断言检查输入参数不能为空
    CHX_ASSERT(NULL != pStreamConfig);
    CHX_ASSERT(NULL != pUsecase);

    // 初始化变量
    UINT       numStreams        = pStreamConfig->num_streams;  // 流数量
    BOOL       isMatching        = FALSE;  // 匹配结果标志
    UINT       streamConfigMode  = pUsecase->streamConfigMode;  // 用例的流配置模式
    BOOL       bTargetVideoCheck = FALSE;  // 是否需要检查视频目标
    BOOL       bHasVideoTarget   = FALSE;  // 用例是否有视频目标
    BOOL       bHasVideoStream   = FALSE;  // 流配置中是否有视频流
    UINT       videoStreamIdx    = 0;      // 视频流索引

    // 初始化比较掩码:
    // compareTargetIndexMask - 用于跟踪需要比较的目标(初始设置为所有目标都需要比较)
    // compareStreamIndexMask - 用于跟踪需要比较的流(初始设置为所有流都需要比较)
    UINT compareTargetIndexMask = ((1 << pUsecase->numTargets) - 1);
    UINT compareStreamIndexMask = ((1 << numStreams) - 1);

    // 检查流配置中是否有视频流
    for (UINT streamIdx = 0; streamIdx < numStreams; streamIdx++)
    {
        if(IsVideoStream(pStreamConfig->streams[streamIdx]))
        {
            bHasVideoStream   = TRUE;
            videoStreamIdx    = streamIdx;
            break;
        }
    }

    // 检查用例中是否有视频目标
    for (UINT targetIdx = 0; targetIdx < pUsecase->numTargets; targetIdx++)
    {
        ChiTarget* pTargetInfo = pUsecase->ppChiTargets[targetIdx];
        if (!CdkUtils::StrCmp(pTargetInfo->pTargetName, "TARGET_BUFFER_VIDEO"))
        {
            bHasVideoTarget = TRUE;
            break;
        }
    }

    // 设置视频目标检查标志(当既有视频流又有视频目标时需要特殊检查)
    bTargetVideoCheck = bHasVideoStream && bHasVideoTarget;

    // 检查流配置模式是否匹配
    if (streamConfigMode == static_cast<UINT>(pStreamConfig->operation_mode))
    {
        // 遍历用例中的所有目标
        for (UINT targetInfoIdx = 0; targetInfoIdx < pUsecase->numTargets; targetInfoIdx++)
        {
            ChiTarget* pTargetInfo = pUsecase->ppChiTargets[targetInfoIdx];
            
            // 检查是否需要修剪当前目标(根据修剪设置)
            if ((NULL != pUsecase->pTargetPruneSettings) &&
                (TRUE == ShouldPrune(pPruneSettings, &pUsecase->pTargetPruneSettings[targetInfoIdx])))
            {
                CHX_LOG_INFO("Ignoring Target Info because of prune settings: "
                             "format[0]: %u targetType = %d streamWidth = %d streamHeight = %d",
                             pTargetInfo->pBufferFormats[0],
                             pTargetInfo->direction,
                             pTargetInfo->dimension.maxWidth,
                             pTargetInfo->dimension.maxHeight);
                // 从比较掩码中移除当前目标
                compareTargetIndexMask = ChxUtils::BitReset(compareTargetIndexMask, targetInfoIdx);
                continue; // 跳过被修剪的目标
            }
            
            isMatching = FALSE;  // 重置匹配标志

            // 检查当前目标是否是视频目标
            BOOL bIsVideoTarget = !CdkUtils::StrCmp(pTargetInfo->pTargetName, "TARGET_BUFFER_VIDEO");

            // 遍历所有流
            for (UINT streamId = 0; streamId < numStreams; streamId++)
            {
                // 如果当前流已经被匹配过,则跳过
                if (FALSE == ChxUtils::IsBitSet(compareStreamIndexMask, streamId))
                {
                    continue;
                }
                
                ChiStream* pStream = reinterpret_cast<ChiStream*>(pStreamConfig->streams[streamId]);
                CHX_ASSERT(pStream != NULL);

                if (NULL != pStream)
                {
                    // 获取流属性
                    INT    streamFormat = pStream->format;
                    UINT   streamType   = pStream->streamType;
                    UINT32 streamWidth  = pStream->width;
                    UINT32 streamHeight = pStream->height;

                    CHX_LOG("streamType = %d streamFormat = %d streamWidth = %d streamHeight = %d",
                            streamType, streamFormat, streamWidth, streamHeight);

                    // 检查格式是否匹配
                    isMatching = IsMatchingFormat(reinterpret_cast<ChiStream*>(pStream),
                                                  pTargetInfo->numFormats,
                                                  pTargetInfo->pBufferFormats);

                    // 检查流类型(方向)是否匹配
                    if (TRUE == isMatching)
                    {
                        isMatching = ((streamType == static_cast<UINT>(pTargetInfo->direction)) ? TRUE : FALSE);
                    }

                    // 检查分辨率是否在目标范围内
                    if (TRUE == isMatching)
                    {
                        BufferDimension* pRange = &pTargetInfo->dimension;

                        if ((streamWidth  >= pRange->minWidth)  && (streamWidth  <= pRange->maxWidth) &&
                            (streamHeight >= pRange->minHeight) && (streamHeight <= pRange->maxHeight))
                        {
                            isMatching = TRUE;
                        }
                        else
                        {
                            isMatching = FALSE;
                        }
                    }

                    // 特殊处理视频流和目标
                    if (bTargetVideoCheck)
                    {
                        BOOL  bIsVideoStream = (videoStreamIdx == streamId);
                        if(bIsVideoTarget ^ bIsVideoStream)  // XOR操作:必须同时是视频或同时不是视频
                        {
                            isMatching = FALSE;
                        }
                    }

                    // 如果匹配成功,更新掩码并跳出循环
                    if (TRUE == isMatching)
                    {
                        pTargetInfo->pChiStream = pStream;  // 将流与目标关联
                        // 从比较掩码中移除已匹配的目标和流
                        compareTargetIndexMask = ChxUtils::BitReset(compareTargetIndexMask, targetInfoIdx);
                        compareStreamIndexMask = ChxUtils::BitReset(compareStreamIndexMask, streamId);
                        break; // 处理下一个流
                    }
                }
            }

            // 如果当前流没有找到匹配的目标,则整个用例不匹配
            if (FALSE == isMatching)
            {
                break;
            }
        }
    }

    // 最终检查:所有流都必须找到匹配的目标
    if (TRUE == isMatching)
    {
        isMatching = (0 == compareStreamIndexMask) ? TRUE : FALSE;
    }
    
    // 记录调试信息
    CHX_LOG_VERBOSE("Target Mask: %x Stream Mask: %x - %s",
                    compareTargetIndexMask,
                    compareStreamIndexMask,
                    pUsecase->pUsecaseName);

    return isMatching;
}

网站公告

今日签到

点亮在社区的每一天
去签到