Camera相机人脸识别系列专题分析之十七:人脸特征检测FFD算法之libhci_face_camera_api.so 296点位人脸识别检测流程详解

发布于:2025-07-19 ⋅ 阅读:(19) ⋅ 点赞:(0)

【关注我,后续持续新增专题博文,谢谢!!!】

上一篇我们讲了

        这一篇我们开始讲 Camera相机人脸识别系列专题分析之十七:人脸特征检测FFD算法之libhci_face_camera_api.so 296点位人脸识别检测流程详解

目录

一、背景

二、:FFD算法libhci_face_camera_api.so人脸识别检测流程详解

    2.1:FFD初始化

    2.2 :FFD人脸识别检测process

    2.3 :setFdAlgoInfo

    2.4 :FFD卸载


一、背景

人脸特征检测FFD算法有很多三方FFD算法,我们以FFD算法libhci_face_camera_api.so为例讲解人脸识别检测流程。和libcvface_api.so的流程类似,区别在于:

  1. libcvface_api.so是137点位FFD
  2. libhci_face_camera_api.so是296点位FFD,更加精细化。

二、:FFD算法libhci_face_camera_api.so人脸识别检测流程详解

    2.1:FFD初始化

  1. 首先通过vendorFDLoad()通过dlopen加载libhci_face_camera_api.so,并对算法库API进行初始化。
  2. 再通过vendorFDInit对算法内部进行初始化。
    1. get sdk version
    2. set debug log info
    3. set license
    4. create tracker,参数是CV_DETECT_ENABLE_ALIGN_296,296点位FFD参数。
void CustomHciFadApi::vendorFDLoad(senseTime_lib_struct* p_lib)
{
    if (p_lib->cv_face_lib_ptr != nullptr) {
        return;
    }

    memset(p_lib, 0x0, sizeof(senseTime_lib_struct));
    p_lib->cv_face_lib_ptr = dlopen(LIB_PATH_HCI_FD, RTLD_NOW | RTLD_NODELETE);
    if (p_lib->cv_face_lib_ptr == nullptr) {
        CAM_LOGE("Error opening libcvface_api.so lib");
        return;
    }

    CAM_LOGI("%s cv_face_lib_ptr is %p", __FUNCTION__, p_lib->cv_face_lib_ptr);
    IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_algorithm_info, "cv_face_algorithm_info");
    IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_init_license_config, "cv_face_init_license_config");
    IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_get_version, "cv_face_get_version");
    IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_detect_get_threshold, "cv_face_detect_get_threshold");
    IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_detect_set_threshold, "cv_face_detect_set_threshold");
    IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_destroy_detector, "cv_face_destroy_detector");
    IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_detect, "cv_face_detect");
    IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_release_detector_result, "cv_face_release_detector_result");
    IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_create_tracker, "cv_face_create_tracker");
    IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_destroy_tracker, "cv_face_destroy_tracker");
    IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_track, "cv_face_track");
    IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_reset_tracker, "cv_face_reset_tracker");
    IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_release_tracker_result, "cv_face_release_tracker_result");
    IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_track_set_detect_face_cnt_limit, "cv_face_track_set_detect_face_cnt_limit");
    IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_track_set_detect_interval, "cv_face_track_set_detect_interval");
    IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_track_set_alignment_threshold, "cv_face_track_set_alignment_threshold");
    IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr ,p_lib->cv_face_create_attribute_detector, "cv_face_create_attribute_detector");
    IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_attribute_detector_detect, "cv_face_attribute_detector_detect");
    IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_destroy_attribute_detector, "cv_face_destroy_attribute_detector");
    IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_open_log, "cv_face_open_log");
    IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_create_matrix_attribute_detector, "cv_face_create_matrix_attribute_detector");
    IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_matrix_attribute_detect, "cv_face_matrix_attribute_detect");
    IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_destroy_matrix_attribute_detector, "cv_face_destroy_matrix_attribute_detector");
    IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_attribute_classify_detect, "cv_face_attribute_classify_detect");
    IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_attribute_classify_reset, "cv_face_attribute_classify_reset");
    IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_create_attribute_handle, "cv_face_create_attribute_handle");
    IF_CTRL_GET_ALGO_INF(p_lib->cv_face_lib_ptr, p_lib->cv_face_destroy_attribute_handle, "cv_face_destroy_attribute_handle");
}


int32_t CustomHciFadApi::cusHalFDInit(int camId, thirdFaceInitData* faceData)
{
    pSenseTimelib = reinterpret_cast<senseTime_lib_struct*>(malloc(sizeof(senseTime_lib_struct)));
    pSenseTimelib->cv_face_lib_ptr = nullptr;
    pSenseTimelib->g_SenseTimeEngine.attr_Buff = nullptr;
    pSenseTimelib->g_engine_counter = 0;

    parent = std::make_shared<FdAlgoApi>();
    parent->mDisplaySize = faceData->previewImgSize;
    parent->mIsEisOn = faceData->isEISOn;

    vendorFDLoad(pSenseTimelib);
    vendorFDInit(pSenseTimelib, camId);
    return 0;
}

int CustomHciFadApi::vendorFDInit(senseTime_lib_struct* p_lib, int camId)
{
    int ret = -1;
    NSCam::IHalSensorList* const pIHalSensorList = NSCam::IHalSensorList::get();
    int isfacing = pIHalSensorList->queryFacingDirection(camId);
    if (isfacing) {
        m_ImageSensorFacing = ImageSensorFacingFront;
    } else {
        m_ImageSensorFacing = ImageSensorFacingBack;
    }

    if (p_lib->cv_face_lib_ptr == nullptr) {
        CAM_LOGE("Get NULL face lib pointer");
        return ret;
    }

    int init_flag = p_lib->cv_face_init_license_config((const char*)pLicense_Data);
    if (init_flag != CV_OK) {
        CAM_LOGE("cv_face_init_license_config error");
    }

    const char* version = p_lib->cv_face_get_version();
    int32_t value = property_get_int32("third.camera.pfdLog.enable", 0);
    p_lib->cv_face_open_log(value);
    CAM_LOGI("fad version is %s, setAlgo log is %d, isfacing is %d", version, value, isfacing);

    unsigned int flag = 0;
    unsigned int attr_flag = 0;

    if (p_lib->g_cv_tracker_handle_front == nullptr) {
        int cvDetectEnableAlign = CV_DETECT_ENABLE_ALIGN_296;
        flag = cvDetectEnableAlign | CV_FACE_RESIZE_IMG_320W;
        ret = p_lib->cv_face_create_tracker(&(p_lib->g_cv_tracker_handle_front), NULL, flag);
        if ((ret != CV_OK) || (p_lib->g_cv_tracker_handle_front == nullptr)) {
            CAM_LOGE("%s: FRONT: cv_face_create_tracker error: %d", __FUNCTION__, ret);
            return ret;
        }
    }
    p_lib->g_SenseTimeEngine.cv_tracker_handle = p_lib->g_cv_tracker_handle_front;

    if (p_lib->g_cv_attr_handle == nullptr) {
        int cvDetectAttrAlign = CV_DETECT_ENABLE_ALIGN_296;
        attr_flag = cvDetectAttrAlign | CV_FACE_RESIZE_IMG_640W;
        ret = p_lib->cv_face_create_attribute_handle(&(p_lib->g_cv_attr_handle), ATTR_MODE, attr_flag);
        if ((ret != CV_OK) || (p_lib->g_cv_attr_handle == nullptr)) {
            CAM_LOGE("%s: cv_face_create_attribute_handle error: %d", __FUNCTION__, ret);
            return ret;
        }
    }

    p_lib->g_SenseTimeEngine.cv_attr_handle = p_lib->g_cv_attr_handle;
    memset(&p_lib->attribute_result, 0, sizeof(cv_face_attribute_classify_t) * HCI_FD_MAX_FACE_NUM);
    p_lib->g_engine_counter++;
    return ret;
}

    2.2 :FFD人脸识别检测process

process进行人脸识别检测主要通过cv_fad_process进行人脸识别:

  1. cv_face_track: 对连续视频帧进行实时快速人脸跟踪
  2. cv_face_track_set_detect_interval: 设置检测到的最大人脸数
  3. setFdAlgoInfo:设置人脸信息
MINT32 CustomCvFadApi::process(struct FD_Frame_Parameters& param, MtkCameraFaceMetadata* p3AFaceResult)
{
    CustomFDImage* imgBuffer = reinterpret_cast<CustomFDImage*>(param.imgBuffer);
    cv_image_t frame;
    frame.width = imgBuffer->w;
    frame.height = imgBuffer->h;
    frame.stride = imgBuffer->w;
    cv_fad_process(&param, frame, p3AFaceResult);
    return 0;
}

void CustomCvFadApi::cv_fad_process(struct FD_Frame_Parameters* in, cv_image_t frame, MtkCameraFaceMetadata* p3AFaceResult)
{
    cv_face_orientation mFaceOrientation = CV_FACE_LEFT;
    cv_face_t* p_faces_array = nullptr;
    int faceCount = 0;
    int rotation = in->Rotation_Info;

    memset(&in->faceData, 0, sizeof(faceDataAppJoint));
    mFaceOrientation = setFaceRation(mFaceOrientation, rotation);

    int cv_result = pSenseTimelib->cv_face_track(
                pSenseTimelib->g_SenseTimeEngine.cv_tracker_handle,
                in->pImageBufferY,
                CV_PIX_FMT_NV12,
                frame.width, frame.height, frame.stride, mFaceOrientation,
                &p_faces_array, &faceCount);
    faceCount = std::min(faceCount, CV_FAD_MAX_FACE_NUM);
    p3AFaceResult->number_of_faces = faceCount;

    in->faceData.faceInfoOri.face_num = std::min(faceCount, MAX_ATTRI_FACE_NUM);
    in->faceData.faceInfoOri.versionId = Version_855;
    in->faceData.faceInfoOri.master_index = -1;
    in->faceData.faceInfoOri.points_count = MAX_FFD_NUM;
    in->faceData.faceInfoOri.fdDimensionW = parent->mDisplaySize.w;
    in->faceData.faceInfoOri.fdDimensionH = parent->mDisplaySize.h;
    in->faceData.fdProcessInfo.sensorSize.w = in->thirdCusSensorSize.w;
    in->faceData.fdProcessInfo.sensorSize.h = in->thirdCusSensorSize.h;
    in->faceData.fdProcessInfo.previewSize.w = parent->mDisplaySize.w;
    in->faceData.fdProcessInfo.previewSize.h = parent->mDisplaySize.h;
    in->faceData.fdProcessInfo.ImgSize.w = frame.width;
    in->faceData.fdProcessInfo.ImgSize.h = frame.height;
    in->faceData.fdProcessInfo.isEisOn = parent->mIsEisOn;

    int interval = 0;
    if (m_ImageSensorFacing == ImageSensorFacingBack) {
        interval = (faceCount > 0) ? FREQ_FACE_BACK : FREQ_NOFACE_BACK;
    } else {
        interval = (faceCount > 0) ? FREQ_FACE_FRONT : FREQ_NOFACE_FRONT;
    }

    int val = -1;
    if (g_currFreq != interval) {
        cv_result = pSenseTimelib->cv_face_track_set_detect_interval(
                    pSenseTimelib->g_SenseTimeEngine.cv_tracker_handle,
                    interval,
                    &val);
        if (cv_result != CV_OK) {
            CAM_LOGE("cv_face_track_set_detect_interval error");
        } else {
            g_currFreq = interval;
        }
    }
    if (faceCount > 0) {
        for (int i = 0; i < faceCount; i++) {
            setFdAlgoInfo(in, frame, p3AFaceResult, p_faces_array, i);
        }
    }
    fd_algo_face_attribute(in, frame, p3AFaceResult, faceCount, p_faces_array);
}

    2.3 :setFdAlgoInfo

主要设置p3AFaceResult,填充ffd_data数据和faceInfoOri结构体。后面APP和3A会使用这些FFD数据。

void CustomCvFadApi::setFdAlgoInfo(struct FD_Frame_Parameters* in, cv_image_t frame,
                                    MtkCameraFaceMetadata* p3AFaceResult, cv_face_t* p_faces_array, int i)
{
    p3AFaceResult->faces[i].id = (int32_t)p_faces_array[i].ID;
    p3AFaceResult->faces[i].score = FDFaceMinConfidence;
    p3AFaceResult->faces[i].rect[0] = (int32_t)p_faces_array[i].rect.left;
    p3AFaceResult->faces[i].rect[1] = (int32_t)p_faces_array[i].rect.top;
    p3AFaceResult->faces[i].rect[2] = (int32_t)p_faces_array[i].rect.right;
    p3AFaceResult->faces[i].rect[3] = (int32_t)p_faces_array[i].rect.bottom;

    coordinate center;
    center.x = (int32_t)(p_faces_array[i].rect.left + p_faces_array[i].rect.right) / 2;
    center.y = (int32_t)(p_faces_array[i].rect.top + p_faces_array[i].rect.bottom) / 2;
    mCcoordinate[0] = mCcoordinate[1];
    mCcoordinate[1] = center;
    if(center.x != 0 || center.y != 0) {
        p3AFaceResult->motion[i][0] = mCcoordinate[1].x - mCcoordinate[0].x;
        p3AFaceResult->motion[i][1] = mCcoordinate[1].y - mCcoordinate[0].y;
    }
    p3AFaceResult->faces[i].left_eye[0] = (int32_t)p_faces_array[i].points_more[132].x;
    p3AFaceResult->faces[i].left_eye[1] = (int32_t)p_faces_array[i].points_more[132].y;
    p3AFaceResult->faces[i].right_eye[0] = (int32_t)p_faces_array[i].points_more[160].x;
    p3AFaceResult->faces[i].right_eye[1] = (int32_t)p_faces_array[i].points_more[160].y;
    p3AFaceResult->faces[i].mouth[0] = (int32_t)p_faces_array[i].points_more[229].x;
    p3AFaceResult->faces[i].mouth[1] = (int32_t)p_faces_array[i].points_more[229].y;

    p3AFaceResult->leyex0[i] = (int32_t)p_faces_array[i].points_more[108].x;
    p3AFaceResult->leyey0[i] = (int32_t)p_faces_array[i].points_more[108].y;
    p3AFaceResult->leyex1[i] = (int32_t)p_faces_array[i].points_more[120].x;
    p3AFaceResult->leyey1[i] = (int32_t)p_faces_array[i].points_more[120].y;
    p3AFaceResult->reyex0[i] = (int32_t)p_faces_array[i].points_more[136].x;
    p3AFaceResult->reyey0[i] = (int32_t)p_faces_array[i].points_more[136].y;
    p3AFaceResult->reyex1[i] = (int32_t)p_faces_array[i].points_more[148].x;
    p3AFaceResult->reyey1[i] = (int32_t)p_faces_array[i].points_more[148].y;
    p3AFaceResult->nosex[i] = (int32_t)p_faces_array[i].points_more[167].x;
    p3AFaceResult->nosey[i] = (int32_t)p_faces_array[i].points_more[167].y;
    p3AFaceResult->mouthx0[i] = (int32_t)p_faces_array[i].points_more[191].x;
    p3AFaceResult->mouthy0[i] = (int32_t)p_faces_array[i].points_more[191].y;
    p3AFaceResult->mouthx1[i] = (int32_t)p_faces_array[i].points_more[209].x;
    p3AFaceResult->mouthy1[i] = (int32_t)p_faces_array[i].points_more[209].y;
    p3AFaceResult->leyeux[i] = (int32_t)p_faces_array[i].points_more[133].x;
    p3AFaceResult->leyeuy[i] = (int32_t)p_faces_array[i].points_more[133].y;
    p3AFaceResult->leyedx[i] = (int32_t)p_faces_array[i].points_more[134].x;
    p3AFaceResult->leyedy[i] = (int32_t)p_faces_array[i].points_more[134].y;
    p3AFaceResult->reyeux[i] = (int32_t)p_faces_array[i].points_more[161].x;
    p3AFaceResult->reyeuy[i] = (int32_t)p_faces_array[i].points_more[161].y;
    p3AFaceResult->reyedx[i] = (int32_t)p_faces_array[i].points_more[162].x;
    p3AFaceResult->reyedy[i] = (int32_t)p_faces_array[i].points_more[162].y;

    for (int k = 0; k < MAX_FFD_NUM; k++) {
        in->faceData.faceInfoOri.ffd_data[i].x[k] = (int32_t)(p_faces_array[i].points_more[k].x);
        in->faceData.faceInfoOri.ffd_data[i].y[k] = (int32_t)(p_faces_array[i].points_more[k].y);
        in->faceData.faceInfoOri.ffd_data[i].occlusion[k] = (int32_t)p_faces_array[i].landmarks.occlusion[k];
    }

    if (p_faces_array[i].yaw < NEGATIVE_ROTATE_5) {
        p3AFaceResult->posInfo[i].rop_dir = FACE_RIGHT;
    } else if (p_faces_array[i].yaw > ROTATE_5) {
        p3AFaceResult->posInfo[i].rop_dir = FACE_LEFT;
    } else {
        p3AFaceResult->posInfo[i].rop_dir = FACE_FRONT;
    }

    if (p_faces_array[i].roll >= NEGATIVE_ROTATE_15 && p_faces_array[i].roll < ROTATE_15) {
        p3AFaceResult->posInfo[i].rip_dir = 0;
    } else if (p_faces_array[i].roll >= NEGATIVE_ROTATE_90 && p_faces_array[i].roll < NEGATIVE_ROTATE_75) {
        p3AFaceResult->posInfo[i].rip_dir = 9;
    } else if (p_faces_array[i].roll >= NEGATIVE_ROTATE_75 && p_faces_array[i].roll < NEGATIVE_ROTATE_45) {
        p3AFaceResult->posInfo[i].rip_dir = 10;
    } else if (p_faces_array[i].roll >= NEGATIVE_ROTATE_45 && p_faces_array[i].roll < NEGATIVE_ROTATE_15) {
        p3AFaceResult->posInfo[i].rip_dir = 11;
    } else if (p_faces_array[i].roll >= ROTATE_15 && p_faces_array[i].roll < ROTATE_45) {
        p3AFaceResult->posInfo[i].rip_dir = 1;
    } else if (p_faces_array[i].roll >= ROTATE_45 && p_faces_array[i].roll < ROTATE_75) {
        p3AFaceResult->posInfo[i].rip_dir = 2;
    } else if (p_faces_array[i].roll >= ROTATE_75 && p_faces_array[i].roll < ROTATE_90) {
        p3AFaceResult->posInfo[i].rip_dir = 3;
    }

    switch (in->Rotation_Info) {
        case 0:
        case 180:
            p3AFaceResult->fld_rip[i] = in->Rotation_Info - (int)p_faces_array[i].roll;
            break;
        case 90:
        case 270:
            p3AFaceResult->fld_rip[i] = (in->Rotation_Info - 180) - (int)p_faces_array[i].roll;
            break;
        default:
            break;
    }

    if (p3AFaceResult->fld_rip[i] > 180) {
        p3AFaceResult->fld_rip[i] -= 360;
    }

    if (m_ImageSensorFacing == ImageSensorFacingFront) {
        p3AFaceResult->fld_rop[i] = -(int)p_faces_array[i].yaw;
    } else {
        p3AFaceResult->fld_rop[i] = (int)p_faces_array[i].yaw;
    }
    faceRect face;
    face.left = (float)p_faces_array[i].rect.left;
    face.top = (float)p_faces_array[i].rect.top;
    face.right = (float)p_faces_array[i].rect.right;
    face.bottom = (float)p_faces_array[i].rect.bottom;
    in->faceData.faceInfoOri.roll[i] = (int)(p_faces_array[i].roll + 0.5);
    in->faceData.faceInfoOri.yaw[i] = (int)(p_faces_array[i].yaw + 0.5);
    in->faceData.faceInfoOri.pitch[i] = (int)(p_faces_array[i].pitch + 0.5);
    in->faceData.faceInfoOri.face_roi[i].id = (int32_t)p_faces_array[i].ID;
    in->faceData.faceInfoOri.face_roi[i].confidence = FDFaceMinConfidence;
    in->faceData.faceInfoOri.face_roi[i].faceRect.left = (int32_t)face.left;
    in->faceData.faceInfoOri.face_roi[i].faceRect.top = (int32_t)face.top;
    in->faceData.faceInfoOri.face_roi[i].faceRect.width = (int32_t)(face.right - face.left);
    in->faceData.faceInfoOri.face_roi[i].faceRect.height = (int32_t)(face.bottom - face.top);
    in->faceData.faceInfoOri.faceid[i] = (int32_t)p_faces_array[i].ID;
    CAM_LOGD("faceInfo %d_%d, %d x %d x %d x %d", p3AFaceResult->faces[i].id, p3AFaceResult->faces[i].score, p3AFaceResult->faces[i].rect[0],
        p3AFaceResult->faces[i].rect[1], p3AFaceResult->faces[i].rect[2], p3AFaceResult->faces[i].rect[3]);
}

    2.4 :FFD卸载

FFD卸载比较简单,主要通过vendorFDUnload对所有资源进行释放。

MINT32 CustomCvFadApi::uninit()
{
    if (pSenseTimelib == nullptr) {
        CAM_LOGE("Error: pSenseTimelib is NULL");
        return -1;
    }
    vendorFDUnload(pSenseTimelib);
    return 0;
}

void CustomCvFadApi::vendorFDUnload(senseTime_lib_struct* p_lib)
{
    if (p_lib->g_engine_counter > 0) {
        CAM_LOGI("g_engine_counter is %d", p_lib->g_engine_counter);
        vendorFDDeinit(p_lib);
    }

    if (p_lib->cv_face_lib_ptr != nullptr) {
        CAM_LOGI("%s: close fd lib", __FUNCTION__);
        dlclose(p_lib->cv_face_lib_ptr);
        p_lib->cv_face_lib_ptr = nullptr;
    }
    free(p_lib);
}

void CustomCvFadApi::vendorFDDeinit(senseTime_lib_struct* p_lib)
{
    if ((p_lib->g_engine_counter > 0) && (p_lib->cv_face_lib_ptr != nullptr)) {
        p_lib->g_engine_counter--;
    }

    if(p_lib->g_cv_tracker_handle_front != nullptr) {
        p_lib->cv_face_destroy_tracker(p_lib->g_cv_tracker_handle_front);
        p_lib->g_cv_tracker_handle_front = nullptr;
    }

    if(p_lib->g_cv_attr_handle != nullptr) {
        p_lib->cv_face_destroy_attribute_handle(p_lib->g_cv_attr_handle);
        p_lib->g_cv_attr_handle = nullptr;
    }

    if (pSenseTimelib->g_SenseTimeEngine.attr_Buff != nullptr) {
        free(pSenseTimelib->g_SenseTimeEngine.attr_Buff);
        pSenseTimelib->g_SenseTimeEngine.attr_Buff = nullptr;
    }
}

【关注我,后续持续新增专题博文,谢谢!!!】

下一篇讲解


网站公告

今日签到

点亮在社区的每一天
去签到