How to use python to access Hongsoft ArcFace SDK

Posted by leightons on Wed, 11 Sep 2019 11:23:14 +0200

The company needs to use Face Recognition SDK in the project, and the requirements for information security are very high. After a detailed understanding of several mainstream Face Recognition SDK in the market, the comprehensive view of HongSoft's Arcface SDK is more in line with our needs. It provides a free version and can be used in offline environment, which is very in line with me. Requirements for safety. But there is a regrettable thing, our project mainly uses Python language, Hongsoft official does not provide Python version of SDK, so I use Python encapsulated Arcface C++ SDK, easy to use in the project, here will be the main process for you to discuss.

1. Environmental description

a. Note that Python in Win64 environment must use ArcFace C++(Win64) SDK. If the platform is inconsistent, the following errors may occur.
OSError: [WinError 193]% 1 is not a valid Win32 application
b. Since memory operations are involved in SDK, this paper uses the following methods provided by ctypes packages and cdll packages
c_ubyte_p = POINTER(c_ubyte)
memcpy = cdll.msvcrt.memcpy
malloc = cdll.msvcrt.malloc
malloc.restype = c_void_p
free = cdll.msvcrt.free

2.Arcface SDK Basic Data Structure Encapsulation

When encapsulating data structures, it is important to pay attention to the parameter type, otherwise it may lead to program errors.

class MRECT(Structure):  # Face frame
   _fields_ = [(u'left', c_int32),
               (u'top', c_int32),
               (u'right', c_int32),
               (u'bottom', c_int32)]


class ASFVersion(Structure):  # Copyright Description of Version Information Version Number Construction Date
   _fields_ = [
       ('Version', c_char_p),
       ('BuildDate', c_char_p),
       ('CopyRight', c_char_p)]


class ASFSingleFaceInfo(Structure):  # Single Face Information Face Frame Face Angle
   _fields_ = [
       ('faceRect', MRECT),
       ('faceOrient', c_int32)]


class ASFMultiFaceInfo(Structure):  # Multiple Face Information Face Frame Array Face Angle Array Face Number
   _fields_ = [
       (u'faceRect', POINTER(MRECT)),
       (u'faceOrient', POINTER(c_int32)),
       (u'faceNum', c_int32)]


class ASFFaceFeature(Structure):  # Face Feature Face Feature Length
   _fields_ = [
       ('feature', c_void_p),
       ('featureSize', c_int32)]


class ASFFace3DAngle(Structure):  # Face Angle Information
   _fields_ = [
       ('roll', c_void_p),
       ('yaw', c_void_p),
       ('pitch', c_void_p),
       ('status', c_void_p),
       ('num', c_int32)]


class ASFAgeInfo(Structure):  # Age 
   _fields_ = [
       (u'ageArray', c_void_p),
       (u'num', c_int32)]


class ASFGenderInfo(Structure):  # Gender 
   _fields_ = [
       (u'genderArray', c_void_p),
       (u'num', c_int32)]


class ASFLivenessThreshold(Structure):  # In vivo threshold
   _fields_ = [
       (u'thresholdmodel_BGR', c_float),
       (u'thresholdmodel_IR', c_int32)]


class ASFLivenessInfo(Structure):  # Living information
   _fields_ = [
       (u'isLive', c_void_p),
       (u'num', c_int32)]

3.Arcface SDK Interface Packaging

a. The dll library needs to be loaded before the interface is encapsulated, and the dll provided by Arcface SDK needs to be loaded.
b. The picture format in this paper uses ASVL_PAF_RGB24_B8G8R8.
c. Each interface needs to define return values and parameter types, some of which depend on the basic data structure described earlier.

from arcsoft_face_struct import *
from ctypes import *
from enum import Enum

face_dll = CDLL("libarcsoft_face.dll")
face_engine_dll = CDLL("libarcsoft_face_engine.dll")

ASF_DETECT_MODE_VIDEO = 0x00000000
ASF_DETECT_MODE_IMAGE = 0xFFFFFFFF

ASF_NONE = 0x00000000
ASF_FACE_DETECT = 0x00000001
ASF_FACE_RECOGNITION = 0x00000004
ASF_AGE = 0x00000008
ASF_GENDER = 0x00000010
ASF_FACE3DANGLE = 0x00000020
ASF_LIVENESS = 0x00000080
ASF_IR_LIVENESS = 0x00000400

ASVL_PAF_RGB24_B8G8R8 = 0x201


class ArcSoftFaceOrientPriority(Enum):
    ASF_OP_0_ONLY = 0x1,
    ASF_OP_90_ONLY = 0x2,
    ASF_OP_270_ONLY = 0x3,
    ASF_OP_180_ONLY = 0x4,
    ASF_OP_0_HIGHER_EXT = 0x5,


activate = face_engine_dll.ASFActivation
activate.restype = c_int32
activate.argtypes = (c_char_p, c_char_p)


init_engine = face_engine_dll.ASFInitEngine
init_engine.restype = c_int32
init_engine.argtypes = (c_long, c_int32, c_int32, c_int32, c_int32, POINTER(c_void_p))


detect_face = face_engine_dll.ASFDetectFaces
detect_face.restype = c_int32
detect_face.argtypes = (c_void_p, c_int32, c_int32, c_int32, POINTER(c_ubyte), POINTER(ASFMultiFaceInfo))


extract_feature = face_engine_dll.ASFFaceFeatureExtract
extract_feature.restype = c_int32
extract_feature.argtypes = (c_void_p, c_int32, c_int32, c_int32, POINTER(c_ubyte),
                            POINTER(ASFSingleFaceInfo), POINTER(ASFFaceFeature))


compare_feature = face_engine_dll.ASFFaceFeatureCompare
compare_feature.restype = c_int32
compare_feature.argtypes = (c_void_p, POINTER(ASFFaceFeature),
                            POINTER(ASFFaceFeature), POINTER(c_float))


set_liveness_param = face_engine_dll.ASFSetLivenessParam
set_liveness_param.restype = c_int32
set_liveness_param.argtypes = (c_void_p, POINTER(ASFLivenessThreshold))


process = face_engine_dll.ASFProcess
process.restype = c_int32
process.argtypes = (c_void_p, c_int32, c_int32, c_int32, POINTER(c_ubyte),
                    POINTER(ASFMultiFaceInfo), c_int32)


get_age = face_engine_dll.ASFGetAge
get_age.restype = c_int32
get_age.argtypes = (c_void_p, POINTER(ASFAgeInfo))


get_gender = face_engine_dll.ASFGetGender
get_gender.restype = c_int32
get_gender.argtypes = (c_void_p, POINTER(ASFGenderInfo))


get_3d_angle = face_engine_dll.ASFGetFace3DAngle
get_3d_angle.restype = c_int32
get_3d_angle.argtypes = (c_void_p, POINTER(ASFFace3DAngle))


get_liveness_info = face_engine_dll.ASFGetLivenessScore
get_liveness_info.restype = c_int32
get_liveness_info.argtypes = (c_void_p, POINTER(ASFLivenessInfo))

4. Encapsulation interface call

Next, follow the flow chart below to introduce the interface call (which is automatically generated using Microsoft Visio 2016).

The following picture is the result of processing according to this process. Because the picture is limited, it only shows age, sex and living information.

a. activation

Note that app_id and sdk_key require byte types.

    app_id = b""
    sdk_key = b""
    ret = arcsoft_face_func.activate(app_id, sdk_key)  # activation
    if ret == 0 or ret == 90114:
        print("Successful activation")
    else:
        print("Activation failed:", ret)
b. initialization

Initialization needs to pass in all the required functional parameters at one time. This paper uses the functions of face detection, feature extraction and so on.

    mask = arcsoft_face_func.ASF_FACE_DETECT | \
            arcsoft_face_func.ASF_FACE_RECOGNITION | \
            arcsoft_face_func.ASF_AGE | \
            arcsoft_face_func.ASF_GENDER | \
            arcsoft_face_func.ASF_FACE3DANGLE |\
            arcsoft_face_func.ASF_LIVENESS

    engine = c_void_p()
    ret = arcsoft_face_func.init_engine(arcsoft_face_func.ASF_DETECT_MODE_IMAGE,
                                        arcsoft_face_func.ArcSoftFaceOrientPriority.ASF_OP_0_ONLY.value[0],
                                   30, 10, mask, byref(engine))
    if ret == 0:
        print("Successful initialization")
    else:
        print("initialization failed:", ret)
c. Face Detection

This paper uses opencv to read pictures, which is more compatible, and the user-defined data structure to record picture information. Note that ArcFace C++ SDK requires that the width of the incoming image be four times as wide as that of the original image.

class Image:
    def __init__(self):
        self.width = 0
        self.height = 0
        self.imageData = None

def load_image(file_path):
    img = cv2.imread(file_path)
    sp = img.shape
    img = cv2.resize(img, (sp[1]//4*4, sp[0] # four-byte alignment

    image = Image()
    image.width = img.shape[1]
    image.height = img.shape[0]
    image.imageData = img
    return image

###################### Face detection ##################################

    image1 = load_image(r"1.jpg")
    image_bytes = bytes(image1.imageData)
    image_ubytes = cast(image_bytes, c_ubyte_p)

    detect_faces = ASFMultiFaceInfo()
    ret = arcsoft_face_func.detect_face(
        engine,
        image1.width,
        image1.height,
        arcsoft_face_func.ASVL_PAF_RGB24_B8G8R8,
        image_ubytes,
        byref(detect_faces)
    )

    if ret == 0:
        print("Successful Face Detection")
    else:
        print("Face Detection Failure:", ret)
d. Feature extraction

Feature extraction only supports single face, so face processing is done, and the extracted face features need to be copied in time, otherwise they will be covered.

    single_face1 = ASFSingleFaceInfo()
    single_face1.faceRect = detect_faces.faceRect[0]
    single_face1.faceOrient = detect_faces.faceOrient[0]

    face_feature = ASFFaceFeature()
    ret = arcsoft_face_func.extract_feature(
        engine,
        image1.width,
        image1.height,
        arcsoft_face_func.ASVL_PAF_RGB24_B8G8R8,
        image_ubytes,
        single_face1,
        byref(face_feature)
    )

    if ret == 0:
        print("Successful feature extraction")
    else:
        print("Failure to extract feature 1:", ret)

    feature1 = ASFFaceFeature()
    feature1.featureSize = face_feature.featureSize
    feature1.feature = malloc(feature1.featureSize)
    memcpy(c_void_p(feature1.feature),
           c_void_p(face_feature.feature),
           feature1.featureSize)
e. Characteristic comparison

According to the aforementioned extraction of a face feature, the following facial feature comparison operation can be carried out.

    compare_threshold = c_float()
    ret = arcsoft_face_func.compare_feature(
        engine, feature1, feature2, compare_threshold
    )

    free(c_void_p(feature1.feature))
    free(c_void_p(feature2.feature))

    if ret == 0:
        print("Successful feature matching and similarity:", compare_threshold.value)
    else:
        print("Failure of feature alignment:", ret)
f. Age, gender, 3D Angle

Processing interface currently provides age, gender, 3D Angle, biopsy detection, but age, gender, 3D Angle supports multiple faces, while living body only supports single face, so the following are dealt with separately.

    process_mask = arcsoft_face_func.ASF_AGE | \
                   arcsoft_face_func.ASF_GENDER | \
                   arcsoft_face_func.ASF_FACE3DANGLE

    ret = arcsoft_face_func.process(
        engine,
        image1.width,
        image1.height,
        arcsoft_face_func.ASVL_PAF_RGB24_B8G8R8,
        image_ubytes,
        byref(detect_faces),
        c_int32(process_mask)
    )

    if ret == 0:
        print("process Success")
    else:
        print("process fail:", ret)

######################## Age ################################

    age_info = ASFAgeInfo()
    ret = arcsoft_face_func.get_age(engine, byref(age_info))

    if ret == 0:
        print("get_age Success")
        age_ptr = cast(age_info.ageArray, POINTER(c_int))
        for i in range(age_info.num):
            print("face", i, "age:", age_ptr[i])
    else:
        print("get_age fail:", ret)

####################### Gender #################################

    gender_info = ASFGenderInfo()
    ret = arcsoft_face_func.get_gender(engine, byref(gender_info))

    if ret == 0:
        print("get_gender Success")
        gender_ptr = cast(gender_info.genderArray, POINTER(c_int))
        for i in range(gender_info.num):
            print("face", i, "gender:",
                  "Female sex" if (gender_ptr[i] == 1) else (
                      "Male" if (gender_ptr[i] == 0) else "Unknown"
                  ))
    else:
        print("get_gender fail:", ret)

####################### 3D Angle #################################

    angle_info = ASFFace3DAngle()
    ret = arcsoft_face_func.get_3d_angle(engine, byref(angle_info))

    if ret == 0:
        print("get_3d_angle Success")
        roll_ptr = cast(angle_info.roll, POINTER(c_float))
        yaw_ptr = cast(angle_info.yaw, POINTER(c_float))
        pitch_ptr = cast(angle_info.pitch, POINTER(c_float))
        status_ptr = cast(angle_info.status, POINTER(c_int32))
        for i in range(angle_info.num):
            print("face", i,
                  "roll:", roll_ptr[i],
                  "yaw:", yaw_ptr[i],
                  "pitch:", pitch_ptr[i],
                  "status:", "normal" if status_ptr[i] == 0 else "error")

    else:
        print("get_3d_angle fail:", ret)
g.RGB living body

Before biopsy, it is suggested that the threshold of RGB in vivo should be set according to the actual scene without setting the default threshold. The threshold of RGB in vivo is 0.75. The parameters of the detected multi-faces are transferred to single face and transmitted to the interface.

######################### Living threshold setting ###############################
    threshold_param = ASFLivenessThreshold()
    threshold_param.thresholdmodel_BGR = 0.75
    ret = arcsoft_face_func.set_liveness_param(engine,threshold_param)

    if ret == 0:
        print("set_liveness_param Success")
    else:
        print("set_liveness_param fail:", ret)

    temp_face_info = ASFMultiFaceInfo()
    temp_face_info.faceNum = 1
    LP_MRECT = POINTER(MRECT)
    temp_face_info.faceRect = LP_MRECT(MRECT(malloc(sizeof(MRECT))))
    LP_c_long = POINTER(c_long)
    temp_face_info.faceOrient = LP_c_long(c_long(malloc(sizeof(c_long))))

    for i in range(detect_faces.faceNum):
        temp_face_info.faceRect[0] = detect_faces.faceRect[i]
        temp_face_info.faceOrient[0] = detect_faces.faceOrient[i]

        ret = arcsoft_face_func.process(
            engine,
            image1.width,
            image1.height,
            arcsoft_face_func.ASVL_PAF_RGB24_B8G8R8,
            image_ubytes,
            byref(temp_face_info),
            c_int32(arcsoft_face_func.ASF_LIVENESS)
        )

        if ret == 0:
            print("process Success")
        else:
            print("process fail:", ret)
        ## RGB biopsy
        ret = arcsoft_face_func.process(
            engine,
            image1.width,
            image1.height,
            arcsoft_face_func.ASVL_PAF_RGB24_B8G8R8,
            image_ubytes,
            byref(temp_face_info),
            c_int32(arcsoft_face_func.ASF_LIVENESS)
        )

        if ret == 0:
            print("process Success")
        else:
            print("process fail:", ret)

        liveness_info = ASFLivenessInfo()
        ret = arcsoft_face_func.get_liveness_info(engine, byref(liveness_info))

        if ret == 0:
            print("get_liveness_info Success")
            liveness_ptr = cast(liveness_info.isLive, POINTER(c_int))
            print("face", i, "liveness:",
                  "Non real person" if (liveness_ptr[0] == 0) else (
                      "Real person" if (liveness_ptr[0] == 1) else (
                          "Uncertain" if (liveness_ptr[0] == -1) else (
                              "Input Face Number>1" if (liveness_ptr[0] == -2) else
                              (liveness_ptr[0])
                          )
                      )
                  ))
        else:
            print("get_liveness_info fail:", ret)

Topics: Python SDK OpenCV