fix: 修复特征提取失败(错误码81929)

问题: extractFaceFeature 返回错误码 81929 (MERR_FSDK_FACEFEATURE_FACEDATA)

根本原因: detectFaces 返回的人脸信息缺少 faceData 字段,
而虹软 SDK 的 extractFaceFeature 必须要有这个字段才能提取特征

修复:
- FaceEngineManager.convertFaceInfoToList: 添加返回 faceData
- ArcPlugin.handleExtractFaceFeature: 接收并传递 faceData 参数
- Dart API: extractFaceFeature 添加 faceData 参数
- example: 传递 faceData 到特征提取调用

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-03-30 16:58:12 +08:00
parent c9d57d160c
commit 86e268ae1c
7 changed files with 1611 additions and 3 deletions

View File

@@ -0,0 +1,509 @@
package com.xiarui.arc;
import android.content.Context;
import androidx.annotation.NonNull;
import java.util.HashMap;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import io.flutter.embedding.engine.plugins.FlutterPlugin;
import io.flutter.plugin.common.MethodCall;
import io.flutter.plugin.common.MethodChannel;
import io.flutter.plugin.common.MethodChannel.MethodCallHandler;
import io.flutter.plugin.common.MethodChannel.Result;
import com.arcsoft.face.FaceInfo;
import com.arcsoft.face.LivenessInfo;
/**
* ArcPlugin - Flutter 插件入口
* 负责处理 Flutter 与原生 Android 之间的方法调用通信
*/
public class ArcPlugin implements FlutterPlugin, MethodCallHandler {
private MethodChannel channel;
private FlutterPluginBinding flutterPluginBinding;
@Override
public void onAttachedToEngine(@NonNull FlutterPluginBinding flutterPluginBinding) {
this.flutterPluginBinding = flutterPluginBinding;
channel = new MethodChannel(flutterPluginBinding.getBinaryMessenger(), "arc");
channel.setMethodCallHandler(this);
}
@Override
public void onMethodCall(@NonNull MethodCall call, @NonNull Result result) {
switch (call.method) {
// 获取平台版本
case "getPlatformVersion":
handleGetPlatformVersion(result);
break;
// 激活 SDK在线激活
case "activeOnline":
handleActiveOnline(call, result);
break;
// 初始化人脸识别引擎
case "init":
handleInit(call, result);
break;
// 检测人脸(包含 RGB 活体检测)
case "detectFaces":
handleDetectFaces(call, result);
break;
// 提取人脸特征
case "extractFaceFeature":
handleExtractFaceFeature(call, result);
break;
// 比对人脸特征
case "compareFaceFeature":
handleCompareFaceFeature(call, result);
break;
// 注册单张人脸特征到人脸库
case "registerFaceFeature":
handleRegisterFaceFeature(call, result);
break;
// 批量注册人脸特征到人脸库
case "registerFaceFeatureBatch":
handleRegisterFaceFeatureBatch(call, result);
break;
default:
result.notImplemented();
break;
}
}
/**
* 获取平台版本
*/
private void handleGetPlatformVersion(Result result) {
result.success("Android " + android.os.Build.VERSION.RELEASE);
}
/**
* 激活 SDK
*/
private void handleActiveOnline(MethodCall call, Result result) {
String appId = call.argument("appId");
String sdkKey = call.argument("sdkKey");
String activeKey = call.argument("activeKey");
if (appId == null || sdkKey == null) {
result.error("INVALID_PARAMS", "appId 和 sdkKey 不能为空", null);
return;
}
Context context = flutterPluginBinding.getApplicationContext();
int errorCode = FaceEngineManager.getInstance().activeOnline(context, appId, sdkKey, activeKey != null ? activeKey : "");
FaceErrorCode error = FaceErrorCode.fromCode(errorCode);
Map<String, Object> resultMap = new HashMap<>();
resultMap.put("success", errorCode == 0 || errorCode == 90114);
resultMap.put("errorCode", errorCode);
resultMap.put("message", error.getMessage());
result.success(resultMap);
}
/**
* 初始化人脸识别引擎
* @param call 方法调用对象包含初始化参数detectMode(检测模式)、orient(方向)、
* maxFaceNum(最大人脸数)、combinedMask(组合掩码)
* @param result 回调结果对象返回初始化结果success(是否成功)、errorCode(错误码)、
* message(错误消息)
*/
private void handleInit(MethodCall call, Result result) {
Integer detectMode = call.argument("detectMode");
Integer orient = call.argument("orient");
Integer maxFaceNum = call.argument("maxFaceNum");
Integer combinedMask = call.argument("combinedMask");
// 为参数设置默认值
// 默认掩码:人脸检测 + 人脸特征 + RGB活体 = 0x85
int mode = detectMode != null ? detectMode : 0;
int or = orient != null ? orient : 0;
int maxFace = maxFaceNum != null ? maxFaceNum : 1;
int mask = combinedMask != null ? combinedMask : 0x85;
Context context = flutterPluginBinding.getApplicationContext();
int errorCode = FaceEngineManager.getInstance().init(context, mode, or, maxFace, mask);
FaceErrorCode error = FaceErrorCode.fromCode(errorCode);
Map<String, Object> resultMap = new HashMap<>();
resultMap.put("success", errorCode == 0);
resultMap.put("errorCode", errorCode);
resultMap.put("message", error.getMessage());
result.success(resultMap);
}
/**
* 检测人脸(同时进行 RGB 活体检测)
*/
private void handleDetectFaces(MethodCall call, Result result) {
byte[] data = call.argument("data");
Integer width = call.argument("width");
Integer height = call.argument("height");
Integer format = call.argument("format");
if (data == null) {
Map<String, Object> errorMap = new HashMap<>();
errorMap.put("success", false);
errorMap.put("errorCode", -1);
errorMap.put("faceList", new ArrayList<>());
errorMap.put("message", "图像数据不能为空");
errorMap.put("rgbLiveness", -1);
errorMap.put("isRgbAlive", false);
result.success(errorMap);
return;
}
if (width == null || height == null || format == null) {
Map<String, Object> errorMap = new HashMap<>();
errorMap.put("success", false);
errorMap.put("errorCode", -1);
errorMap.put("faceList", new ArrayList<>());
errorMap.put("message", "width, height, format 参数不能为空");
errorMap.put("rgbLiveness", -1);
errorMap.put("isRgbAlive", false);
result.success(errorMap);
return;
}
List<FaceInfo> faceInfoList = new ArrayList<>();
int errorCode = FaceEngineManager.getInstance().detectFaces(data, width, height, format, faceInfoList);
List<Map<String, Object>> faceListMap = FaceEngineManager.getInstance().convertFaceInfoToList(faceInfoList);
FaceErrorCode error = FaceErrorCode.fromCode(errorCode);
// 获取 RGB 活体检测结果
int rgbLiveness = FaceEngineManager.getInstance().getLastRgbLiveness();
Map<String, Object> resultMap = new HashMap<>();
boolean isSuccess = errorCode == 0;
resultMap.put("success", isSuccess);
resultMap.put("errorCode", errorCode);
resultMap.put("faceList", faceListMap);
resultMap.put("message", error.getMessage());
// 添加 RGB 活体检测结果
// rgbLiveness 值:-1=未知, 0=非真人, 1=真人
resultMap.put("rgbLiveness", rgbLiveness);
resultMap.put("isRgbAlive", rgbLiveness == LivenessInfo.ALIVE);
result.success(resultMap);
}
/**
* 提取人脸特征
* @param call 方法调用对象,包含参数:
* data: 图像数据
* width: 图像宽度
* height: 图像高度
* format: 图像格式
* rectLeft/rectTop/rectRight/rectBottom: 人脸框
* faceOrientation: 人脸角度
* faceId: 人脸ID
* faceData: 人脸数据从detectFaces获取必需
* extractType: 特征提取类型0=注册, 1=识别)
* mask: 口罩状态0=未佩戴, 1=已佩戴)
* @param result 回调结果对象,返回:
* success: 是否成功
* errorCode: 错误码
* message: 错误消息
* featureData: 人脸特征数据
*/
private void handleExtractFaceFeature(MethodCall call, Result result) {
byte[] data = call.argument("data");
Integer width = call.argument("width");
Integer height = call.argument("height");
Integer format = call.argument("format");
Integer rectLeft = call.argument("rectLeft");
Integer rectTop = call.argument("rectTop");
Integer rectRight = call.argument("rectRight");
Integer rectBottom = call.argument("rectBottom");
Integer faceOrientation = call.argument("faceOrientation");
Integer faceId = call.argument("faceId");
byte[] faceData = call.argument("faceData"); // 关键:接收 faceData 参数
Integer extractType = call.argument("extractType");
Integer mask = call.argument("mask");
// 打印所有接收到的参数用于调试
android.util.Log.d("ArcPlugin", "=== extractFaceFeature 参数调试 ===");
android.util.Log.d("ArcPlugin", "data长度: " + (data != null ? data.length : "null"));
android.util.Log.d("ArcPlugin", "width: " + width);
android.util.Log.d("ArcPlugin", "height: " + height);
android.util.Log.d("ArcPlugin", "format: " + format);
android.util.Log.d("ArcPlugin", "rectLeft: " + rectLeft);
android.util.Log.d("ArcPlugin", "rectTop: " + rectTop);
android.util.Log.d("ArcPlugin", "rectRight: " + rectRight);
android.util.Log.d("ArcPlugin", "rectBottom: " + rectBottom);
android.util.Log.d("ArcPlugin", "faceOrientation: " + faceOrientation);
android.util.Log.d("ArcPlugin", "faceId: " + faceId);
android.util.Log.d("ArcPlugin", "faceData长度: " + (faceData != null ? faceData.length : "null"));
android.util.Log.d("ArcPlugin", "extractType: " + extractType);
android.util.Log.d("ArcPlugin", "mask: " + mask);
android.util.Log.d("ArcPlugin", "========================================");
// 参数验证
if (data == null) {
Map<String, Object> errorMap = new HashMap<>();
errorMap.put("success", false);
errorMap.put("errorCode", -1);
errorMap.put("message", "图像数据不能为空");
errorMap.put("featureData", null);
result.success(errorMap);
return;
}
if (width == null || height == null || format == null) {
Map<String, Object> errorMap = new HashMap<>();
errorMap.put("success", false);
errorMap.put("errorCode", -1);
errorMap.put("message", "width, height, format 参数不能为空");
errorMap.put("featureData", null);
result.success(errorMap);
return;
}
if (rectLeft == null || rectTop == null || rectRight == null || rectBottom == null) {
Map<String, Object> errorMap = new HashMap<>();
errorMap.put("success", false);
errorMap.put("errorCode", -1);
errorMap.put("message", "人脸框参数不能为空");
errorMap.put("featureData", null);
result.success(errorMap);
return;
}
// 关键:验证 faceData 参数
if (faceData == null) {
Map<String, Object> errorMap = new HashMap<>();
errorMap.put("success", false);
errorMap.put("errorCode", FaceErrorCode.MERR_FSDK_FACEFEATURE_FACEDATA.getCode());
errorMap.put("message", "faceData 参数不能为空,必须从 detectFaces 结果中获取");
errorMap.put("featureData", null);
result.success(errorMap);
return;
}
// 构建 FaceInfo 对象,包含完整的 faceData
android.graphics.Rect rect = new android.graphics.Rect(rectLeft, rectTop, rectRight, rectBottom);
FaceInfo faceInfo = new FaceInfo();
faceInfo.setRect(rect);
faceInfo.setOrient(faceOrientation != null ? faceOrientation : 0);
faceInfo.setFaceId(faceId != null ? faceId : -1);
faceInfo.setFaceData(faceData); // 关键:设置 faceData这是特征提取必需的数据
// 打印 FaceInfo 对象内容
android.util.Log.d("ArcPlugin", "=== FaceInfo 对象 ===");
android.util.Log.d("ArcPlugin", "rect: (" + rect.left + ", " + rect.top + ", " + rect.right + ", " + rect.bottom + ")");
android.util.Log.d("ArcPlugin", "rect宽度: " + rect.width());
android.util.Log.d("ArcPlugin", "rect高度: " + rect.height());
android.util.Log.d("ArcPlugin", "orient: " + faceInfo.getOrient());
android.util.Log.d("ArcPlugin", "faceId: " + faceInfo.getFaceId());
android.util.Log.d("ArcPlugin", "faceData: " + (faceInfo.getFaceData() != null ? "有数据" : "null"));
// 执行特征提取
int extType = extractType != null ? extractType : 1; // 默认识别模式
int maskValue = mask != null ? mask : 0; // 默认未佩戴口罩
int formatValue = format != null ? format : 2050; // 默认 NV21 格式
android.util.Log.d("ArcPlugin", "=== SDK调用参数 ===");
android.util.Log.d("ArcPlugin", "formatValue: " + formatValue);
android.util.Log.d("ArcPlugin", "extType: " + extType + " (" + (extType == 0 ? "REGISTER" : "RECOGNIZE") + ")");
android.util.Log.d("ArcPlugin", "maskValue: " + maskValue);
Map<String, Object> extractResult = FaceEngineManager.getInstance().extractFaceFeature(
data, width, height, formatValue, faceInfo, extType, maskValue);
int errorCode = (int) extractResult.get("errorCode");
android.util.Log.d("ArcPlugin", "=== SDK返回结果 ===");
android.util.Log.d("ArcPlugin", "errorCode: " + errorCode);
android.util.Log.d("ArcPlugin", "featureData: " + (extractResult.get("featureData") != null ? "有数据" : "null"));
FaceErrorCode error = FaceErrorCode.fromCode(errorCode);
Map<String, Object> resultMap = new HashMap<>();
resultMap.put("success", errorCode == 0);
resultMap.put("errorCode", errorCode);
resultMap.put("message", error.getMessage());
resultMap.put("featureData", extractResult.get("featureData"));
result.success(resultMap);
}
/**
* 比对人脸特征
* @param call 方法调用对象,包含参数:
* featureData1: 第一个人脸特征数据
* featureData2: 第二个人脸特征数据
* compareModel: 比对模型0=生活照, 1=证件照)
* @param result 回调结果对象,返回:
* success: 是否成功
* errorCode: 错误码
* message: 错误消息
* similarity: 相似度0-1之间
*/
private void handleCompareFaceFeature(MethodCall call, Result result) {
byte[] featureData1 = call.argument("featureData1");
byte[] featureData2 = call.argument("featureData2");
Integer compareModel = call.argument("compareModel");
// 参数验证
if (featureData1 == null || featureData2 == null) {
Map<String, Object> errorMap = new HashMap<>();
errorMap.put("success", false);
errorMap.put("errorCode", -1);
errorMap.put("message", "特征数据不能为空");
errorMap.put("similarity", 0.0);
result.success(errorMap);
return;
}
// 执行特征比对
int model = compareModel != null ? compareModel : 0; // 默认生活照模式
Map<String, Object> compareResult = FaceEngineManager.getInstance().compareFaceFeature(
featureData1, featureData2, model);
int errorCode = (int) compareResult.get("errorCode");
FaceErrorCode error = FaceErrorCode.fromCode(errorCode);
Map<String, Object> resultMap = new HashMap<>();
resultMap.put("success", errorCode == 0);
resultMap.put("errorCode", errorCode);
resultMap.put("message", error.getMessage());
resultMap.put("similarity", compareResult.get("similarity"));
result.success(resultMap);
}
/**
* 注册单张人脸特征到人脸库
* @param call 方法调用对象,包含参数:
* searchId: 唯一标识符(用于后续搜索匹配)
* featureData: 人脸特征数据
* faceTag: 附属信息(可选)
* @param result 回调结果对象,返回:
* success: 是否成功
* errorCode: 错误码
* message: 错误消息
*/
private void handleRegisterFaceFeature(MethodCall call, Result result) {
Integer searchId = call.argument("searchId");
byte[] featureData = call.argument("featureData");
String faceTag = call.argument("faceTag");
// 参数验证
if (searchId == null) {
Map<String, Object> errorMap = new HashMap<>();
errorMap.put("success", false);
errorMap.put("errorCode", -1);
errorMap.put("message", "searchId 不能为空");
result.success(errorMap);
return;
}
if (featureData == null) {
Map<String, Object> errorMap = new HashMap<>();
errorMap.put("success", false);
errorMap.put("errorCode", -1);
errorMap.put("message", "特征数据不能为空");
result.success(errorMap);
return;
}
// 执行注册
int errorCode = FaceEngineManager.getInstance().registerFaceFeature(searchId, featureData, faceTag);
FaceErrorCode error = FaceErrorCode.fromCode(errorCode);
Map<String, Object> resultMap = new HashMap<>();
resultMap.put("success", errorCode == 0);
resultMap.put("errorCode", errorCode);
resultMap.put("message", error.getMessage());
result.success(resultMap);
}
/**
* 批量注册人脸特征到人脸库
* @param call 方法调用对象,包含参数:
* faceList: 人脸列表,每项包含 searchId, featureData, faceTag(可选)
* @param result 回调结果对象,返回:
* success: 是否成功
* errorCode: 错误码
* message: 错误消息
*/
@SuppressWarnings("unchecked")
private void handleRegisterFaceFeatureBatch(MethodCall call, Result result) {
List<Map<String, Object>> faceList = call.argument("faceList");
// 参数验证
if (faceList == null || faceList.isEmpty()) {
Map<String, Object> errorMap = new HashMap<>();
errorMap.put("success", false);
errorMap.put("errorCode", -1);
errorMap.put("message", "人脸列表不能为空");
result.success(errorMap);
return;
}
// 转换参数格式
List<Map<String, Object>> convertedList = new ArrayList<>();
for (Object item : faceList) {
if (item instanceof Map) {
Map<String, Object> itemMap = new HashMap<>();
Map<?, ?> rawMap = (Map<?, ?>) item;
// 处理 searchId
Object searchIdObj = rawMap.get("searchId");
if (searchIdObj instanceof Integer) {
itemMap.put("searchId", searchIdObj);
}
// 处理 featureData
Object featureDataObj = rawMap.get("featureData");
if (featureDataObj instanceof byte[]) {
itemMap.put("featureData", featureDataObj);
}
// 处理 faceTag
Object faceTagObj = rawMap.get("faceTag");
if (faceTagObj instanceof String) {
itemMap.put("faceTag", faceTagObj);
}
convertedList.add(itemMap);
}
}
// 执行批量注册
int errorCode = FaceEngineManager.getInstance().registerFaceFeatureBatch(convertedList);
FaceErrorCode error = FaceErrorCode.fromCode(errorCode);
Map<String, Object> resultMap = new HashMap<>();
resultMap.put("success", errorCode == 0);
resultMap.put("errorCode", errorCode);
resultMap.put("message", error.getMessage());
result.success(resultMap);
}
@Override
public void onDetachedFromEngine(@NonNull FlutterPluginBinding binding) {
channel.setMethodCallHandler(null);
}
}

View File

@@ -0,0 +1,536 @@
package com.xiarui.arc;
import android.content.Context;
import com.arcsoft.face.FaceInfo;
import com.arcsoft.face.FaceEngine;
import com.arcsoft.face.enums.DetectMode;
import com.arcsoft.face.enums.DetectFaceOrientPriority;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
import com.arcsoft.face.LivenessInfo;
import com.arcsoft.face.FaceFeature;
import com.arcsoft.face.FaceSimilar;
import com.arcsoft.face.FaceFeatureInfo;
import com.arcsoft.face.enums.ExtractType;
import com.arcsoft.face.enums.CompareModel;
/**
* 虹软人脸识别引擎管理器单例类
* 负责管理 FaceEngine 实例的生命周期,提供 SDK 激活和引擎初始化能力
*/
public class FaceEngineManager {
/**
* 单例实例
*/
private static FaceEngineManager instance;
/**
* 虹软 FaceEngine 实例
*/
private FaceEngine faceEngine;
/**
* 引擎是否已初始化
*/
private boolean isInitialized = false;
/**
* 上次 RGB 活体检测结果
*/
private int lastRgbLiveness = -1; // -1=未知, 0=非真人, 1=真人
/**
* 私有构造函数,防止外部实例化
*/
private FaceEngineManager() {
}
/**
* 获取单例实例(线程安全)
* @return FaceEngineManager 单例实例
*/
public static synchronized FaceEngineManager getInstance() {
if (instance == null) {
instance = new FaceEngineManager();
}
return instance;
}
/**
* 激活 SDK在线激活方式
* @param context Android 上下文
* @param appId 应用 ID从虹软控制台获取
* @param sdkKey SDK 密钥(从虹软控制台获取)
* @param activeKey 激活密钥
* @return 错误码0 表示成功)
*/
public int activeOnline(Context context, String appId, String sdkKey, String activeKey) {
String key = activeKey != null ? activeKey : "";
return FaceEngine.activeOnline(context, key, appId, sdkKey);
}
/**
* 根据整数值获取 DetectMode 枚举
* @param value 检测模式值0=VIDEO, 1=IMAGE
* @return DetectMode 枚举对象
*/
private DetectMode getDetectMode(int value) {
switch (value) {
case 1:
return DetectMode.ASF_DETECT_MODE_IMAGE;
case 0:
default:
return DetectMode.ASF_DETECT_MODE_VIDEO;
}
}
/**
* 根据整数值获取 DetectFaceOrientPriority 枚举
* @param value 检测角度值0/90/180/270/360
* @return DetectFaceOrientPriority 枚举对象
*/
private DetectFaceOrientPriority getDetectFaceOrientPriority(int value) {
switch (value) {
case 90:
return DetectFaceOrientPriority.ASF_OP_90_ONLY;
case 180:
return DetectFaceOrientPriority.ASF_OP_180_ONLY;
case 270:
return DetectFaceOrientPriority.ASF_OP_270_ONLY;
case 360:
return DetectFaceOrientPriority.ASF_OP_ALL_OUT;
case 0:
default:
return DetectFaceOrientPriority.ASF_OP_0_ONLY;
}
}
/**
* 初始化人脸检测引擎
* @param context Android 上下文
* @param detectMode 检测模式0=VIDEO 视频流模式1=IMAGE 图像模式)
* @param orient 检测角度0/90/180/270/360
* @param maxFaceNum 最大可检测人脸数量
* @param combinedMask 功能组合掩码
* public static final int ASF_FACE_DETECT = 0x00000001; // 人脸检测
* public static final int ASF_FACE_RECOGNITION = 0x00000004; // 人脸特征
* public static final int ASF_AGE = 0x00000008; // 年龄
* public static final int ASF_GENDER = 0x00000010; // 性别
* public static final int ASF_LIVENESS = 0x00000080; // RGB 活体
* public static final int ASF_IMAGEQUALITY = 0x00000200; // 图像质量检测
* public static final int ASF_MASK_DETECT = 0x00001000; // 口罩检测
* public static final int ASF_UPDATE_FACEDATA = 0x00002000; // 人脸信息检测
* @return 错误码0 表示成功)
*/
public int init(Context context, int detectMode, int orient, int maxFaceNum, int combinedMask) {
// 使用传入的功能组合掩码如果传入为0则使用默认配置
// 默认配置:人脸检测 + 人脸特征 + RGB活体检测
int finalMask = combinedMask;
if (combinedMask == 0) {
finalMask = FaceEngine.ASF_FACE_DETECT | FaceEngine.ASF_FACE_RECOGNITION |
FaceEngine.ASF_LIVENESS;
}
// 确保至少包含人脸检测属性(必须)
if ((finalMask & FaceEngine.ASF_FACE_DETECT) == 0) {
finalMask |= FaceEngine.ASF_FACE_DETECT;
}
if (faceEngine == null) {
faceEngine = new FaceEngine();
}
DetectMode mode = getDetectMode(detectMode);
DetectFaceOrientPriority orientPriority = getDetectFaceOrientPriority(orient);
android.util.Log.i("FaceEngineManager", "init with combinedMask: " + finalMask +
" (RGB_LIVENESS=" + ((finalMask & FaceEngine.ASF_LIVENESS) != 0) + ")");
int result = faceEngine.init(context, mode, orientPriority, maxFaceNum, finalMask);
android.util.Log.i("FaceEngineManager", "init result: " + result);
if (result == 0) {
isInitialized = true;
android.util.Log.i("FaceEngineManager", "引擎初始化成功!");
} else {
android.util.Log.e("FaceEngineManager", "引擎初始化失败,错误码: " + result);
}
return result;
}
/**
* 检测图片中的人脸(同时进行 RGB 活体检测)
* @param data NV21 格式的图像数据
* @param width 图像宽度(必须是 4 的倍数)
* @param height 图像高度NV21 格式时必须是 2 的倍数)
* @param format 图像数据格式FaceEngine.CP_PAF_NV21 = 2050
* @param faceInfoList 输出参数,用于存储检测到的人脸信息列表
* @return 错误码0 表示成功)
*/
public int detectFaces(byte[] data, int width, int height, int format, List<FaceInfo> faceInfoList) {
// 检查引擎是否已初始化
if (faceEngine == null || !isInitialized) {
android.util.Log.e("FaceEngineManager", "detectFaces: 引擎未初始化");
return FaceErrorCode.ENGINE_NOT_INITIALIZED.getCode();
}
// 验证图像参数
// 宽度必须是 4 的倍数
if (width % 4 != 0) {
android.util.Log.e("FaceEngineManager", "detectFaces: 宽度不是 4 的倍数 - width=" + width);
return FaceErrorCode.MERR_FSDK_FR_INVALID_IMAGE_INFO.getCode();
}
// NV21 格式要求高度是 2 的倍数 (NV21 = 2050)
if (format == 2050 && height % 2 != 0) {
android.util.Log.e("FaceEngineManager", "detectFaces: 高度不是 2 的倍数 - height=" + height);
return FaceErrorCode.MERR_FSDK_FR_INVALID_IMAGE_INFO.getCode();
}
// 执行人脸检测
List<FaceInfo> sdkFaceInfoList = new ArrayList<>();
int resultCode = faceEngine.detectFaces(data, width, height, format, sdkFaceInfoList);
// 如果未检测到人脸,返回空列表
if (resultCode != 0 || sdkFaceInfoList.isEmpty()) {
lastRgbLiveness = -1;
return resultCode;
}
// 执行 RGB 活体检测处理
int processMask = FaceEngine.ASF_LIVENESS;
int processCode = faceEngine.process(data, width, height, format, sdkFaceInfoList, processMask);
// 获取活体检测结果
List<LivenessInfo> livenessInfoList = new ArrayList<>();
if (processCode == 0) {
int livenessCode = faceEngine.getLiveness(livenessInfoList);
android.util.Log.i("FaceEngineManager", "getLiveness result: " + livenessCode + ", size: " + livenessInfoList.size());
// 保存 RGB 活体检测结果
if (livenessCode == 0 && !livenessInfoList.isEmpty()) {
lastRgbLiveness = livenessInfoList.get(0).getLiveness();
android.util.Log.i("FaceEngineManager", "RGB活体检测结果: " + getLivenessStatusDescription(lastRgbLiveness));
} else {
lastRgbLiveness = -1;
}
} else {
lastRgbLiveness = -1;
}
// 将检测结果添加到输出列表
if (faceInfoList != null) {
faceInfoList.clear();
faceInfoList.addAll(sdkFaceInfoList);
}
return resultCode;
}
/**
* 获取上次 RGB 活体检测结果
* @return RGB 活体检测结果(-1=未知, 0=非真人, 1=真人)
*/
public int getLastRgbLiveness() {
return lastRgbLiveness;
}
/**
* 将 FaceInfo 列表转换为 Map 列表,用于 Flutter 通信
* 包含完整的人脸信息,包括 faceData 字段(用于特征提取)
* @param faceInfoList FaceInfo 列表
* @return Map 列表
*/
public List<Map<String, Object>> convertFaceInfoToList(List<FaceInfo> faceInfoList) {
List<Map<String, Object>> result = new ArrayList<>();
if (faceInfoList == null || faceInfoList.isEmpty()) {
return result;
}
for (FaceInfo faceInfo : faceInfoList) {
Map<String, Object> map = new HashMap<>();
map.put("rectLeft", faceInfo.getRect().left);
map.put("rectTop", faceInfo.getRect().top);
map.put("rectRight", faceInfo.getRect().right);
map.put("rectBottom", faceInfo.getRect().bottom);
map.put("faceOrientation", faceInfo.getOrient());
map.put("faceId", faceInfo.getFaceId());
// 关键:添加 faceData 字段,用于后续特征提取
// faceData 是虹软 SDK 检测人脸时生成的内部数据extractFaceFeature 必须使用此数据
map.put("faceData", faceInfo.getFaceData());
result.add(map);
}
return result;
}
/**
* 释放引擎资源
* @return 错误码0 表示成功)
*/
public int unInit() {
if (faceEngine != null && isInitialized) {
int result = faceEngine.unInit();
if (result == 0) {
isInitialized = false;
android.util.Log.i("FaceEngineManager", "引擎已释放");
}
return result;
}
return 0;
}
/**
* 获取引擎是否已初始化
* @return 是否已初始化
*/
public boolean isInitialized() {
return isInitialized;
}
/**
* 将活体状态值转换为描述文本
* @param liveness 活体状态值
* @return 活体状态描述
*/
private String getLivenessStatusDescription(int liveness) {
switch (liveness) {
case LivenessInfo.NOT_ALIVE:
return "非真人";
case LivenessInfo.ALIVE:
return "真人";
case LivenessInfo.UNKNOWN:
return "不确定";
case LivenessInfo.FACE_NUM_MORE_THAN_ONE:
return "传入人脸数>1";
case LivenessInfo.FACE_TOO_SMALL:
return "人脸过小";
case LivenessInfo.FACE_ANGLE_TOO_LARGE:
return "角度过大";
case LivenessInfo.FACE_BEYOND_BOUNDARY:
return "人脸超出边界";
default:
return "未知状态(" + liveness + ")";
}
}
/**
* 提取人脸特征
* @param data 图像数据NV21/BGR24/GRAY格式
* @param width 图像宽度必须是4的倍数
* @param height 图像高度NV21格式时必须是2的倍数
* @param format 图像格式FaceEngine.CP_PAF_NV21=2050
* @param faceInfo 人脸信息从detectFaces获取
* @param extractType 特征提取类型0=REGISTER注册, 1=RECOGNIZE识别
* @param mask 口罩状态0=未佩戴口罩, 1=已佩戴口罩)
* @return 包含特征数据和错误码的Mapkey为"featureData"和"errorCode"
*/
public Map<String, Object> extractFaceFeature(byte[] data, int width, int height, int format,
FaceInfo faceInfo, int extractType, int mask) {
Map<String, Object> resultMap = new HashMap<>();
// 检查引擎是否已初始化
if (faceEngine == null || !isInitialized) {
android.util.Log.e("FaceEngineManager", "extractFaceFeature: 引擎未初始化");
resultMap.put("featureData", null);
resultMap.put("errorCode", FaceErrorCode.ENGINE_NOT_INITIALIZED.getCode());
return resultMap;
}
// 验证参数
if (data == null || faceInfo == null) {
android.util.Log.e("FaceEngineManager", "extractFaceFeature: 参数无效");
resultMap.put("featureData", null);
resultMap.put("errorCode", FaceErrorCode.MERR_INVALID_PARAM.getCode());
return resultMap;
}
// 宽度必须是4的倍数
if (width % 4 != 0) {
android.util.Log.e("FaceEngineManager", "extractFaceFeature: 宽度不是4的倍数");
resultMap.put("featureData", null);
resultMap.put("errorCode", FaceErrorCode.MERR_FSDK_FR_INVALID_IMAGE_INFO.getCode());
return resultMap;
}
// NV21格式要求高度是2的倍数
if (format == 2050 && height % 2 != 0) {
android.util.Log.e("FaceEngineManager", "extractFaceFeature: 高度不是2的倍数");
resultMap.put("featureData", null);
resultMap.put("errorCode", FaceErrorCode.MERR_FSDK_FR_INVALID_IMAGE_INFO.getCode());
return resultMap;
}
// 获取特征提取类型
ExtractType type = extractType == 0 ? ExtractType.REGISTER : ExtractType.RECOGNIZE;
// 执行特征提取
FaceFeature faceFeature = new FaceFeature();
int errorCode = faceEngine.extractFaceFeature(data, width, height, format, faceInfo, type, mask, faceFeature);
if (errorCode == 0) {
byte[] featureData = faceFeature.getFeatureData();
resultMap.put("featureData", featureData);
android.util.Log.i("FaceEngineManager", "特征提取成功,特征长度: " + (featureData != null ? featureData.length : 0));
} else {
resultMap.put("featureData", null);
android.util.Log.e("FaceEngineManager", "特征提取失败,错误码: " + errorCode);
}
resultMap.put("errorCode", errorCode);
return resultMap;
}
/**
* 比对两个人脸特征的相似度
* @param featureData1 第一个人脸特征数据
* @param featureData2 第二个人脸特征数据
* @param compareModel 比对模型0=LIFE_PHOTO生活照比对, 1=ID_CARD证件照比对
* @return 包含相似度和错误码的Mapkey为"similarity"和"errorCode"
*/
public Map<String, Object> compareFaceFeature(byte[] featureData1, byte[] featureData2, int compareModel) {
Map<String, Object> resultMap = new HashMap<>();
// 检查引擎是否已初始化
if (faceEngine == null || !isInitialized) {
android.util.Log.e("FaceEngineManager", "compareFaceFeature: 引擎未初始化");
resultMap.put("similarity", 0.0f);
resultMap.put("errorCode", FaceErrorCode.ENGINE_NOT_INITIALIZED.getCode());
return resultMap;
}
// 验证参数
if (featureData1 == null || featureData2 == null) {
android.util.Log.e("FaceEngineManager", "compareFaceFeature: 特征数据为空");
resultMap.put("similarity", 0.0f);
resultMap.put("errorCode", FaceErrorCode.MERR_INVALID_PARAM.getCode());
return resultMap;
}
// 创建人脸特征对象
FaceFeature feature1 = new FaceFeature();
feature1.setFeatureData(featureData1);
FaceFeature feature2 = new FaceFeature();
feature2.setFeatureData(featureData2);
// 获取比对模型
CompareModel model = compareModel == 1 ? CompareModel.ID_CARD : CompareModel.LIFE_PHOTO;
// 执行特征比对
FaceSimilar faceSimilar = new FaceSimilar();
int errorCode = faceEngine.compareFaceFeature(feature1, feature2, model, faceSimilar);
if (errorCode == 0) {
float similarity = faceSimilar.getScore();
resultMap.put("similarity", similarity);
android.util.Log.i("FaceEngineManager", "特征比对成功,相似度: " + similarity);
} else {
resultMap.put("similarity", 0.0f);
android.util.Log.e("FaceEngineManager", "特征比对失败,错误码: " + errorCode);
}
resultMap.put("errorCode", errorCode);
return resultMap;
}
/**
* 注册单张人脸特征到人脸库用于1:N搜索
* @param searchId 唯一标识符用于后续搜索匹配建议使用用户ID
* @param featureData 人脸特征数据从extractFaceFeature获取建议使用extractType=0注册模式
* @param faceTag 附属信息可选如用户名、员工ID等
* @return 错误码0表示成功
*/
public int registerFaceFeature(int searchId, byte[] featureData, String faceTag) {
// 检查引擎是否已初始化
if (faceEngine == null || !isInitialized) {
android.util.Log.e("FaceEngineManager", "registerFaceFeature: 引擎未初始化");
return FaceErrorCode.ENGINE_NOT_INITIALIZED.getCode();
}
// 验证参数
if (featureData == null || featureData.length == 0) {
android.util.Log.e("FaceEngineManager", "registerFaceFeature: 特征数据为空");
return FaceErrorCode.MERR_INVALID_PARAM.getCode();
}
// 使用构造函数创建 FaceFeatureInfo 对象
FaceFeatureInfo faceFeatureInfo;
if (faceTag != null && !faceTag.isEmpty()) {
faceFeatureInfo = new FaceFeatureInfo(searchId, featureData, faceTag);
} else {
faceFeatureInfo = new FaceFeatureInfo(searchId, featureData);
}
// 执行注册
int errorCode = faceEngine.registerFaceFeature(faceFeatureInfo);
if (errorCode == 0) {
android.util.Log.i("FaceEngineManager", "人脸注册成功searchId: " + searchId);
} else {
android.util.Log.e("FaceEngineManager", "人脸注册失败searchId: " + searchId + ",错误码: " + errorCode);
}
return errorCode;
}
/**
* 批量注册人脸特征到人脸库用于1:N搜索
* @param faceInfoList 人脸信息列表,每项需包含:
* - searchId: 唯一标识符
* - featureData: 人脸特征数据
* - faceTag: 附属信息(可选)
* @return 错误码0表示成功
*/
public int registerFaceFeatureBatch(List<Map<String, Object>> faceInfoList) {
// 检查引擎是否已初始化
if (faceEngine == null || !isInitialized) {
android.util.Log.e("FaceEngineManager", "registerFaceFeatureBatch: 引擎未初始化");
return FaceErrorCode.ENGINE_NOT_INITIALIZED.getCode();
}
// 验证参数
if (faceInfoList == null || faceInfoList.isEmpty()) {
android.util.Log.e("FaceEngineManager", "registerFaceFeatureBatch: 人脸信息列表为空");
return FaceErrorCode.MERR_INVALID_PARAM.getCode();
}
// 构建 FaceFeatureInfo 列表
List<FaceFeatureInfo> featureInfoList = new ArrayList<>();
for (Map<String, Object> faceMap : faceInfoList) {
Integer searchId = (Integer) faceMap.get("searchId");
byte[] featureData = (byte[]) faceMap.get("featureData");
String faceTag = (String) faceMap.get("faceTag");
if (searchId == null || featureData == null) {
android.util.Log.w("FaceEngineManager", "registerFaceFeatureBatch: 跳过无效数据项");
continue;
}
// 使用构造函数创建 FaceFeatureInfo 对象
FaceFeatureInfo faceFeatureInfo;
if (faceTag != null && !faceTag.isEmpty()) {
faceFeatureInfo = new FaceFeatureInfo(searchId, featureData, faceTag);
} else {
faceFeatureInfo = new FaceFeatureInfo(searchId, featureData);
}
featureInfoList.add(faceFeatureInfo);
}
if (featureInfoList.isEmpty()) {
android.util.Log.e("FaceEngineManager", "registerFaceFeatureBatch: 无有效人脸数据");
return FaceErrorCode.MERR_INVALID_PARAM.getCode();
}
// 执行批量注册
int errorCode = faceEngine.registerFaceFeature(featureInfoList);
if (errorCode == 0) {
android.util.Log.i("FaceEngineManager", "批量人脸注册成功,数量: " + featureInfoList.size());
} else {
android.util.Log.e("FaceEngineManager", "批量人脸注册失败,错误码: " + errorCode);
}
return errorCode;
}
}

View File

@@ -525,6 +525,13 @@ class _FaceRecognitionScreenState extends State<FaceRecognitionScreen> {
return;
}
// 检查 faceData 是否存在
final faceData = _currentFaceInfo!['faceData'] as Uint8List?;
if (faceData == null) {
_showMessage('人脸数据缺失,请重新检测');
return;
}
setState(() {
_statusMessage = '正在提取特征...';
});
@@ -539,8 +546,9 @@ class _FaceRecognitionScreenState extends State<FaceRecognitionScreen> {
rectTop: _currentFaceInfo!['rectTop'] as int,
rectRight: _currentFaceInfo!['rectRight'] as int,
rectBottom: _currentFaceInfo!['rectBottom'] as int,
faceOrientation: _currentFaceInfo!['orient'] as int? ?? 0,
faceOrientation: _currentFaceInfo!['faceOrientation'] as int? ?? 0,
faceId: _currentFaceInfo!['faceId'] as int? ?? -1,
faceData: faceData, // 关键:传递 faceData这是虹软 SDK 特征提取必需的数据
extractType: 0, // REGISTER 模式
mask: 0,
);
@@ -557,9 +565,11 @@ class _FaceRecognitionScreenState extends State<FaceRecognitionScreen> {
});
_showMessage('人脸注册成功!');
} else {
final errorCode = result?['errorCode'] ?? -1;
final errorMsg = result?['message'] ?? '未知错误';
debugPrint('特征提取失败: errorCode=$errorCode, message=$errorMsg');
setState(() {
_statusMessage = '特征提取失败: $errorMsg';
_statusMessage = '特征提取失败: [$errorCode] $errorMsg';
});
}
} catch (e) {
@@ -581,6 +591,13 @@ class _FaceRecognitionScreenState extends State<FaceRecognitionScreen> {
return;
}
// 检查 faceData 是否存在
final faceData = _currentFaceInfo!['faceData'] as Uint8List?;
if (faceData == null) {
_showMessage('人脸数据缺失,请重新检测');
return;
}
// 读取存储的特征
final prefs = await SharedPreferences.getInstance();
final storedFeatureBase64 = prefs.getString(_storedFeatureKey);
@@ -604,8 +621,9 @@ class _FaceRecognitionScreenState extends State<FaceRecognitionScreen> {
rectTop: _currentFaceInfo!['rectTop'] as int,
rectRight: _currentFaceInfo!['rectRight'] as int,
rectBottom: _currentFaceInfo!['rectBottom'] as int,
faceOrientation: _currentFaceInfo!['orient'] as int? ?? 0,
faceOrientation: _currentFaceInfo!['faceOrientation'] as int? ?? 0,
faceId: _currentFaceInfo!['faceId'] as int? ?? -1,
faceData: faceData, // 关键:传递 faceData这是虹软 SDK 特征提取必需的数据
extractType: 1, // RECOGNIZE 模式
mask: 0,
);

174
lib/arc.dart Normal file
View File

@@ -0,0 +1,174 @@
import 'dart:typed_data';
import 'arc_platform_interface.dart';
class Arc {
/// 获取平台版本
Future<String?> getPlatformVersion() {
return ArcPlatform.instance.getPlatformVersion();
}
/// 激活 SDK在线激活
/// [appId] 应用 ID从虹软控制台获取
/// [sdkKey] SDK 密钥(从虹软控制台获取)
/// [activeKey] 激活密钥
/// 返回包含 success, errorCode, message 的 Map
Future<Map<String, dynamic>?> activeOnline({
required String appId,
required String sdkKey,
required String activeKey,
}) {
return ArcPlatform.instance.activeOnline(
appId: appId,
sdkKey: sdkKey,
activeKey: activeKey,
);
}
/// 初始化人脸识别引擎
/// [detectMode] 检测模式0=VIDEO 视频流模式1=IMAGE 图像模式)
/// [orient] 检测角度0/90/180/270/360
/// [maxFaceNum] 最大可检测人脸数量
/// [combinedMask] 功能组合掩码
/// 返回包含 success, errorCode, message 的 Map
Future<Map<String, dynamic>?> init({
int? detectMode,
int? orient,
int? maxFaceNum,
int? combinedMask,
}) {
return ArcPlatform.instance.init(
detectMode: detectMode,
orient: orient,
maxFaceNum: maxFaceNum,
combinedMask: combinedMask,
);
}
/// 检测人脸(同时进行 RGB 活体检测)
/// [data] NV21 格式的图像数据
/// [width] 图像宽度
/// [height] 图像高度
/// [format] 图像格式2050=NV21默认
/// 返回包含 success, errorCode, message, faceList, rgbLiveness, isRgbAlive 的 Map
/// rgbLiveness: -1=未知, 0=非真人, 1=真人
Future<Map<String, dynamic>?> detectFaces({
required Uint8List data,
required int width,
required int height,
int format = 2050,
}) {
return ArcPlatform.instance.detectFaces(
data: data,
width: width,
height: height,
format: format,
);
}
/// 提取人脸特征
/// [data] NV21 格式的图像数据
/// [width] 图像宽度
/// [height] 图像高度
/// [rectLeft] 人脸框左边界从detectFaces获取
/// [rectTop] 人脸框上边界从detectFaces获取
/// [rectRight] 人脸框右边界从detectFaces获取
/// [rectBottom] 人脸框下边界从detectFaces获取
/// [format] 图像格式2050=NV21默认
/// [faceOrientation] 人脸角度从detectFaces获取
/// [faceId] 人脸ID从detectFaces获取
/// [faceData] 人脸数据从detectFaces获取必需这是虹软SDK进行特征提取的关键数据
/// [extractType] 特征提取类型0=注册, 1=识别,默认为识别)
/// [mask] 口罩状态0=未佩戴, 1=已佩戴,默认为未佩戴)
/// 返回包含 success, errorCode, message, featureData 的 Map
Future<Map<String, dynamic>?> extractFaceFeature({
required Uint8List data,
required int width,
required int height,
required int rectLeft,
required int rectTop,
required int rectRight,
required int rectBottom,
int format = 2050,
int faceOrientation = 0,
int faceId = -1,
Uint8List? faceData,
int extractType = 1,
int mask = 0,
}) {
return ArcPlatform.instance.extractFaceFeature(
data: data,
width: width,
height: height,
rectLeft: rectLeft,
rectTop: rectTop,
rectRight: rectRight,
rectBottom: rectBottom,
format: format,
faceOrientation: faceOrientation,
faceId: faceId,
faceData: faceData,
extractType: extractType,
mask: mask,
);
}
/// 比对人脸特征
/// [featureData1] 第一个人脸特征数据从extractFaceFeature获取
/// [featureData2] 第二个人脸特征数据从extractFaceFeature获取
/// [compareModel] 比对模型0=生活照, 1=证件照,默认为生活照)
/// 返回包含 success, errorCode, message, similarity 的 Map
/// similarity: 相似度分数0-1之间推荐阈值生活照0.8证件照0.82
Future<Map<String, dynamic>?> compareFaceFeature({
required Uint8List featureData1,
required Uint8List featureData2,
int compareModel = 0,
}) {
return ArcPlatform.instance.compareFaceFeature(
featureData1: featureData1,
featureData2: featureData2,
compareModel: compareModel,
);
}
/// 注册单张人脸特征到人脸库用于1:N搜索
/// [searchId] 唯一标识符用于后续搜索匹配建议使用用户ID
/// [featureData] 人脸特征数据从extractFaceFeature获取建议使用extractType=0注册模式
/// [faceTag] 附属信息可选如用户名、员工ID等
/// 返回包含 success, errorCode, message 的 Map
/// 注意如果底库中已存在相同searchIdSDK会忽略此次注册
Future<Map<String, dynamic>?> registerFaceFeature({
required int searchId,
required Uint8List featureData,
String? faceTag,
}) {
return ArcPlatform.instance.registerFaceFeature(
searchId: searchId,
featureData: featureData,
faceTag: faceTag,
);
}
/// 批量注册人脸特征到人脸库用于1:N搜索
/// [faceList] 人脸列表,每项需包含:
/// - searchId: 唯一标识符int必填
/// - featureData: 人脸特征数据Uint8List必填
/// - faceTag: 附属信息String可选
/// 返回包含 success, errorCode, message 的 Map
/// 示例:
/// ```dart
/// await arc.registerFaceFeatureBatch(
/// faceList: [
/// {'searchId': 1, 'featureData': feature1, 'faceTag': '张三'},
/// {'searchId': 2, 'featureData': feature2, 'faceTag': '李四'},
/// ],
/// );
/// ```
Future<Map<String, dynamic>?> registerFaceFeatureBatch({
required List<Map<String, dynamic>> faceList,
}) {
return ArcPlatform.instance.registerFaceFeatureBatch(
faceList: faceList,
);
}
}

135
lib/arc_method_channel.dart Normal file
View File

@@ -0,0 +1,135 @@
import 'package:flutter/foundation.dart';
import 'package:flutter/services.dart';
import 'arc_platform_interface.dart';
/// An implementation of [ArcPlatform] that uses method channels.
class MethodChannelArc extends ArcPlatform {
/// The method channel used to interact with the native platform.
@visibleForTesting
final methodChannel = const MethodChannel('arc');
@override
Future<String?> getPlatformVersion() async {
final version = await methodChannel.invokeMethod<String>('getPlatformVersion');
return version;
}
@override
Future<Map<String, dynamic>?> activeOnline({
required String appId,
required String sdkKey,
required String activeKey,
}) async {
final result = await methodChannel.invokeMethod<Map<dynamic, dynamic>>('activeOnline', {
'appId': appId,
'sdkKey': sdkKey,
'activeKey': activeKey,
});
return result?.cast<String, dynamic>();
}
@override
Future<Map<String, dynamic>?> init({
int? detectMode,
int? orient,
int? maxFaceNum,
int? combinedMask,
}) async {
final result = await methodChannel.invokeMethod<Map<dynamic, dynamic>>('init', {
'detectMode': detectMode,
'orient': orient,
'maxFaceNum': maxFaceNum,
'combinedMask': combinedMask,
});
return result?.cast<String, dynamic>();
}
@override
Future<Map<String, dynamic>?> detectFaces({
required Uint8List data,
required int width,
required int height,
int format = 2050,
}) async {
final result = await methodChannel.invokeMethod<Map<dynamic, dynamic>>('detectFaces', {
'data': data,
'width': width,
'height': height,
'format': format,
});
return result?.cast<String, dynamic>();
}
@override
Future<Map<String, dynamic>?> extractFaceFeature({
required Uint8List data,
required int width,
required int height,
required int rectLeft,
required int rectTop,
required int rectRight,
required int rectBottom,
int format = 2050,
int faceOrientation = 0,
int faceId = -1,
Uint8List? faceData,
int extractType = 1,
int mask = 0,
}) async {
final result = await methodChannel.invokeMethod<Map<dynamic, dynamic>>('extractFaceFeature', {
'data': data,
'width': width,
'height': height,
'format': format,
'rectLeft': rectLeft,
'rectTop': rectTop,
'rectRight': rectRight,
'rectBottom': rectBottom,
'faceOrientation': faceOrientation,
'faceId': faceId,
'faceData': faceData, // 关键:传递 faceData这是虹软 SDK 特征提取必需的数据
'extractType': extractType,
'mask': mask,
});
return result?.cast<String, dynamic>();
}
@override
Future<Map<String, dynamic>?> compareFaceFeature({
required Uint8List featureData1,
required Uint8List featureData2,
int compareModel = 0,
}) async {
final result = await methodChannel.invokeMethod<Map<dynamic, dynamic>>('compareFaceFeature', {
'featureData1': featureData1,
'featureData2': featureData2,
'compareModel': compareModel,
});
return result?.cast<String, dynamic>();
}
@override
Future<Map<String, dynamic>?> registerFaceFeature({
required int searchId,
required Uint8List featureData,
String? faceTag,
}) async {
final result = await methodChannel.invokeMethod<Map<dynamic, dynamic>>('registerFaceFeature', {
'searchId': searchId,
'featureData': featureData,
'faceTag': faceTag,
});
return result?.cast<String, dynamic>();
}
@override
Future<Map<String, dynamic>?> registerFaceFeatureBatch({
required List<Map<String, dynamic>> faceList,
}) async {
final result = await methodChannel.invokeMethod<Map<dynamic, dynamic>>('registerFaceFeatureBatch', {
'faceList': faceList,
});
return result?.cast<String, dynamic>();
}
}

View File

@@ -0,0 +1,147 @@
import 'dart:typed_data';
import 'package:plugin_platform_interface/plugin_platform_interface.dart';
import 'arc_method_channel.dart';
abstract class ArcPlatform extends PlatformInterface {
/// Constructs a ArcPlatform.
ArcPlatform() : super(token: _token);
static final Object _token = Object();
static ArcPlatform _instance = MethodChannelArc();
/// The default instance of [ArcPlatform] to use.
///
/// Defaults to [MethodChannelArc].
static ArcPlatform get instance => _instance;
/// Platform-specific implementations should set this with their own
/// platform-specific class that extends [ArcPlatform] when
/// they register themselves.
static set instance(ArcPlatform instance) {
PlatformInterface.verifyToken(instance, _token);
_instance = instance;
}
/// 获取平台版本
Future<String?> getPlatformVersion() {
throw UnimplementedError('platformVersion() has not been implemented.');
}
/// 激活 SDK在线激活
/// [appId] 应用 ID从虹软控制台获取
/// [sdkKey] SDK 密钥(从虹软控制台获取)
/// [activeKey] 激活密钥
/// 返回包含 success, errorCode, message 的 Map
Future<Map<String, dynamic>?> activeOnline({
required String appId,
required String sdkKey,
required String activeKey,
}) {
throw UnimplementedError('activeOnline() has not been implemented.');
}
/// 初始化人脸识别引擎
/// [detectMode] 检测模式0=VIDEO 视频流模式1=IMAGE 图像模式)
/// [orient] 检测角度0/90/180/270/360
/// [maxFaceNum] 最大可检测人脸数量
/// [combinedMask] 功能组合掩码
/// 返回包含 success, errorCode, message 的 Map
Future<Map<String, dynamic>?> init({
int? detectMode,
int? orient,
int? maxFaceNum,
int? combinedMask,
}) {
throw UnimplementedError('init() has not been implemented.');
}
/// 检测人脸(同时进行 RGB 活体检测)
/// [data] NV21 格式的图像数据
/// [width] 图像宽度
/// [height] 图像高度
/// [format] 图像格式2050=NV21
/// 返回包含 success, errorCode, message, faceList, rgbLiveness, isRgbAlive 的 Map
/// rgbLiveness: -1=未知, 0=非真人, 1=真人
Future<Map<String, dynamic>?> detectFaces({
required Uint8List data,
required int width,
required int height,
int format = 2050,
}) {
throw UnimplementedError('detectFaces() has not been implemented.');
}
/// 提取人脸特征
/// [data] NV21 格式的图像数据
/// [width] 图像宽度
/// [height] 图像高度
/// [format] 图像格式2050=NV21
/// [rectLeft] 人脸框左边界
/// [rectTop] 人脸框上边界
/// [rectRight] 人脸框右边界
/// [rectBottom] 人脸框下边界
/// [faceOrientation] 人脸角度
/// [faceId] 人脸ID从detectFaces获取
/// [faceData] 人脸数据从detectFaces获取必需这是虹软SDK进行特征提取的关键数据
/// [extractType] 特征提取类型0=注册, 1=识别)
/// [mask] 口罩状态0=未佩戴, 1=已佩戴)
/// 返回包含 success, errorCode, message, featureData 的 Map
Future<Map<String, dynamic>?> extractFaceFeature({
required Uint8List data,
required int width,
required int height,
required int rectLeft,
required int rectTop,
required int rectRight,
required int rectBottom,
int format = 2050,
int faceOrientation = 0,
int faceId = -1,
Uint8List? faceData,
int extractType = 1,
int mask = 0,
}) {
throw UnimplementedError('extractFaceFeature() has not been implemented.');
}
/// 比对人脸特征
/// [featureData1] 第一个人脸特征数据
/// [featureData2] 第二个人脸特征数据
/// [compareModel] 比对模型0=生活照, 1=证件照)
/// 返回包含 success, errorCode, message, similarity 的 Map
Future<Map<String, dynamic>?> compareFaceFeature({
required Uint8List featureData1,
required Uint8List featureData2,
int compareModel = 0,
}) {
throw UnimplementedError('compareFaceFeature() has not been implemented.');
}
/// 注册单张人脸特征到人脸库用于1:N搜索
/// [searchId] 唯一标识符用于后续搜索匹配建议使用用户ID
/// [featureData] 人脸特征数据从extractFaceFeature获取建议使用extractType=0注册模式
/// [faceTag] 附属信息可选如用户名、员工ID等
/// 返回包含 success, errorCode, message 的 Map
Future<Map<String, dynamic>?> registerFaceFeature({
required int searchId,
required Uint8List featureData,
String? faceTag,
}) {
throw UnimplementedError('registerFaceFeature() has not been implemented.');
}
/// 批量注册人脸特征到人脸库用于1:N搜索
/// [faceList] 人脸列表,每项需包含:
/// - searchId: 唯一标识符
/// - featureData: 人脸特征数据
/// - faceTag: 附属信息(可选)
/// 返回包含 success, errorCode, message 的 Map
Future<Map<String, dynamic>?> registerFaceFeatureBatch({
required List<Map<String, dynamic>> faceList,
}) {
throw UnimplementedError('registerFaceFeatureBatch() has not been implemented.');
}
}

89
test/arc_test.dart Normal file
View File

@@ -0,0 +1,89 @@
import 'package:flutter_test/flutter_test.dart';
import 'package:arc/arc.dart';
import 'package:arc/arc_platform_interface.dart';
import 'package:arc/arc_method_channel.dart';
import 'package:plugin_platform_interface/plugin_platform_interface.dart';
import 'dart:typed_data';
class MockArcPlatform
with MockPlatformInterfaceMixin
implements ArcPlatform {
@override
Future<String?> getPlatformVersion() => Future.value('42');
@override
Future<Map<String, dynamic>?> activeOnline({
required String appId,
required String sdkKey,
required String activeKey,
}) => Future.value({'success': true, 'errorCode': 0, 'message': 'success'});
@override
Future<Map<String, dynamic>?> init({
int? detectMode,
int? orient,
int? maxFaceNum,
int? combinedMask,
}) => Future.value({'success': true, 'errorCode': 0, 'message': 'success'});
@override
Future<Map<String, dynamic>?> detectFaces({
required Uint8List data,
required int width,
required int height,
int format = 2050,
}) => Future.value({'success': true, 'errorCode': 0, 'faceList': [], 'rgbLiveness': 1, 'isRgbAlive': true});
@override
Future<Map<String, dynamic>?> extractFaceFeature({
required Uint8List data,
required int width,
required int height,
required int rectLeft,
required int rectTop,
required int rectRight,
required int rectBottom,
int format = 2050,
int faceOrientation = 0,
int faceId = -1,
Uint8List? faceData,
int extractType = 1,
int mask = 0,
}) => Future.value({'success': true, 'errorCode': 0, 'featureData': Uint8List(512)});
@override
Future<Map<String, dynamic>?> compareFaceFeature({
required Uint8List featureData1,
required Uint8List featureData2,
int compareModel = 0,
}) => Future.value({'success': true, 'errorCode': 0, 'similarity': 0.95});
@override
Future<Map<String, dynamic>?> registerFaceFeature({
required int searchId,
required Uint8List featureData,
String? faceTag,
}) => Future.value({'success': true, 'errorCode': 0, 'message': 'success'});
@override
Future<Map<String, dynamic>?> registerFaceFeatureBatch({
required List<Map<String, dynamic>> faceList,
}) => Future.value({'success': true, 'errorCode': 0, 'message': 'success'});
}
void main() {
final ArcPlatform initialPlatform = ArcPlatform.instance;
test('$MethodChannelArc is the default instance', () {
expect(initialPlatform, isInstanceOf<MethodChannelArc>());
});
test('getPlatformVersion', () async {
Arc arcPlugin = Arc();
MockArcPlatform fakePlatform = MockArcPlatform();
ArcPlatform.instance = fakePlatform;
expect(await arcPlugin.getPlatformVersion(), '42');
});
}