Cordova
The Cordova plugin has been completely reworked in release 7.1, resulting in changes to the public interfaces. This section provides a side-by-side comparison of the basic features between releases 6.4 and 7.1.
Plugin installation
With this release we changed the name of the plugin.
cordova plugin add @regulaforensics/cordova-plugin-face-api
cordova plugin add @regulaforensics/cordova-plugin-face-core-basic
cordova plugin add @regulaforensics/face-sdk
cordova plugin add @regulaforensics/face-core-basic
Accessing FaceSDK
var FaceSDK = FaceSDKPlugin.FaceSDK
FaceSDK.
var FaceSDK = FaceSDKPlugin.FaceSDK
FaceSDK.instance.
var faceSdk = FaceSDK.instance
faceSdk.
Initializing
FaceSDK.initialize(null, (json) => {
response = InitResponse.fromJson(JSON.parse(json))
if (!response.success) {
console.log(response.error.code);
console.log(response.error.message);
} else {
console.log("Init complete")
}
}, e => { })
var [success, error] = await faceSdk.initialize()
if (success) console.log("Init complete")
else console.log(error.code + ": " + error.message)
Accessing Fields
FaceSDK.getServiceUrl((data) => {
var serviceUrl = data
}, error => { console.log(error) });
FaceSDK.setServiceUrl("new url", (success) => { }, error => { console.log(error) });
var serviceUrl = faceSdk.serviceUrl
faceSdk.serviceUrl = "new url"
Changing Configuration
FaceSDK.setCustomization({
colors: {
[Enum.CustomizationColor.ONBOARDING_SCREEN_BACKGROUND]: 0xff808080,
},
fonts: {
[Enum.CustomizationFont.ONBOARDING_SCREEN_MESSAGE_LABELS]: { name: "sans-serif", size: 14 }
},
images: {
[Enum.CustomizationImage.ONBOARDING_SCREEN_ACCESSORIES]: custom_image_base64,
}
}, function (m) { }, function (e) { })
faceSdk.customization.colors.onboardingScreenBackground = 0xff808080
faceSdk.customization.fonts.onboardingScreenMessageLabels = new Font("sans-serif", { size: 14 })
faceSdk.customization.images.onboardingScreenIllumination = custom_image_base64
Face Сapture
FaceSDK.startFaceCapture(null, raw => {
// handling events:
var csEventId = "cameraSwitchEvent"
if (raw.substring(0, csEventId.length) === csEventId) {
raw = raw.substring(csEventId.length, raw.length)
var cameraId = raw
// handle new camera id
} else {
var response = FaceCaptureResponse.fromJson(JSON.parse(raw))
// ... check response.image.image for capture result.
}
}, e => { })
var response = await faceSdk.startFaceCapture()
Liveness
FaceSDK.startLiveness(null, raw => {
// handling events:
var lnEventId = "livenessNotificationEvent"
var csEventId = "cameraSwitchEvent"
if (raw.substring(0, lnEventId.length) === lnEventId) {
raw = raw.substring(lnEventId.length, raw.length)
var notification = LivenessNotification.fromJson(JSON.parse(raw))
// handle liveness notification
} else if (raw.substring(0, csEventId.length) === csEventId) {
raw = raw.substring(csEventId.length, raw.length)
var cameraId = raw
// handle new camera id
} else {
var response = LivenessResponse.fromJson(JSON.parse(raw))
// ... check response.liveness for detection result.
}
}, e => { })
var response = await faceSdk.startLiveness()
Match Faces
const firstImage = new FaceSDK.MatchFacesImage();
firstImage.imageType = FaceSDK.Enum.ImageType.IMAGE_TYPE_PRINTED;
firstImage.bitmap = firstImageBitmapAsBase64String;
const secondImage = new FaceSDK.MatchFacesImage();
secondImage.imageType = FaceSDK.Enum.ImageType.IMAGE_TYPE_PRINTED;
secondImage.bitmap = secondImageBitmapAsBase64String;
const request = new FaceSDK.MatchFacesRequest();
request.images = [firstImage, secondImage];
FaceSDK.matchFaces(JSON.stringify(request), matchFacesResponse => {
const response = FaceSDK.MatchFacesResponse.fromJson(JSON.parse(matchFacesResponse));
}, e => { });
var request = new MatchFacesRequest([
new MatchFacesImage(firstBase64, ImageType.PRINTED),
new MatchFacesImage(secondBase64, ImageType.PRINTED)
])
var response = await faceSdk.matchFaces(request)
Detect Faces
var config = new DetectFacesConfig()
config.onlyCentralFace = true
var request = new DetectFacesRequest()
request.image = base64
request.configuration = config
FaceSDK.detectFaces(request, function(json) {
var response = DetectFacesResponse.fromJson(JSON.parse(json))
}, function (e) { })
var config = new DetectFacesConfig({
onlyCentralFace: true
})
var request = new DetectFacesRequest(imageData, config)
var response = await faceSdk.detectFaces(request)
Face Identification
var imageUpload = new ImageUpload();
imageUpload.imageData = base64;
var searchPersonRequest = new SearchPersonRequest();
searchPersonRequest.groupIdsForSearch = ["id1", "id2", "id3"];
searchPersonRequest.imageUpload = imageUpload;
searchPersonRequest.detectAll = true;
FaceSDK.searchPerson(searchPersonRequest, function (response) {
}, function (e) { });
var request = new SearchPersonRequest(
ImageUpload.withImageData(base64), {
groupIdsForSearch: ["id1", "id2", "id3"],
detectAll: true,
})
var [searchPerson, error] = await faceSdk.personDatabase.searchPerson(request)