Skip to content

Ionic

The Ionic plugin has been completely reworked in release 7.1, resulting in changes to the public interfaces. This section provides a side-by-side comparison of the basic features between releases 6.4 and 7.1.

Plugin installation

With this release we changed the name of the plugin.

npm install --save @regulaforensics/ionic-native-face-api
ionic cordova plugin add @regulaforensics/cordova-plugin-face-api
ionic cordova plugin add @regulaforensics/cordova-plugin-face-core-basic
ionic cordova plugin add @regulaforensics/face-sdk
ionic cordova plugin add @regulaforensics/face-core-basic

Accessing FaceSDK

FaceSDK.
FaceSDK.instance.

var faceSdk = FaceSDK.instance
faceSdk.

Initializing

FaceSDK.initialize(null).then((json) => {
  response = InitResponse.fromJson(JSON.parse(json))
  if (!response.success) {
    console.log(response.error.code);
    console.log(response.error.message);
  } else {
    console.log("Init complete")
  }
}, e => { })
var [success, error] = await faceSdk.initialize()

if (success) console.log("Init complete")
else console.log(error.code + ": " + error.message)

Accessing Fields

FaceSDK.getServiceUrl().then((data) => {
  var serviceUrl = data
})

FaceSDK.setServiceUrl("new url")
var serviceUrl = faceSdk.serviceUrl
faceSdk.serviceUrl = "new url"

Changing Configuration

FaceSDK.setCustomization({
  colors: {
    [Enum.CustomizationColor.ONBOARDING_SCREEN_BACKGROUND]: 0xff808080,
  },
  fonts: {
    [Enum.CustomizationFont.ONBOARDING_SCREEN_MESSAGE_LABELS]: { name: "sans-serif", size: 14 }
  },
  images: {
    [Enum.CustomizationImage.ONBOARDING_SCREEN_ACCESSORIES]: custom_image_base64,
  }
})
faceSdk.customization.colors.onboardingScreenBackground = 0xff808080
faceSdk.customization.fonts.onboardingScreenMessageLabels = new Font("sans-serif", { size: 14 })
faceSdk.customization.images.onboardingScreenIllumination = custom_image_base64

Face Сapture

FaceSDK.startFaceCapture(null).then(raw => {
    // handling events:
    var csEventId = "cameraSwitchEvent"
    if (raw.substring(0, csEventId.length) === csEventId) {
        raw = raw.substring(csEventId.length, raw.length)
        var cameraId = raw
        // handle new camera id
    } else {
        var response = FaceCaptureResponse.fromJson(JSON.parse(raw))
        // ... check response.image.image for capture result.
    }
})
var response = await faceSdk.startFaceCapture()

Liveness

FaceSDK.startLiveness(null).then(raw => {
    // handling events:
    var lnEventId = "livenessNotificationEvent"
    var csEventId = "cameraSwitchEvent"
    if (raw.substring(0, lnEventId.length) === lnEventId) {
        raw = raw.substring(lnEventId.length, raw.length)
        var notification = LivenessNotification.fromJson(JSON.parse(raw))
        // handle liveness notification
    } else if (raw.substring(0, csEventId.length) === csEventId) {
        raw = raw.substring(csEventId.length, raw.length)
        var cameraId = raw
        // handle new camera id
    } else {
        var response = LivenessResponse.fromJson(JSON.parse(raw))
        // ... check response.liveness for detection result.
    }
})
var response = await faceSdk.startLiveness()

Match Faces

const firstImage = new FaceSDK.MatchFacesImage();
firstImage.imageType = FaceSDK.Enum.ImageType.IMAGE_TYPE_PRINTED;
firstImage.bitmap = firstImageBitmapAsBase64String;

const secondImage = new FaceSDK.MatchFacesImage();
secondImage.imageType = FaceSDK.Enum.ImageType.IMAGE_TYPE_PRINTED;
secondImage.bitmap = secondImageBitmapAsBase64String;

const request = new FaceSDK.MatchFacesRequest();
request.images = [firstImage, secondImage];

FaceSDK.matchFaces(JSON.stringify(request)).then(matchFacesResponse => {
    const response = FaceSDK.MatchFacesResponse.fromJson(JSON.parse(matchFacesResponse));
});
var request = new MatchFacesRequest([
  new MatchFacesImage(firstBase64, ImageType.PRINTED),
  new MatchFacesImage(secondBase64, ImageType.PRINTED)
])
var response = await faceSdk.matchFaces(request)

Detect Faces

var config = new DetectFacesConfig()
config.onlyCentralFace = true

var request = new DetectFacesRequest()
request.image = base64
request.configuration = config

FaceSDK.detectFaces(request).then(json => {
  var response = DetectFacesResponse.fromJson(JSON.parse(json))
})
var config = new DetectFacesConfig({
  onlyCentralFace: true
})
var request = new DetectFacesRequest(imageData, config)
var response = await faceSdk.detectFaces(request)

Face Identification

var imageUpload = new ImageUpload();
imageUpload.imageData = base64;
var searchPersonRequest = new SearchPersonRequest();
searchPersonRequest.groupIdsForSearch = ["id1", "id2", "id3"];
searchPersonRequest.imageUpload = imageUpload;
searchPersonRequest.detectAll = true;

FaceSDK.searchPerson(searchPersonRequest).then(response => {

});
var request = new SearchPersonRequest(
  ImageUpload.withImageData(base64), {
  groupIdsForSearch: ["id1", "id2", "id3"],
  detectAll: true,
})

var [searchPerson, error] = await faceSdk.personDatabase.searchPerson(request)