Skip to content

Face Image Quality Assessment

You can use the face image quality assessment feature to check if a portrait meets certain standards. Find the detailed information on how it works in the Web Service section.

Request

// with default values
configuration.customQuality = ImageQualityGroup.FaceImage.allRecommended()

// or with custom values
configuration.customQuality = [
  ImageQualityGroup.FaceImage.blurLevel().withCustomRange([0.1, 0.4]),
  ImageQualityGroup.Background.otherFaces().withCustomValue(2),
  ImageQualityGroup.HeadOcclusion.headphones().withCustomRange([0.0, 0.1]),
]

let detectFacesRequest = DetectFacesRequest(image: image,
                                    configuration: configuration)
// with default values
[configuration setCustomQuality:[RFSFaceImageQuality allRecommended]];

// or with custom values
[configuration setCustomQuality:@[
  [[RFSFaceImageQuality blurLevel] withCustomRange:@[@0.1, @0.4]],
  [[RFSQualityBackground otherFaces] withCustomValue: @2],
  [[RFSHeadOcclusion headphones] withCustomRange:@[@0, @0.1]],
]];

RFSDetectFacesRequest *detectFacesRequest = [[RFSDetectFacesRequest alloc] initWithImage:image
                                                                           configuration:configuration];
val configuration = DetectFacesConfiguration()

// with default values
configuration.customQuality = ImageQualityGroup.FaceImageQuality.allRecommended()  

// or with custom values
configuration.customQuality = listOf(
    ImageQualityGroup.FaceImageQuality.blurLevel().withCustomRange(0.1, 0.4),
    ImageQualityGroup.QualityBackground.otherFaces().withCustomValue(2.0),
    ImageQualityGroup.HeadOcclusion.headphones().withCustomRange(0.0, 0.1)
)

val detectFacesRequest = DetectFacesRequest(bitmap, configuration)
DetectFacesConfiguration configuration = new DetectFacesConfiguration();

// with default values
configuration.setCustomQuality(ImageQualityGroup.FaceImageQuality.allRecommended());

// or with custom values
configuration.setCustomQuality(Arrays.asList(
    ImageQualityGroup.FaceImageQuality.blurLevel().withCustomRange(0.1, 0.4),
    ImageQualityGroup.QualityBackground.otherFaces().withCustomValue(2),
    ImageQualityGroup.HeadOcclusion.headphones().withCustomRange(0.0, 0.1)));

DetectFacesRequest detectFacesRequest = new DetectFacesRequest(bitmap, configuration);
var config = new DetectFacesConfig()

// with default values
var q1 = new ImageQualityCharacteristic()
q1.characteristicName = Enum.ImageQualityCharacteristicName.BLUR_LEVEL

var q2 = new ImageQualityCharacteristic()
q2.characteristicName = Enum.ImageQualityCharacteristicName.NOISE_LEVEL

var q3 = new ImageQualityCharacteristic()
q3.characteristicName = Enum.ImageQualityCharacteristicName.UNNATURAL_SKIN_TONE

var q4 = new ImageQualityCharacteristic()
q4.characteristicName = Enum.ImageQualityCharacteristicName.FACE_DYNAMIC_RANGE

var customQuality = [q1, q2, q3, q4]
config.customQuality = customQuality

// with custom values
var q1 = new ImageQualityCharacteristic()
q1.characteristicName = Enum.ImageQualityCharacteristicName.BLUR_LEVEL
var r1 = new ImageQualityRange()
r1.min = 0.1
r1.max = 0.4
q1.customRange = r1

var q2 = new ImageQualityCharacteristic()
q2.characteristicName = Enum.ImageQualityCharacteristicName.OTHER_FACES
var r2 = new ImageQualityRange()
r2.min = 2
r2.max = 2
q2.customRange = r2

var q3 = new ImageQualityCharacteristic()
q3.characteristicName = Enum.ImageQualityCharacteristicName.HEAD_PHONES
var r3 = new ImageQualityRange()
r3.min = 0
r3.max = 0.1
q3.customRange = r3

var customQuality = [q1, q2, q3]
config.customQuality = customQuality

var request = new DetectFacesRequest()
request.image = "base64"
request.configuration = config
var config = DetectFacesConfig();

// with default values
config.customQuality = ImageQualityGroup.faceImage.allRecommended();

// or with custom values
config.customQuality = [
  ImageQualityGroup.faceImage.blurLevel().withCustomRange(0.1, 0.4),
  ImageQualityGroup.background.otherFaces().withCustomValue(2.0),
  ImageQualityGroup.headOcclusion.headphones().withCustomRange(0.0, 0.1),
];

var request = DetectFacesRequest(imageData, config);
var config = new DetectFacesConfig()

// with default values
var q1 = new ImageQualityCharacteristic()
q1.characteristicName = Enum.ImageQualityCharacteristicName.BLUR_LEVEL

var q2 = new ImageQualityCharacteristic()
q2.characteristicName = Enum.ImageQualityCharacteristicName.NOISE_LEVEL

var q3 = new ImageQualityCharacteristic()
q3.characteristicName = Enum.ImageQualityCharacteristicName.UNNATURAL_SKIN_TONE

var q4 = new ImageQualityCharacteristic()
q4.characteristicName = Enum.ImageQualityCharacteristicName.FACE_DYNAMIC_RANGE

var customQuality = [q1, q2, q3, q4]
config.customQuality = customQuality

// with custom values
var q1 = new ImageQualityCharacteristic()
q1.characteristicName = Enum.ImageQualityCharacteristicName.BLUR_LEVEL
var r1 = new ImageQualityRange()
r1.min = 0.1
r1.max = 0.4
q1.customRange = r1

var q2 = new ImageQualityCharacteristic()
q2.characteristicName = Enum.ImageQualityCharacteristicName.OTHER_FACES
var r2 = new ImageQualityRange()
r2.min = 2
r2.max = 2
q2.customRange = r2

var q3 = new ImageQualityCharacteristic()
q3.characteristicName = Enum.ImageQualityCharacteristicName.HEAD_PHONES
var r3 = new ImageQualityRange()
r3.min = 0
r3.max = 0.1
q3.customRange = r3

var customQuality = [q1, q2, q3]
config.customQuality = customQuality

var request = new DetectFacesRequest()
request.image = "base64"
request.configuration = config
var config = new DetectFacesConfig()

// with default values
var q1 = new ImageQualityCharacteristic()
q1.characteristicName = Enum.ImageQualityCharacteristicName.BLUR_LEVEL

var q2 = new ImageQualityCharacteristic()
q2.characteristicName = Enum.ImageQualityCharacteristicName.NOISE_LEVEL

var q3 = new ImageQualityCharacteristic()
q3.characteristicName = Enum.ImageQualityCharacteristicName.UNNATURAL_SKIN_TONE

var q4 = new ImageQualityCharacteristic()
q4.characteristicName = Enum.ImageQualityCharacteristicName.FACE_DYNAMIC_RANGE

var customQuality = [q1, q2, q3, q4]
config.customQuality = customQuality

// with custom values
var q1 = new ImageQualityCharacteristic()
q1.characteristicName = Enum.ImageQualityCharacteristicName.BLUR_LEVEL
var r1 = new ImageQualityRange()
r1.min = 0.1
r1.max = 0.4
q1.customRange = r1

var q2 = new ImageQualityCharacteristic()
q2.characteristicName = Enum.ImageQualityCharacteristicName.OTHER_FACES
var r2 = new ImageQualityRange()
r2.min = 2
r2.max = 2
q2.customRange = r2

var q3 = new ImageQualityCharacteristic()
q3.characteristicName = Enum.ImageQualityCharacteristicName.HEAD_PHONES
var r3 = new ImageQualityRange()
r3.min = 0
r3.max = 0.1
q3.customRange = r3

var customQuality = [q1, q2, q3]
config.customQuality = customQuality

var request = new DetectFacesRequest()
request.image = "base64"
request.configuration = config
// Android
DetectFacesConfiguration configuration = new DetectFacesConfiguration();
// with default values
configuration.CustomQuality = ImageQualityGroup.FaceImageQuality.AllRecommended();
// or with custom values
configuration.CustomQuality=[
    ImageQualityGroup.FaceImageQuality.BlurLevel().WithCustomRange(0.1, 0.4),
    ImageQualityGroup.QualityBackground.OtherFaces().WithCustomValue(2),
    ImageQualityGroup.HeadOcclusion.Headphones().WithCustomRange(0.0, 0.1)];
DetectFacesRequest detectFacesRequest = new DetectFacesRequest(bitmap, configuration);

// iOS
RFSDetectFacesConfiguration configuration = new();
// with default values
configuration.CustomQuality = RFSFaceImageQuality.AllRecommended;
// or with custom values
configuration.CustomQuality = [
    RFSFaceImageQuality.BlurLevel.WithCustomRange([0.1, 0.4]),
    RFSQualityBackground.OtherFaces.WithCustomValue(2),
    RFSHeadOcclusion.Headphones.WithCustomRange([0.0, 0.1])];
RFSDetectFacesRequest detectFacesRequest = new(image, configuration);

Response

detectFaceResponse.detection?.quality?.forEach { imageQualityResult in
  let status = imageQualityResult.status
  let group = imageQualityResult.group
  let tname = imageQualityResult.name
  let range = imageQualityResult.range
  let value = imageQualityResult.value
}
for (RFSImageQualityResult *imageQualityResult in detectFaceResponse.detection.quality) {
  RFSImageQualityResultStatus status = imageQualityResult.status;
  RFSImageQualityGroup group = imageQualityResult.group;
  RFSImageQualityCharacteristicName name = imageQualityResult.name;
  RFSImageQualityRange *range = imageQualityResult.range;
  NSNumber *value = imageQualityResult.value;
}
for (imageQuality in detectFacesResponse.detection!!.quality!!) {
    val status = imageQuality.status
    val group = imageQuality.group
    val name = imageQuality.name
    val range = imageQuality.range
    val value = imageQuality.value
}
for (ImageQualityResult imageQuality : detectFacesResponse.getDetection().getQuality()) {
    ImageQualityResultStatus status = imageQuality.getStatus();
    ImageQualityGroupName group = imageQuality.getGroup();
    ImageQualityCharacteristicName name = imageQuality.getName();
    ImageQualityRange range = imageQuality.getRange();
    double value = imageQuality.getValue();
}
detectFacesResponse.detection?.quality?.forEach(quality => {
  var status = quality.status
  var group = quality.group
  var name = quality.name
  var range = quality.range
  var value = quality.value
})
detectFacesResponse.detection?.quality?.forEach((quality) {
  var status = quality.status;
  var group = quality.group;
  var name = quality.name;
  var range = quality.range;
  var value = quality.value;
});
detectFacesResponse.detection.quality.forEach(quality => {
  var status = quality.status
  var group = quality.group
  var name = quality.name
  var range = quality.range
  var value = quality.value
})
detectFacesResponse.detection?.quality?.forEach(quality => {
  var status = quality.status
  var group = quality.group
  var name = quality.name
  var range = quality.range
  var value = quality.value
})
// Android
foreach (ImageQualityResult imageQuality in detectFacesResponse.Detection.Quality) {
    ImageQualityResultStatus status = imageQuality.Status;
    ImageQualityGroupName group = imageQuality.Group;
    ImageQualityCharacteristicName name = imageQuality.Name;
    ImageQualityRange range = imageQuality.Range;
    double value = imageQuality.Value;
}

// iOS
foreach (RFSImageQualityResult imageQuality in detectFacesResponse.Detection.Quality) {
    RFSImageQualityResultStatus status = imageQuality.Status;
    RFSImageQualityGroup group = imageQuality.Group;
    string name = imageQuality.Name;
    RFSImageQualityRange range = imageQuality.Range;
    NSNumber value = imageQuality.Value;
}