Attributes Evaluation
To identify the status of certain parameters, for example, whether the person in the photo is wearing a medical mask, glasses, head coverage, etc., you can evaluate attributes. Find the exhaustive information on the Attributes Evaluation page in the Web Service section.
Request
let configuration = DetectFacesConfiguration()
configuration.attributes = [
.emotion,
.age,
.glasses
]
let detectFacesRequest = DetectFacesRequest(image: image,
configuration: configuration)
RFSDetectFacesConfiguration *configuration = [[RFSDetectFacesConfiguration alloc] init];
[configuration setAttributes:@[
RFSDetectFacesAttributeEmotion,
RFSDetectFacesAttributeAge,
RFSDetectFacesAttributeGlasses,
]];
RFSDetectFacesRequest *detectFacesRequest = [[RFSDetectFacesRequest alloc] initWithImage:image
configuration:configuration];
val configuration = DetectFacesConfiguration()
configuration.setAttributes(
listOf(
DetectFacesAttribute.EMOTION,
DetectFacesAttribute.AGE,
DetectFacesAttribute.GLASSES
)
)
val detectFacesRequest = DetectFacesRequest(bitmap, configuration)
DetectFacesConfiguration configuration = new DetectFacesConfiguration();
configuration.setAttributes(Arrays.asList(DetectFacesAttribute.EMOTION,
DetectFacesAttribute.AGE,
DetectFacesAttribute.GLASSES));
DetectFacesRequest detectFacesRequest = new DetectFacesRequest(bitmap, configuration);
var config = new DetectFacesConfig()
config.attributes = [
Enum.DetectFacesAttribute.EMOTION,
Enum.DetectFacesAttribute.AGE,
Enum.DetectFacesAttribute.GLASSES,
]
var request = new DetectFacesRequest()
request.image = "base64"
request.configuration = config
var config = DetectFacesConfig(
attributes: [
DetectFacesAttribute.EMOTION,
DetectFacesAttribute.AGE,
DetectFacesAttribute.GLASSES,
],
);
var request = new DetectFacesRequest(imageData, config);
var config = new DetectFacesConfig()
config.attributes = [
Enum.DetectFacesAttribute.EMOTION,
Enum.DetectFacesAttribute.AGE,
Enum.DetectFacesAttribute.GLASSES,
]
var request = new DetectFacesRequest()
request.image = "base64"
request.configuration = config
var config = new DetectFacesConfig()
config.attributes = [
Enum.DetectFacesAttribute.EMOTION,
Enum.DetectFacesAttribute.AGE,
Enum.DetectFacesAttribute.GLASSES,
]
var request = new DetectFacesRequest()
request.image = "base64"
request.configuration = config
// Android
DetectFacesConfiguration configuration = new()
{
Attributes = [
DetectFacesAttribute.Emotion,
DetectFacesAttribute.Age,
DetectFacesAttribute.Glasses
]
};
DetectFacesRequest detectFacesRequest = new(bitmap, configuration);
// iOS
RFSDetectFacesConfiguration configuration = new()
{
Attributes = [
Constants.RFSDetectFacesAttributeEmotion,
Constants.RFSDetectFacesAttributeAge,
Constants.RFSDetectFacesAttributeGlasses
]
};
RFSDetectFacesRequest detectFacesRequest = new(image, configuration);
Response
detectFacesResponse.detection?.attributes?.forEach { attributeResult in
let value = attributeResult.value
let attribute = attributeResult.attribute
let confidence = attributeResult.confidence
let range = attributeResult.range
}
for (RFSDetectFacesAttributeResult *attributeResult in detectFaceResponse.detection.attributes) {
RFSDetectFacesAttribute attribute = attributeResult.attribute;
NSString *value = attributeResult.value;
RFSImageQualityRange *range = attributeResult.range;
NSNumber *confidence = attributeResult.confidence;
}
for (facesAttribute in detectFacesResponse.detection?.attributes!!) {
val value = facesAttribute.value
val attribute = facesAttribute.attribute
val confidence = facesAttribute.confidence
val range = facesAttribute.range
}
for (DetectFacesAttributeResult facesAttribute : detectFacesResponse.getDetection().getAttributes()) {
String value = facesAttribute.getValue();
DetectFacesAttribute attribute = facesAttribute.getAttribute();
Double confidence = facesAttribute.getConfidence();
ImageQualityRange range = facesAttribute.getRange();
}
FaceSDK.detectFaces(detectFacesRequest, json => {
var response = DetectFacesResponse.fromJson(JSON.parse(json))!
response.detection?.attributes?.forEach((attributeResult) => {
var value = attributeResult.value
var attribute = attributeResult.attribute
var confidence = attributeResult.confidence
var range = attributeResult.range
})
}, _ => { })
var response = await FaceSDK.instance.detectFaces(detectFacesRequest);
response.detection?.attributes?.forEach((attributeResult) {
var value = attributeResult.value;
var attribute = attributeResult.attribute;
var confidence = attributeResult.confidence;
var range = attributeResult.range;
});
FaceSDK.detectFaces(detectFacesRequest, function(json) {
var response = DetectFacesResponse.fromJson(JSON.parse(json))
response.detection.attributes.forEach((attributeResult) => {
var value = attributeResult.value
var attribute = attributeResult.attribute
var confidence = attributeResult.confidence
var range = attributeResult.range
})
}, function (e) { })
FaceSDK.detectFaces(detectFacesRequest).then(json => {
var response = DetectFacesResponse.fromJson(JSON.parse(json))!
response.detection?.attributes?.forEach((attributeResult) => {
var value = attributeResult.value
var attribute = attributeResult.attribute
var confidence = attributeResult.confidence
var range = attributeResult.range
})
})
// Android
foreach (DetectFacesAttributeResult facesAttribute in response.Detection.Attributes)
{
String value = facesAttribute.Value;
DetectFacesAttribute attribute = facesAttribute.Attribute;
Java.Lang.Double confidence = facesAttribute.Confidence;
ImageQualityRange range = facesAttribute.Range;
}
// iOS
foreach (RFSDetectFacesAttributeResult attributeResult in response.Detection.Attributes)
{
string attribute = attributeResult.Attribute;
string value = attributeResult.Value;
RFSImageQualityRange range = attributeResult.Range;
NSNumber confidence = attributeResult.Confidence;
}