Attributes Evaluation
To identify the status of certain parameters, for example, whether the person in the photo is wearing a medical mask, glasses, head coverage, etc., you can evaluate attributes. Find the exhaustive information on the Attributes Evaluation page in the Web Service section.
Request
let configuration = DetectFacesConfiguration()
configuration.attributes = [
.emotion,
.age,
.glasses
]
let detectFacesRequest = DetectFacesRequest(image: image,
configuration: configuration)
RFSDetectFacesConfiguration *configuration = [[RFSDetectFacesConfiguration alloc] init];
[configuration setAttributes:@[
RFSDetectFacesAttributeEmotion,
RFSDetectFacesAttributeAge,
RFSDetectFacesAttributeGlasses,
]];
RFSDetectFacesRequest *detectFacesRequest = [[RFSDetectFacesRequest alloc] initWithImage:image
configuration:configuration];
val configuration = DetectFacesConfiguration()
configuration.setAttributes(
listOf(
DetectFacesAttribute.EMOTION,
DetectFacesAttribute.AGE,
DetectFacesAttribute.GLASSES
)
)
val detectFacesRequest = DetectFacesRequest(bitmap, configuration)
DetectFacesConfiguration configuration = new DetectFacesConfiguration();
configuration.setAttributes(Arrays.asList(DetectFacesAttribute.EMOTION,
DetectFacesAttribute.AGE,
DetectFacesAttribute.GLASSES));
DetectFacesRequest detectFacesRequest = new DetectFacesRequest(bitmap, configuration);
let configuration = new DetectFacesConfiguration()
configuration.attributes = [
DetectFacesAttribute.EMOTION,
DetectFacesAttribute.AGE,
DetectFacesAttribute.GLASSES,
]
let detectFacesRequest = new DetectFacesRequest()
detectFacesRequest.image = bitmap
detectFacesRequest.configuration = configuration
var configuration = new DetectFacesConfiguration();
configuration.attributes = [
DetectFacesAttribute.EMOTION,
DetectFacesAttribute.AGE,
DetectFacesAttribute.GLASSES,
];
var detectFacesRequest = new DetectFacesRequest();
detectFacesRequest.image = bitmap;
detectFacesRequest.configuration = configuration;
configuration = new DetectFacesConfiguration()
configuration.attributes = [
Enum.DetectFacesAttribute.EMOTION,
Enum.DetectFacesAttribute.AGE,
Enum.DetectFacesAttribute.GLASSES,
]
detectFacesRequest = new DetectFacesRequest()
detectFacesRequest.image = bitmap
detectFacesRequest.configuration = configuration
let configuration = new DetectFacesConfiguration()
configuration.attributes = [
DetectFacesAttribute.EMOTION,
DetectFacesAttribute.AGE,
DetectFacesAttribute.GLASSES,
]
let detectFacesRequest = new DetectFacesRequest()
detectFacesRequest.image = bitmap
detectFacesRequest.configuration = configuration
Response
detectFacesResponse.detection?.attributes?.forEach { attributeResult in
let value = attributeResult.value
let attribute = attributeResult.attribute
let confidence = attributeResult.confidence
let range = attributeResult.range
}
for (RFSDetectFacesAttributeResult *attributeResult in detectFaceResponse.detection.attributes) {
RFSDetectFacesAttribute attribute = attributeResult.attribute;
NSString *value = attributeResult.value;
RFSImageQualityRange *range = attributeResult.range;
NSNumber *confidence = attributeResult.confidence;
}
for (facesAttribute in detectFacesResponse.detection?.attributes!!) {
val value = facesAttribute.value
val attribute = facesAttribute.attribute
val confidence = facesAttribute.confidence
val range = facesAttribute.range
}
for (DetectFacesAttributeResult facesAttribute : detectFacesResponse.getDetection().getAttributes()) {
String value = facesAttribute.getValue();
DetectFacesAttribute attribute = facesAttribute.getAttribute();
Double confidence = facesAttribute.getConfidence();
ImageQualityRange range = facesAttribute.getRange();
}
FaceSDK.detectFaces(JSON.stringify(detectFacesRequest), raw => {
let detectFacesResponse = DetectFacesResponse.fromJson(JSON.parse(raw))
detectFacesResponse.detection?.attributes?.forEach((attributeResult) => {
let value = attributeResult.value
let attribute = attributeResult.attribute
let confidence = attributeResult.confidence
let range = attributeResult.range
})
}, e => { })
FaceSDK.detectFaces(jsonEncode(detectFacesRequest)).then((raw) {
var detectFacesResponse = DetectFacesResponse.fromJson(jsonDecode(raw));
detectFacesResponse?.detection?.attributes.forEach((attributeResult) {
var value = attributeResult?.value;
var attribute = attributeResult?.attribute;
var confidence = attributeResult?.confidence;
var range = attributeResult?.range;
});
});
FaceSDK.detectFaces(JSON.stringify(detectFacesRequest), raw => {
detectFacesResponse = DetectFacesResponse.fromJson(JSON.parse(raw))
detectFacesResponse.detection?.attributes?.forEach((attributeResult) => {
value = attributeResult.value
attribute = attributeResult.attribute
confidence = attributeResult.confidence
range = attributeResult.range
})
}, e => { })
FaceSDK.detectFaces(JSON.stringify(detectFacesRequest)).then(raw => {
let detectFacesResponse = DetectFacesResponse.fromJson(JSON.parse(raw))
detectFacesResponse.detection?.attributes?.forEach((attributeResult) => {
let value = attributeResult.value
let attribute = attributeResult.attribute
let confidence = attributeResult.confidence
let range = attributeResult.range
})
})