Skip to content

Commit

Permalink
Add samples
Browse files Browse the repository at this point in the history
  • Loading branch information
leareai committed May 13, 2024
1 parent 206f2ab commit 12dca01
Show file tree
Hide file tree
Showing 40 changed files with 2,079 additions and 0 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.

package com.azure.ai.vision.face.samples;

import com.azure.ai.vision.face.FaceAdministrationClient;
import com.azure.ai.vision.face.FaceAdministrationClientBuilder;
import com.azure.ai.vision.face.models.DynamicPersonGroup;
import com.azure.ai.vision.face.models.FaceOperationResult;
import com.azure.ai.vision.face.models.PersonDirectoryPerson;
import com.azure.ai.vision.face.samples.utils.ConfigurationHelper;
import com.azure.core.credential.AzureKeyCredential;
import com.azure.core.util.polling.SyncPoller;

import java.time.Duration;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;

import static com.azure.ai.vision.face.samples.utils.Utils.log;

public class CleanAllDynamicPersonGroupAndPersonDirectoryPersons {
public static void main(String[] args) {
//Create FaceAdministrationClient to create Persons and DynamicPersonDirectory.
FaceAdministrationClient administrationClient = new FaceAdministrationClientBuilder()
.endpoint(ConfigurationHelper.getEndpoint())
.credential(new AzureKeyCredential(ConfigurationHelper.getAccountKey()))
.buildClient();


try {
createFakeData(administrationClient);
} finally {
deleteAllPersons(administrationClient);
deleteAllDynamicPersonDirecroy(administrationClient);
}
}

private static void deleteAllDynamicPersonDirecroy(FaceAdministrationClient administrationClient) {
List<DynamicPersonGroup> dynamicPersonGroups = administrationClient.getDynamicPersonGroups();
List<SyncPoller<FaceOperationResult, Void>> pollers = dynamicPersonGroups.stream().map(group -> {
log("Remove DynamicPersonGroup: " + group.getDynamicPersonGroupId());
return administrationClient.beginDeleteDynamicPersonGroup(group.getDynamicPersonGroupId());
}).collect(Collectors.toList());

log("Wait for all deletion of DynamicPersonGroup to complete");
pollers.forEach(poller -> poller
.setPollInterval(Duration.ofSeconds(1))
.waitForCompletion(Duration.ofSeconds(60)));
log("Done");
}

private static void deleteAllPersons(FaceAdministrationClient administrationClient) {
List<PersonDirectoryPerson> pdPersons = administrationClient.getPersons();
for (PersonDirectoryPerson person : pdPersons) {
log("Delete Person, name: " + person.getName() + ", data:" + person.getUserData() + ", ID:" + person.getPersonId());
administrationClient.beginDeletePerson(person.getPersonId());
}
}

private static void createFakeData(FaceAdministrationClient administrationClient) {
log("Create fake data ...");
List<SyncPoller<FaceOperationResult, PersonDirectoryPerson>> pollers = new ArrayList<>();
pollers.add(
administrationClient.beginCreatePerson("fake_person1", "Fake Person 1"));
pollers.add(
administrationClient.beginCreatePerson("fake_person2", "Fake Person 2"));
administrationClient.createDynamicPersonGroup("fake1", "Fake group 1");
administrationClient.createDynamicPersonGroup("fake2", "Fake group 2");

pollers.forEach(poller -> poller.setPollInterval(Duration.ofSeconds(1))
.waitForCompletion(Duration.ofSeconds(60)));

log("Done");
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.

package com.azure.ai.vision.face.samples;

import com.azure.ai.vision.face.FaceClient;
import com.azure.ai.vision.face.FaceClientBuilder;
import com.azure.ai.vision.face.samples.utils.ConfigurationHelper;
import com.azure.ai.vision.face.samples.utils.Resources;
import com.azure.ai.vision.face.samples.utils.Utils;
import com.azure.ai.vision.face.models.DetectOptions;
import com.azure.ai.vision.face.models.FaceDetectionModel;
import com.azure.ai.vision.face.models.FaceDetectionResult;
import com.azure.ai.vision.face.models.FaceRecognitionModel;
import com.azure.core.credential.AzureKeyCredential;
import com.azure.core.util.BinaryData;

import java.nio.file.FileSystems;
import java.util.Arrays;
import java.util.List;

import static com.azure.ai.vision.face.samples.utils.Utils.log;
import static com.azure.ai.vision.face.models.FaceAttributeType.Detection01;
import static com.azure.ai.vision.face.models.FaceAttributeType.Detection03;
import static com.azure.ai.vision.face.models.FaceAttributeType.Recognition04;

public class DetectFaces {
public static void main(String[] args) {
FaceClient client = new FaceClientBuilder()
.endpoint(ConfigurationHelper.getEndpoint())
.credential(new AzureKeyCredential(ConfigurationHelper.getAccountKey()))
.buildClient();

BinaryData imageBinary = BinaryData.fromFile(FileSystems.getDefault().getPath(Resources.TEST_IMAGE_PATH_DETECT_SAMPLE_IMAGE));
List<FaceDetectionResult> detectResult = client.detect(
imageBinary,
FaceDetectionModel.DETECTION_03,
FaceRecognitionModel.RECOGNITION_04,
true,
Arrays.asList(Detection03.HEAD_POSE, Detection03.MASK, Recognition04.QUALITY_FOR_RECOGNITION),
false,
true,
120);

detectResult.forEach(face -> log("Detected Face by file:" + Utils.toString(face) + "\n"));

DetectOptions options = new DetectOptions(FaceDetectionModel.DETECTION_01, FaceRecognitionModel.RECOGNITION_04, false)
.setReturnFaceAttributes(Arrays.asList(Detection01.ACCESSORIES, Detection01.GLASSES, Detection01.EXPOSURE, Detection01.NOISE))
.setReturnFaceLandmarks(true);

detectResult = client.detectFromUrl(Resources.TEST_IMAGE_URL_DETECT_SAMPLE, options);
detectResult.forEach(face -> log("Detected Faces from URL:" + Utils.toString(face) + "\n"));
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.

package com.azure.ai.vision.face.samples;

import com.azure.ai.vision.face.FaceAsyncClient;
import com.azure.ai.vision.face.FaceClientBuilder;
import com.azure.ai.vision.face.samples.utils.ConfigurationHelper;
import com.azure.ai.vision.face.samples.utils.Resources;
import com.azure.ai.vision.face.samples.utils.Utils;
import com.azure.ai.vision.face.models.DetectOptions;
import com.azure.ai.vision.face.models.FaceDetectionModel;
import com.azure.ai.vision.face.models.FaceDetectionResult;
import com.azure.ai.vision.face.models.FaceRecognitionModel;
import com.azure.core.credential.AzureKeyCredential;
import com.azure.core.util.BinaryData;
import reactor.core.publisher.Flux;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;

import static com.azure.ai.vision.face.samples.utils.Utils.log;
import static com.azure.ai.vision.face.models.FaceAttributeType.Detection01;
import static com.azure.ai.vision.face.models.FaceAttributeType.Detection03;
import static com.azure.ai.vision.face.models.FaceAttributeType.Recognition04;

public class DetectFacesAsync {
public static void main(String[] args) {
FaceAsyncClient client = new FaceClientBuilder()
.endpoint(ConfigurationHelper.getEndpoint())
.credential(new AzureKeyCredential(ConfigurationHelper.getAccountKey()))
.buildAsyncClient();

BinaryData imageBinary = Utils.loadFromFile(Resources.TEST_IMAGE_PATH_DETECT_SAMPLE_IMAGE);
Flux<FaceDetectionResult> flux = client.detect(
imageBinary,
FaceDetectionModel.DETECTION_03,
FaceRecognitionModel.RECOGNITION_04,
true,
Arrays.asList(Detection03.HEAD_POSE, Detection03.MASK, Recognition04.QUALITY_FOR_RECOGNITION),
false,
true,
120)
.flatMapMany(Flux::fromIterable);

flux.subscribe(face -> log("Detected Face by file:" + Utils.toString(face) + "\n"));

DetectOptions options = new DetectOptions(FaceDetectionModel.DETECTION_01, FaceRecognitionModel.RECOGNITION_04, false)
.setReturnFaceAttributes(Arrays.asList(Detection01.ACCESSORIES, Detection01.GLASSES, Detection01.EXPOSURE, Detection01.NOISE))
.setReturnFaceLandmarks(true);

flux = client.detectFromUrl(Resources.TEST_IMAGE_URL_DETECT_SAMPLE, options)
.flatMapMany(Flux::fromIterable);

flux.subscribe(face -> log("Detected Face from URL:" + Utils.toString(face) + "\n"));


try {
Thread.sleep(3000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.

package com.azure.ai.vision.face.samples;

import com.azure.ai.vision.face.FaceSessionClient;
import com.azure.ai.vision.face.FaceSessionClientBuilder;
import com.azure.ai.vision.face.models.CreateLivenessSessionContent;
import com.azure.ai.vision.face.models.CreateLivenessSessionResult;
import com.azure.ai.vision.face.models.LivenessOperationMode;
import com.azure.ai.vision.face.models.LivenessSession;
import com.azure.ai.vision.face.models.LivenessSessionAuditEntry;
import com.azure.ai.vision.face.models.LivenessSessionItem;
import com.azure.ai.vision.face.samples.utils.ConfigurationHelper;
import com.azure.ai.vision.face.samples.utils.Resources;
import com.azure.ai.vision.face.samples.utils.Utils;
import com.azure.core.credential.AzureKeyCredential;
import com.azure.core.util.BinaryData;

import java.util.List;
import java.util.UUID;

import static com.azure.ai.vision.face.samples.utils.Utils.log;
import static com.azure.ai.vision.face.samples.utils.Utils.logObject;

public class DetectLiveness {
public static void main(String[] args) {
// This sample follows below documentation
// https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/tutorials/liveness
// We will the steps in
// https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/tutorials/liveness#orchestrate-the-liveness-solution
// to demo the sample code in app server

// 1. A client device will send a request to start liveness check to us
waitingForLivenessRequest();

// 2.Send a request to Face API to create a liveness session
// Create a FaceSessionClient
FaceSessionClient faceSessionClient = new FaceSessionClientBuilder()
.endpoint(ConfigurationHelper.getEndpoint())
.credential(new AzureKeyCredential(ConfigurationHelper.getAccountKey()))
.buildClient();

// Create a liveness session
CreateLivenessSessionContent parameters = new CreateLivenessSessionContent(LivenessOperationMode.PASSIVE)
.setDeviceCorrelationId(UUID.randomUUID().toString())
.setSendResultsToClient(false)
.setAuthTokenTimeToLiveInSeconds(60);
BinaryData data = Utils.loadFromFile(Resources.TEST_IMAGE_PATH_DETECTLIVENESS_VERIFYIMAGE);
CreateLivenessSessionResult livenessSessionCreationResult = faceSessionClient.createLivenessSession(parameters);
String sessionId = livenessSessionCreationResult.getSessionId();
logObject("Create a liveness session: ", livenessSessionCreationResult, true);
String token = livenessSessionCreationResult.getAuthToken();

try {
// 3. Pass the AuthToken to client device
// Client device will process the step 4, 5, 6 in the documentation 'Orchestrate the liveness solution'
sendTokenToClientDevices(token);

// 7. Wait for client device notify us that liveness session has completed.
waitingForLivenessSessionComplete();

// 8. After client devices perform the action, we can get the result from the following API
LivenessSession sessionResult = faceSessionClient.getLivenessSessionResult(livenessSessionCreationResult.getSessionId());
logObject("Get liveness session result after client device complete liveness check: ", sessionResult);

// Get the details of all the request/response for liveness check for this sessions
List<LivenessSessionAuditEntry> auditEntries = faceSessionClient.getLivenessSessionAuditEntries(
livenessSessionCreationResult.getSessionId());
logObject("Get audit entries: ", auditEntries);

// We can also list all the liveness sessions of this face account.
List<LivenessSessionItem> sessions = faceSessionClient.getLivenessSessions();
logObject("List all the liveness sessions: ", sessions, true);
}
finally {
// Delete this session
faceSessionClient.deleteLivenessSession(livenessSessionCreationResult.getSessionId());
}
}

private static void waitingForLivenessSessionComplete() {
log("Please refer to https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/tutorials/liveness and use the mobile client SDK to perform liveness detection on your mobile application.");
Utils.pressAnyKeyToContinue("Press any key to continue when you complete these steps to run sample to get session results ...");
}

private static void sendTokenToClientDevices(String token) {
// Logic to send token to client devices
}

private static void waitingForLivenessRequest() {
// Logic to wait for request from client device
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,99 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.

package com.azure.ai.vision.face.samples;

import com.azure.ai.vision.face.FaceSessionAsyncClient;
import com.azure.ai.vision.face.FaceSessionClientBuilder;
import com.azure.ai.vision.face.models.CreateLivenessSessionContent;
import com.azure.ai.vision.face.models.CreateLivenessSessionResult;
import com.azure.ai.vision.face.models.LivenessOperationMode;
import com.azure.ai.vision.face.models.LivenessSession;
import com.azure.ai.vision.face.models.LivenessSessionAuditEntry;
import com.azure.ai.vision.face.models.LivenessSessionItem;
import com.azure.ai.vision.face.samples.utils.ConfigurationHelper;
import com.azure.ai.vision.face.samples.utils.Resources;
import com.azure.ai.vision.face.samples.utils.Utils;
import com.azure.core.credential.AzureKeyCredential;
import com.azure.core.util.BinaryData;

import java.util.List;
import java.util.UUID;

import static com.azure.ai.vision.face.samples.utils.Utils.log;
import static com.azure.ai.vision.face.samples.utils.Utils.logObject;

public class DetectLivenessAsync {
public static void main(String[] args) {
// This sample follows below documentation
// https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/tutorials/liveness
// We will the steps in
// https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/tutorials/liveness#orchestrate-the-liveness-solution
// to demo the sample code in app server

// 1. A client device will send a request to start liveness check to us
waitingForLivenessRequest();

// 2.Send a request to Face API to create a liveness with verify session with a VerifyImage
// Create a FaceSessionClient
FaceSessionAsyncClient faceSessionClient = new FaceSessionClientBuilder()
.endpoint(ConfigurationHelper.getEndpoint())
.credential(new AzureKeyCredential(ConfigurationHelper.getAccountKey()))
.buildAsyncClient();

// Create a liveness session
CreateLivenessSessionContent parameters = new CreateLivenessSessionContent(LivenessOperationMode.PASSIVE)
.setDeviceCorrelationId(UUID.randomUUID().toString())
.setSendResultsToClient(false)
.setAuthTokenTimeToLiveInSeconds(60);
BinaryData data = Utils.loadFromFile(Resources.TEST_IMAGE_PATH_DETECTLIVENESS_VERIFYIMAGE);
CreateLivenessSessionResult livenessSessionCreationResult = faceSessionClient.createLivenessSession(parameters)
.block();
String sessionId = livenessSessionCreationResult.getSessionId();
logObject("Create a liveness session: ", livenessSessionCreationResult, true);
String token = livenessSessionCreationResult.getAuthToken();

try {
// 3. Pass the AuthToken to client device
// Client device will process the step 4, 5, 6 in the documentation 'Orchestrate the liveness solution'
sendTokenToClientDevices(token);

// 7. wait for client device notify us that liveness session completed.
waitingForLivenessSessionComplete();

// 8. After client devices perform the action, we can get the result from the following API
LivenessSession sessionResult = faceSessionClient.getLivenessSessionResult(livenessSessionCreationResult.getSessionId())
.block();
logObject("Get liveness session result after client device complete liveness check: ", sessionResult);

// Get the details of all the request/response for liveness check for this sessions
List<LivenessSessionAuditEntry> auditEntries = faceSessionClient.getLivenessSessionAuditEntries(
livenessSessionCreationResult.getSessionId())
.block();
logObject("Get audit entries: ", auditEntries);

// We can also list all the liveness sessions of this face account.
List<LivenessSessionItem> sessions = faceSessionClient.getLivenessSessions()
.block();
logObject("List all the liveness sessions: ", sessions, true);
} finally {
logObject("Delete liveness sessions: ", sessionId);
// Delete this session
faceSessionClient.deleteLivenessSession(livenessSessionCreationResult.getSessionId())
.block();
}
}

private static void waitingForLivenessSessionComplete() {
log("Please refer to https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/tutorials/liveness to download client SDK to run session starts and detect liveness call.");
Utils.pressAnyKeyToContinue("Press any key to continue when you complete these steps to run sample to get session results ...");
}

private static void sendTokenToClientDevices(String token) {
// Logic to send token to client devices
}

private static void waitingForLivenessRequest() {
// Logic to wait for request from client device
}
}
Loading

0 comments on commit 12dca01

Please sign in to comment.