From 241c4f0422d842f459baf7430c9f017b7c1531fd Mon Sep 17 00:00:00 2001 From: Han Chiang Date: Tue, 30 Apr 2024 13:15:31 +0800 Subject: [PATCH] Migrate existing Azure AI Face service to TypeSpec (#27576) * Add following operations: - Liveness Session - Person Directory - Person Group - Face List - Face Operations * Restructure and rename operations Omit unreachable types * Change PD to non-LRO * Update example, doc, data decorator and unnecessary sharedRoute * Use union to represent extensible enum Add suppressions Add default value for models Fix age type Rename model in sdk Refine examples and train headers Add uuid type, groupId type and name min length Use our own operation state Change liveness operation to enum Refine group id pattern Use array for csv fields Separate request and response model for collection Flatten interface Flatten parameter for more interface * Mark detect as internal for customization * Add liveness audit response schema * Update documents (add and fix description format and add examples) Co-authored-by: Shawn Lin * Move path to host template * Move suppression to config Fix format and suppress response code failure Fix breaking changes Switch endpoint to uri Refine liveness session model Move away from standard operation to use our own error code * Set liveness with verify session as internal * Remove 4xx examples for now with some document updates (#4) * Update detection document * Update 4xx examples * Update example parameter and response * Several doc fix for PD * Differenct recognition descriptions for different data structure * Fix bullet points in note and update some PD doc * Update liveness doc * Remove 4xx examples for now to pass validation --------- Co-authored-by: Shawn Lin Co-authored-by: Shawn Lin * Add verify image response in create session * Rename API operation name (#5) * Change document to new operation names * Rename * More renaming --------- Co-authored-by: Shawn Lin Co-authored-by: Shawn Lin * Refine verify image header description * Add partial update for java * Use shared route for DPG LRO Use original LRO for PD person and face creation * Use resource decorator instead of specify resource in LRO response * Change name for patch to optional * Correct operation status name Correct DPG example * Rename operation status model * Correct operation result example * Add gated attribute * Fix validation * Force public model for internal detect * Correct update DPG model * Disable detect from url convenientAPI for java * Rename dpg write * Fix comment * Rename id and remove customized uuid * Correct access level of landmark and attribute * Correct more models' access * Collect client customization into client.tsp * Correct verify pd parameter type * Correct type * Reorder detect parameters * Rename session model * Refine description for return reco model * Remove detection convenient API * Add link of error code and message (#6) Co-authored-by: Shawn Lin * Fix validation * Remove unnecessary model * Rename model * Remove redundant line --------- Co-authored-by: Han Chiang Co-authored-by: shaoli-msft <165001074+shaoli-msft@users.noreply.github.com> Co-authored-by: Shawn Lin Co-authored-by: Shawn Lin --- cSpell.json | 10 +- specification/ai/Face/client.tsp | 193 + .../Face/examples/v1.1-preview.1/Detect.json | 181 + .../v1.1-preview.1/DetectFromUrl.json | 183 + ...tOperations_AddFaceListFaceFromStream.json | 19 + ...ListOperations_AddFaceListFaceFromUrl.json | 21 + ...ations_AddLargeFaceListFaceFromStream.json | 19 + ...perations_AddLargeFaceListFaceFromUrl.json | 21 + .../FaceListOperations_CreateFaceList.json | 16 + ...aceListOperations_CreateLargeFaceList.json | 16 + .../FaceListOperations_DeleteFaceList.json | 11 + ...FaceListOperations_DeleteFaceListFace.json | 12 + ...aceListOperations_DeleteLargeFaceList.json | 11 + ...istOperations_DeleteLargeFaceListFace.json | 12 + .../FaceListOperations_GetFaceList.json | 19 + .../FaceListOperations_GetFaceLists.json | 20 + .../FaceListOperations_GetLargeFaceList.json | 19 + ...ceListOperations_GetLargeFaceListFace.json | 17 + ...eListOperations_GetLargeFaceListFaces.json | 20 + ...ations_GetLargeFaceListTrainingStatus.json | 19 + .../FaceListOperations_GetLargeFaceLists.json | 22 + ...FaceListOperations_TrainLargeFaceList.json | 15 + .../FaceListOperations_UpdateFaceList.json | 15 + ...aceListOperations_UpdateLargeFaceList.json | 15 + ...istOperations_UpdateLargeFaceListFace.json | 15 + ...FaceRecognitionOperations_FindSimilar.json | 26 + ...ionOperations_FindSimilarFromFaceList.json | 23 + ...erations_FindSimilarFromLargeFaceList.json | 23 + .../FaceRecognitionOperations_Group.json | 41 + ...ations_IdentifyFromDynamicPersonGroup.json | 30 + ...erations_IdentifyFromLargePersonGroup.json | 30 + ...perations_IdentifyFromPersonDirectory.json | 32 + ...ionOperations_IdentifyFromPersonGroup.json | 30 + ...ecognitionOperations_VerifyFaceToFace.json | 19 + ...Operations_VerifyFromLargePersonGroup.json | 20 + ...nOperations_VerifyFromPersonDirectory.json | 19 + ...itionOperations_VerifyFromPersonGroup.json | 20 + .../v1.1-preview.1/GetOperationResult.json | 20 + ...ssionOperations_CreateLivenessSession.json | 22 + ...tions_CreateLivenessWithVerifySession.json | 22 + ...enessWithVerifySessionWithVerifyImage.json | 26 + ...ssionOperations_DeleteLivenessSession.json | 11 + ...tions_DeleteLivenessWithVerifySession.json | 11 + ...ations_GetLivenessSessionAuditEntries.json | 36 + ...onOperations_GetLivenessSessionResult.json | 41 + ...SessionOperations_GetLivenessSessions.json | 23 + ...LivenessWithVerifySessionAuditEntries.json | 36 + ...ns_GetLivenessWithVerifySessionResult.json | 41 + ...rations_GetLivenessWithVerifySessions.json | 23 + ...oryOperations_AddPersonFaceFromStream.json | 24 + ...ectoryOperations_AddPersonFaceFromUrl.json | 26 + ...ryOperations_CreateDynamicPersonGroup.json | 15 + ...ns_CreateDynamicPersonGroupWithPerson.json | 22 + ...ersonDirectoryOperations_CreatePerson.json | 22 + ...ryOperations_DeleteDynamicPersonGroup.json | 16 + ...ersonDirectoryOperations_DeletePerson.json | 16 + ...nDirectoryOperations_DeletePersonFace.json | 18 + ...ctoryOperations_GetDynamicPersonGroup.json | 17 + ...erations_GetDynamicPersonGroupPersons.json | 20 + ...tions_GetDynamicPersonGroupReferences.json | 19 + ...toryOperations_GetDynamicPersonGroups.json | 20 + .../PersonDirectoryOperations_GetPerson.json | 17 + ...rsonDirectoryOperations_GetPersonFace.json | 18 + ...sonDirectoryOperations_GetPersonFaces.json | 19 + .../PersonDirectoryOperations_GetPersons.json | 20 + ...ryOperations_UpdateDynamicPersonGroup.json | 15 + ...teDynamicPersonGroupWithPersonChanges.json | 25 + ...ersonDirectoryOperations_UpdatePerson.json | 15 + ...nDirectoryOperations_UpdatePersonFace.json | 16 + ...dLargePersonGroupPersonFaceFromStream.json | 20 + ..._AddLargePersonGroupPersonFaceFromUrl.json | 22 + ...ns_AddPersonGroupPersonFaceFromStream.json | 20 + ...tions_AddPersonGroupPersonFaceFromUrl.json | 22 + ...roupOperations_CreateLargePersonGroup.json | 16 + ...erations_CreateLargePersonGroupPerson.json | 19 + ...rsonGroupOperations_CreatePersonGroup.json | 16 + ...oupOperations_CreatePersonGroupPerson.json | 19 + ...roupOperations_DeleteLargePersonGroup.json | 11 + ...erations_DeleteLargePersonGroupPerson.json | 12 + ...ions_DeleteLargePersonGroupPersonFace.json | 13 + ...rsonGroupOperations_DeletePersonGroup.json | 11 + ...oupOperations_DeletePersonGroupPerson.json | 12 + ...perations_DeletePersonGroupPersonFace.json | 13 + ...onGroupOperations_GetLargePersonGroup.json | 19 + ...pOperations_GetLargePersonGroupPerson.json | 21 + ...rations_GetLargePersonGroupPersonFace.json | 18 + ...Operations_GetLargePersonGroupPersons.json | 24 + ...ons_GetLargePersonGroupTrainingStatus.json | 19 + ...nGroupOperations_GetLargePersonGroups.json | 22 + .../PersonGroupOperations_GetPersonGroup.json | 19 + ...nGroupOperations_GetPersonGroupPerson.json | 21 + ...upOperations_GetPersonGroupPersonFace.json | 18 + ...GroupOperations_GetPersonGroupPersons.json | 24 + ...erations_GetPersonGroupTrainingStatus.json | 19 + ...PersonGroupOperations_GetPersonGroups.json | 22 + ...GroupOperations_TrainLargePersonGroup.json | 15 + ...ersonGroupOperations_TrainPersonGroup.json | 15 + ...roupOperations_UpdateLargePersonGroup.json | 15 + ...erations_UpdateLargePersonGroupPerson.json | 16 + ...ions_UpdateLargePersonGroupPersonFace.json | 16 + ...rsonGroupOperations_UpdatePersonGroup.json | 15 + ...oupOperations_UpdatePersonGroupPerson.json | 16 + ...perations_UpdatePersonGroupPersonFace.json | 16 + specification/ai/Face/main.tsp | 58 + specification/ai/Face/models.common.tsp | 296 + specification/ai/Face/models.detect.tsp | 512 + specification/ai/Face/models.facelist.tsp | 45 + .../ai/Face/models.persondirectory.tsp | 110 + specification/ai/Face/models.persongroup.tsp | 58 + specification/ai/Face/models.session.tsp | 276 + specification/ai/Face/routes.common.tsp | 151 + specification/ai/Face/routes.detection.tsp | 63 + specification/ai/Face/routes.facelist.tsp | 193 + .../ai/Face/routes.persondirectory.tsp | 285 + specification/ai/Face/routes.persongroup.tsp | 290 + specification/ai/Face/routes.recognition.tsp | 283 + specification/ai/Face/routes.session.tsp | 152 + specification/ai/Face/tspconfig.yaml | 45 + .../Face/preview/v1.1-preview.1/Face.json | 9416 +++++++++++++++++ .../v1.1-preview.1/examples/Detect.json | 181 + .../examples/DetectFromUrl.json | 183 + ...tOperations_AddFaceListFaceFromStream.json | 19 + ...ListOperations_AddFaceListFaceFromUrl.json | 21 + ...ations_AddLargeFaceListFaceFromStream.json | 19 + ...perations_AddLargeFaceListFaceFromUrl.json | 21 + .../FaceListOperations_CreateFaceList.json | 16 + ...aceListOperations_CreateLargeFaceList.json | 16 + .../FaceListOperations_DeleteFaceList.json | 11 + ...FaceListOperations_DeleteFaceListFace.json | 12 + ...aceListOperations_DeleteLargeFaceList.json | 11 + ...istOperations_DeleteLargeFaceListFace.json | 12 + .../FaceListOperations_GetFaceList.json | 19 + .../FaceListOperations_GetFaceLists.json | 20 + .../FaceListOperations_GetLargeFaceList.json | 19 + ...ceListOperations_GetLargeFaceListFace.json | 17 + ...eListOperations_GetLargeFaceListFaces.json | 20 + ...ations_GetLargeFaceListTrainingStatus.json | 19 + .../FaceListOperations_GetLargeFaceLists.json | 22 + ...FaceListOperations_TrainLargeFaceList.json | 15 + .../FaceListOperations_UpdateFaceList.json | 15 + ...aceListOperations_UpdateLargeFaceList.json | 15 + ...istOperations_UpdateLargeFaceListFace.json | 15 + ...FaceRecognitionOperations_FindSimilar.json | 26 + ...ionOperations_FindSimilarFromFaceList.json | 23 + ...erations_FindSimilarFromLargeFaceList.json | 23 + .../FaceRecognitionOperations_Group.json | 41 + ...ations_IdentifyFromDynamicPersonGroup.json | 30 + ...erations_IdentifyFromLargePersonGroup.json | 30 + ...perations_IdentifyFromPersonDirectory.json | 32 + ...ionOperations_IdentifyFromPersonGroup.json | 30 + ...ecognitionOperations_VerifyFaceToFace.json | 19 + ...Operations_VerifyFromLargePersonGroup.json | 20 + ...nOperations_VerifyFromPersonDirectory.json | 19 + ...itionOperations_VerifyFromPersonGroup.json | 20 + .../examples/GetOperationResult.json | 20 + ...ssionOperations_CreateLivenessSession.json | 22 + ...tions_CreateLivenessWithVerifySession.json | 22 + ...enessWithVerifySessionWithVerifyImage.json | 26 + ...ssionOperations_DeleteLivenessSession.json | 11 + ...tions_DeleteLivenessWithVerifySession.json | 11 + ...ations_GetLivenessSessionAuditEntries.json | 36 + ...onOperations_GetLivenessSessionResult.json | 41 + ...SessionOperations_GetLivenessSessions.json | 23 + ...LivenessWithVerifySessionAuditEntries.json | 36 + ...ns_GetLivenessWithVerifySessionResult.json | 41 + ...rations_GetLivenessWithVerifySessions.json | 23 + ...oryOperations_AddPersonFaceFromStream.json | 24 + ...ectoryOperations_AddPersonFaceFromUrl.json | 26 + ...ryOperations_CreateDynamicPersonGroup.json | 15 + ...ns_CreateDynamicPersonGroupWithPerson.json | 22 + ...ersonDirectoryOperations_CreatePerson.json | 22 + ...ryOperations_DeleteDynamicPersonGroup.json | 16 + ...ersonDirectoryOperations_DeletePerson.json | 16 + ...nDirectoryOperations_DeletePersonFace.json | 18 + ...ctoryOperations_GetDynamicPersonGroup.json | 17 + ...erations_GetDynamicPersonGroupPersons.json | 20 + ...tions_GetDynamicPersonGroupReferences.json | 19 + ...toryOperations_GetDynamicPersonGroups.json | 20 + .../PersonDirectoryOperations_GetPerson.json | 17 + ...rsonDirectoryOperations_GetPersonFace.json | 18 + ...sonDirectoryOperations_GetPersonFaces.json | 19 + .../PersonDirectoryOperations_GetPersons.json | 20 + ...ryOperations_UpdateDynamicPersonGroup.json | 15 + ...teDynamicPersonGroupWithPersonChanges.json | 25 + ...ersonDirectoryOperations_UpdatePerson.json | 15 + ...nDirectoryOperations_UpdatePersonFace.json | 16 + ...dLargePersonGroupPersonFaceFromStream.json | 20 + ..._AddLargePersonGroupPersonFaceFromUrl.json | 22 + ...ns_AddPersonGroupPersonFaceFromStream.json | 20 + ...tions_AddPersonGroupPersonFaceFromUrl.json | 22 + ...roupOperations_CreateLargePersonGroup.json | 16 + ...erations_CreateLargePersonGroupPerson.json | 19 + ...rsonGroupOperations_CreatePersonGroup.json | 16 + ...oupOperations_CreatePersonGroupPerson.json | 19 + ...roupOperations_DeleteLargePersonGroup.json | 11 + ...erations_DeleteLargePersonGroupPerson.json | 12 + ...ions_DeleteLargePersonGroupPersonFace.json | 13 + ...rsonGroupOperations_DeletePersonGroup.json | 11 + ...oupOperations_DeletePersonGroupPerson.json | 12 + ...perations_DeletePersonGroupPersonFace.json | 13 + ...onGroupOperations_GetLargePersonGroup.json | 19 + ...pOperations_GetLargePersonGroupPerson.json | 21 + ...rations_GetLargePersonGroupPersonFace.json | 18 + ...Operations_GetLargePersonGroupPersons.json | 24 + ...ons_GetLargePersonGroupTrainingStatus.json | 19 + ...nGroupOperations_GetLargePersonGroups.json | 22 + .../PersonGroupOperations_GetPersonGroup.json | 19 + ...nGroupOperations_GetPersonGroupPerson.json | 21 + ...upOperations_GetPersonGroupPersonFace.json | 18 + ...GroupOperations_GetPersonGroupPersons.json | 24 + ...erations_GetPersonGroupTrainingStatus.json | 19 + ...PersonGroupOperations_GetPersonGroups.json | 22 + ...GroupOperations_TrainLargePersonGroup.json | 15 + ...ersonGroupOperations_TrainPersonGroup.json | 15 + ...roupOperations_UpdateLargePersonGroup.json | 15 + ...erations_UpdateLargePersonGroupPerson.json | 16 + ...ions_UpdateLargePersonGroupPersonFace.json | 16 + ...rsonGroupOperations_UpdatePersonGroup.json | 15 + ...oupOperations_UpdatePersonGroupPerson.json | 16 + ...perations_UpdatePersonGroupPersonFace.json | 16 + specification/ai/data-plane/Face/readme.md | 21 +- 221 files changed, 17091 insertions(+), 4 deletions(-) create mode 100644 specification/ai/Face/client.tsp create mode 100644 specification/ai/Face/examples/v1.1-preview.1/Detect.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/DetectFromUrl.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_AddFaceListFaceFromStream.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_AddFaceListFaceFromUrl.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_AddLargeFaceListFaceFromStream.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_AddLargeFaceListFaceFromUrl.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_CreateFaceList.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_CreateLargeFaceList.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_DeleteFaceList.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_DeleteFaceListFace.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_DeleteLargeFaceList.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_DeleteLargeFaceListFace.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_GetFaceList.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_GetFaceLists.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_GetLargeFaceList.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_GetLargeFaceListFace.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_GetLargeFaceListFaces.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_GetLargeFaceListTrainingStatus.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_GetLargeFaceLists.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_TrainLargeFaceList.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_UpdateFaceList.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_UpdateLargeFaceList.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_UpdateLargeFaceListFace.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_FindSimilar.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_FindSimilarFromFaceList.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_FindSimilarFromLargeFaceList.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_Group.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_IdentifyFromDynamicPersonGroup.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_IdentifyFromLargePersonGroup.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_IdentifyFromPersonDirectory.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_IdentifyFromPersonGroup.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_VerifyFaceToFace.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_VerifyFromLargePersonGroup.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_VerifyFromPersonDirectory.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_VerifyFromPersonGroup.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/GetOperationResult.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_CreateLivenessSession.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_CreateLivenessWithVerifySession.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_CreateLivenessWithVerifySessionWithVerifyImage.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_DeleteLivenessSession.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_DeleteLivenessWithVerifySession.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_GetLivenessSessionAuditEntries.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_GetLivenessSessionResult.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_GetLivenessSessions.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_GetLivenessWithVerifySessionAuditEntries.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_GetLivenessWithVerifySessionResult.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_GetLivenessWithVerifySessions.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_AddPersonFaceFromStream.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_AddPersonFaceFromUrl.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_CreateDynamicPersonGroup.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_CreateDynamicPersonGroupWithPerson.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_CreatePerson.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_DeleteDynamicPersonGroup.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_DeletePerson.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_DeletePersonFace.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetDynamicPersonGroup.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetDynamicPersonGroupPersons.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetDynamicPersonGroupReferences.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetDynamicPersonGroups.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetPerson.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetPersonFace.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetPersonFaces.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetPersons.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_UpdateDynamicPersonGroup.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_UpdateDynamicPersonGroupWithPersonChanges.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_UpdatePerson.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_UpdatePersonFace.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_AddLargePersonGroupPersonFaceFromStream.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_AddLargePersonGroupPersonFaceFromUrl.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_AddPersonGroupPersonFaceFromStream.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_AddPersonGroupPersonFaceFromUrl.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_CreateLargePersonGroup.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_CreateLargePersonGroupPerson.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_CreatePersonGroup.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_CreatePersonGroupPerson.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_DeleteLargePersonGroup.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_DeleteLargePersonGroupPerson.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_DeleteLargePersonGroupPersonFace.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_DeletePersonGroup.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_DeletePersonGroupPerson.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_DeletePersonGroupPersonFace.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetLargePersonGroup.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetLargePersonGroupPerson.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetLargePersonGroupPersonFace.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetLargePersonGroupPersons.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetLargePersonGroupTrainingStatus.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetLargePersonGroups.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetPersonGroup.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetPersonGroupPerson.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetPersonGroupPersonFace.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetPersonGroupPersons.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetPersonGroupTrainingStatus.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetPersonGroups.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_TrainLargePersonGroup.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_TrainPersonGroup.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_UpdateLargePersonGroup.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_UpdateLargePersonGroupPerson.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_UpdateLargePersonGroupPersonFace.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_UpdatePersonGroup.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_UpdatePersonGroupPerson.json create mode 100644 specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_UpdatePersonGroupPersonFace.json create mode 100644 specification/ai/Face/main.tsp create mode 100644 specification/ai/Face/models.common.tsp create mode 100644 specification/ai/Face/models.detect.tsp create mode 100644 specification/ai/Face/models.facelist.tsp create mode 100644 specification/ai/Face/models.persondirectory.tsp create mode 100644 specification/ai/Face/models.persongroup.tsp create mode 100644 specification/ai/Face/models.session.tsp create mode 100644 specification/ai/Face/routes.common.tsp create mode 100644 specification/ai/Face/routes.detection.tsp create mode 100644 specification/ai/Face/routes.facelist.tsp create mode 100644 specification/ai/Face/routes.persondirectory.tsp create mode 100644 specification/ai/Face/routes.persongroup.tsp create mode 100644 specification/ai/Face/routes.recognition.tsp create mode 100644 specification/ai/Face/routes.session.tsp create mode 100644 specification/ai/Face/tspconfig.yaml create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/Face.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/Detect.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/DetectFromUrl.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_AddFaceListFaceFromStream.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_AddFaceListFaceFromUrl.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_AddLargeFaceListFaceFromStream.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_AddLargeFaceListFaceFromUrl.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_CreateFaceList.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_CreateLargeFaceList.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_DeleteFaceList.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_DeleteFaceListFace.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_DeleteLargeFaceList.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_DeleteLargeFaceListFace.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_GetFaceList.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_GetFaceLists.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_GetLargeFaceList.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_GetLargeFaceListFace.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_GetLargeFaceListFaces.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_GetLargeFaceListTrainingStatus.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_GetLargeFaceLists.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_TrainLargeFaceList.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_UpdateFaceList.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_UpdateLargeFaceList.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_UpdateLargeFaceListFace.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_FindSimilar.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_FindSimilarFromFaceList.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_FindSimilarFromLargeFaceList.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_Group.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_IdentifyFromDynamicPersonGroup.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_IdentifyFromLargePersonGroup.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_IdentifyFromPersonDirectory.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_IdentifyFromPersonGroup.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_VerifyFaceToFace.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_VerifyFromLargePersonGroup.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_VerifyFromPersonDirectory.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_VerifyFromPersonGroup.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/GetOperationResult.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_CreateLivenessSession.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_CreateLivenessWithVerifySession.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_CreateLivenessWithVerifySessionWithVerifyImage.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_DeleteLivenessSession.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_DeleteLivenessWithVerifySession.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_GetLivenessSessionAuditEntries.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_GetLivenessSessionResult.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_GetLivenessSessions.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_GetLivenessWithVerifySessionAuditEntries.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_GetLivenessWithVerifySessionResult.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_GetLivenessWithVerifySessions.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_AddPersonFaceFromStream.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_AddPersonFaceFromUrl.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_CreateDynamicPersonGroup.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_CreateDynamicPersonGroupWithPerson.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_CreatePerson.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_DeleteDynamicPersonGroup.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_DeletePerson.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_DeletePersonFace.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetDynamicPersonGroup.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetDynamicPersonGroupPersons.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetDynamicPersonGroupReferences.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetDynamicPersonGroups.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetPerson.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetPersonFace.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetPersonFaces.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetPersons.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_UpdateDynamicPersonGroup.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_UpdateDynamicPersonGroupWithPersonChanges.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_UpdatePerson.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_UpdatePersonFace.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_AddLargePersonGroupPersonFaceFromStream.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_AddLargePersonGroupPersonFaceFromUrl.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_AddPersonGroupPersonFaceFromStream.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_AddPersonGroupPersonFaceFromUrl.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_CreateLargePersonGroup.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_CreateLargePersonGroupPerson.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_CreatePersonGroup.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_CreatePersonGroupPerson.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_DeleteLargePersonGroup.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_DeleteLargePersonGroupPerson.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_DeleteLargePersonGroupPersonFace.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_DeletePersonGroup.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_DeletePersonGroupPerson.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_DeletePersonGroupPersonFace.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetLargePersonGroup.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetLargePersonGroupPerson.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetLargePersonGroupPersonFace.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetLargePersonGroupPersons.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetLargePersonGroupTrainingStatus.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetLargePersonGroups.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetPersonGroup.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetPersonGroupPerson.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetPersonGroupPersonFace.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetPersonGroupPersons.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetPersonGroupTrainingStatus.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetPersonGroups.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_TrainLargePersonGroup.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_TrainPersonGroup.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_UpdateLargePersonGroup.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_UpdateLargePersonGroupPerson.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_UpdateLargePersonGroupPersonFace.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_UpdatePersonGroup.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_UpdatePersonGroupPerson.json create mode 100644 specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_UpdatePersonGroupPersonFace.json diff --git a/cSpell.json b/cSpell.json index 1ce677e8154e..a0ff54c022f5 100644 --- a/cSpell.json +++ b/cSpell.json @@ -1241,7 +1241,7 @@ "WSUS" ] }, - { + { "filename": "**/specification/monitor/resource-manager/Microsoft.Insights/**/actionGroups_API.json", "words": [ "occurringlocation", @@ -1251,6 +1251,14 @@ "tsgid", "correlationid" ] + }, + { + "filename": "**/specification/ai/data-plane/Face/**/*.json", + "words": [ + "headwear", + "realface", + "spoofface" + ] } ], "enableFiletypes": [ diff --git a/specification/ai/Face/client.tsp b/specification/ai/Face/client.tsp new file mode 100644 index 000000000000..b80cb20941ae --- /dev/null +++ b/specification/ai/Face/client.tsp @@ -0,0 +1,193 @@ +import "@azure-tools/typespec-client-generator-core"; +import "@typespec/rest"; +import "@typespec/http"; +import "@typespec/versioning"; +import "./main.tsp"; + +using Azure.ClientGenerator.Core; +using Face; + +@TypeSpec.Versioning.useDependency(Azure.Core.Versions.v1_0_Preview_2) +@TypeSpec.Versioning.useDependency(Face.Versions.v1_1_preview_1) +namespace ClientCustomizations; + +@@clientName(RecognitionModel, "FaceRecognitionModel"); +@@clientName(DetectionModel, "FaceDetectionModel"); +@@clientName(OperationStatus, "FaceOperationStatus"); +@@clientName(TrainingResult, "FaceCollectionTrainingResult"); +@@clientName(IdentificationResult, "FaceIdentificationResult"); +@@clientName(IdentificationCandidate, "FaceIdentificationCandidate"); +@@clientName(VerificationResult, "FaceVerificationResult"); +@@clientName(FindSimilarResult, "FaceFindSimilarResult"); +@@clientName(GroupingResult, "FaceGroupingResult"); +@@clientName(OperationResult, "FaceOperationResult"); +@@clientName(LivenessDecision, "FaceLivenessDecision"); +@@clientName(ImageType, "FaceImageType"); + +@@access(FaceAttributeType, Access.public); +@@access(FaceDetectionResult, Access.public); +@@access(FaceLandmarks, Access.public); +@@access(LandmarkCoordinate, Access.public); +@@access(FaceAttributes, Access.public); +@@access(FacialHair, Access.public); +@@access(GlassesType, Access.public); +@@access(HeadPose, Access.public); +@@access(HairProperties, Access.public); +@@access(HairColor, Access.public); +@@access(HairColorType, Access.public); +@@access(OcclusionProperties, Access.public); +@@access(AccessoryItem, Access.public); +@@access(AccessoryType, Access.public); +@@access(BlurProperties, Access.public); +@@access(BlurLevel, Access.public); +@@access(ExposureProperties, Access.public); +@@access(ExposureLevel, Access.public); +@@access(NoiseProperties, Access.public); +@@access(NoiseLevel, Access.public); +@@access(MaskProperties, Access.public); +@@access(MaskType, Access.public); +@@access(QualityForRecognition, Access.public); +@@access(CreateLivenessWithVerifySessionResult, Access.public); + +@client({ + name: "FaceClient", + service: Face, +}) +interface FaceClient { + @access(Access.internal) + @convenientAPI(false, "java") + @convenientAPI(false, "csharp") + detectFromUrl is FaceDetectionOperations.detectFromUrl; + #suppress "@azure-tools/typespec-azure-core/byos" "It's an RPC call to detect face from an image, it doesn't store anything. There's a BYOS version of this call, but it's not this one." + @access(Access.internal) + @convenientAPI(false, "java") + @convenientAPI(false, "csharp") + detect is FaceDetectionOperations.detect; + + findSimilar is FaceRecognitionOperations.findSimilar; + findSimilarFromFaceList is FaceRecognitionOperations.findSimilarFromFaceList; + findSimilarFromLargeFaceList is FaceRecognitionOperations.findSimilarFromLargeFaceList; + identifyFromPersonGroup is FaceRecognitionOperations.identifyFromPersonGroup; + identifyFromLargePersonGroup is FaceRecognitionOperations.identifyFromLargePersonGroup; + identifyFromPersonDirectory is FaceRecognitionOperations.identifyFromPersonDirectory; + identifyFromDynamicPersonGroup is FaceRecognitionOperations.identifyFromDynamicPersonGroup; + verifyFaceToFace is FaceRecognitionOperations.verifyFaceToFace; + verifyFromPersonGroup is FaceRecognitionOperations.verifyFromPersonGroup; + verifyFromLargePersonGroup is FaceRecognitionOperations.verifyFromLargePersonGroup; + verifyFromPersonDirectory is FaceRecognitionOperations.verifyFromPersonDirectory; + group is FaceRecognitionOperations.group; +} + +@client({ + name: "FaceAdministrationClient", + service: Face, +}) +interface FaceAdministrationClient { + createFaceList is FaceListOperations.createFaceList; + deleteFaceList is FaceListOperations.deleteFaceList; + getFaceList is FaceListOperations.getFaceList; + updateFaceList is FaceListOperations.updateFaceList; + getFaceLists is FaceListOperations.getFaceLists; + addFaceListFaceFromUrl is FaceListOperations.addFaceListFaceFromUrl; + #suppress "@azure-tools/typespec-azure-core/byos" "It's an RPC call to detect face from an image, it doesn't store anything. There's a BYOS version of this call, but it's not this one." + addFaceListFace is FaceListOperations.addFaceListFace; + deleteFaceListFace is FaceListOperations.deleteFaceListFace; + + createLargeFaceList is FaceListOperations.createLargeFaceList; + deleteLargeFaceList is FaceListOperations.deleteLargeFaceList; + getLargeFaceList is FaceListOperations.getLargeFaceList; + updateLargeFaceList is FaceListOperations.updateLargeFaceList; + getLargeFaceLists is FaceListOperations.getLargeFaceLists; + getLargeFaceListTrainingStatus is FaceListOperations.getLargeFaceListTrainingStatus; + trainLargeFaceList is FaceListOperations.trainLargeFaceList; + addLargeFaceListFaceFromUrl is FaceListOperations.addLargeFaceListFaceFromUrl; + #suppress "@azure-tools/typespec-azure-core/byos" "It's an RPC call to detect face from an image, it doesn't store anything. There's a BYOS version of this call, but it's not this one." + addLargeFaceListFace is FaceListOperations.addLargeFaceListFace; + deleteLargeFaceListFace is FaceListOperations.deleteLargeFaceListFace; + getLargeFaceListFace is FaceListOperations.getLargeFaceListFace; + updateLargeFaceListFace is FaceListOperations.updateLargeFaceListFace; + getLargeFaceListFaces is FaceListOperations.getLargeFaceListFaces; + + createPersonGroup is PersonGroupOperations.createPersonGroup; + deletePersonGroup is PersonGroupOperations.deletePersonGroup; + getPersonGroup is PersonGroupOperations.getPersonGroup; + updatePersonGroup is PersonGroupOperations.updatePersonGroup; + getPersonGroups is PersonGroupOperations.getPersonGroups; + getPersonGroupTrainingStatus is PersonGroupOperations.getPersonGroupTrainingStatus; + trainPersonGroup is PersonGroupOperations.trainPersonGroup; + createPersonGroupPerson is PersonGroupOperations.createPersonGroupPerson; + deletePersonGroupPerson is PersonGroupOperations.deletePersonGroupPerson; + getPersonGroupPerson is PersonGroupOperations.getPersonGroupPerson; + updatePersonGroupPerson is PersonGroupOperations.updatePersonGroupPerson; + getPersonGroupPersons is PersonGroupOperations.getPersonGroupPersons; + addPersonGroupPersonFaceFromUrl is PersonGroupOperations.addPersonGroupPersonFaceFromUrl; + #suppress "@azure-tools/typespec-azure-core/byos" "It's an RPC call to detect face from an image, it doesn't store anything. There's a BYOS version of this call, but it's not this one." + addPersonGroupPersonFace is PersonGroupOperations.addPersonGroupPersonFace; + deletePersonGroupPersonFace is PersonGroupOperations.deletePersonGroupPersonFace; + getPersonGroupPersonFace is PersonGroupOperations.getPersonGroupPersonFace; + updatePersonGroupPersonFace is PersonGroupOperations.updatePersonGroupPersonFace; + + createLargePersonGroup is PersonGroupOperations.createLargePersonGroup; + deleteLargePersonGroup is PersonGroupOperations.deleteLargePersonGroup; + getLargePersonGroup is PersonGroupOperations.getLargePersonGroup; + updateLargePersonGroup is PersonGroupOperations.updateLargePersonGroup; + getLargePersonGroups is PersonGroupOperations.getLargePersonGroups; + getLargePersonGroupTrainingStatus is PersonGroupOperations.getLargePersonGroupTrainingStatus; + trainLargePersonGroup is PersonGroupOperations.trainLargePersonGroup; + createLargePersonGroupPerson is PersonGroupOperations.createLargePersonGroupPerson; + deleteLargePersonGroupPerson is PersonGroupOperations.deleteLargePersonGroupPerson; + getLargePersonGroupPerson is PersonGroupOperations.getLargePersonGroupPerson; + updateLargePersonGroupPerson is PersonGroupOperations.updateLargePersonGroupPerson; + getLargePersonGroupPersons is PersonGroupOperations.getLargePersonGroupPersons; + addLargePersonGroupPersonFaceFromUrl is PersonGroupOperations.addLargePersonGroupPersonFaceFromUrl; + #suppress "@azure-tools/typespec-azure-core/byos" "It's an RPC call to detect face from an image, it doesn't store anything. There's a BYOS version of this call, but it's not this one." + addLargePersonGroupPersonFace is PersonGroupOperations.addLargePersonGroupPersonFace; + deleteLargePersonGroupPersonFace is PersonGroupOperations.deleteLargePersonGroupPersonFace; + getLargePersonGroupPersonFace is PersonGroupOperations.getLargePersonGroupPersonFace; + updateLargePersonGroupPersonFace is PersonGroupOperations.updateLargePersonGroupPersonFace; + + createPerson is PersonDirectoryOperations.createPerson; + deletePerson is PersonDirectoryOperations.deletePerson; + getPerson is PersonDirectoryOperations.getPerson; + updatePerson is PersonDirectoryOperations.updatePerson; + getPersons is PersonDirectoryOperations.getPersons; + getDynamicPersonGroupReferences is PersonDirectoryOperations.getDynamicPersonGroupReferences; + addPersonFaceFromUrl is PersonDirectoryOperations.addPersonFaceFromUrl; + #suppress "@azure-tools/typespec-azure-core/byos" "It's an RPC call to detect face from an image, it doesn't store anything. There's a BYOS version of this call, but it's not this one." + addPersonFace is PersonDirectoryOperations.addPersonFace; + deletePersonFace is PersonDirectoryOperations.deletePersonFace; + getPersonFace is PersonDirectoryOperations.getPersonFace; + updatePersonFace is PersonDirectoryOperations.updatePersonFace; + getPersonFaces is PersonDirectoryOperations.getPersonFaces; + + createDynamicPersonGroupWithPerson is PersonDirectoryOperations.createDynamicPersonGroupWithPerson; + createDynamicPersonGroup is PersonDirectoryOperations.createDynamicPersonGroup; + deleteDynamicPersonGroup is PersonDirectoryOperations.deleteDynamicPersonGroup; + getDynamicPersonGroup is PersonDirectoryOperations.getDynamicPersonGroup; + updateDynamicPersonGroupWithPersonChanges is PersonDirectoryOperations.updateDynamicPersonGroupWithPersonChanges; + updateDynamicPersonGroup is PersonDirectoryOperations.updateDynamicPersonGroup; + getDynamicPersonGroups is PersonDirectoryOperations.getDynamicPersonGroups; + getDynamicPersonGroupPersons is PersonDirectoryOperations.getDynamicPersonGroupPersons; +} + +@client({ + name: "FaceSessionClient", + service: Face, +}) +interface FaceSessionClient { + createLivenessSession is LivenessSessionOperations.createLivenessSession; + deleteLivenessSession is LivenessSessionOperations.deleteLivenessSession; + getLivenessSessionResult is LivenessSessionOperations.getLivenessSessionResult; + getLivenessSessions is LivenessSessionOperations.getLivenessSessions; + getLivenessSessionAuditEntries is LivenessSessionOperations.getLivenessSessionAuditEntries; + + @access(Access.internal) + createLivenessWithVerifySession is LivenessSessionOperations.createLivenessWithVerifySession; + #suppress "@azure-tools/typespec-azure-core/byos" "It's an RPC call to detect face from an image, it doesn't store anything. There's a BYOS version of this call, but it's not this one." + @access(Access.internal) + createLivenessWithVerifySessionWithVerifyImage is LivenessSessionOperations.createLivenessWithVerifySessionWithVerifyImage; + deleteLivenessWithVerifySession is LivenessSessionOperations.deleteLivenessWithVerifySession; + getLivenessWithVerifySessionResult is LivenessSessionOperations.getLivenessWithVerifySessionResult; + getLivenessWithVerifySessions is LivenessSessionOperations.getLivenessWithVerifySessions; + getLivenessWithVerifySessionAuditEntries is LivenessSessionOperations.getLivenessWithVerifySessionAuditEntries; +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/Detect.json b/specification/ai/Face/examples/v1.1-preview.1/Detect.json new file mode 100644 index 000000000000..06b9e7ecf543 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/Detect.json @@ -0,0 +1,181 @@ +{ + "title": "Detect with Image", + "operationId": "FaceDetectionOperations_Detect", + "parameters": { + "apiVersion": "v1.1-preview.1", + "returnFaceId": true, + "returnFaceLandmarks": true, + "returnFaceAttributes": "glasses,headPose,occlusion,accessories,blur,exposure,noise,qualityForRecognition", + "recognitionModel": "recognition_03", + "returnRecognitionModel": true, + "detectionModel": "detection_01", + "faceIdTimeToLive": 60, + "imageContent": "" + }, + "responses": { + "200": { + "body": [ + { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "recognitionModel": "recognition_03", + "faceRectangle": { + "width": 78, + "height": 78, + "left": 394, + "top": 54 + }, + "faceLandmarks": { + "pupilLeft": { + "x": 412.7, + "y": 78.4 + }, + "pupilRight": { + "x": 446.8, + "y": 74.2 + }, + "noseTip": { + "x": 437.7, + "y": 92.4 + }, + "mouthLeft": { + "x": 417.8, + "y": 114.4 + }, + "mouthRight": { + "x": 451.3, + "y": 109.3 + }, + "eyebrowLeftOuter": { + "x": 397.9, + "y": 78.5 + }, + "eyebrowLeftInner": { + "x": 425.4, + "y": 70.5 + }, + "eyeLeftOuter": { + "x": 406.7, + "y": 80.6 + }, + "eyeLeftTop": { + "x": 412.2, + "y": 76.2 + }, + "eyeLeftBottom": { + "x": 413.0, + "y": 80.1 + }, + "eyeLeftInner": { + "x": 418.9, + "y": 78.0 + }, + "eyebrowRightInner": { + "x": 4.8, + "y": 69.7 + }, + "eyebrowRightOuter": { + "x": 5.5, + "y": 68.5 + }, + "eyeRightInner": { + "x": 441.5, + "y": 75.0 + }, + "eyeRightTop": { + "x": 446.4, + "y": 71.7 + }, + "eyeRightBottom": { + "x": 447.0, + "y": 75.3 + }, + "eyeRightOuter": { + "x": 451.7, + "y": 73.4 + }, + "noseRootLeft": { + "x": 428.0, + "y": 77.1 + }, + "noseRootRight": { + "x": 435.8, + "y": 75.6 + }, + "noseLeftAlarTop": { + "x": 428.3, + "y": 89.7 + }, + "noseRightAlarTop": { + "x": 442.2, + "y": 87.0 + }, + "noseLeftAlarOutTip": { + "x": 424.3, + "y": 96.4 + }, + "noseRightAlarOutTip": { + "x": 446.6, + "y": 92.5 + }, + "upperLipTop": { + "x": 437.6, + "y": 105.9 + }, + "upperLipBottom": { + "x": 437.6, + "y": 108.2 + }, + "underLipTop": { + "x": 436.8, + "y": 111.4 + }, + "underLipBottom": { + "x": 437.3, + "y": 114.5 + } + }, + "faceAttributes": { + "glasses": "sunglasses", + "headPose": { + "roll": 2.1, + "yaw": 3, + "pitch": 1.6 + }, + "occlusion": { + "foreheadOccluded": false, + "eyeOccluded": false, + "mouthOccluded": false + }, + "accessories": [ + { + "type": "headwear", + "confidence": 0.99 + }, + { + "type": "glasses", + "confidence": 1.0 + }, + { + "type": "mask", + "confidence": 0.87 + } + ], + "blur": { + "blurLevel": "medium", + "value": 0.51 + }, + "exposure": { + "exposureLevel": "goodExposure", + "value": 0.55 + }, + "noise": { + "noiseLevel": "low", + "value": 0.12 + }, + "qualityForRecognition": "high" + } + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/DetectFromUrl.json b/specification/ai/Face/examples/v1.1-preview.1/DetectFromUrl.json new file mode 100644 index 000000000000..469992dd6154 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/DetectFromUrl.json @@ -0,0 +1,183 @@ +{ + "title": "Detect with Image URL", + "operationId": "FaceDetectionOperations_DetectFromUrl", + "parameters": { + "apiVersion": "v1.1-preview.1", + "returnFaceId": true, + "returnFaceLandmarks": true, + "returnFaceAttributes": "glasses,headPose,occlusion,accessories,blur,exposure,noise,qualityForRecognition", + "recognitionModel": "recognition_03", + "returnRecognitionModel": true, + "detectionModel": "detection_01", + "faceIdTimeToLive": 60, + "body": { + "url": "https://microsoft.com/example.jpg" + } + }, + "responses": { + "200": { + "body": [ + { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "recognitionModel": "recognition_03", + "faceRectangle": { + "width": 78, + "height": 78, + "left": 394, + "top": 54 + }, + "faceLandmarks": { + "pupilLeft": { + "x": 412.7, + "y": 78.4 + }, + "pupilRight": { + "x": 446.8, + "y": 74.2 + }, + "noseTip": { + "x": 437.7, + "y": 92.4 + }, + "mouthLeft": { + "x": 417.8, + "y": 114.4 + }, + "mouthRight": { + "x": 451.3, + "y": 109.3 + }, + "eyebrowLeftOuter": { + "x": 397.9, + "y": 78.5 + }, + "eyebrowLeftInner": { + "x": 425.4, + "y": 70.5 + }, + "eyeLeftOuter": { + "x": 406.7, + "y": 80.6 + }, + "eyeLeftTop": { + "x": 412.2, + "y": 76.2 + }, + "eyeLeftBottom": { + "x": 413.0, + "y": 80.1 + }, + "eyeLeftInner": { + "x": 418.9, + "y": 78.0 + }, + "eyebrowRightInner": { + "x": 4.8, + "y": 69.7 + }, + "eyebrowRightOuter": { + "x": 5.5, + "y": 68.5 + }, + "eyeRightInner": { + "x": 441.5, + "y": 75.0 + }, + "eyeRightTop": { + "x": 446.4, + "y": 71.7 + }, + "eyeRightBottom": { + "x": 447.0, + "y": 75.3 + }, + "eyeRightOuter": { + "x": 451.7, + "y": 73.4 + }, + "noseRootLeft": { + "x": 428.0, + "y": 77.1 + }, + "noseRootRight": { + "x": 435.8, + "y": 75.6 + }, + "noseLeftAlarTop": { + "x": 428.3, + "y": 89.7 + }, + "noseRightAlarTop": { + "x": 442.2, + "y": 87.0 + }, + "noseLeftAlarOutTip": { + "x": 424.3, + "y": 96.4 + }, + "noseRightAlarOutTip": { + "x": 446.6, + "y": 92.5 + }, + "upperLipTop": { + "x": 437.6, + "y": 105.9 + }, + "upperLipBottom": { + "x": 437.6, + "y": 108.2 + }, + "underLipTop": { + "x": 436.8, + "y": 111.4 + }, + "underLipBottom": { + "x": 437.3, + "y": 114.5 + } + }, + "faceAttributes": { + "glasses": "sunglasses", + "headPose": { + "roll": 2.1, + "yaw": 3, + "pitch": 1.6 + }, + "occlusion": { + "foreheadOccluded": false, + "eyeOccluded": false, + "mouthOccluded": false + }, + "accessories": [ + { + "type": "headwear", + "confidence": 0.99 + }, + { + "type": "glasses", + "confidence": 1.0 + }, + { + "type": "mask", + "confidence": 0.87 + } + ], + "blur": { + "blurLevel": "medium", + "value": 0.51 + }, + "exposure": { + "exposureLevel": "goodExposure", + "value": 0.55 + }, + "noise": { + "noiseLevel": "low", + "value": 0.12 + }, + "qualityForRecognition": "high" + } + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_AddFaceListFaceFromStream.json b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_AddFaceListFaceFromStream.json new file mode 100644 index 000000000000..42601eb7f278 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_AddFaceListFaceFromStream.json @@ -0,0 +1,19 @@ +{ + "title": "Add Face to FaceList", + "operationId": "FaceListOperations_AddFaceListFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "faceListId": "your_face_list_id", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "imageContent": "" + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_AddFaceListFaceFromUrl.json b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_AddFaceListFaceFromUrl.json new file mode 100644 index 000000000000..f0da5f86413b --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_AddFaceListFaceFromUrl.json @@ -0,0 +1,21 @@ +{ + "title": "Add Face to FaceList from Url", + "operationId": "FaceListOperations_AddFaceListFaceFromUrl", + "parameters": { + "apiVersion": "v1.1-preview.1", + "faceListId": "your_face_list_id", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "body": { + "url": "https://microsoft.com/example.jpg" + } + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_AddLargeFaceListFaceFromStream.json b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_AddLargeFaceListFaceFromStream.json new file mode 100644 index 000000000000..d35761dda3ff --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_AddLargeFaceListFaceFromStream.json @@ -0,0 +1,19 @@ +{ + "title": "Add Face to LargeFaceList", + "operationId": "FaceListOperations_AddLargeFaceListFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largeFaceListId": "your_large_face_list_id", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "imageContent": "" + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_AddLargeFaceListFaceFromUrl.json b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_AddLargeFaceListFaceFromUrl.json new file mode 100644 index 000000000000..99d7ed8a9d2c --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_AddLargeFaceListFaceFromUrl.json @@ -0,0 +1,21 @@ +{ + "title": "Add Face to LargeFaceList from Url", + "operationId": "FaceListOperations_AddLargeFaceListFaceFromUrl", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largeFaceListId": "your_large_face_list_id", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "body": { + "url": "https://microsoft.com/example.jpg" + } + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_CreateFaceList.json b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_CreateFaceList.json new file mode 100644 index 000000000000..94c1e9a6775e --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_CreateFaceList.json @@ -0,0 +1,16 @@ +{ + "title": "Create FaceList", + "operationId": "FaceListOperations_CreateFaceList", + "parameters": { + "apiVersion": "v1.1-preview.1", + "faceListId": "your_face_list_id", + "body": { + "name": "your_face_list_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_CreateLargeFaceList.json b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_CreateLargeFaceList.json new file mode 100644 index 000000000000..f402a08149a4 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_CreateLargeFaceList.json @@ -0,0 +1,16 @@ +{ + "title": "Create LargeFaceList", + "operationId": "FaceListOperations_CreateLargeFaceList", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largeFaceListId": "your_large_face_list_id", + "body": { + "name": "your_large_face_list_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_DeleteFaceList.json b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_DeleteFaceList.json new file mode 100644 index 000000000000..0c47e068577d --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_DeleteFaceList.json @@ -0,0 +1,11 @@ +{ + "title": "Delete FaceList", + "operationId": "FaceListOperations_DeleteFaceList", + "parameters": { + "apiVersion": "v1.1-preview.1", + "faceListId": "your_face_list_id" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_DeleteFaceListFace.json b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_DeleteFaceListFace.json new file mode 100644 index 000000000000..a6fdb812eeac --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_DeleteFaceListFace.json @@ -0,0 +1,12 @@ +{ + "title": "Delete Face from FaceList", + "operationId": "FaceListOperations_DeleteFaceListFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "faceListId": "your_face_list_id", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_DeleteLargeFaceList.json b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_DeleteLargeFaceList.json new file mode 100644 index 000000000000..7dd5267be454 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_DeleteLargeFaceList.json @@ -0,0 +1,11 @@ +{ + "title": "Delete LargeFaceList", + "operationId": "FaceListOperations_DeleteLargeFaceList", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largeFaceListId": "your_large_face_list_id" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_DeleteLargeFaceListFace.json b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_DeleteLargeFaceListFace.json new file mode 100644 index 000000000000..31a2b680969b --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_DeleteLargeFaceListFace.json @@ -0,0 +1,12 @@ +{ + "title": "Delete Face From LargeFaceList", + "operationId": "FaceListOperations_DeleteLargeFaceListFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largeFaceListId": "your_large_face_list_id", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_GetFaceList.json b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_GetFaceList.json new file mode 100644 index 000000000000..9821416e1b95 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_GetFaceList.json @@ -0,0 +1,19 @@ +{ + "title": "Get FaceList", + "operationId": "FaceListOperations_GetFaceList", + "parameters": { + "apiVersion": "v1.1-preview.1", + "faceListId": "your_face_list_id", + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": { + "name": "your_face_list_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "faceListId": "your_face_list_id" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_GetFaceLists.json b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_GetFaceLists.json new file mode 100644 index 000000000000..53a5d318ae64 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_GetFaceLists.json @@ -0,0 +1,20 @@ +{ + "title": "Get FaceLists", + "operationId": "FaceListOperations_GetFaceLists", + "parameters": { + "apiVersion": "v1.1-preview.1", + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": [ + { + "name": "your_face_list_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "faceListId": "your_face_list_id" + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_GetLargeFaceList.json b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_GetLargeFaceList.json new file mode 100644 index 000000000000..ea56dc176324 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_GetLargeFaceList.json @@ -0,0 +1,19 @@ +{ + "title": "Get LargeFaceList", + "operationId": "FaceListOperations_GetLargeFaceList", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largeFaceListId": "your_large_face_list_id", + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": { + "name": "your_large_face_list_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "largeFaceListId": "your_large_face_list_id" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_GetLargeFaceListFace.json b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_GetLargeFaceListFace.json new file mode 100644 index 000000000000..17e867ce7136 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_GetLargeFaceListFace.json @@ -0,0 +1,17 @@ +{ + "title": "Get Face from LargeFaceList", + "operationId": "FaceListOperations_GetLargeFaceListFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largeFaceListId": "your_large_face_list_id", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "userData": "your_user_data" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_GetLargeFaceListFaces.json b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_GetLargeFaceListFaces.json new file mode 100644 index 000000000000..46f8bf8a6205 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_GetLargeFaceListFaces.json @@ -0,0 +1,20 @@ +{ + "title": "Get Faces from LargeFaceList", + "operationId": "FaceListOperations_GetLargeFaceListFaces", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largeFaceListId": "your_large_face_list_id", + "start": "00000000-0000-0000-0000-000000000000", + "top": 20 + }, + "responses": { + "200": { + "body": [ + { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "userData": "your_user_data" + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_GetLargeFaceListTrainingStatus.json b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_GetLargeFaceListTrainingStatus.json new file mode 100644 index 000000000000..cbc6d28a63ca --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_GetLargeFaceListTrainingStatus.json @@ -0,0 +1,19 @@ +{ + "title": "Get Training Status of LargeFaceList", + "operationId": "FaceListOperations_GetLargeFaceListTrainingStatus", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largeFaceListId": "your_large_face_list_id" + }, + "responses": { + "200": { + "body": { + "status": "notStarted", + "createdDateTime": "2024-03-05T11:07:58.371Z", + "lastActionDateTime": "2024-03-05T11:07:58.371Z", + "lastSuccessfulTrainingDateTime": "2024-03-05T11:07:58.371Z", + "message": null + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_GetLargeFaceLists.json b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_GetLargeFaceLists.json new file mode 100644 index 000000000000..83578707c7e4 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_GetLargeFaceLists.json @@ -0,0 +1,22 @@ +{ + "title": "Get LargeFaceLists", + "operationId": "FaceListOperations_GetLargeFaceLists", + "parameters": { + "apiVersion": "v1.1-preview.1", + "start": "my_list_id", + "top": 20, + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": [ + { + "name": "your_large_face_list_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "largeFaceListId": "your_large_face_list_id" + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_TrainLargeFaceList.json b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_TrainLargeFaceList.json new file mode 100644 index 000000000000..a3525a208a5e --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_TrainLargeFaceList.json @@ -0,0 +1,15 @@ +{ + "title": "Train LargeFaceList", + "operationId": "FaceListOperations_TrainLargeFaceList", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largeFaceListId": "your_large_face_list_id" + }, + "responses": { + "202": { + "headers": { + "operation-Location": "https://contoso.com/operationstatus" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_UpdateFaceList.json b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_UpdateFaceList.json new file mode 100644 index 000000000000..7619a581e7be --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_UpdateFaceList.json @@ -0,0 +1,15 @@ +{ + "title": "Update FaceList", + "operationId": "FaceListOperations_UpdateFaceList", + "parameters": { + "apiVersion": "v1.1-preview.1", + "faceListId": "your_face_list_id", + "body": { + "name": "your_face_list_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_UpdateLargeFaceList.json b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_UpdateLargeFaceList.json new file mode 100644 index 000000000000..d61a004eadb4 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_UpdateLargeFaceList.json @@ -0,0 +1,15 @@ +{ + "title": "Update LargeFaceList", + "operationId": "FaceListOperations_UpdateLargeFaceList", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largeFaceListId": "your_large_face_list_id", + "body": { + "name": "your_large_face_list_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_UpdateLargeFaceListFace.json b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_UpdateLargeFaceListFace.json new file mode 100644 index 000000000000..7798cb324a97 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceListOperations_UpdateLargeFaceListFace.json @@ -0,0 +1,15 @@ +{ + "title": "Update Face in LargeFaceList", + "operationId": "FaceListOperations_UpdateLargeFaceListFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largeFaceListId": "your_large_face_list_id", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "body": { + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_FindSimilar.json b/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_FindSimilar.json new file mode 100644 index 000000000000..3a78056208ca --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_FindSimilar.json @@ -0,0 +1,26 @@ +{ + "title": "Find Similar among Face IDs", + "operationId": "FaceRecognitionOperations_FindSimilar", + "parameters": { + "apiVersion": "v1.1-preview.1", + "body": { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "maxNumOfCandidatesReturned": 3, + "mode": "matchPerson", + "faceIds": [ + "015839fb-fbd9-4f79-ace9-7675fc2f1dd9", + "be386ab3-af91-4104-9e6d-4dae4c9fddb7" + ] + } + }, + "responses": { + "200": { + "body": [ + { + "confidence": 0.9, + "persistedFaceId": "015839fb-fbd9-4f79-ace9-7675fc2f1dd9" + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_FindSimilarFromFaceList.json b/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_FindSimilarFromFaceList.json new file mode 100644 index 000000000000..7c761424783e --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_FindSimilarFromFaceList.json @@ -0,0 +1,23 @@ +{ + "title": "Find Similar from FaceList", + "operationId": "FaceRecognitionOperations_FindSimilarFromFaceList", + "parameters": { + "apiVersion": "v1.1-preview.1", + "body": { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "maxNumOfCandidatesReturned": 3, + "mode": "matchPerson", + "faceListId": "your_face_list_id" + } + }, + "responses": { + "200": { + "body": [ + { + "confidence": 0.8, + "faceId": "015839fb-fbd9-4f79-ace9-7675fc2f1dd9" + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_FindSimilarFromLargeFaceList.json b/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_FindSimilarFromLargeFaceList.json new file mode 100644 index 000000000000..f83d2ed8e685 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_FindSimilarFromLargeFaceList.json @@ -0,0 +1,23 @@ +{ + "title": "Find Similar from LargeFaceList", + "operationId": "FaceRecognitionOperations_FindSimilarFromLargeFaceList", + "parameters": { + "apiVersion": "v1.1-preview.1", + "body": { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "maxNumOfCandidatesReturned": 3, + "mode": "matchPerson", + "largeFaceListId": "your_large_face_list_id" + } + }, + "responses": { + "200": { + "body": [ + { + "confidence": 0.8, + "faceId": "015839fb-fbd9-4f79-ace9-7675fc2f1dd9" + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_Group.json b/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_Group.json new file mode 100644 index 000000000000..659611be7f2c --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_Group.json @@ -0,0 +1,41 @@ +{ + "title": "Group Face IDs", + "operationId": "FaceRecognitionOperations_Group", + "parameters": { + "apiVersion": "v1.1-preview.1", + "body": { + "faceIds": [ + "c5c24a82-6845-4031-9d5d-978df9175426", + "015839fb-fbd9-4f79-ace9-7675fc2f1dd9", + "65d083d4-9447-47d1-af30-b626144bf0fb", + "fce92aed-d578-4d2e-8114-068f8af4492e", + "30ea1073-cc9e-4652-b1e3-d08fb7b95315", + "be386ab3-af91-4104-9e6d-4dae4c9fddb7", + "fbd2a038-dbff-452c-8e79-2ee81b1aa84e", + "b64d5e15-8257-4af2-b20a-5a750f8940e7" + ] + } + }, + "responses": { + "200": { + "body": { + "groups": [ + [ + "c5c24a82-6845-4031-9d5d-978df9175426", + "015839fb-fbd9-4f79-ace9-7675fc2f1dd9", + "fce92aed-d578-4d2e-8114-068f8af4492e", + "b64d5e15-8257-4af2-b20a-5a750f8940e7" + ], + [ + "65d083d4-9447-47d1-af30-b626144bf0fb", + "30ea1073-cc9e-4652-b1e3-d08fb7b95315" + ] + ], + "messyGroup": [ + "be386ab3-af91-4104-9e6d-4dae4c9fddb7", + "fbd2a038-dbff-452c-8e79-2ee81b1aa84e" + ] + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_IdentifyFromDynamicPersonGroup.json b/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_IdentifyFromDynamicPersonGroup.json new file mode 100644 index 000000000000..f1bcf858243d --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_IdentifyFromDynamicPersonGroup.json @@ -0,0 +1,30 @@ +{ + "title": "Identify from DynamicPersonGroup", + "operationId": "FaceRecognitionOperations_IdentifyFromDynamicPersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "body": { + "faceIds": [ + "c5c24a82-6845-4031-9d5d-978df9175426" + ], + "dynamicPersonGroupId": "your_dynamic_person_group_id", + "maxNumOfCandidatesReturned": 9, + "confidenceThreshold": 0.7 + } + }, + "responses": { + "200": { + "body": [ + { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "candidates": [ + { + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "confidence": 0.8 + } + ] + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_IdentifyFromLargePersonGroup.json b/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_IdentifyFromLargePersonGroup.json new file mode 100644 index 000000000000..bb9d12973c50 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_IdentifyFromLargePersonGroup.json @@ -0,0 +1,30 @@ +{ + "title": "Identify from LargePersonGroup", + "operationId": "FaceRecognitionOperations_IdentifyFromLargePersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "body": { + "faceIds": [ + "c5c24a82-6845-4031-9d5d-978df9175426" + ], + "largePersonGroupId": "your_large_person_group_id", + "maxNumOfCandidatesReturned": 9, + "confidenceThreshold": 0.7 + } + }, + "responses": { + "200": { + "body": [ + { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "candidates": [ + { + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "confidence": 0.8 + } + ] + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_IdentifyFromPersonDirectory.json b/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_IdentifyFromPersonDirectory.json new file mode 100644 index 000000000000..68ea2773abf0 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_IdentifyFromPersonDirectory.json @@ -0,0 +1,32 @@ +{ + "title": "Identify from PersonDirectory", + "operationId": "FaceRecognitionOperations_IdentifyFromPersonDirectory", + "parameters": { + "apiVersion": "v1.1-preview.1", + "body": { + "faceIds": [ + "c5c24a82-6845-4031-9d5d-978df9175426" + ], + "personIds": [ + "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5" + ], + "maxNumOfCandidatesReturned": 9, + "confidenceThreshold": 0.7 + } + }, + "responses": { + "200": { + "body": [ + { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "candidates": [ + { + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "confidence": 0.8 + } + ] + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_IdentifyFromPersonGroup.json b/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_IdentifyFromPersonGroup.json new file mode 100644 index 000000000000..aa429dd22a70 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_IdentifyFromPersonGroup.json @@ -0,0 +1,30 @@ +{ + "title": "Identify from PersonGroup", + "operationId": "FaceRecognitionOperations_IdentifyFromPersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "body": { + "faceIds": [ + "c5c24a82-6845-4031-9d5d-978df9175426" + ], + "personGroupId": "your_person_group_id", + "maxNumOfCandidatesReturned": 9, + "confidenceThreshold": 0.7 + } + }, + "responses": { + "200": { + "body": [ + { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "candidates": [ + { + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "confidence": 0.8 + } + ] + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_VerifyFaceToFace.json b/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_VerifyFaceToFace.json new file mode 100644 index 000000000000..56f14664116a --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_VerifyFaceToFace.json @@ -0,0 +1,19 @@ +{ + "title": "Verify Face to Face", + "operationId": "FaceRecognitionOperations_VerifyFaceToFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "body": { + "faceId1": "c5c24a82-6845-4031-9d5d-978df9175426", + "faceId2": "3aa87e30-b380-48eb-ad9e-1aa54fc52bd3" + } + }, + "responses": { + "200": { + "body": { + "isIdentical": true, + "confidence": 0.8 + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_VerifyFromLargePersonGroup.json b/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_VerifyFromLargePersonGroup.json new file mode 100644 index 000000000000..1f179351b860 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_VerifyFromLargePersonGroup.json @@ -0,0 +1,20 @@ +{ + "title": "Verify from LargePersonGroup", + "operationId": "FaceRecognitionOperations_VerifyFromLargePersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "body": { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "personId": "815df99c-598f-4926-930a-a734b3fd651c", + "largePersonGroupId": "your_large_person_group" + } + }, + "responses": { + "200": { + "body": { + "isIdentical": true, + "confidence": 0.8 + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_VerifyFromPersonDirectory.json b/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_VerifyFromPersonDirectory.json new file mode 100644 index 000000000000..088f9560ae71 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_VerifyFromPersonDirectory.json @@ -0,0 +1,19 @@ +{ + "title": "Verify from PersonDirectory", + "operationId": "FaceRecognitionOperations_VerifyFromPersonDirectory", + "parameters": { + "apiVersion": "v1.1-preview.1", + "body": { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "personId": "815df99c-598f-4926-930a-a734b3fd651c" + } + }, + "responses": { + "200": { + "body": { + "isIdentical": true, + "confidence": 0.8 + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_VerifyFromPersonGroup.json b/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_VerifyFromPersonGroup.json new file mode 100644 index 000000000000..8ce21790ac88 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/FaceRecognitionOperations_VerifyFromPersonGroup.json @@ -0,0 +1,20 @@ +{ + "title": "Verify from PersonGroup", + "operationId": "FaceRecognitionOperations_VerifyFromPersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "body": { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "personId": "815df99c-598f-4926-930a-a734b3fd651c", + "personGroupId": "your_person_group" + } + }, + "responses": { + "200": { + "body": { + "isIdentical": true, + "confidence": 0.8 + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/GetOperationResult.json b/specification/ai/Face/examples/v1.1-preview.1/GetOperationResult.json new file mode 100644 index 000000000000..9393fc18a96c --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/GetOperationResult.json @@ -0,0 +1,20 @@ +{ + "title": "Get Face Operation Status", + "operationId": "GetOperationResult", + "parameters": { + "apiVersion": "v1.1-preview.1", + "operationId": "1b22ff44-c7dc-43ce-93be-67d3283ba86c" + }, + "responses": { + "200": { + "body": { + "operationId": "1b22ff44-c7dc-43ce-93be-67d3283ba86c", + "status": "notStarted", + "createdTime": "2024-03-05T11:08:20.193Z", + "finishedTime": "2024-03-05T11:08:20.193Z", + "lastActionTime": "2024-03-05T11:08:20.193Z", + "message": null + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_CreateLivenessSession.json b/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_CreateLivenessSession.json new file mode 100644 index 000000000000..1c111f7fcd7a --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_CreateLivenessSession.json @@ -0,0 +1,22 @@ +{ + "title": "Create Liveness Session", + "operationId": "LivenessSessionOperations_CreateLivenessSession", + "parameters": { + "apiVersion": "v1.1-preview.1", + "body": { + "livenessOperationMode": "Passive", + "sendResultsToClient": true, + "deviceCorrelationIdSetInClient": true, + "deviceCorrelationId": "your_device_correlation_id", + "authTokenTimeToLiveInSeconds": 60 + } + }, + "responses": { + "200": { + "body": { + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e", + "authToken": "eyJhbGciOiJFUzI1NiIsIm" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_CreateLivenessWithVerifySession.json b/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_CreateLivenessWithVerifySession.json new file mode 100644 index 000000000000..72158007876e --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_CreateLivenessWithVerifySession.json @@ -0,0 +1,22 @@ +{ + "title": "Create LivenessWithVerify Session", + "operationId": "LivenessSessionOperations_CreateLivenessWithVerifySession", + "parameters": { + "apiVersion": "v1.1-preview.1", + "body": { + "livenessOperationMode": "Passive", + "sendResultsToClient": true, + "deviceCorrelationIdSetInClient": true, + "deviceCorrelationId": "your_device_correlation_id", + "authTokenTimeToLiveInSeconds": 60 + } + }, + "responses": { + "200": { + "body": { + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e", + "authToken": "eyJhbGciOiJFUzI1NiIsIm" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_CreateLivenessWithVerifySessionWithVerifyImage.json b/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_CreateLivenessWithVerifySessionWithVerifyImage.json new file mode 100644 index 000000000000..f1a0cdad2bcb --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_CreateLivenessWithVerifySessionWithVerifyImage.json @@ -0,0 +1,26 @@ +{ + "title": "Create LivenessWithVerify Session with VerifyImage", + "operationId": "LivenessSessionOperations_CreateLivenessWithVerifySessionWithVerifyImage", + "parameters": { + "apiVersion": "v1.1-preview.1", + "Parameters": "{\"livenessOperationMode\": \"Passive\", \"sendResultsToClient\": true, \"deviceCorrelationIdSetInClient\": true, \"deviceCorrelationId\": \"your_device_correlation_id\", \"authTokenTimeToLiveInSeconds\": 60}", + "VerifyImage": "" + }, + "responses": { + "200": { + "body": { + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e", + "authToken": "eyJhbGciOiJFUzI1NiIsIm", + "verifyImage": { + "faceRectangle": { + "width": 78, + "height": 78, + "left": 394, + "top": 54 + }, + "qualityForRecognition": "high" + } + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_DeleteLivenessSession.json b/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_DeleteLivenessSession.json new file mode 100644 index 000000000000..c5a6e71328cc --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_DeleteLivenessSession.json @@ -0,0 +1,11 @@ +{ + "title": "Delete Liveness Session", + "operationId": "LivenessSessionOperations_DeleteLivenessSession", + "parameters": { + "apiVersion": "v1.1-preview.1", + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_DeleteLivenessWithVerifySession.json b/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_DeleteLivenessWithVerifySession.json new file mode 100644 index 000000000000..9c432670b62b --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_DeleteLivenessWithVerifySession.json @@ -0,0 +1,11 @@ +{ + "title": "Delete LivenessWithVerify Session", + "operationId": "LivenessSessionOperations_DeleteLivenessWithVerifySession", + "parameters": { + "apiVersion": "v1.1-preview.1", + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_GetLivenessSessionAuditEntries.json b/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_GetLivenessSessionAuditEntries.json new file mode 100644 index 000000000000..091839dfa977 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_GetLivenessSessionAuditEntries.json @@ -0,0 +1,36 @@ +{ + "title": "Get LivenessSession Audit Entries", + "operationId": "LivenessSessionOperations_GetLivenessSessionAuditEntries", + "parameters": { + "apiVersion": "v1.1-preview.1", + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e", + "start": "0", + "top": 20 + }, + "responses": { + "200": { + "body": [ + { + "id": 4, + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e", + "requestId": "4af681e9-0e25-43e9-9922-a7adebf13b2f", + "clientRequestId": "4af681e9-0e25-43e9-9922-a7adebf13b2f", + "receivedDateTime": "2024-03-05T11:07:29.698Z", + "request": { + "url": "/face/v1.1-preview.1/detectliveness/singlemodal", + "method": "POST", + "contentLength": 18, + "contentType": "multipart/form-data", + "userAgent": "Mozilla/5.0 (Linux) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Mobile Safari/537.36" + }, + "response": { + "body": {}, + "statusCode": 200, + "latencyInMilliseconds": 1200 + }, + "digest": "1CC98BA83EAF1D0FF7F566FAEFCCCC787819FFA01251E2D9299143F7AD6651DB" + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_GetLivenessSessionResult.json b/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_GetLivenessSessionResult.json new file mode 100644 index 000000000000..ffabf3220489 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_GetLivenessSessionResult.json @@ -0,0 +1,41 @@ +{ + "title": "Get LivenessSession Result", + "operationId": "LivenessSessionOperations_GetLivenessSessionResult", + "parameters": { + "apiVersion": "v1.1-preview.1", + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e" + }, + "responses": { + "200": { + "body": { + "id": "b12e033e-bda7-4b83-a211-e721c661f30e", + "createdDateTime": "2024-03-05T11:07:29.698Z", + "sessionStartDateTime": "2024-03-05T11:07:29.698Z", + "sessionExpired": true, + "deviceCorrelationId": "your_device_correlation_id", + "authTokenTimeToLiveInSeconds": 600, + "status": "NotStarted", + "result": { + "id": 4, + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e", + "requestId": "4af681e9-0e25-43e9-9922-a7adebf13b2f", + "clientRequestId": "4af681e9-0e25-43e9-9922-a7adebf13b2f", + "receivedDateTime": "2024-03-05T11:07:29.698Z", + "request": { + "url": "/face/v1.1-preview.1/detectliveness/singlemodal", + "method": "POST", + "contentLength": 18, + "contentType": "multipart/form-data", + "userAgent": "Mozilla/5.0 (Linux) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Mobile Safari/537.36" + }, + "response": { + "body": {}, + "statusCode": 200, + "latencyInMilliseconds": 1200 + }, + "digest": "1CC98BA83EAF1D0FF7F566FAEFCCCC787819FFA01251E2D9299143F7AD6651DB" + } + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_GetLivenessSessions.json b/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_GetLivenessSessions.json new file mode 100644 index 000000000000..38ca5ad52bd6 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_GetLivenessSessions.json @@ -0,0 +1,23 @@ +{ + "title": "Get LivenessSessions", + "operationId": "LivenessSessionOperations_GetLivenessSessions", + "parameters": { + "apiVersion": "v1.1-preview.1", + "start": "00000000-0000-0000-0000-000000000000", + "top": 20 + }, + "responses": { + "200": { + "body": [ + { + "id": "b12e033e-bda7-4b83-a211-e721c661f30e", + "createdDateTime": "2024-03-05T11:07:28.540Z", + "sessionStartDateTime": "2024-03-05T11:07:28.540Z", + "sessionExpired": true, + "deviceCorrelationId": "your_device_correlation_id", + "authTokenTimeToLiveInSeconds": 60 + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_GetLivenessWithVerifySessionAuditEntries.json b/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_GetLivenessWithVerifySessionAuditEntries.json new file mode 100644 index 000000000000..70befacd6ece --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_GetLivenessWithVerifySessionAuditEntries.json @@ -0,0 +1,36 @@ +{ + "title": "Get LivenessWithVerify Session Audit Entries", + "operationId": "LivenessSessionOperations_GetLivenessWithVerifySessionAuditEntries", + "parameters": { + "apiVersion": "v1.1-preview.1", + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e", + "start": "0", + "top": 20 + }, + "responses": { + "200": { + "body": [ + { + "id": 4, + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e", + "requestId": "4af681e9-0e25-43e9-9922-a7adebf13b2f", + "clientRequestId": "4af681e9-0e25-43e9-9922-a7adebf13b2f", + "receivedDateTime": "2024-03-05T11:07:29.698Z", + "request": { + "url": "/face/v1.1-preview.1/detectliveness/singlemodal", + "method": "POST", + "contentLength": 18, + "contentType": "multipart/form-data", + "userAgent": "Mozilla/5.0 (Linux) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Mobile Safari/537.36" + }, + "response": { + "body": {}, + "statusCode": 200, + "latencyInMilliseconds": 1200 + }, + "digest": "1CC98BA83EAF1D0FF7F566FAEFCCCC787819FFA01251E2D9299143F7AD6651DB" + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_GetLivenessWithVerifySessionResult.json b/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_GetLivenessWithVerifySessionResult.json new file mode 100644 index 000000000000..d4cdc31dc549 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_GetLivenessWithVerifySessionResult.json @@ -0,0 +1,41 @@ +{ + "title": "Get LivenessWithVerify Session Result", + "operationId": "LivenessSessionOperations_GetLivenessWithVerifySessionResult", + "parameters": { + "apiVersion": "v1.1-preview.1", + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e" + }, + "responses": { + "200": { + "body": { + "id": "b12e033e-bda7-4b83-a211-e721c661f30e", + "createdDateTime": "2024-03-05T11:07:29.698Z", + "sessionStartDateTime": "2024-03-05T11:07:29.698Z", + "sessionExpired": true, + "deviceCorrelationId": "your_device_correlation_id", + "authTokenTimeToLiveInSeconds": 600, + "status": "NotStarted", + "result": { + "id": 4, + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e", + "requestId": "4af681e9-0e25-43e9-9922-a7adebf13b2f", + "clientRequestId": "4af681e9-0e25-43e9-9922-a7adebf13b2f", + "receivedDateTime": "2024-03-05T11:07:29.698Z", + "request": { + "url": "/face/v1.1-preview.1/detectliveness/singlemodal", + "method": "POST", + "contentLength": 18, + "contentType": "multipart/form-data", + "userAgent": "Mozilla/5.0 (Linux) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Mobile Safari/537.36" + }, + "response": { + "body": {}, + "statusCode": 200, + "latencyInMilliseconds": 1200 + }, + "digest": "1CC98BA83EAF1D0FF7F566FAEFCCCC787819FFA01251E2D9299143F7AD6651DB" + } + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_GetLivenessWithVerifySessions.json b/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_GetLivenessWithVerifySessions.json new file mode 100644 index 000000000000..36a4283123d6 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/LivenessSessionOperations_GetLivenessWithVerifySessions.json @@ -0,0 +1,23 @@ +{ + "title": "Get LivenessWithVerify Sessions", + "operationId": "LivenessSessionOperations_GetLivenessWithVerifySessions", + "parameters": { + "apiVersion": "v1.1-preview.1", + "start": "00000000-0000-0000-0000-000000000000", + "top": 20 + }, + "responses": { + "200": { + "body": [ + { + "id": "b12e033e-bda7-4b83-a211-e721c661f30e", + "createdDateTime": "2024-03-05T11:07:28.540Z", + "sessionStartDateTime": "2024-03-05T11:07:28.540Z", + "sessionExpired": true, + "deviceCorrelationId": "your_device_correlation_id", + "authTokenTimeToLiveInSeconds": 60 + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_AddPersonFaceFromStream.json b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_AddPersonFaceFromStream.json new file mode 100644 index 000000000000..799b950ae8db --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_AddPersonFaceFromStream.json @@ -0,0 +1,24 @@ +{ + "title": "Add Face to a PersonDirectory Person", + "operationId": "PersonDirectoryOperations_AddPersonFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "recognitionModel": "recognition_01", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "imageContent": "" + }, + "responses": { + "202": { + "headers": { + "location": "https://contoso.com/operationstatus", + "operation-Location": "https://contoso.com/operationstatus" + }, + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_AddPersonFaceFromUrl.json b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_AddPersonFaceFromUrl.json new file mode 100644 index 000000000000..7bb0e2ca61a2 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_AddPersonFaceFromUrl.json @@ -0,0 +1,26 @@ +{ + "title": "Add Face to PersonDirectory Person from Url", + "operationId": "PersonDirectoryOperations_AddPersonFaceFromUrl", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "recognitionModel": "recognition_01", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "body": { + "url": "https://microsoft.com/example.jpg" + } + }, + "responses": { + "202": { + "headers": { + "location": "https://contoso.com/operationstatus", + "operation-Location": "https://contoso.com/operationstatus" + }, + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_CreateDynamicPersonGroup.json b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_CreateDynamicPersonGroup.json new file mode 100644 index 000000000000..75aaef75ebfe --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_CreateDynamicPersonGroup.json @@ -0,0 +1,15 @@ +{ + "title": "Create DynamicPersonGroup", + "operationId": "PersonDirectoryOperations_CreateDynamicPersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "dynamicPersonGroupId": "your_dynamic_person_group_id", + "body": { + "name": "your_dynamic_person_group_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_CreateDynamicPersonGroupWithPerson.json b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_CreateDynamicPersonGroupWithPerson.json new file mode 100644 index 000000000000..8aca52123292 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_CreateDynamicPersonGroupWithPerson.json @@ -0,0 +1,22 @@ +{ + "title": "Create DynamicPersonGroup", + "operationId": "PersonDirectoryOperations_CreateDynamicPersonGroupWithPerson", + "parameters": { + "apiVersion": "v1.1-preview.1", + "dynamicPersonGroupId": "your_dynamic_person_group_id", + "body": { + "name": "your_dynamic_person_group_name", + "userData": "your_user_data", + "addPersonIds": [ + "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5" + ] + } + }, + "responses": { + "202": { + "headers": { + "operation-Location": "https://contoso.com/operationstatus" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_CreatePerson.json b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_CreatePerson.json new file mode 100644 index 000000000000..5a24293e2e57 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_CreatePerson.json @@ -0,0 +1,22 @@ +{ + "title": "Create Person in PersonDirectory", + "operationId": "PersonDirectoryOperations_CreatePerson", + "parameters": { + "apiVersion": "v1.1-preview.1", + "body": { + "name": "your_person_name", + "userData": "your_user_data" + } + }, + "responses": { + "202": { + "headers": { + "location": "https://contoso.com/operationstatus", + "operation-Location": "https://contoso.com/operationstatus" + }, + "body": { + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_DeleteDynamicPersonGroup.json b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_DeleteDynamicPersonGroup.json new file mode 100644 index 000000000000..ce885996a49c --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_DeleteDynamicPersonGroup.json @@ -0,0 +1,16 @@ +{ + "title": "Delete DynamicPersonGroup", + "operationId": "PersonDirectoryOperations_DeleteDynamicPersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "dynamicPersonGroupId": "your_dynamic_person_group_id" + }, + "responses": { + "202": { + "headers": { + "location": "https://contoso.com/operationstatus", + "operation-Location": "https://contoso.com/operationstatus" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_DeletePerson.json b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_DeletePerson.json new file mode 100644 index 000000000000..ec878a74a629 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_DeletePerson.json @@ -0,0 +1,16 @@ +{ + "title": "Delete Person", + "operationId": "PersonDirectoryOperations_DeletePerson", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5" + }, + "responses": { + "202": { + "headers": { + "location": "https://contoso.com/operationstatus", + "operation-Location": "https://contoso.com/operationstatus" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_DeletePersonFace.json b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_DeletePersonFace.json new file mode 100644 index 000000000000..b5888b351645 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_DeletePersonFace.json @@ -0,0 +1,18 @@ +{ + "title": "Delete Face from PersonDirectory Person", + "operationId": "PersonDirectoryOperations_DeletePersonFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "recognitionModel": "recognition_01", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "202": { + "headers": { + "location": "https://contoso.com/operationstatus", + "operation-Location": "https://contoso.com/operationstatus" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetDynamicPersonGroup.json b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetDynamicPersonGroup.json new file mode 100644 index 000000000000..d9c77ab11b0c --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetDynamicPersonGroup.json @@ -0,0 +1,17 @@ +{ + "title": "Get DynamicPersonGroup", + "operationId": "PersonDirectoryOperations_GetDynamicPersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "dynamicPersonGroupId": "your_dynamic_person_group_id" + }, + "responses": { + "200": { + "body": { + "dynamicPersonGroupId": "your_dynamic_person_group_id", + "name": "your_dynamic_person_group_name", + "userData": "your_user_data" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetDynamicPersonGroupPersons.json b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetDynamicPersonGroupPersons.json new file mode 100644 index 000000000000..ad56754e3966 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetDynamicPersonGroupPersons.json @@ -0,0 +1,20 @@ +{ + "title": "Get DynamicPersonGroup Persons", + "operationId": "PersonDirectoryOperations_GetDynamicPersonGroupPersons", + "parameters": { + "apiVersion": "v1.1-preview.1", + "dynamicPersonGroupId": "your_dynamic_person_group_id", + "start": "00000000-0000-0000-0000-000000000000", + "top": 20 + }, + "responses": { + "200": { + "body": { + "personIds": [ + "1d44651f-fadb-41f5-8918-c30609964489", + "c1d3b745-2548-4abf-b057-a386c9bd52f1" + ] + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetDynamicPersonGroupReferences.json b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetDynamicPersonGroupReferences.json new file mode 100644 index 000000000000..9713e78774dd --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetDynamicPersonGroupReferences.json @@ -0,0 +1,19 @@ +{ + "title": "Get DynamicPersonGroup References", + "operationId": "PersonDirectoryOperations_GetDynamicPersonGroupReferences", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "start": "00000000-0000-0000-0000-000000000000", + "top": 20 + }, + "responses": { + "200": { + "body": { + "dynamicPersonGroupIds": [ + "your_dynamic_person_group_id" + ] + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetDynamicPersonGroups.json b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetDynamicPersonGroups.json new file mode 100644 index 000000000000..193635dd48c6 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetDynamicPersonGroups.json @@ -0,0 +1,20 @@ +{ + "title": "Get DynamicPersonGroups", + "operationId": "PersonDirectoryOperations_GetDynamicPersonGroups", + "parameters": { + "apiVersion": "v1.1-preview.1", + "start": "dynamic_person_group_id", + "top": 20 + }, + "responses": { + "200": { + "body": [ + { + "dynamicPersonGroupId": "your_dynamic_person_group_id", + "name": "your_dynamic_person_group_name", + "userData": "your_user_data" + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetPerson.json b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetPerson.json new file mode 100644 index 000000000000..a44a64dade8e --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetPerson.json @@ -0,0 +1,17 @@ +{ + "title": "Get Person from PeronDirectory", + "operationId": "PersonDirectoryOperations_GetPerson", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5" + }, + "responses": { + "200": { + "body": { + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "name": "your_person_name", + "userData": "your_user_data" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetPersonFace.json b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetPersonFace.json new file mode 100644 index 000000000000..c9936d951197 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetPersonFace.json @@ -0,0 +1,18 @@ +{ + "title": "Get Face from PersonDirectory Person", + "operationId": "PersonDirectoryOperations_GetPersonFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "recognitionModel": "recognition_01", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "userData": "your_user_data" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetPersonFaces.json b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetPersonFaces.json new file mode 100644 index 000000000000..be30b43b5dc5 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetPersonFaces.json @@ -0,0 +1,19 @@ +{ + "title": "Get Faces from PersonDirectory Person", + "operationId": "PersonDirectoryOperations_GetPersonFaces", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "recognitionModel": "recognition_01" + }, + "responses": { + "200": { + "body": { + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "persistedFaceIds": [ + "43897a75-8d6f-42cf-885e-74832febb055" + ] + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetPersons.json b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetPersons.json new file mode 100644 index 000000000000..3375158f280c --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_GetPersons.json @@ -0,0 +1,20 @@ +{ + "title": "Get Persons from PersonDirectory", + "operationId": "PersonDirectoryOperations_GetPersons", + "parameters": { + "apiVersion": "v1.1-preview.1", + "start": "00000000-0000-0000-0000-000000000000", + "top": 20 + }, + "responses": { + "200": { + "body": [ + { + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "name": "your_person_name", + "userData": "your_user_data" + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_UpdateDynamicPersonGroup.json b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_UpdateDynamicPersonGroup.json new file mode 100644 index 000000000000..1cbe9e6f2fa4 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_UpdateDynamicPersonGroup.json @@ -0,0 +1,15 @@ +{ + "title": "Update DynamicPersonGroup", + "operationId": "PersonDirectoryOperations_UpdateDynamicPersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "dynamicPersonGroupId": "your_dynamic_person_group_id", + "body": { + "name": "your_dynamic_person_group_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_UpdateDynamicPersonGroupWithPersonChanges.json b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_UpdateDynamicPersonGroupWithPersonChanges.json new file mode 100644 index 000000000000..d329ed1854d0 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_UpdateDynamicPersonGroupWithPersonChanges.json @@ -0,0 +1,25 @@ +{ + "title": "Update DynamicPersonGroup", + "operationId": "PersonDirectoryOperations_UpdateDynamicPersonGroupWithPersonChanges", + "parameters": { + "apiVersion": "v1.1-preview.1", + "dynamicPersonGroupId": "your_dynamic_person_group_id", + "body": { + "name": "your_dynamic_person_group_name", + "userData": "your_user_data", + "addPersonIds": [ + "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5" + ], + "removePersonIds": [ + "67f7e96d-823a-4318-9bf6-e9a2a2608899" + ] + } + }, + "responses": { + "202": { + "headers": { + "operation-Location": "https://contoso.com/operationstatus" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_UpdatePerson.json b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_UpdatePerson.json new file mode 100644 index 000000000000..c9b5dd6f09a6 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_UpdatePerson.json @@ -0,0 +1,15 @@ +{ + "title": "Update Person in PersonDirectory", + "operationId": "PersonDirectoryOperations_UpdatePerson", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "body": { + "name": "your_person_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_UpdatePersonFace.json b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_UpdatePersonFace.json new file mode 100644 index 000000000000..4820362ff86d --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonDirectoryOperations_UpdatePersonFace.json @@ -0,0 +1,16 @@ +{ + "title": "Update Face of PersonDirectory Person", + "operationId": "PersonDirectoryOperations_UpdatePersonFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "recognitionModel": "recognition_01", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "body": { + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_AddLargePersonGroupPersonFaceFromStream.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_AddLargePersonGroupPersonFaceFromStream.json new file mode 100644 index 000000000000..141ddfa3f570 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_AddLargePersonGroupPersonFaceFromStream.json @@ -0,0 +1,20 @@ +{ + "title": "Add Face in LargePersonGroup Person", + "operationId": "PersonGroupOperations_AddLargePersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "imageContent": "" + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_AddLargePersonGroupPersonFaceFromUrl.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_AddLargePersonGroupPersonFaceFromUrl.json new file mode 100644 index 000000000000..d508fe448032 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_AddLargePersonGroupPersonFaceFromUrl.json @@ -0,0 +1,22 @@ +{ + "title": "Add Face in LargePersonGroup Person from Url", + "operationId": "PersonGroupOperations_AddLargePersonGroupPersonFaceFromUrl", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "body": { + "url": "https://microsoft.com/example.jpg" + } + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_AddPersonGroupPersonFaceFromStream.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_AddPersonGroupPersonFaceFromStream.json new file mode 100644 index 000000000000..a5284dd3dd85 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_AddPersonGroupPersonFaceFromStream.json @@ -0,0 +1,20 @@ +{ + "title": "Add Face to PersonGroup Person", + "operationId": "PersonGroupOperations_AddPersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "imageContent": "" + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_AddPersonGroupPersonFaceFromUrl.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_AddPersonGroupPersonFaceFromUrl.json new file mode 100644 index 000000000000..2ffd39e216cf --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_AddPersonGroupPersonFaceFromUrl.json @@ -0,0 +1,22 @@ +{ + "title": "Add Face to PersonGroupPerson from Url", + "operationId": "PersonGroupOperations_AddPersonGroupPersonFaceFromUrl", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "body": { + "url": "https://microsoft.com/example.jpg" + } + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_CreateLargePersonGroup.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_CreateLargePersonGroup.json new file mode 100644 index 000000000000..331f4fea89e2 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_CreateLargePersonGroup.json @@ -0,0 +1,16 @@ +{ + "title": "Create LargePersonGroup", + "operationId": "PersonGroupOperations_CreateLargePersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "body": { + "name": "your_large_person_group_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_CreateLargePersonGroupPerson.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_CreateLargePersonGroupPerson.json new file mode 100644 index 000000000000..b20a03adf246 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_CreateLargePersonGroupPerson.json @@ -0,0 +1,19 @@ +{ + "title": "Create Person in LargePersonGroup", + "operationId": "PersonGroupOperations_CreateLargePersonGroupPerson", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "body": { + "name": "your_large_person_group_person_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": { + "body": { + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_CreatePersonGroup.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_CreatePersonGroup.json new file mode 100644 index 000000000000..326e8074f4d5 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_CreatePersonGroup.json @@ -0,0 +1,16 @@ +{ + "title": "Create PersonGroup", + "operationId": "PersonGroupOperations_CreatePersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id", + "body": { + "name": "your_person_group_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_CreatePersonGroupPerson.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_CreatePersonGroupPerson.json new file mode 100644 index 000000000000..e9002c1d37a1 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_CreatePersonGroupPerson.json @@ -0,0 +1,19 @@ +{ + "title": "Create Person in PersonGroup", + "operationId": "PersonGroupOperations_CreatePersonGroupPerson", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id", + "body": { + "name": "your_person_group_person_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": { + "body": { + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_DeleteLargePersonGroup.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_DeleteLargePersonGroup.json new file mode 100644 index 000000000000..2eb1187dc33c --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_DeleteLargePersonGroup.json @@ -0,0 +1,11 @@ +{ + "title": "Delete LargePersonGroup", + "operationId": "PersonGroupOperations_DeleteLargePersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_DeleteLargePersonGroupPerson.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_DeleteLargePersonGroupPerson.json new file mode 100644 index 000000000000..84b5b5abe4ef --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_DeleteLargePersonGroupPerson.json @@ -0,0 +1,12 @@ +{ + "title": "Delete Person from LargePersonGroup", + "operationId": "PersonGroupOperations_DeleteLargePersonGroupPerson", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_DeleteLargePersonGroupPersonFace.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_DeleteLargePersonGroupPersonFace.json new file mode 100644 index 000000000000..2a8d88a8506d --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_DeleteLargePersonGroupPersonFace.json @@ -0,0 +1,13 @@ +{ + "title": "Delete Face from LargePersonGroup Person", + "operationId": "PersonGroupOperations_DeleteLargePersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_DeletePersonGroup.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_DeletePersonGroup.json new file mode 100644 index 000000000000..b5848fb96e01 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_DeletePersonGroup.json @@ -0,0 +1,11 @@ +{ + "title": "Delete PersonGroup", + "operationId": "PersonGroupOperations_DeletePersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_DeletePersonGroupPerson.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_DeletePersonGroupPerson.json new file mode 100644 index 000000000000..380bec45e4b9 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_DeletePersonGroupPerson.json @@ -0,0 +1,12 @@ +{ + "title": "Delete Person from PersonGroup", + "operationId": "PersonGroupOperations_DeletePersonGroupPerson", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_DeletePersonGroupPersonFace.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_DeletePersonGroupPersonFace.json new file mode 100644 index 000000000000..1a1bcd0103d6 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_DeletePersonGroupPersonFace.json @@ -0,0 +1,13 @@ +{ + "title": "Delete Face from PersonGroup Person", + "operationId": "PersonGroupOperations_DeletePersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetLargePersonGroup.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetLargePersonGroup.json new file mode 100644 index 000000000000..7c292178d50c --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetLargePersonGroup.json @@ -0,0 +1,19 @@ +{ + "title": "Get LargePersonGroup", + "operationId": "PersonGroupOperations_GetLargePersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": { + "name": "your_large_person_group_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "largePersonGroupId": "your_large_person_group_id" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetLargePersonGroupPerson.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetLargePersonGroupPerson.json new file mode 100644 index 000000000000..5f17d932ce46 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetLargePersonGroupPerson.json @@ -0,0 +1,21 @@ +{ + "title": "Get Person from LargePersonGroup", + "operationId": "PersonGroupOperations_GetLargePersonGroupPerson", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1" + }, + "responses": { + "200": { + "body": { + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "name": "your_large_person_group_person_name", + "userData": "your_user_data", + "persistedFaceIds": [ + "43897a75-8d6f-42cf-885e-74832febb055" + ] + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetLargePersonGroupPersonFace.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetLargePersonGroupPersonFace.json new file mode 100644 index 000000000000..ec0074928330 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetLargePersonGroupPersonFace.json @@ -0,0 +1,18 @@ +{ + "title": "Get Face from LargePersonGroup Person", + "operationId": "PersonGroupOperations_GetLargePersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "userData": "your_user_data" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetLargePersonGroupPersons.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetLargePersonGroupPersons.json new file mode 100644 index 000000000000..6301d513e1e5 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetLargePersonGroupPersons.json @@ -0,0 +1,24 @@ +{ + "title": "Get Persons from LargePersonGroup", + "operationId": "PersonGroupOperations_GetLargePersonGroupPersons", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "start": "00000000-0000-0000-0000-000000000000", + "top": 20 + }, + "responses": { + "200": { + "body": [ + { + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "name": "your_large_person_group_person_name", + "userData": "your_user_data", + "persistedFaceIds": [ + "43897a75-8d6f-42cf-885e-74832febb055" + ] + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetLargePersonGroupTrainingStatus.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetLargePersonGroupTrainingStatus.json new file mode 100644 index 000000000000..37bfa8a54448 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetLargePersonGroupTrainingStatus.json @@ -0,0 +1,19 @@ +{ + "title": "Get Training Status of LargePersonGroup", + "operationId": "PersonGroupOperations_GetLargePersonGroupTrainingStatus", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id" + }, + "responses": { + "200": { + "body": { + "status": "notStarted", + "createdDateTime": "2024-03-05T11:07:58.371Z", + "lastActionDateTime": "2024-03-05T11:07:58.371Z", + "lastSuccessfulTrainingDateTime": "2024-03-05T11:07:58.371Z", + "message": null + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetLargePersonGroups.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetLargePersonGroups.json new file mode 100644 index 000000000000..41c1030f6917 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetLargePersonGroups.json @@ -0,0 +1,22 @@ +{ + "title": "Get LargePersonGroups", + "operationId": "PersonGroupOperations_GetLargePersonGroups", + "parameters": { + "apiVersion": "v1.1-preview.1", + "start": "00000000-0000-0000-0000-000000000000", + "top": 20, + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": [ + { + "name": "your_large_person_group_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "largePersonGroupId": "your_large_person_group_id" + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetPersonGroup.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetPersonGroup.json new file mode 100644 index 000000000000..d3dffc82041f --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetPersonGroup.json @@ -0,0 +1,19 @@ +{ + "title": "Get PersonGroup", + "operationId": "PersonGroupOperations_GetPersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id", + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": { + "name": "your_person_group_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "personGroupId": "your_person_group_id" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetPersonGroupPerson.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetPersonGroupPerson.json new file mode 100644 index 000000000000..19e331d9a9d4 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetPersonGroupPerson.json @@ -0,0 +1,21 @@ +{ + "title": "Get Person from PersonGroup", + "operationId": "PersonGroupOperations_GetPersonGroupPerson", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1" + }, + "responses": { + "200": { + "body": { + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "name": "your_person_group_person_name", + "userData": "your_user_data", + "persistedFaceIds": [ + "43897a75-8d6f-42cf-885e-74832febb055" + ] + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetPersonGroupPersonFace.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetPersonGroupPersonFace.json new file mode 100644 index 000000000000..3159157cb95f --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetPersonGroupPersonFace.json @@ -0,0 +1,18 @@ +{ + "title": "Get Face form PersonGroup Person", + "operationId": "PersonGroupOperations_GetPersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "userData": "your_user_data" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetPersonGroupPersons.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetPersonGroupPersons.json new file mode 100644 index 000000000000..9105ebc4274d --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetPersonGroupPersons.json @@ -0,0 +1,24 @@ +{ + "title": "Get Persons from PersonGroup", + "operationId": "PersonGroupOperations_GetPersonGroupPersons", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id", + "start": "00000000-0000-0000-0000-000000000000", + "top": 20 + }, + "responses": { + "200": { + "body": [ + { + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "name": "your_person_group_person_name", + "userData": "your_user_data", + "persistedFaceIds": [ + "43897a75-8d6f-42cf-885e-74832febb055" + ] + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetPersonGroupTrainingStatus.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetPersonGroupTrainingStatus.json new file mode 100644 index 000000000000..747ce9df7199 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetPersonGroupTrainingStatus.json @@ -0,0 +1,19 @@ +{ + "title": "Get Training Status of PersonGroup", + "operationId": "PersonGroupOperations_GetPersonGroupTrainingStatus", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id" + }, + "responses": { + "200": { + "body": { + "status": "notStarted", + "createdDateTime": "2024-03-05T11:07:58.371Z", + "lastActionDateTime": "2024-03-05T11:07:58.371Z", + "lastSuccessfulTrainingDateTime": "2024-03-05T11:07:58.371Z", + "message": null + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetPersonGroups.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetPersonGroups.json new file mode 100644 index 000000000000..81b6bee61596 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_GetPersonGroups.json @@ -0,0 +1,22 @@ +{ + "title": "Get PersonGroups", + "operationId": "PersonGroupOperations_GetPersonGroups", + "parameters": { + "apiVersion": "v1.1-preview.1", + "start": "00000000-0000-0000-0000-000000000000", + "top": 20, + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": [ + { + "name": "your_person_group_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "personGroupId": "your_person_group_id" + } + ] + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_TrainLargePersonGroup.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_TrainLargePersonGroup.json new file mode 100644 index 000000000000..99a4c4ba537a --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_TrainLargePersonGroup.json @@ -0,0 +1,15 @@ +{ + "title": "Train LargePersonGroup", + "operationId": "PersonGroupOperations_TrainLargePersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id" + }, + "responses": { + "202": { + "headers": { + "operation-Location": "https://contoso.com/operationstatus" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_TrainPersonGroup.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_TrainPersonGroup.json new file mode 100644 index 000000000000..014adaab4dd6 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_TrainPersonGroup.json @@ -0,0 +1,15 @@ +{ + "title": "Train PersonGroup", + "operationId": "PersonGroupOperations_TrainPersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id" + }, + "responses": { + "202": { + "headers": { + "operation-Location": "https://contoso.com/operationstatus" + } + } + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_UpdateLargePersonGroup.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_UpdateLargePersonGroup.json new file mode 100644 index 000000000000..a0811844aaf3 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_UpdateLargePersonGroup.json @@ -0,0 +1,15 @@ +{ + "title": "Update LargePersonGroup", + "operationId": "PersonGroupOperations_UpdateLargePersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "body": { + "name": "your_large_person_group_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_UpdateLargePersonGroupPerson.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_UpdateLargePersonGroupPerson.json new file mode 100644 index 000000000000..65b8960f057e --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_UpdateLargePersonGroupPerson.json @@ -0,0 +1,16 @@ +{ + "title": "Update Person in LargePersonGroup", + "operationId": "PersonGroupOperations_UpdateLargePersonGroupPerson", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "body": { + "name": "your_large_person_group_person_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_UpdateLargePersonGroupPersonFace.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_UpdateLargePersonGroupPersonFace.json new file mode 100644 index 000000000000..7c615968cc63 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_UpdateLargePersonGroupPersonFace.json @@ -0,0 +1,16 @@ +{ + "title": "Update Face in LargePersonGroup Person", + "operationId": "PersonGroupOperations_UpdateLargePersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "body": { + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_UpdatePersonGroup.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_UpdatePersonGroup.json new file mode 100644 index 000000000000..05451bf387f7 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_UpdatePersonGroup.json @@ -0,0 +1,15 @@ +{ + "title": "Update PersonGroup", + "operationId": "PersonGroupOperations_UpdatePersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id", + "body": { + "name": "your_person_group_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_UpdatePersonGroupPerson.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_UpdatePersonGroupPerson.json new file mode 100644 index 000000000000..4a81657da4a6 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_UpdatePersonGroupPerson.json @@ -0,0 +1,16 @@ +{ + "title": "Update PersonGroup Person", + "operationId": "PersonGroupOperations_UpdatePersonGroupPerson", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "body": { + "name": "your_person_group_person_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_UpdatePersonGroupPersonFace.json b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_UpdatePersonGroupPersonFace.json new file mode 100644 index 000000000000..9713f646dc88 --- /dev/null +++ b/specification/ai/Face/examples/v1.1-preview.1/PersonGroupOperations_UpdatePersonGroupPersonFace.json @@ -0,0 +1,16 @@ +{ + "title": "Update Face in PersonGroup Person", + "operationId": "PersonGroupOperations_UpdatePersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "body": { + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/Face/main.tsp b/specification/ai/Face/main.tsp new file mode 100644 index 000000000000..069b23ed0899 --- /dev/null +++ b/specification/ai/Face/main.tsp @@ -0,0 +1,58 @@ +import "@typespec/rest"; +import "@typespec/versioning"; +import "@azure-tools/typespec-azure-core"; +import "./routes.common.tsp"; +import "./routes.detection.tsp"; +import "./routes.recognition.tsp"; +import "./routes.facelist.tsp"; +import "./routes.persondirectory.tsp"; +import "./routes.persongroup.tsp"; +import "./routes.session.tsp"; + +using TypeSpec.Http; +using TypeSpec.Rest; +using TypeSpec.Versioning; + +@useAuth(KeyAuth | AADToken) +@service({ + title: "Azure AI Face API", +}) +@versioned(Versions) +@server( + "{endpoint}/face/{apiVersion}", + "Azure AI Face API", + { + @doc(""" +Supported Cognitive Services endpoints (protocol and hostname, for example: +https://{resource-name}.cognitiveservices.azure.com). +""") + endpoint: url, + + @doc("API Version") + @path + apiVersion: Versions, + } +) +namespace Face; + +@doc("The secret key for your Azure AI Face subscription.") +model KeyAuth is ApiKeyAuth; + +#suppress "@azure-tools/typespec-azure-core/casing-style" "Supress casing style for AAD" +@doc("The Azure Active Directory OAuth2 Flow") +model AADToken + is OAuth2Auth<[ + { + type: OAuth2FlowType.authorizationCode; + authorizationUrl: "https://api.example.com/oauth2/authorize"; + tokenUrl: "https://api.example.com/oauth2/token"; + scopes: ["https://cognitiveservices.azure.com/.default"]; + } + ]>; + +@doc("API versions for Azure AI Face API.") +enum Versions { + @useDependency(Azure.Core.Versions.v1_0_Preview_2) + @doc("v1.1-preview.1") + v1_1_preview_1: "v1.1-preview.1", +} diff --git a/specification/ai/Face/models.common.tsp b/specification/ai/Face/models.common.tsp new file mode 100644 index 000000000000..41092c4b44f0 --- /dev/null +++ b/specification/ai/Face/models.common.tsp @@ -0,0 +1,296 @@ +import "@typespec/http"; +import "@typespec/rest"; +import "@azure-tools/typespec-azure-core"; + +using Azure.Core; +using TypeSpec.Http; +using TypeSpec.Rest; + +namespace Face; + +@pattern("^[a-z0-9-_]+$") +@maxLength(64) +@minLength(1) +scalar collectionId extends string; + +@doc("The ApiVersion path parameter.") +model ApiVersionPathParameter { + @segment("face") + @path("apiVersion") + @doc("The API version to use for this operation.") + apiVersion: string; +} + +alias ListRequestOptions = { + @doc("List resources greater than the \"start\". It contains no more than 64 characters. Default is empty.") + @query + start?: string; + + @doc("The number of items to list, ranging in [1, 1000]. Default is 1000.") + @query + @minValue(1) + @maxValue(1000) + top?: int32 = 1000; +}; + +@doc("The recognition model for the face.") +union RecognitionModel { + string, + + @doc("The default recognition model for \"Detect\". All those faceIds created before 2019 March are bonded with this recognition model.") + "recognition_01", + + @doc("Recognition model released in 2019 March.") + "recognition_02", + + @doc("Recognition model released in 2020 May.") + "recognition_03", + + @doc("Recognition model released in 2021 February. It's recommended to use this recognition model for better recognition accuracy.") + "recognition_04", +} + +@doc("The detection model for the face.") +union DetectionModel { + string, + + @doc("The default detection model. Recommend for near frontal face detection. For scenarios with exceptionally large angle (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be detected.") + "detection_01", + + @doc("Detection model released in 2019 May with improved accuracy especially on small, side and blurry faces.") + "detection_02", + + @doc("Detection model released in 2021 February with improved accuracy especially on small faces.") + "detection_03", +} + +alias UserDefinedFields = { + @doc("User defined name, maximum length is 128.") + @maxLength(128) + @minLength(1) + name: string; + + @doc("Optional user defined data. Length should not exceed 16K.") + @maxLength(16384) + userData?: string; +}; + +alias UserDefinedFieldsForUpdate = { + @doc("User defined name, maximum length is 128.") + @maxLength(128) + @minLength(1) + name?: string; + + @doc("Optional user defined data. Length should not exceed 16K.") + @maxLength(16384) + userData?: string; +}; + +@doc("Common model for face list and person group.") +model BaseCollection { + ...UserDefinedFields; + + @doc("Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds.") + recognitionModel?: RecognitionModel; +} + +alias CreateCollectionOptions = { + ...UserDefinedFields; + + @doc("The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'.") + recognitionModel?: RecognitionModel = "recognition_01"; +}; + +alias FaceUserData = { + @doc("User-provided data attached to the face. The length limit is 1K.") + @maxLength(1024) + userData?: string; +}; + +@doc("Common model for persisted face.") +@resource("persistedfaces") +model BaseFace { + @key + @visibility("read") + @doc("Face ID of the face.") + persistedFaceId: uuid; + + ...FaceUserData; +} + +alias AddFaceOptions = { + @doc("A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'.") + @query({ + format: "csv", + }) + @maxItems(4) + @minItems(4) + targetFace?: int32[]; + + @query + @doc("The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'.") + detectionModel?: DetectionModel = "detection_01"; + + @query + @doc("User-provided data attached to the face. The size limit is 1K.") + @maxLength(1024) + userData?: string; +}; + +@doc("Response body for adding face.") +model AddFaceResult { + @doc("Persisted Face ID of the added face, which is persisted and will not expire. Different from faceId which is created in \"Detect\" and will expire in 24 hours after the detection call.") + persistedFaceId: uuid; +} + +@doc("Response of create person.") +model CreatePersonResult { + @doc("Person ID of the person.") + personId: uuid; +} + +alias ReturnRecognitionModelOptions = { + @query + @doc("Return 'recognitionModel' or not. The default value is false.") + returnRecognitionModel?: boolean = false; +}; + +@doc("The status of long running operation.") +@lroStatus +union OperationStatus { + string, + + @doc("The operation is not started.") + "notStarted", + + @doc("The operation is still running.") + "running", + + @doc("The operation is succeeded.") + @lroSucceeded + "succeeded", + + @doc("The operation is failed.") + @lroFailed + "failed", +} + +@doc("Training result of a container") +model TrainingResult { + @doc("Training status of the container.") + status: OperationStatus; + + @doc("A combined UTC date and time string that describes the created time of the person group, large person group or large face list.") + createdDateTime: utcDateTime; + + @doc("A combined UTC date and time string that describes the last modify time of the person group, large person group or large face list, could be null value when the group is not successfully trained.") + lastActionDateTime: utcDateTime; + + @doc("A combined UTC date and time string that describes the last successful training time of the person group, large person group or large face list.") + lastSuccessfulTrainingDateTime: utcDateTime; + + @doc("Show failure message when training failed (omitted when training succeed).") + message?: string; +} + +@doc("Identify result.") +model IdentificationResult { + @doc("faceId of the query face.") + faceId: uuid; + + @doc("Identified person candidates for that face (ranked by confidence). Array size should be no larger than input maxNumOfCandidatesReturned. If no person is identified, will return an empty array.") + candidates: IdentificationCandidate[]; +} + +@doc("Candidate for identify call.") +model IdentificationCandidate { + @doc("personId of candidate person.") + personId: uuid; + + @doc("Confidence value of the candidate. The higher confidence, the more similar. Range between [0,1].") + @minValue(0) + @maxValue(1) + confidence: float32; +} + +@doc("Verify result.") +model VerificationResult { + @doc("True if the two faces belong to the same person or the face belongs to the person, otherwise false.") + isIdentical: boolean; + + @doc("A number indicates the similarity confidence of whether two faces belong to the same person, or whether the face belongs to the person. By default, isIdentical is set to True if similarity confidence is greater than or equal to 0.5. This is useful for advanced users to override 'isIdentical' and fine-tune the result on their own data.") + @minValue(0) + @maxValue(1) + confidence: float32; +} + +@doc("Similar face searching mode.") +union FindSimilarMatchMode { + string, + + @doc("Match person.") + "matchPerson", + + @doc("Match face.") + "matchFace", +} + +@doc("Response body for find similar face operation.") +model FindSimilarResult { + @doc("Confidence value of the candidate. The higher confidence, the more similar. Range between [0,1].") + @minValue(0) + @maxValue(1) + confidence: float32; + + @doc("faceId of candidate face when find by faceIds. faceId is created by \"Detect\" and will expire 24 hours after the detection call.") + faceId?: uuid; + + @doc("persistedFaceId of candidate face when find by faceListId or largeFaceListId. persistedFaceId in face list/large face list is persisted and will not expire.") + persistedFaceId?: uuid; +} + +@doc("Response body for group face operation.") +model GroupingResult { + @doc("A partition of the original faces based on face similarity. Groups are ranked by number of faces.") + groups: uuid[][]; + + @doc("Face ids array of faces that cannot find any similar faces from original faces.") + messyGroup: uuid[]; +} + +alias AddFaceDescriptionInList = """ +> +* + * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. + * Each person entry can hold up to 248 faces. + * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. + * "targetFace" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided "targetFace" rectangle is not returned from \"Detect\", there's no guarantee to detect and add the face successfully. + * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures. + * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. + * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/how-to/specify-detection-model +"""; + +alias ListRequestOptionsDescriptionInList = """ +> +* + * "start" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting "start" to an empty value indicates that entries should be returned starting from the first item. + * "top" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify "start" with the personId of the last entry returned in the current call. + +> [!TIP] +> +> * For example, there are total 5 items with their IDs: "itemId1", ..., "itemId5". +> * "start=&top=" will return all 5 items. +> * "start=&top=2" will return "itemId1", "itemId2". +> * "start=itemId2&top=3" will return "itemId3", "itemId4", "itemId5". +"""; + +@doc("The error object. For comprehensive details on error codes and messages returned by the Face Service, please refer to the following link: https://aka.ms/face-error-codes-and-messages.") +model FaceError { + @doc("One of a server-defined set of error codes.") + code: string; + + @doc("A human-readable representation of the error.") + message: string; +} + +model FaceErrorResponse is Azure.Core.Foundations.ErrorResponseBase; diff --git a/specification/ai/Face/models.detect.tsp b/specification/ai/Face/models.detect.tsp new file mode 100644 index 000000000000..04e49b4dd26a --- /dev/null +++ b/specification/ai/Face/models.detect.tsp @@ -0,0 +1,512 @@ +import "@typespec/http"; +import "@typespec/rest"; +import "@azure-tools/typespec-azure-core"; +import "./models.common.tsp"; + +using Azure.Core; +using TypeSpec.Http; +using TypeSpec.Rest; + +namespace Face; + +alias FaceDetectionOptions = { + @doc("The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'.") + @query + detectionModel?: DetectionModel = "detection_01"; + + @doc("The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'.") + @query + recognitionModel?: RecognitionModel = "recognition_01"; + + @doc("Return faceIds of the detected faces or not. The default value is true.") + @query + returnFaceId?: boolean = true; + + @doc("Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost.") + @query({ + format: "csv", + }) + returnFaceAttributes?: FaceAttributeType[]; + + @doc("Return face landmarks of the detected faces or not. The default value is false.") + @query + returnFaceLandmarks?: boolean = false; + + @doc("Return 'recognitionModel' or not. The default value is false. This is only applicable when returnFaceId = true.") + @query + returnRecognitionModel?: boolean = false; + + @doc("The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours).") + @query + @minValue(60) + @maxValue(86400) + faceIdTimeToLive?: int32 = 86400; +}; + +#suppress "@azure-tools/typespec-azure-core/use-extensible-enum" "Use fixed enum to represent modelAsString = false" +@doc("Available options for detect face with attribute.") +union FaceAttributeType { + string, + + @doc("3-D roll/yaw/pitch angles for face direction.") + "headPose", + + @doc("Glasses type. Values include 'NoGlasses', 'ReadingGlasses', 'Sunglasses', 'SwimmingGoggles'.") + "glasses", + + @doc("Whether each facial area is occluded, including forehead, eyes and mouth.") + "occlusion", + + @doc("Accessories around face, including 'headwear', 'glasses' and 'mask'. Empty array means no accessories detected. Note this is after a face is detected. Large mask could result in no face to be detected.") + "accessories", + + @doc("Face is blurry or not. Level returns 'Low', 'Medium' or 'High'. Value returns a number between [0,1], the larger the blurrier.") + "blur", + + @doc("Face exposure level. Level returns 'GoodExposure', 'OverExposure' or 'UnderExposure'.") + "exposure", + + @doc("Noise level of face pixels. Level returns 'Low', 'Medium' and 'High'. Value returns a number between [0,1], the larger the noisier") + "noise", + + @doc("Whether each face is wearing a mask. Mask type returns 'noMask', 'faceMask', 'otherMaskOrOcclusion', or 'uncertain'. Value returns a boolean 'noseAndMouthCovered' indicating whether nose and mouth are covered.") + "mask", + + @doc("The overall image quality regarding whether the image being used in the detection is of sufficient quality to attempt face recognition on. The value is an informal rating of low, medium, or high. Only 'high' quality images are recommended for person enrollment and quality at or above 'medium' is recommended for identification scenarios. The attribute is only available when using any combinations of detection models detection_01 or detection_03, and recognition models recognition_03 or recognition_04.") + "qualityForRecognition", + + @doc("Age in years.") + "age", + + @doc("Smile intensity, a number between [0,1].") + "smile", + + @doc("Properties describing facial hair attributes.") + "facialHair", + + @doc("Properties describing hair attributes.") + "hair", +} + +@doc("Response for detect API.") +model FaceDetectionResult { + @doc("Unique faceId of the detected face, created by detection API and it will expire 24 hours after the detection call. To return this, it requires 'returnFaceId' parameter to be true.") + faceId?: uuid; + + @doc("The 'recognitionModel' associated with this faceId. This is only returned when 'returnRecognitionModel' is explicitly set as true.") + recognitionModel?: RecognitionModel; + + @doc("A rectangle area for the face location on image.") + faceRectangle: FaceRectangle; + + @doc("An array of 27-point face landmarks pointing to the important positions of face components. To return this, it requires 'returnFaceLandmarks' parameter to be true.") + faceLandmarks?: FaceLandmarks; + + @doc("Face attributes for detected face.") + faceAttributes?: FaceAttributes; +} + +@doc("A rectangle within which a face can be found.") +model FaceRectangle { + @doc("The distance from the top edge if the image to the top edge of the rectangle, in pixels.") + top: int32; + + @doc("The distance from the left edge if the image to the left edge of the rectangle, in pixels.") + left: int32; + + @doc("The width of the rectangle, in pixels.") + width: int32; + + @doc("The height of the rectangle, in pixels.") + height: int32; +} + +@doc("A collection of 27-point face landmarks pointing to the important positions of face components.") +model FaceLandmarks { + @doc("The coordinates of the left eye pupil.") + pupilLeft: LandmarkCoordinate; + + @doc("The coordinates of the right eye pupil.") + pupilRight: LandmarkCoordinate; + + @doc("The coordinates of the nose tip.") + noseTip: LandmarkCoordinate; + + @doc("The coordinates of the mouth left.") + mouthLeft: LandmarkCoordinate; + + @doc("The coordinates of the mouth right.") + mouthRight: LandmarkCoordinate; + + @doc("The coordinates of the left eyebrow outer.") + eyebrowLeftOuter: LandmarkCoordinate; + + @doc("The coordinates of the left eyebrow inner.") + eyebrowLeftInner: LandmarkCoordinate; + + @doc("The coordinates of the left eye outer.") + eyeLeftOuter: LandmarkCoordinate; + + @doc("The coordinates of the left eye top.") + eyeLeftTop: LandmarkCoordinate; + + @doc("The coordinates of the left eye bottom.") + eyeLeftBottom: LandmarkCoordinate; + + @doc("The coordinates of the left eye inner.") + eyeLeftInner: LandmarkCoordinate; + + @doc("The coordinates of the right eyebrow inner.") + eyebrowRightInner: LandmarkCoordinate; + + @doc("The coordinates of the right eyebrow outer.") + eyebrowRightOuter: LandmarkCoordinate; + + @doc("The coordinates of the right eye inner.") + eyeRightInner: LandmarkCoordinate; + + @doc("The coordinates of the right eye top.") + eyeRightTop: LandmarkCoordinate; + + @doc("The coordinates of the right eye bottom.") + eyeRightBottom: LandmarkCoordinate; + + @doc("The coordinates of the right eye outer.") + eyeRightOuter: LandmarkCoordinate; + + @doc("The coordinates of the nose root left.") + noseRootLeft: LandmarkCoordinate; + + @doc("The coordinates of the nose root right.") + noseRootRight: LandmarkCoordinate; + + @doc("The coordinates of the nose left alar top.") + noseLeftAlarTop: LandmarkCoordinate; + + @doc("The coordinates of the nose right alar top.") + noseRightAlarTop: LandmarkCoordinate; + + @doc("The coordinates of the nose left alar out tip.") + noseLeftAlarOutTip: LandmarkCoordinate; + + @doc("The coordinates of the nose right alar out tip.") + noseRightAlarOutTip: LandmarkCoordinate; + + @doc("The coordinates of the upper lip top.") + upperLipTop: LandmarkCoordinate; + + @doc("The coordinates of the upper lip bottom.") + upperLipBottom: LandmarkCoordinate; + + @doc("The coordinates of the under lip top.") + underLipTop: LandmarkCoordinate; + + @doc("The coordinates of the under lip bottom.") + underLipBottom: LandmarkCoordinate; +} + +@doc("Landmark coordinates within an image.") +model LandmarkCoordinate { + @doc("The horizontal component, in pixels.") + x: float32; + + @doc("The vertical component, in pixels.") + y: float32; +} + +@doc("Face attributes for the detected face.") +model FaceAttributes { + @doc("Age in years.") + age?: float32; + + @doc("Smile intensity, a number between [0,1].") + @minValue(0) + @maxValue(1) + smile?: float32; + + @doc("Properties describing facial hair attributes.") + facialHair?: FacialHair; + + @doc("Glasses type if any of the face.") + glasses?: GlassesType; + + @doc("3-D roll/yaw/pitch angles for face direction.") + headPose?: HeadPose; + + @doc("Properties describing hair attributes.") + hair?: HairProperties; + + @doc("Properties describing occlusions on a given face.") + occlusion?: OcclusionProperties; + + @doc("Properties describing any accessories on a given face.") + accessories?: AccessoryItem[]; + + @doc("Properties describing any presence of blur within the image.") + blur?: BlurProperties; + + @doc("Properties describing exposure level of the image.") + exposure?: ExposureProperties; + + @doc("Properties describing noise level of the image.") + noise?: NoiseProperties; + + @doc("Properties describing the presence of a mask on a given face.") + mask?: MaskProperties; + + @doc("Properties describing the overall image quality regarding whether the image being used in the detection is of sufficient quality to attempt face recognition on.") + qualityForRecognition?: QualityForRecognition; +} + +@doc("Properties describing facial hair attributes.") +model FacialHair { + @doc("A number ranging from 0 to 1 indicating a level of confidence associated with a property.") + @minValue(0) + @maxValue(1) + moustache: float32; + + @doc("A number ranging from 0 to 1 indicating a level of confidence associated with a property.") + @minValue(0) + @maxValue(1) + beard: float32; + + @doc("A number ranging from 0 to 1 indicating a level of confidence associated with a property.") + @minValue(0) + @maxValue(1) + sideburns: float32; +} + +@doc("Glasses type of the face.") +union GlassesType { + string, + + @doc("No glasses on the face.") + "noGlasses", + + @doc("Normal glasses on the face.") + "readingGlasses", + + @doc("Sunglasses on the face.") + "sunglasses", + + @doc("Swimming goggles on the face.") + "swimmingGoggles", +} + +@doc("3-D roll/yaw/pitch angles for face direction.") +model HeadPose { + @doc("Value of angles.") + pitch: float32; + + @doc("Value of angles.") + roll: float32; + + @doc("Value of angles.") + yaw: float32; +} + +@doc("Properties describing hair attributes.") +model HairProperties { + @doc("A number describing confidence level of whether the person is bald.") + @minValue(0) + @maxValue(1) + bald: float32; + + @doc("A boolean value describing whether the hair is visible in the image.") + invisible: boolean; + + @doc("An array of candidate colors and confidence level in the presence of each.") + hairColor: HairColor[]; +} + +@doc("An array of candidate colors and confidence level in the presence of each.") +model HairColor { + @doc("Name of the hair color.") + color: HairColorType; + + @doc("Confidence level of the color. Range between [0,1].") + @minValue(0) + @maxValue(1) + confidence: float32; +} + +@doc("Name of the hair color.") +union HairColorType { + string, + + @doc("Unknown.") + unknownHairColor: "unknown", + + @doc("White.") + "white", + + @doc("Gray.") + "gray", + + @doc("Blond.") + "blond", + + @doc("Brown.") + "brown", + + @doc("Red.") + "red", + + @doc("Black.") + "black", + + @doc("Other.") + "other", +} + +@doc("Properties describing occlusions on a given face.") +model OcclusionProperties { + @doc("A boolean value indicating whether forehead is occluded.") + foreheadOccluded: boolean; + + @doc("A boolean value indicating whether eyes are occluded.") + eyeOccluded: boolean; + + @doc("A boolean value indicating whether the mouth is occluded.") + mouthOccluded: boolean; +} + +@doc("Accessory item and corresponding confidence level.") +model AccessoryItem { + @doc("Type of the accessory.") + type: AccessoryType; + + @doc("Confidence level of the accessory type. Range between [0,1].") + @minValue(0) + @maxValue(1) + confidence: float32; +} + +@doc("Type of the accessory.") +union AccessoryType { + string, + + @doc("Head wear.") + "headwear", + + @doc("Glasses.") + "glasses", + + @doc("Mask.") + "mask", +} + +@doc("Properties describing any presence of blur within the image.") +model BlurProperties { + @doc("An enum value indicating level of blurriness.") + blurLevel: BlurLevel; + + @doc("A number indicating level of blurriness ranging from 0 to 1.") + @minValue(0) + @maxValue(1) + value: float32; +} + +@doc("Indicates level of blurriness.") +union BlurLevel { + string, + + @doc("Low blur level.") + "low", + + @doc("Medium blur level.") + "medium", + + @doc("High blur level.") + "high", +} + +@doc("Properties describing exposure level of the image.") +model ExposureProperties { + @doc("An enum value indicating level of exposure.") + exposureLevel: ExposureLevel; + + @doc("A number indicating level of exposure level ranging from 0 to 1. [0, 0.25) is under exposure. [0.25, 0.75) is good exposure. [0.75, 1] is over exposure.") + @minValue(0) + @maxValue(1) + value: float32; +} + +@doc("Indicates level of exposure.") +union ExposureLevel { + string, + + @doc("Low exposure level.") + "underExposure", + + @doc("Good exposure level.") + "goodExposure", + + @doc("High exposure level.") + "overExposure", +} + +@doc("Properties describing noise level of the image.") +model NoiseProperties { + @doc("An enum value indicating level of noise.") + noiseLevel: NoiseLevel; + + @doc("A number indicating level of noise level ranging from 0 to 1. [0, 0.25) is under exposure. [0.25, 0.75) is good exposure. [0.75, 1] is over exposure. [0, 0.3) is low noise level. [0.3, 0.7) is medium noise level. [0.7, 1] is high noise level.") + @minValue(0) + @maxValue(1) + value: float32; +} + +@doc("Indicates level of noise.") +union NoiseLevel { + string, + + @doc("Low noise level.") + "low", + + @doc("Medium noise level.") + "medium", + + @doc("High noise level.") + "high", +} + +@doc("Properties describing the presence of a mask on a given face.") +model MaskProperties { + @doc("A boolean value indicating whether nose and mouth are covered.") + noseAndMouthCovered: boolean; + + @doc("Type of the mask.") + type: MaskType; +} + +@doc("Type of the mask.") +union MaskType { + string, + + @doc("Face mask.") + "faceMask", + + @doc("No mask.") + "noMask", + + @doc("Other types of mask or occlusion.") + "otherMaskOrOcclusion", + + @doc("Uncertain.") + "uncertain", +} + +@doc("Indicates quality of image for recognition.") +union QualityForRecognition { + string, + + @doc("Low quality.") + "low", + + @doc("Medium quality.") + "medium", + + @doc("High quality.") + "high", +} diff --git a/specification/ai/Face/models.facelist.tsp b/specification/ai/Face/models.facelist.tsp new file mode 100644 index 000000000000..f6ffdfb437d0 --- /dev/null +++ b/specification/ai/Face/models.facelist.tsp @@ -0,0 +1,45 @@ +import "@typespec/http"; +import "@typespec/rest"; +import "@azure-tools/typespec-azure-core"; +import "./models.common.tsp"; + +using Azure.Core; +using TypeSpec.Http; +using TypeSpec.Rest; + +namespace Face; + +@doc("Face list is a list of faces, up to 1,000 faces.") +@resource("facelists") +model FaceList is BaseCollection { + @key + @visibility("read") + @doc("Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.") + faceListId: collectionId; + + @doc("Face ids of registered faces in the face list.") + persistedFaces?: FaceListFace[]; +} + +@doc("Face list item for list face list.") +model FaceListItem is BaseCollection { + @doc("Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.") + faceListId: collectionId; +} + +@parentResource(FaceList) +@doc("Face resource for face list.") +model FaceListFace is BaseFace; + +@doc("Large face list is a list of faces, up to 1,000,000 faces.") +@resource("largefacelists") +model LargeFaceList is BaseCollection { + @key + @visibility("read") + @doc("Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.") + largeFaceListId: collectionId; +} + +@parentResource(LargeFaceList) +@doc("Face resource for large face list.") +model LargeFaceListFace is BaseFace; diff --git a/specification/ai/Face/models.persondirectory.tsp b/specification/ai/Face/models.persondirectory.tsp new file mode 100644 index 000000000000..c67f66ad3d02 --- /dev/null +++ b/specification/ai/Face/models.persondirectory.tsp @@ -0,0 +1,110 @@ +import "@typespec/http"; +import "@typespec/rest"; +import "@azure-tools/typespec-azure-core"; +import "./models.common.tsp"; + +using Azure.Core; +using TypeSpec.Http; +using TypeSpec.Rest; + +namespace Face; + +@resource("persons") +@doc("Person resource for person directory") +model PersonDirectoryPerson { + @key + @visibility("read") + @doc("Person ID of the person.") + personId: uuid; + + ...UserDefinedFields; +} + +@resource("operations") +@doc("Long running operation resource for person directory.") +model OperationResult { + @key + @visibility("read") + @doc("Operation ID of the operation.") + operationId: uuid; + + @doc("Current status of the operation.") + status: OperationStatus; + + @doc("Date and time the operation was created.") + createdTime: utcDateTime; + + @doc("Date and time the operation was last updated.") + lastActionTime?: utcDateTime; + + @doc("Date and time the operation was finished.") + finishedTime?: utcDateTime; + + @doc("Message for the operation.") + message?: string; +} + +@resource("recognitionModels") +@parentResource(PersonDirectoryPerson) +@doc("Middle tier resource for person directory.") +model PersonDirectoryRecognitionModel { + @key + @doc("The 'recognitionModel' associated with faces.") + @visibility("create", "read") + recognitionModel: RecognitionModel; +} + +@resource("persistedfaces") +@parentResource(PersonDirectoryRecognitionModel) +@doc("Face resource for person directory person.") +model PersonDirectoryFace is BaseFace; + +@doc("A container that references Person Directory \"Create Person\".") +@resource("dynamicpersongroups") +model DynamicPersonGroup { + @key + @visibility("read") + @doc("ID of the dynamic person group.") + dynamicPersonGroupId: collectionId; + + ...UserDefinedFields; +} + +alias CreateDynamicPersonGroupOptions = { + ...UserDefinedFields; + + @doc("Array of personIds created by Person Directory \"Create Person\" to be added.") + @minItems(1) + addPersonIds: uuid[]; +}; + +alias UpdateDynamicPersonGroupOptions = { + ...UserDefinedFieldsForUpdate; + + @doc("Array of personIds created by Person Directory \"Create Person\" to be added.") + addPersonIds?: uuid[]; + + @doc("Array of personIds created by Person Directory \"Create Person\" to be removed.") + removePersonIds?: uuid[]; +}; + +@doc("Response of list dynamic person group of person.") +model ListGroupReferenceResult { + @doc("Array of PersonDirectory DynamicPersonGroup ids.") + dynamicPersonGroupIds: collectionId[]; +} + +@doc("Response of list face of person.") +model ListFaceResult { + @doc("Id of person.") + personId: uuid; + + @doc("Array of persisted face ids.") + persistedFaceIds: uuid[]; +} + +@doc("Response of list dynamic person group person.") +model ListPersonResult { + @doc("Array of PersonDirectory Person ids.") + personIds: uuid[]; +} diff --git a/specification/ai/Face/models.persongroup.tsp b/specification/ai/Face/models.persongroup.tsp new file mode 100644 index 000000000000..606329053a57 --- /dev/null +++ b/specification/ai/Face/models.persongroup.tsp @@ -0,0 +1,58 @@ +import "@typespec/http"; +import "@typespec/rest"; +import "@azure-tools/typespec-azure-core"; +import "./models.common.tsp"; + +using Azure.Core; +using TypeSpec.Http; +using TypeSpec.Rest; + +namespace Face; + +@resource("persons") +@doc("Base model for person.") +model BasePersonGroupPerson { + @key + @visibility("read") + @doc("ID of the person.") + personId: uuid; + + ...UserDefinedFields; + + @doc("Face ids of registered faces in the person.") + persistedFaceIds?: uuid[]; +} + +@resource("persongroups") +@doc("The container of the uploaded person data, including face recognition feature, and up to 10,000 persons. To handle larger scale face identification problem, please consider using Large Person Group.") +model PersonGroup is BaseCollection { + @key + @visibility("read") + @doc("ID of the container.") + personGroupId: collectionId; +} + +@parentResource(PersonGroup) +@doc("The person in a specified person group. To add face to this person, please call \"Add Large Person Group Person Face\".") +model PersonGroupPerson is BasePersonGroupPerson; + +@parentResource(PersonGroupPerson) +@doc("Face resource for person group person.") +model PersonGroupPersonFace is BaseFace; + +@doc("The container of the uploaded person data, including face recognition feature, and up to 1,000,000 people.") +@resource("largepersongroups") +model LargePersonGroup is BaseCollection { + @key + @visibility("read") + @doc("ID of the container.") + largePersonGroupId: collectionId; +} + +@parentResource(LargePersonGroup) +@doc("The person in a specified large person group. To add face to this person, please call \"Add Large Person Group Person Face\".") +model LargePersonGroupPerson is BasePersonGroupPerson; + +@parentResource(LargePersonGroupPerson) +@doc("Face resource for large person group person.") +model LargePersonGroupPersonFace is BaseFace; diff --git a/specification/ai/Face/models.session.tsp b/specification/ai/Face/models.session.tsp new file mode 100644 index 000000000000..0768025bed8f --- /dev/null +++ b/specification/ai/Face/models.session.tsp @@ -0,0 +1,276 @@ +import "@typespec/http"; +import "@typespec/rest"; +import "@azure-tools/typespec-azure-core"; + +using Azure.Core; +using TypeSpec.Http; +using TypeSpec.Rest; + +namespace Face; + +@doc("The operation mode for the liveness modal.") +union LivenessOperationMode { + string, + + @doc("The operation mode for the liveness modal.") + "Passive", +} + +#suppress "@azure-tools/typespec-autorest/unsupported-multipart-type" "Provide convenient interface for multipart/form-data in SDK" +@doc("Request for creating liveness session.") +model CreateLivenessSessionContent { + @doc("Type of liveness mode the client should follow.") + livenessOperationMode: LivenessOperationMode; + + @doc("Whether or not to allow a '200 - Success' response body to be sent to the client, which may be undesirable for security reasons. Default is false, clients will receive a '204 - NoContent' empty body response. Regardless of selection, calling Session GetResult will always contain a response body enabling business logic to be implemented.") + sendResultsToClient?: boolean; + + @doc("Whether or not to allow client to set their own 'deviceCorrelationId' via the Vision SDK. Default is false, and 'deviceCorrelationId' must be set in this request body.") + deviceCorrelationIdSetInClient?: boolean; + + ...LivenessSessionData; +} + +@doc("Data for liveness session.") +model LivenessSessionData { + @doc("Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null.") + deviceCorrelationId?: string; + + @doc("Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600.") + @minValue(60) + @maxValue(86400) + authTokenTimeToLiveInSeconds?: int32 = 600; +} + +@doc("Session data returned for enumeration.") +model LivenessSessionItem { + @doc("The unique ID to reference this session.") + @key("sessionId") + @visibility("read") + id: string; + + @doc("DateTime when this session was created.") + createdDateTime: utcDateTime; + + @doc("DateTime when this session was started by the client.") + sessionStartDateTime?: utcDateTime; + + @doc("Whether or not the session is expired.") + sessionExpired: boolean; + + ...LivenessSessionData; +} + +@doc("Session details data.") +model LivenessSessionDetails { + @doc("The current status of the session.") + status: FaceSessionStatus; + + @doc("The latest session audit result only populated if status == 'ResultAvailable'.") + result?: LivenessSessionAuditEntry; +} + +@doc("Session result of detect liveness.") +@resource("detectLiveness/singleModal/sessions") +model LivenessSession is LivenessSessionItem { + ...LivenessSessionDetails; +} + +@doc("Session result of detect liveness with verify.") +@resource("detectLivenessWithVerify/singleModal/sessions") +model LivenessWithVerifySession is LivenessSessionItem { + ...LivenessSessionDetails; +} + +@doc("Response of liveness session creation.") +model CreateLivenessSessionResult { + @doc("The unique session ID of the created session. It will expire 48 hours after it was created or may be deleted sooner using the corresponding Session DELETE operation.") + sessionId: string; + + @doc("Bearer token to provide authentication for the Vision SDK running on a client application. This Bearer token has limited permissions to perform only the required action and expires after the TTL time. It is also auditable.") + authToken: string; +} + +@doc("Response of liveness session with verify creation with verify image provided.") +model CreateLivenessWithVerifySessionResult is CreateLivenessSessionResult { + @doc("The detail of face for verification.") + verifyImage?: LivenessWithVerifyImage; +} + +@doc("The current status of the session.") +union FaceSessionStatus { + string, + + @doc("Session has not started.") + "NotStarted", + + @doc("Session has started.") + "Started", + + @doc("Session has available result.") + "ResultAvailable", +} + +@doc("The outcome of the liveness classification.") +union LivenessDecision { + string, + + @doc("The algorithm could not classify the target face as either real or spoof.") + "uncertain", + + @doc("The algorithm has classified the target face as real.") + "realface", + + @doc("The algorithm has classified the target face as a spoof.") + "spoofface", +} + +@doc("The type of image.") +union ImageType { + string, + "Color", + "Infrared", + "Depth", +} + +@doc("The liveness classification for target face.") +model LivenessOutputsTarget { + @doc("The face region where the liveness classification was made on.") + faceRectangle: FaceRectangle; + + @doc("The file name which contains the face rectangle where the liveness classification was made on.") + fileName: string; + + @doc("The time offset within the file of the frame which contains the face rectangle where the liveness classification was made on.") + timeOffsetWithinFile: int32; + + @doc("The image type which contains the face rectangle where the liveness classification was made on.") + imageType: ImageType; +} + +@doc("The model version used for liveness classification.") +union LivenessModel { + string, + "2020-02-15-preview.01", + "2021-11-12-preview.03", + "2022-10-15-preview.04", + "2023-03-02-preview.05", +} + +@doc("The detail of face for verification.") +model LivenessWithVerifyImage { + @doc("The face region where the comparison image's classification was made.") + faceRectangle: FaceRectangle; + + @doc("Quality of face image for recognition.") + qualityForRecognition: QualityForRecognition; +} + +@doc("The face verification output.") +model LivenessWithVerifyOutputs { + @doc("The detail of face for verification.") + verifyImage: LivenessWithVerifyImage; + + @doc("The target face liveness face and comparison image face verification confidence.") + @minValue(0) + @maxValue(1) + matchConfidence: float32; + + @doc("Whether the target liveness face and comparison image face match.") + isIdentical: boolean; +} + +#suppress "@azure-tools/typespec-azure-core/bad-record-type" "Use Record for accepting failure response" +#suppress "@azure-tools/typespec-azure-core/composition-over-inheritance" "Extending Record for accepting failure response" +@doc("The response body of detect liveness API call.") +model LivenessResponseBody extends Record { + @doc("The liveness classification for the target face.") + livenessDecision?: LivenessDecision; + + @doc("Specific targets used for liveness classification.") + target?: LivenessOutputsTarget; + + @doc("The model version used for liveness classification.") + modelVersionUsed?: LivenessModel; + + @doc("The face verification output. Only available when the request is liveness with verify.") + verifyResult?: LivenessWithVerifyOutputs; +} + +@doc("Audit entry for a request in the session.") +model AuditRequestInfo { + @doc("The relative URL and query of the liveness request.") + url: string; + + @doc("The HTTP method of the request (i.e., GET, POST, DELETE).") + method: string; + + @doc("The length of the request body in bytes.") + contentLength?: int64; + + @doc("The content type of the request.") + contentType: string; + + @doc("The user agent used to submit the request.") + userAgent?: string; +} + +@doc("Audit entry for a response in the session.") +model AuditLivenessResponseInfo { + @doc("The response body. The schema of this field will depend on the request.url and request.method used by the client.") + body: LivenessResponseBody; + + @doc("The HTTP status code returned to the client.") + statusCode: int32; + + @doc("The server measured latency for this request in milliseconds.") + latencyInMilliseconds: int64; +} + +@doc("Audit entry for a request in session.") +model LivenessSessionAuditEntry { + @doc("The unique id to refer to this audit request. Use this id with the 'start' query parameter to continue on to the next page of audit results.") + id: int64; + + @doc("The unique sessionId of the created session. It will expire 48 hours after it was created or may be deleted sooner using the corresponding session DELETE operation.") + sessionId: string; + + @doc("The unique requestId that is returned by the service to the client in the 'apim-request-id' header.") + requestId: string; + + @doc("The unique clientRequestId that is sent by the client in the 'client-request-id' header.") + clientRequestId: string; + + @doc("The UTC DateTime that the request was received.") + receivedDateTime: utcDateTime; + + @doc("The request of this entry.") + request: AuditRequestInfo; + + @doc("The response of this entry.") + response: AuditLivenessResponseInfo; + + @doc("The server calculated digest for this request. If the client reported digest differs from the server calculated digest, then the message integrity between the client and service has been compromised and the result should not be trusted. For more information, see how to guides on how to leverage this value to secure your end-to-end solution.") + digest: string; +} + +#suppress "@azure-tools/typespec-autorest/unsupported-multipart-type" "Provide convenient interface for multipart/form-data in SDK" +@doc("Dedicated parameter model for multipart/form-data.") +model CreateLivenessSessionContentForMultipart { + ...CreateLivenessSessionContent; +} + +@doc("Request of liveness with verify session creation.") +model CreateLivenessWithVerifySessionContent { + @doc("The content type for the operation. Always multipart/form-data for this operation.") + @header("content-type") + contentType: "multipart/form-data"; + + #suppress "@azure-tools/typespec-azure-core/casing-style" + @doc("The parameters for creating session.") + Parameters: CreateLivenessSessionContentForMultipart; + + #suppress "@azure-tools/typespec-azure-core/casing-style" + @doc("The image stream for verify. Content-Disposition header field for this part must have filename.") + VerifyImage: bytes; +} diff --git a/specification/ai/Face/routes.common.tsp b/specification/ai/Face/routes.common.tsp new file mode 100644 index 000000000000..24a7f4b33c5e --- /dev/null +++ b/specification/ai/Face/routes.common.tsp @@ -0,0 +1,151 @@ +import "@typespec/http"; +import "@typespec/rest"; +import "@azure-tools/typespec-azure-core"; +import "./models.common.tsp"; +import "./models.detect.tsp"; + +using TypeSpec.Http; +using TypeSpec.Rest; +using Azure.Core; +using Azure.Core.Traits; +using Foundations; + +namespace Face; + +alias ServiceTraits = NoClientRequestId & + NoRepeatableRequests & + NoConditionalRequests & + TraitOverride>; + +alias SuccessfulCallWithEmptyBody = "A successful call returns an empty response body."; + +@createsResource(TResource) +op FaceResourceCreateWithServiceProvidedName< + TResource extends TypeSpec.Reflection.Model, + TParams extends TypeSpec.Reflection.Model, + TResponse extends TypeSpec.Reflection.Model = TResource +> is Foundations.ResourceCollectionOperation< + TResource, + TParams, + TResponse, + ServiceTraits, + FaceErrorResponse +>; + +@createsOrReplacesResource(TResource) +op FaceCollectionResourceCreateOperation is Foundations.ResourceOperation< + TResource, + CreateCollectionOptions, + TypeSpec.Http.OkResponse, + ServiceTraits, + FaceErrorResponse +>; + +alias AddFaceSuccess = "A successful call returns a new persistedFaceId."; +#suppress "@azure-tools/typespec-azure-core/byos" "It's a template." +@sharedRoute +@createsResource(TFace) +op AddFace is Foundations.ResourceCollectionOperation< + TFace, + AddFaceOptions & { + @doc("The format of the HTTP payload.") + @header + contentType: "application/octet-stream"; + + @doc("The image to be analyzed") + @body + imageContent: bytes; + }, + AddFaceResult, + ServiceTraits, + FaceErrorResponse +>; + +@sharedRoute +@createsResource(TFace) +op AddFaceFromUrl is Foundations.ResourceCollectionOperation< + TFace, + AddFaceOptions & { + @doc("URL of input image.") + url: url; + }, + AddFaceResult, + ServiceTraits, + FaceErrorResponse +>; + +@updatesResource(TResource) +op FaceResourceUpdateOperation< + TResource extends TypeSpec.Reflection.Model, + TParams extends TypeSpec.Reflection.Model = UserDefinedFieldsForUpdate +> is Foundations.ResourceOperation< + TResource, + TParams, + OkResponse, + ServiceTraits, + FaceErrorResponse +>; + +op FaceResourceListOperation< + TResource extends TypeSpec.Reflection.Model, + TResourceItem extends TypeSpec.Reflection.Model = TResource, + TQuery extends TypeSpec.Reflection.Model = {} +> is Foundations.ResourceList< + TResource, + ListRequestOptions & TQuery, + Body, + ServiceTraits, + FaceErrorResponse +>; + +@deletesResource(TResource) +op FaceResourceDeleteOperation is Foundations.ResourceOperation< + TResource, + {}, + TypeSpec.Http.OkResponse, + ServiceTraits, + FaceErrorResponse +>; + +@readsResource(TResource) +op FaceResourceReadOperation is Foundations.ResourceOperation< + TResource, + {}, + TResource, + ServiceTraits, + FaceErrorResponse +>; + +op FaceResourceReadOperationWithReturnRecognitionModelOptions is Foundations.ResourceOperation< + TResource, + ReturnRecognitionModelOptions, + TResource, + ServiceTraits, + FaceErrorResponse +>; + +#suppress "@azure-tools/typespec-azure-core/long-running-polling-operation-required" "This is a template, polling operation should be defined in the operation instance." +@post +@action("train") +@actionSeparator("/") +op FaceResourceTrain is Foundations.ResourceOperation< + TResource, + {}, + TypeSpec.Http.AcceptedResponse & { + @header("operation-Location") + operationLocation: ResourceLocation; + }, + ServiceTraits, + FaceErrorResponse +>; + +@get +@action("training") +@actionSeparator("/") +op FaceResourceGetTrainingStatus is Foundations.ResourceOperation< + TResource, + {}, + TrainingResult, + ServiceTraits, + FaceErrorResponse +>; diff --git a/specification/ai/Face/routes.detection.tsp b/specification/ai/Face/routes.detection.tsp new file mode 100644 index 000000000000..8ae0c67cc74d --- /dev/null +++ b/specification/ai/Face/routes.detection.tsp @@ -0,0 +1,63 @@ +import "@typespec/http"; +import "@typespec/rest"; +import "@azure-tools/typespec-azure-core"; +import "./models.common.tsp"; +import "./models.detect.tsp"; + +using TypeSpec.Http; +using TypeSpec.Rest; +using Azure.Core; +using Azure.Core.Traits; +using Foundations; + +namespace Face; + +@summary("Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes.") +@doc(""" +> [!IMPORTANT] +> To mitigate potential misuse that can subject people to stereotyping, discrimination, or unfair denial of services, we are retiring Face API attributes that predict emotion, gender, age, smile, facial hair, hair, and makeup. Read more about this decision https://azure.microsoft.com/en-us/blog/responsible-ai-investments-and-safeguards-for-facial-recognition/. + +* + * No image will be stored. Only the extracted face feature(s) will be stored on server. The faceId is an identifier of the face feature and will be used in \"Identify\", \"Verify\", and \"Find Similar\". The stored face features will expire and be deleted at the time specified by faceIdTimeToLive after the original detection call. + * Optional parameters include faceId, landmarks, and attributes. Attributes include headPose, glasses, occlusion, accessories, blur, exposure, noise, mask, and qualityForRecognition. Some of the results returned for specific attributes may not be highly accurate. + * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. + * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. + * Up to 100 faces can be returned for an image. Faces are ranked by face rectangle size from large to small. + * For optimal results when querying \"Identify\", \"Verify\", and \"Find Similar\" ('returnFaceId' is true), please use faces that are: frontal, clear, and with a minimum size of 200x200 pixels (100 pixels between eyes). + * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/how-to/specify-detection-model + * 'detection_02': Face attributes and landmarks are disabled if you choose this detection model. + * 'detection_03': Face attributes (mask and headPose only) and landmarks are supported if you choose this detection model. + * Different 'recognitionModel' values are provided. If follow-up operations like \"Verify\", \"Identify\", \"Find Similar\" are needed, please specify the recognition model with 'recognitionModel' parameter. The default value for 'recognitionModel' is 'recognition_01', if latest model needed, please explicitly specify the model you need in this parameter. Once specified, the detected faceIds will be associated with the specified recognition model. More details, please refer to https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/how-to/specify-recognition-model. +""") +@returnsDoc("A successful call returns an array of face entries ranked by face rectangle size in descending order. An empty response indicates no faces detected.") +@post +@sharedRoute +@route("detect") +op DetectOperation is Azure.Core.RpcOperation< + T & FaceDetectionOptions, + Body, + ServiceTraits, + FaceErrorResponse +>; + +interface FaceDetectionOperations { + detectFromUrl is DetectOperation<{ + @doc("The format of the HTTP payload.") + @header + contentType: "application/json"; + + @doc("URL of input image.") + url: url; + }>; + + #suppress "@azure-tools/typespec-azure-core/byos" "It's an RPC call to detect face from an image, it doesn't store anything. There's a BYOS version of this call, but it's not this one." + detect is DetectOperation<{ + @doc("The format of the HTTP payload.") + @header + contentType: "application/octet-stream"; + + @doc("The input image binary.") + @body + imageContent: bytes; + }>; +} diff --git a/specification/ai/Face/routes.facelist.tsp b/specification/ai/Face/routes.facelist.tsp new file mode 100644 index 000000000000..08ed2d4303e3 --- /dev/null +++ b/specification/ai/Face/routes.facelist.tsp @@ -0,0 +1,193 @@ +import "@typespec/http"; +import "@typespec/rest"; +import "@azure-tools/typespec-azure-core"; +import "./models.facelist.tsp"; + +using TypeSpec.Http; +using TypeSpec.Rest; +using Azure.Core; +using Azure.Core.Traits; +using Foundations; + +namespace Face; + +alias AddFaceListFaceSummary = "Add a face to a specified Face List, up to 1,000 faces."; +alias AddFaceListFaceDescription = """ + To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until \"Delete Face List Face\" or \"Delete Face List\" is called. + + Note that persistedFaceId is different from faceId generated by \"Detect\". + ${AddFaceDescriptionInList} + """; +alias AddLargeFaceListFaceSummary = "Add a face to a specified Large Face List, up to 1,000,000 faces."; +alias AddLargeFaceListFaceDescription = """ + To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until \"Delete Large Face List Face\" or \"Delete Large Face List\" is called. + + Note that persistedFaceId is different from faceId generated by \"Detect\". + ${AddFaceDescriptionInList} + + > [!NOTE] + > + > * + > * Free-tier subscription quota: 1,000 faces per Large Face List. + > * S0-tier subscription quota: 1,000,000 faces per Large Face List. + """; + +interface FaceListOperations { + @summary("Create an empty Face List with user-specified faceListId, name, an optional userData and recognitionModel.") + @doc(""" + Up to 64 Face Lists are allowed in one subscription. + + Face List is a list of faces, up to 1,000 faces, and used by \"Find Similar From Face List\". + + After creation, user should use \"Add Face List Face\" to import the faces. No image will be stored. Only the extracted face feature(s) will be stored on server until \"Delete Face List\" is called. + + \"Find Similar\" is used for scenario like finding celebrity-like faces, similar face filtering, or as a light way face identification. But if the actual use is to identify person, please use Person Group / Large Person Group and \"Identify\". + + Please consider Large Face List when the face number is large. It can support up to 1,000,000 faces. + """) + @returnsDoc(SuccessfulCallWithEmptyBody) + createFaceList is FaceCollectionResourceCreateOperation; + + @doc("Delete a specified Face List.") + @returnsDoc(SuccessfulCallWithEmptyBody) + deleteFaceList is FaceResourceDeleteOperation; + + @doc("Retrieve a Face List's faceListId, name, userData, recognitionModel and faces in the Face List.") + @returnsDoc("A successful call returns the Face List's information.") + getFaceList is FaceResourceReadOperationWithReturnRecognitionModelOptions; + + @doc("Update information of a Face List, including name and userData.") + @returnsDoc(SuccessfulCallWithEmptyBody) + updateFaceList is FaceResourceUpdateOperation; + + @doc(""" + List Face Lists' faceListId, name, userData and recognitionModel. + + To get face information inside Face List use \"Get Face List\". + """) + @returnsDoc("A successful call returns an array of Face Lists.") + getFaceLists is Foundations.ResourceList< + FaceList, + ReturnRecognitionModelOptions, + Body, + ServiceTraits, + FaceErrorResponse + >; + + @summary(AddFaceListFaceSummary) + @doc(AddFaceListFaceDescription) + @returnsDoc(AddFaceSuccess) + addFaceListFaceFromUrl is AddFaceFromUrl; + + #suppress "@azure-tools/typespec-azure-core/byos" "It's an RPC call to detect face from an image, it doesn't store anything. There's a BYOS version of this call, but it's not this one." + @summary(AddFaceListFaceSummary) + @doc(AddFaceListFaceDescription) + @returnsDoc(AddFaceSuccess) + addFaceListFace is AddFace; + + @summary("Delete a face from a Face List by specified faceListId and persistedFaceId.") + @doc("Adding/deleting faces to/from a same Face List are processed sequentially and to/from different Face Lists are in parallel.") + @returnsDoc(SuccessfulCallWithEmptyBody) + deleteFaceListFace is FaceResourceDeleteOperation; + + @summary("Create an empty Large Face List with user-specified largeFaceListId, name, an optional userData and recognitionModel.") + @doc(""" + Large Face List is a list of faces, up to 1,000,000 faces, and used by \"Find Similar From Large Face List\". + + After creation, user should use Add Large Face List Face to import the faces and Train Large Face List to make it ready for \"Find Similar\". No image will be stored. Only the extracted face feature(s) will be stored on server until Delete Large Face List is called. + + \"Find Similar\" is used for scenario like finding celebrity-like faces, similar face filtering, or as a light way face identification. But if the actual use is to identify person, please use Person Group / Large Person Group and \"Identify\". + + > [!NOTE] + > + > * + > * Free-tier subscription quota: 64 Large Face Lists. + > * S0-tier subscription quota: 1,000,000 Large Face Lists. + """) + @returnsDoc(SuccessfulCallWithEmptyBody) + createLargeFaceList is FaceCollectionResourceCreateOperation; + + @summary("Delete a face from a Large Face List by specified largeFaceListId and persistedFaceId.") + @doc("Adding/deleting faces to/from a same Large Face List are processed sequentially and to/from different Large Face Lists are in parallel.") + @returnsDoc(SuccessfulCallWithEmptyBody) + deleteLargeFaceList is FaceResourceDeleteOperation; + + @doc("Retrieve a Large Face List's largeFaceListId, name, userData and recognitionModel.") + @returnsDoc("A successful call returns the Large Face List's information.") + getLargeFaceList is FaceResourceReadOperationWithReturnRecognitionModelOptions; + + @doc("Update information of a Large Face List, including name and userData.") + @returnsDoc(SuccessfulCallWithEmptyBody) + updateLargeFaceList is FaceResourceUpdateOperation; + + @summary("List Large Face Lists' information of largeFaceListId, name, userData and recognitionModel.") + @doc( + """ + To get face information inside largeFaceList use \"Get Large Face List Face\". + + Large Face Lists are stored in alphabetical order of largeFaceListId. + ${ListRequestOptionsDescriptionInList} + """ + ) + @returnsDoc("A successful call returns an array of Large Face Lists and their information (largeFaceListId, name and userData).") + getLargeFaceLists is FaceResourceListOperation< + LargeFaceList, + LargeFaceList, + ReturnRecognitionModelOptions + >; + + @doc(""" + To check the Large Face List training status completed or still ongoing. Large Face List training is an asynchronous operation triggered by \"Train Large Face List\". + + Training time depends on the number of face entries in a Large Face List. It could be in seconds, or up to half an hour for 1,000,000 faces. + """) + @returnsDoc("A successful call returns the Large Face List's training status.") + getLargeFaceListTrainingStatus is FaceResourceGetTrainingStatus; + + @summary("Submit a Large Face List training task.") + @doc(""" + + Training is a crucial step that only a trained Large Face List can be used by \"Find Similar From Large Face List\". + + The training task is an asynchronous task. Training time depends on the number of face entries in a Large Face List. It could be in seconds, or up to half an hour for 1,000,000 faces. To check training completion, please use \"Get Large Face List Training Status\". + """) + @returnsDoc(SuccessfulCallWithEmptyBody) + @pollingOperation(FaceListOperations.getLargeFaceListTrainingStatus) + trainLargeFaceList is FaceResourceTrain; + + @summary(AddLargeFaceListFaceSummary) + @doc(AddLargeFaceListFaceDescription) + @returnsDoc(AddFaceSuccess) + addLargeFaceListFaceFromUrl is AddFaceFromUrl; + + #suppress "@azure-tools/typespec-azure-core/byos" "It's an RPC call to detect face from an image, it doesn't store anything. There's a BYOS version of this call, but it's not this one." + @summary(AddLargeFaceListFaceSummary) + @doc(AddLargeFaceListFaceDescription) + @returnsDoc(AddFaceSuccess) + addLargeFaceListFace is AddFace; + + @doc("Delete a face from a Large Face List by specified largeFaceListId and persistedFaceId.") + @returnsDoc(SuccessfulCallWithEmptyBody) + deleteLargeFaceListFace is FaceResourceDeleteOperation; + + @doc("Retrieve persisted face in Large Face List by largeFaceListId and persistedFaceId.") + @returnsDoc("A successful call returns target persisted face's information (persistedFaceId and userData).") + getLargeFaceListFace is FaceResourceReadOperation; + + @doc("Update a specified face's userData field in a Large Face List by its persistedFaceId.") + @returnsDoc(SuccessfulCallWithEmptyBody) + updateLargeFaceListFace is FaceResourceUpdateOperation< + LargeFaceListFace, + FaceUserData + >; + + @summary("List faces' persistedFaceId and userData in a specified Large Face List.") + @doc( + """ + Faces are stored in alphabetical order of persistedFaceId created in \"Add Large Face List Face\". + ${ListRequestOptionsDescriptionInList} + """ + ) + @returnsDoc("A successful call returns an array of persisted faces and their information (persistedFaceId and userData).") + getLargeFaceListFaces is FaceResourceListOperation; +} diff --git a/specification/ai/Face/routes.persondirectory.tsp b/specification/ai/Face/routes.persondirectory.tsp new file mode 100644 index 000000000000..817a68fc8f72 --- /dev/null +++ b/specification/ai/Face/routes.persondirectory.tsp @@ -0,0 +1,285 @@ +import "@typespec/http"; +import "@typespec/rest"; +import "@azure-tools/typespec-azure-core"; +import "./models.persondirectory.tsp"; + +using TypeSpec.Http; +using TypeSpec.Rest; +using Azure.Core; +using Azure.Core.Traits; +using Foundations; + +namespace Face; + +#suppress "@azure-tools/typespec-azure-core/long-running-polling-operation-required" "This is a template, polling operation should be defined in the operation instance." +@summary("Add a face to a person (see Person Directory \"Create Person\") for face identification or verification.") +@doc( + """ + To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until Person Directory \"Delete Person Face\" or \"Delete Person\" is called. + + Note that persistedFaceId is different from faceId generated by \"Detect\". + ${AddFaceDescriptionInList} + * + * Adding/deleting faces to/from a same person will be processed sequentially. Adding/deleting faces to/from different persons are processed in parallel. + * This is a long running operation. Use Response Header "Operation-Location" to determine when the AddFace operation has successfully propagated for future requests to \"Identify\". For further information about Operation-Locations see \"Get Face Operation Status\". + """ +) +@returnsDoc(PersonModificationAccepted) +@pollingOperation(getOperationResult) +@sharedRoute +@createsResource(PersonDirectoryFace) +op AddPersonFaceOperation is Foundations.ResourceCollectionOperation< + PersonDirectoryFace, + AddFaceOptions & T, + AddFaceResult & + TypeSpec.Http.AcceptedResponse & { + @pollingLocation + @header("operation-Location") + operationLocation: ResourceLocation; + + @finalLocation + @header("Location") + location: ResourceLocation; + }, + ServiceTraits, + FaceErrorResponse +>; + +@pollingOperation(getOperationResult) +@deletesResource(TResource) +op FaceResourceDeleteLongRunningOperation is Foundations.ResourceOperation< + TResource, + {}, + TypeSpec.Http.AcceptedResponse & { + @pollingLocation + @header("operation-Location") + operationLocation: ResourceLocation; + }, + ServiceTraits, + FaceErrorResponse +>; + +@pollingOperation(getOperationResult) +@sharedRoute +op DynamicPersonGroupLongRunningOperation is Foundations.ResourceOperation< + DynamicPersonGroup, + TParams, + Foundations.AcceptedResponse<{ + @pollingLocation + @header("operation-Location") + operationLocation: ResourceLocation; + }>, + ServiceTraits, + FaceErrorResponse +>; + +@sharedRoute +op DynamicPersonGroupOperation is Foundations.ResourceOperation< + DynamicPersonGroup, + TParams, + TypeSpec.Http.OkResponse, + ServiceTraits, + FaceErrorResponse +>; + +@doc("Get status of a long running operation.") +@returnsDoc("A successful call returns the long running operation status.") +op getOperationResult is FaceResourceReadOperation; + +alias PersonModificationAccepted = "A successful call returns an empty response body. The service has accepted the request and will start processing soon. The client can query the operation status and result using the URL specified in the 'Operation-Location' response header. The URL expires in 48 hours."; +alias DynamicPersonGroupModificationAccepted = "A successful call returns an empty response body. The service has accepted the request and will start processing soon. The client can query the operation status and result using the URL specified in the 'Operation-Location' response header. The URL expires in 48 hours. The URL provides the status of when Person Directory \"Get Dynamic Person Group References\" will return the changes made in this request."; + +alias CreateDynamicPersonGroupSummary = "Creates a new Dynamic Person Group with specified dynamicPersonGroupId, name, and user-provided userData."; +alias CreateDynamicPersonGroupDescription = """ +A Dynamic Person Group is a container that references Person Directory "Create Person". After creation, use Person Directory "Update Dynamic Person Group" to add/remove persons to/from the Dynamic Person Group. + +Dynamic Person Group and user data will be stored on server until Person Directory "Delete Dynamic Person Group" is called. Use "Identify From Dynamic Person Group" with the dynamicPersonGroupId parameter to identify against persons. + +No image will be stored. Only the person's extracted face feature(s) and userData will be stored on server until Person Directory "Delete Person" or "Delete Person Face" is called. + +'recognitionModel' does not need to be specified with Dynamic Person Groups. Dynamic Person Groups are references to Person Directory "Create Person" and therefore work with most all 'recognitionModels'. The faceId's provided during "Identify" determine the 'recognitionModel' used. +"""; + +alias UpdateDynamicPersonGroupSummary = "Update the name or userData of an existing Dynamic Person Group, and manage its members by adding or removing persons."; +alias UpdateDynamicPersonGroupDescription = "The properties keep unchanged if they are not in request body."; + +interface PersonDirectoryOperations { + #suppress "@azure-tools/typespec-azure-core/long-running-polling-operation-required" "Doesn't fit in generated long-running operation" + @doc("Creates a new person in a Person Directory. To add face to this person, please call Person Directory \"Add Person Face\".") + @returnsDoc(PersonModificationAccepted) + @pollingOperation(getOperationResult) + @createsResource(PersonDirectoryPerson) + createPerson is Foundations.ResourceCollectionOperation< + PersonDirectoryPerson, + UserDefinedFields, + CreatePersonResult & + TypeSpec.Http.AcceptedResponse & { + @pollingLocation + @header("operation-Location") + operationLocation: ResourceLocation; + + @finalLocation + @header("Location") + location: ResourceLocation; + }, + ServiceTraits, + FaceErrorResponse + >; + + @doc("Delete an existing person from Person Directory. The persistedFaceId(s), userData, person name and face feature(s) in the person entry will all be deleted.") + @returnsDoc(PersonModificationAccepted) + deletePerson is FaceResourceDeleteLongRunningOperation; + + @doc("Retrieve a person's name and userData from Person Directory.") + @returnsDoc("A successful call returns the person's information.") + getPerson is FaceResourceReadOperation; + + @doc("Update name or userData of a person.") + @returnsDoc(SuccessfulCallWithEmptyBody) + updatePerson is FaceResourceUpdateOperation; + + @doc( + """ + Persons are stored in alphabetical order of personId created in Person Directory \"Create Person\". + ${ListRequestOptionsDescriptionInList} + """ + ) + @returnsDoc("A successful call returns an array of Person Directory Persons contained in the Dynamic Person Group.") + @summary("List all persons' information in Person Directory, including personId, name, and userData.") + getPersons is FaceResourceListOperation; + + @summary("List all Dynamic Person Groups a person has been referenced by in Person Directory.") + @doc( + """ + Dynamic Person Groups are stored in alphabetical order of Dynamic Person Group ID created in Person Directory \"Create Dynamic Person Group\". + ${ListRequestOptionsDescriptionInList} + """ + ) + @returnsDoc("A successful call returns an array of dynamicPersonGroups information that reference the provided personId.") + @get + @action("dynamicPersonGroupReferences") + @actionSeparator("/") + getDynamicPersonGroupReferences is Foundations.ResourceOperation< + PersonDirectoryPerson, + ListRequestOptions, + ListGroupReferenceResult, + ServiceTraits, + FaceErrorResponse + >; + + #suppress "@azure-tools/typespec-azure-core/byos" "It's an RPC call to detect face from an image, it doesn't store anything. There's a BYOS version of this call, but it's not this one." + #suppress "@azure-tools/typespec-azure-core/long-running-polling-operation-required" "Doesn't fit in generated long-running operation" + addPersonFace is AddPersonFaceOperation<{ + @doc("The format of the HTTP payload.") + @header + contentType: "application/octet-stream"; + + @doc("The image to be analyzed") + @body + imageContent: bytes; + }>; + + #suppress "@azure-tools/typespec-azure-core/byos" "It's an RPC call to detect face from an image, it doesn't store anything. There's a BYOS version of this call, but it's not this one." + #suppress "@azure-tools/typespec-azure-core/long-running-polling-operation-required" "Doesn't fit in generated long-running operation" + addPersonFaceFromUrl is AddPersonFaceOperation<{ + @doc("URL of input image.") + url: url; + }>; + + @summary("Delete a face from a person in Person Directory by specified personId and persistedFaceId.") + @doc(""" + Adding/deleting faces to/from a same person will be processed sequentially. Adding/deleting faces to/from different persons are processed in parallel. + """) + @returnsDoc(PersonModificationAccepted) + deletePersonFace is FaceResourceDeleteLongRunningOperation; + + @doc(""" + Retrieve person face information. The persisted person face is specified by its personId. recognitionModel, and persistedFaceId. + """) + @returnsDoc("A successful call returns target persisted face's information (persistedFaceId and userData).") + getPersonFace is FaceResourceReadOperation; + + @doc("Update a persisted face's userData field of a person.") + @returnsDoc(SuccessfulCallWithEmptyBody) + @updatesResource(PersonDirectoryFace) + updatePersonFace is FaceResourceUpdateOperation< + PersonDirectoryFace, + FaceUserData + >; + + @doc("Retrieve a person's persistedFaceIds representing the registered person face feature(s).") + @returnsDoc("A successful call returns an array of persistedFaceIds and and a person ID.") + getPersonFaces is Foundations.ResourceList< + PersonDirectoryFace, + {}, + ListFaceResult, + ServiceTraits, + FaceErrorResponse + >; + + @summary(CreateDynamicPersonGroupSummary) + @doc(CreateDynamicPersonGroupDescription) + @returnsDoc(DynamicPersonGroupModificationAccepted) + @createsOrReplacesResource(DynamicPersonGroup) + createDynamicPersonGroupWithPerson is DynamicPersonGroupLongRunningOperation; + + @summary(CreateDynamicPersonGroupSummary) + @doc(CreateDynamicPersonGroupDescription) + @createsOrReplacesResource(DynamicPersonGroup) + createDynamicPersonGroup is DynamicPersonGroupOperation; + + @summary("Deletes an existing Dynamic Person Group with specified dynamicPersonGroupId.") + @doc(""" + Deleting this Dynamic Person Group only delete the references to persons data. To delete actual person see Person Directory \"Delete Person\". + """) + @returnsDoc(DynamicPersonGroupModificationAccepted) + deleteDynamicPersonGroup is FaceResourceDeleteLongRunningOperation; + + @doc(""" + This API returns Dynamic Person Group information only, use Person Directory \"Get Dynamic Person Group Persons\" instead to retrieve person information under the Dynamic Person Group. + """) + @summary("Retrieve the information of a Dynamic Person Group, including its name and userData.") + @returnsDoc("A successful call returns the Dynamic Person Group's information.") + getDynamicPersonGroup is FaceResourceReadOperation; + + @summary(UpdateDynamicPersonGroupSummary) + @doc(UpdateDynamicPersonGroupDescription) + @returnsDoc(DynamicPersonGroupModificationAccepted) + @patch // updatesResource have codegen issue + updateDynamicPersonGroupWithPersonChanges is DynamicPersonGroupLongRunningOperation; + + @summary(UpdateDynamicPersonGroupSummary) + @doc(UpdateDynamicPersonGroupDescription) + @returnsDoc(DynamicPersonGroupModificationAccepted) + @updatesResource(DynamicPersonGroup) + updateDynamicPersonGroup is DynamicPersonGroupOperation; + + @summary("List all existing Dynamic Person Groups by dynamicPersonGroupId along with name and userData.") + @doc( + """ + Dynamic Person Groups are stored in alphabetical order of dynamicPersonGroupId. + ${ListRequestOptionsDescriptionInList} + """ + ) + @returnsDoc("A successful call returns an array of Dynamic Person Groups and their information (dynamicPersonGroupId, name and userData).") + getDynamicPersonGroups is FaceResourceListOperation; + + @summary("List all persons in the specified Dynamic Person Group.") + @doc( + """ + Persons are stored in alphabetical order of personId created in Person Directory \"Create Person\". + ${ListRequestOptionsDescriptionInList} + """ + ) + @returnsDoc("A successful call returns an array of person information in the Person Directory.") + @get + @action("persons") + @actionSeparator("/") + getDynamicPersonGroupPersons is Foundations.ResourceOperation< + DynamicPersonGroup, + ListRequestOptions, + ListPersonResult, + ServiceTraits, + FaceErrorResponse + >; +} diff --git a/specification/ai/Face/routes.persongroup.tsp b/specification/ai/Face/routes.persongroup.tsp new file mode 100644 index 000000000000..b6614083944a --- /dev/null +++ b/specification/ai/Face/routes.persongroup.tsp @@ -0,0 +1,290 @@ +import "@typespec/http"; +import "@typespec/rest"; +import "@azure-tools/typespec-azure-core"; +import "./models.persongroup.tsp"; + +using TypeSpec.Http; +using TypeSpec.Rest; +using Azure.Core; +using Azure.Core.Traits; +using Foundations; + +namespace Face; + +alias AddPersonGroupPersonFaceSummary = "Add a face to a person into a Person Group for face identification or verification."; +alias AddPersonGroupPersonFaceDescription = """ + To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until \"Delete Person Group Person Face\", \"Delete Person Group Person\" or \"Delete Person Group\" is called. + + Note that persistedFaceId is different from faceId generated by \"Detect\". + ${AddFaceDescriptionInList} + """; +alias AddLargePersonGroupPersonFaceSummary = "Add a face to a person into a Large Person Group for face identification or verification."; +alias AddLargePersonGroupPersonFaceDescription = """ + To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until \"Delete Large Person Group Person Face\", \"Delete Large Person Group Person\" or \"Delete Large Person Group\" is called. + + Note that persistedFaceId is different from faceId generated by \"Detect\". + ${AddFaceDescriptionInList} + """; +alias CreatePersonSuccess = "A successful call returns a new personId created."; +alias GetPersonSuccess = "A successful call returns the person's information."; +alias GetPersonFaceSuccess = "A successful call returns target persisted face's information (persistedFaceId and userData)."; + +interface PersonGroupOperations { + @summary("Create a new Person Group with specified personGroupId, name, user-provided userData and recognitionModel.") + @doc(""" + A Person Group is a container holding the uploaded person data, including face recognition features. + + After creation, use \"Create Person Group Person\" to add persons into the group, and then call \"Train Person Group\" to get this group ready for \"Identify From Person Group\". + + No image will be stored. Only the person's extracted face feature(s) and userData will be stored on server until \"Delete Person Group Person\" or \"Delete Person Group\" is called. + + 'recognitionModel' should be specified to associate with this Person Group. The default value for 'recognitionModel' is 'recognition_01', if the latest model needed, please explicitly specify the model you need in this parameter. New faces that are added to an existing Person Group will use the recognition model that's already associated with the collection. Existing face feature(s) in a Person Group can't be updated to features extracted by another version of recognition model. + + > [!NOTE] + > + > * + > * Free-tier subscription quota: 1,000 Person Groups. Each holds up to 1,000 persons. + > * S0-tier subscription quota: 1,000,000 Person Groups. Each holds up to 10,000 persons. + > * to handle larger scale face identification problem, please consider using Large Person Group. + """) + @returnsDoc(SuccessfulCallWithEmptyBody) + createPersonGroup is FaceCollectionResourceCreateOperation; + + @doc("Delete an existing Person Group with specified personGroupId. Persisted data in this Person Group will be deleted.") + @returnsDoc(SuccessfulCallWithEmptyBody) + deletePersonGroup is FaceResourceDeleteOperation; + + @doc("Retrieve Person Group name, userData and recognitionModel. To get person information under this personGroup, use \"Get Person Group Persons\".") + @returnsDoc("A successful call returns the Person Group's information.") + getPersonGroup is FaceResourceReadOperationWithReturnRecognitionModelOptions; + + @doc("Update an existing Person Group's name and userData. The properties keep unchanged if they are not in request body.") + @returnsDoc(SuccessfulCallWithEmptyBody) + updatePersonGroup is FaceResourceUpdateOperation; + + @summary("List Person Groups' personGroupId, name, userData and recognitionModel.") + @doc( + """ + Person Groups are stored in alphabetical order of personGroupId. + ${ListRequestOptionsDescriptionInList} + """ + ) + @returnsDoc("A successful call returns an array of Person Groups and their information (personGroupId, name and userData).") + getPersonGroups is FaceResourceListOperation< + PersonGroup, + PersonGroup, + ReturnRecognitionModelOptions + >; + + @doc("To check Person Group training status completed or still ongoing. Person Group training is an asynchronous operation triggered by \"Train Person Group\" API.") + @returnsDoc("A successful call returns the Person Group's training status.") + getPersonGroupTrainingStatus is FaceResourceGetTrainingStatus; + + @pollingOperation(PersonGroupOperations.getPersonGroupTrainingStatus) + @summary("Submit a Person Group training task. Training is a crucial step that only a trained Person Group can be used by \"Identify From Person Group\".") + @doc(""" + The training task is an asynchronous task. Training time depends on the number of person entries, and their faces in a Person Group. It could be several seconds to minutes. To check training status, please use \"Get Person Group Training Status\". + """) + @returnsDoc(SuccessfulCallWithEmptyBody) + trainPersonGroup is FaceResourceTrain; + + @summary("Create a new person in a specified Person Group. To add face to this person, please call \"Add Person Group Person Face\".") + @doc(""" + > [!NOTE] + > + > * + > * Free-tier subscription quota: + > * 1,000 persons in all Person Groups. + > * S0-tier subscription quota: + > * 10,000 persons per Person Group. + > * 1,000,000 Person Groups. + > * 100,000,000 persons in all Person Groups. + """) + @returnsDoc(CreatePersonSuccess) + createPersonGroupPerson is FaceResourceCreateWithServiceProvidedName< + PersonGroupPerson, + UserDefinedFields, + CreatePersonResult + >; + + @doc("Delete an existing person from a Person Group. The persistedFaceId, userData, person name and face feature(s) in the person entry will all be deleted.") + @returnsDoc(SuccessfulCallWithEmptyBody) + deletePersonGroupPerson is FaceResourceDeleteOperation; + + @doc("Retrieve a person's name and userData, and the persisted faceIds representing the registered person face feature(s).") + @returnsDoc(GetPersonSuccess) + getPersonGroupPerson is FaceResourceReadOperation; + + @doc("Update name or userData of a person.") + @returnsDoc(SuccessfulCallWithEmptyBody) + updatePersonGroupPerson is FaceResourceUpdateOperation; + + @summary("List all persons' information in the specified Person Group, including personId, name, userData and persistedFaceIds of registered person faces.") + @doc( + """ + Persons are stored in alphabetical order of personId created in \"Create Person Group Person\". + ${ListRequestOptionsDescriptionInList} + """ + ) + @returnsDoc("A successful call returns an array of person information that belong to the Person Group.") + getPersonGroupPersons is FaceResourceListOperation; + + @summary(AddPersonGroupPersonFaceSummary) + @doc(AddPersonGroupPersonFaceDescription) + @returnsDoc(AddFaceSuccess) + addPersonGroupPersonFaceFromUrl is AddFaceFromUrl; + + #suppress "@azure-tools/typespec-azure-core/byos" "It's an RPC call to detect face from an image, it doesn't store anything. There's a BYOS version of this call, but it's not this one." + @summary(AddPersonGroupPersonFaceSummary) + @doc(AddPersonGroupPersonFaceDescription) + @returnsDoc(AddFaceSuccess) + addPersonGroupPersonFace is AddFace; + + @summary("Delete a face from a person in a Person Group by specified personGroupId, personId and persistedFaceId.") + @doc(""" + Adding/deleting faces to/from a same person will be processed sequentially. Adding/deleting faces to/from different persons are processed in parallel. + """) + @returnsDoc(SuccessfulCallWithEmptyBody) + deletePersonGroupPersonFace is FaceResourceDeleteOperation; + + @doc("Retrieve person face information. The persisted person face is specified by its personGroupId, personId and persistedFaceId.") + @returnsDoc(GetPersonFaceSuccess) + getPersonGroupPersonFace is FaceResourceReadOperation; + + @doc("Update a person persisted face's userData field.") + @returnsDoc(SuccessfulCallWithEmptyBody) + updatePersonGroupPersonFace is FaceResourceUpdateOperation< + PersonGroupPersonFace, + FaceUserData + >; + + @summary("Create a new Large Person Group with user-specified largePersonGroupId, name, an optional userData and recognitionModel.") + @doc(""" + A Large Person Group is a container holding the uploaded person data, including the face recognition features. It can hold up to 1,000,000 entities. + + After creation, use \"Create Large Person Group Person\" to add person into the group, and call \"Train Large Person Group\" to get this group ready for \"Identify From Large Person Group\". + + No image will be stored. Only the person's extracted face feature(s) and userData will be stored on server until \"Delete Large Person Group Person\" or \"Delete Large Person Group\" is called. + + 'recognitionModel' should be specified to associate with this Large Person Group. The default value for 'recognitionModel' is 'recognition_01', if the latest model needed, please explicitly specify the model you need in this parameter. New faces that are added to an existing Large Person Group will use the recognition model that's already associated with the collection. Existing face feature(s) in a Large Person Group can't be updated to features extracted by another version of recognition model. + + > [!NOTE] + > + > * + > * Free-tier subscription quota: 1,000 Large Person Groups. + > * S0-tier subscription quota: 1,000,000 Large Person Groups. + """) + @returnsDoc(SuccessfulCallWithEmptyBody) + createLargePersonGroup is FaceCollectionResourceCreateOperation; + + @doc("Delete an existing Large Person Group with specified personGroupId. Persisted data in this Large Person Group will be deleted.") + @returnsDoc(SuccessfulCallWithEmptyBody) + deleteLargePersonGroup is FaceResourceDeleteOperation; + + @doc("Retrieve the information of a Large Person Group, including its name, userData and recognitionModel. This API returns Large Person Group information only, use \"Get Large Person Group Persons\" instead to retrieve person information under the Large Person Group.") + @returnsDoc("A successful call returns the Large Person Group's information.") + getLargePersonGroup is FaceResourceReadOperationWithReturnRecognitionModelOptions; + + @doc("Update an existing Large Person Group's name and userData. The properties keep unchanged if they are not in request body.") + @returnsDoc(SuccessfulCallWithEmptyBody) + updateLargePersonGroup is FaceResourceUpdateOperation; + + @summary("List all existing Large Person Groups' largePersonGroupId, name, userData and recognitionModel.") + @doc( + """ + Large Person Groups are stored in alphabetical order of largePersonGroupId. + ${ListRequestOptionsDescriptionInList} + """ + ) + @returnsDoc("A successful call returns an array of Large Person Groups and their information (largePersonGroupId, name and userData).") + getLargePersonGroups is FaceResourceListOperation< + LargePersonGroup, + LargePersonGroup, + ReturnRecognitionModelOptions + >; + + @summary("To check Large Person Group training status completed or still ongoing. Large Person Group training is an asynchronous operation triggered by \"Train Large Person Group\" API.") + @doc(""" + Training time depends on the number of person entries, and their faces in a Large Person Group. It could be in seconds, or up to half an hour for 1,000,000 persons. + """) + @returnsDoc("A successful call returns the Large Person Group's training status.") + getLargePersonGroupTrainingStatus is FaceResourceGetTrainingStatus; + + @pollingOperation(PersonGroupOperations.getLargePersonGroupTrainingStatus) + @summary("Submit a Large Person Group training task. Training is a crucial step that only a trained Large Person Group can be used by \"Identify From Large Person Group\".") + @doc(""" + The training task is an asynchronous task. Training time depends on the number of person entries, and their faces in a Large Person Group. It could be in several seconds, or up to half a hour for 1,000,000 persons. To check training status, please use \"Get Large Person Group Training Status\". + """) + @returnsDoc(SuccessfulCallWithEmptyBody) + trainLargePersonGroup is FaceResourceTrain; + + @summary("Create a new person in a specified Large Person Group. To add face to this person, please call \"Add Large Person Group Person Face\".") + @doc(""" + > [!NOTE] + > + > * + > * Free-tier subscription quota: + > * 1,000 persons in all Large Person Groups. + > * S0-tier subscription quota: + > * 1,000,000 persons per Large Person Group. + > * 1,000,000 Large Person Groups. + > * 1,000,000,000 persons in all Large Person Groups. + """) + @returnsDoc(CreatePersonSuccess) + createLargePersonGroupPerson is FaceResourceCreateWithServiceProvidedName< + LargePersonGroupPerson, + UserDefinedFields, + CreatePersonResult + >; + + @doc("Delete an existing person from a Large Person Group. The persistedFaceId, userData, person name and face feature(s) in the person entry will all be deleted.") + @returnsDoc(SuccessfulCallWithEmptyBody) + deleteLargePersonGroupPerson is FaceResourceDeleteOperation; + + @doc("Retrieve a person's name and userData, and the persisted faceIds representing the registered person face feature(s).") + @returnsDoc(GetPersonSuccess) + getLargePersonGroupPerson is FaceResourceReadOperation; + + @doc("Update name or userData of a person.") + @returnsDoc(SuccessfulCallWithEmptyBody) + updateLargePersonGroupPerson is FaceResourceUpdateOperation; + + @summary("List all persons' information in the specified Large Person Group, including personId, name, userData and persistedFaceIds of registered person faces.") + @doc( + """ + Persons are stored in alphabetical order of personId created in \"Create Large Person Group Person\". + ${ListRequestOptionsDescriptionInList} + """ + ) + @returnsDoc("A successful call returns an array of person information that belong to the Large Person Group.") + getLargePersonGroupPersons is FaceResourceListOperation; + + @summary(AddLargePersonGroupPersonFaceSummary) + @doc(AddLargePersonGroupPersonFaceDescription) + @returnsDoc(AddFaceSuccess) + addLargePersonGroupPersonFaceFromUrl is AddFaceFromUrl; + + #suppress "@azure-tools/typespec-azure-core/byos" "It's an RPC call to detect face from an image, it doesn't store anything. There's a BYOS version of this call, but it's not this one." + @summary(AddLargePersonGroupPersonFaceSummary) + @doc(AddLargePersonGroupPersonFaceDescription) + @returnsDoc(AddFaceSuccess) + addLargePersonGroupPersonFace is AddFace; + + @summary("Delete a face from a person in a Large Person Group by specified largePersonGroupId, personId and persistedFaceId.") + @doc(""" + Adding/deleting faces to/from a same person will be processed sequentially. Adding/deleting faces to/from different persons are processed in parallel. + """) + @returnsDoc(SuccessfulCallWithEmptyBody) + deleteLargePersonGroupPersonFace is FaceResourceDeleteOperation; + + @doc("Retrieve person face information. The persisted person face is specified by its largePersonGroupId, personId and persistedFaceId.") + @returnsDoc(GetPersonFaceSuccess) + getLargePersonGroupPersonFace is FaceResourceReadOperation; + + @doc("Update a person persisted face's userData field.") + @returnsDoc(SuccessfulCallWithEmptyBody) + updateLargePersonGroupPersonFace is FaceResourceUpdateOperation< + LargePersonGroupPersonFace, + FaceUserData + >; +} diff --git a/specification/ai/Face/routes.recognition.tsp b/specification/ai/Face/routes.recognition.tsp new file mode 100644 index 000000000000..4035990deb9b --- /dev/null +++ b/specification/ai/Face/routes.recognition.tsp @@ -0,0 +1,283 @@ +import "@typespec/http"; +import "@typespec/rest"; +import "@azure-tools/typespec-azure-core"; +import "./models.common.tsp"; +import "./models.detect.tsp"; + +using TypeSpec.Http; +using TypeSpec.Rest; +using Azure.Core; +using Azure.Core.Traits; +using Foundations; + +namespace Face; + +alias IdentifyDocumentNote = """ +> [!NOTE] +> +> * +> * The algorithm allows more than one face to be identified independently at the same request, but no more than 10 faces. +> * Each person could have more than one face, but no more than 248 faces. +> * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. +> * Number of candidates returned is restricted by maxNumOfCandidatesReturned and confidenceThreshold. If no person is identified, the returned candidates will be an empty array. +"""; + +alias VerifyDocumentNote = """ +> [!NOTE] +> +> * +> * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. +> * For the scenarios that are sensitive to accuracy please make your own judgment. +"""; + +alias FindSimilarDocument = """ +Depending on the input the returned similar faces list contains faceIds or persistedFaceIds ranked by similarity. + +Find similar has two working modes, "matchPerson" and "matchFace". "matchPerson" is the default mode that it tries to find faces of the same person as possible by using internal same-person thresholds. It is useful to find a known person's other photos. Note that an empty list will be returned if no faces pass the internal thresholds. "matchFace" mode ignores same-person thresholds and returns ranked similar faces anyway, even the similarity is low. It can be used in the cases like searching celebrity-looking faces. + +"""; + +@returnsDoc("A successful call returns the identified candidate person(s) for each query face.") +@sharedRoute +@post +@route("identify") +op IdentifyOperation is Azure.Core.RpcOperation< + { + @doc("Array of query faces faceIds, created by the \"Detect\". Each of the faces are identified independently. The valid number of faceIds is between [1, 10].") + @minItems(1) + @maxItems(10) + faceIds: uuid[]; + + ...T; + + @doc("The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10.") + @minValue(1) + @maxValue(100) + maxNumOfCandidatesReturned?: int32 = 10; + + @doc("Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates.") + @minValue(0) + @maxValue(1) + confidenceThreshold?: float32; + }, + Body, + ServiceTraits, + FaceErrorResponse +>; + +@returnsDoc("A successful call returns the verification result.") +@sharedRoute +@post +@route("verify") +op VerifyOperation is Azure.Core.RpcOperation< + T, + VerificationResult, + ServiceTraits, + FaceErrorResponse +>; + +@returnsDoc("A successful call returns an array of the most similar faces represented in faceId if the input parameter is faceIds or persistedFaceId if the input parameter is faceListId or largeFaceListId.") +@sharedRoute +@post +@route("findsimilars") +op FindSimilarOperation is Azure.Core.RpcOperation< + { + @doc("faceId of the query face. User needs to call \"Detect\" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call.") + faceId: uuid; + + @doc("The number of top similar faces returned. The valid range is [1, 1000]. Default value is 20.") + @minValue(1) + @maxValue(1000) + maxNumOfCandidatesReturned?: int32 = 20; + + @doc("Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'.") + mode?: FindSimilarMatchMode = "matchPerson"; + + ...T; + }, + Body, + ServiceTraits, + FaceErrorResponse +>; + +interface FaceRecognitionOperations { + @summary("Given query face's faceId, to search the similar-looking faces from a faceId array. A faceId array contains the faces created by Detect.") + @doc( + """ + ${FindSimilarDocument} + The 'recognitionModel' associated with the query faceId should be the same as the 'recognitionModel' used by the target faceId array. + """ + ) + findSimilar is FindSimilarOperation<{ + @doc("An array of candidate faceIds. All of them are created by \"Detect\" and the faceIds will expire 24 hours after the detection call. The number of faceIds is limited to 1000.") + faceIds: uuid[]; + }>; + + @summary("Given query face's faceId, to search the similar-looking faces from a Face List. A 'faceListId' is created by Create Face List.") + @doc( + """ + ${FindSimilarDocument} + The 'recognitionModel' associated with the query faceId should be the same as the 'recognitionModel' used by the target Face List. + """ + ) + findSimilarFromFaceList is FindSimilarOperation<{ + @doc("An existing user-specified unique candidate Face List, created in \"Create Face List\". Face List contains a set of persistedFaceIds which are persisted and will never expire.") + faceListId: string; + }>; + + @summary("Given query face's faceId, to search the similar-looking faces from a Large Face List. A 'largeFaceListId' is created by Create Large Face List.") + @doc( + """ + ${FindSimilarDocument} + The 'recognitionModel' associated with the query faceId should be the same as the 'recognitionModel' used by the target Large Face List. + """ + ) + findSimilarFromLargeFaceList is FindSimilarOperation<{ + @doc("An existing user-specified unique candidate Large Face List, created in \"Create Large Face List\". Large Face List contains a set of persistedFaceIds which are persisted and will never expire.") + largeFaceListId: string; + }>; + + @summary("1-to-many identification to find the closest matches of the specific query person face from a Person Group.") + @doc( + """ + For each face in the faceIds array, Face Identify will compute similarities between the query face and all the faces in the Person Group (given by personGroupId), and return candidate person(s) for that face ranked by similarity confidence. The Person Group should be trained to make it ready for identification. See more in \"Train Person Group\". + ${IdentifyDocumentNote} + > * Try \"Find Similar\" when you need to find similar faces from a Face List/Large Face List instead of a Person Group. + > * The 'recognitionModel' associated with the query faces' faceIds should be the same as the 'recognitionModel' used by the target Person Group. + """ + ) + identifyFromPersonGroup is IdentifyOperation<{ + @doc("personGroupId of the target Person Group, created by \"Create Person Group\". Parameter personGroupId and largePersonGroupId should not be provided at the same time.") + personGroupId: string; + }>; + + @summary("1-to-many identification to find the closest matches of the specific query person face from a Large Person Group.") + @doc( + """ + For each face in the faceIds array, Face Identify will compute similarities between the query face and all the faces in the Large Person Group (given by largePersonGroupId), and return candidate person(s) for that face ranked by similarity confidence. The Large Person Group should be trained to make it ready for identification. See more in \"Train Large Person Group\". + ${IdentifyDocumentNote} + > * Try \"Find Similar\" when you need to find similar faces from a Face List/Large Face List instead of a Person Group/Large Person Group. + > * The 'recognitionModel' associated with the query faces' faceIds should be the same as the 'recognitionModel' used by the target Person Group or Large Person Group. + """ + ) + identifyFromLargePersonGroup is IdentifyOperation<{ + @doc("largePersonGroupId of the target Large Person Group, created by \"Create Large Person Group\". Parameter personGroupId and largePersonGroupId should not be provided at the same time.") + largePersonGroupId: string; + }>; + + @summary("1-to-many identification to find the closest matches of the specific query person face from a person directory personIds array.") + @doc( + """ + For each face in the faceIds array, Face Identify will compute similarities between the query face and all the faces in the Person Directory Persons (given by personIds), and return candidate person(s) for that face ranked by similarity confidence. + ${IdentifyDocumentNote} + > * The Identify operation can only match faces obtained with the same recognition model, that is associated with the query faces. + """ + ) + identifyFromPersonDirectory is IdentifyOperation<{ + @doc("Array of personIds created in Person Directory \"Create Person\". The valid number of personIds is between [1,30].") + @minItems(1) + @maxItems(30) + personIds: uuid[]; + }>; + + @summary("1-to-many identification to find the closest matches of the specific query person face from a Dynamic Person Group.") + @doc( + """ + For each face in the faceIds array, Face Identify will compute similarities between the query face and all the faces in the Dynamic Person Group (given by dynamicPersonGroupId), and return candidate person(s) for that face ranked by similarity confidence. + ${IdentifyDocumentNote} + > * The Identify operation can only match faces obtained with the same recognition model, that is associated with the query faces. + """ + ) + identifyFromDynamicPersonGroup is IdentifyOperation<{ + @doc("DynamicPersonGroupId of the target PersonDirectory DynamicPersonGroup to match against.") + dynamicPersonGroupId: string; + }>; + + @summary("Verify whether two faces belong to a same person.") + @doc( + """ + ${VerifyDocumentNote} + > * The 'recognitionModel' associated with the both faces should be the same. + """ + ) + verifyFaceToFace is VerifyOperation<{ + @doc("The faceId of one face, come from \"Detect\".") + faceId1: uuid; + + @doc("The faceId of another face, come from \"Detect\".") + faceId2: uuid; + }>; + + @summary("Verify whether a face belongs to a person in a Person Group.") + @doc( + """ + ${VerifyDocumentNote} + > * The 'recognitionModel' associated with the query face should be the same as the 'recognitionModel' used by the Person Group. + """ + ) + verifyFromPersonGroup is VerifyOperation<{ + @doc("The faceId of the face, come from \"Detect\".") + faceId: uuid; + + @doc("Using existing personGroupId and personId for fast loading a specified person. personGroupId is created in \"Create Person Group\".") + personGroupId: string; + + @doc("Specify a certain person in Person Group.") + personId: uuid; + }>; + + @summary("Verify whether a face belongs to a person in a Large Person Group.") + @doc( + """ + ${VerifyDocumentNote} + > * The 'recognitionModel' associated with the query face should be the same as the 'recognitionModel' used by the Large Person Group. + """ + ) + verifyFromLargePersonGroup is VerifyOperation<{ + @doc("The faceId of the face, come from \"Detect\".") + faceId: uuid; + + @doc("Using existing largePersonGroupId and personId for fast loading a specified person. largePersonGroupId is created in \"Create Large Person Group\".") + largePersonGroupId: string; + + @doc("Specify a certain person in Large Person Group.") + personId: uuid; + }>; + + @doc( + """ + ${VerifyDocumentNote} + > * The Verify operation can only match faces obtained with the same recognition model, that is associated with the query face. + """ + ) + @summary("Verify whether a face belongs to a person in Person Directory.") + verifyFromPersonDirectory is VerifyOperation<{ + @doc("The faceId of the face, come from \"Detect\".") + faceId: uuid; + + @doc("Specify a certain person in PersonDirectory Person.") + personId: uuid; + }>; + + @summary("Divide candidate faces into groups based on face similarity.") + @doc(""" + > + * + * The output is one or more disjointed face groups and a messyGroup. A face group contains faces that have similar looking, often of the same person. Face groups are ranked by group size, i.e. number of faces. Notice that faces belonging to a same person might be split into several groups in the result. + * MessyGroup is a special face group containing faces that cannot find any similar counterpart face from original faces. The messyGroup will not appear in the result if all faces found their counterparts. + * Group API needs at least 2 candidate faces and 1000 at most. We suggest to try \"Verify Face To Face\" when you only have 2 candidate faces. + * The 'recognitionModel' associated with the query faces' faceIds should be the same. + """) + @returnsDoc("A successful call returns one or more groups of similar faces (rank by group size) and a messyGroup.") + @post + @route("group") + group is Azure.Core.RpcOperation< + { + @doc("Array of candidate faceIds created by \"Detect\". The maximum is 1000 faces.") + faceIds: uuid[]; + }, + GroupingResult, + ServiceTraits, + FaceErrorResponse + >; +} diff --git a/specification/ai/Face/routes.session.tsp b/specification/ai/Face/routes.session.tsp new file mode 100644 index 000000000000..eff89082a2e6 --- /dev/null +++ b/specification/ai/Face/routes.session.tsp @@ -0,0 +1,152 @@ +import "@typespec/http"; +import "@typespec/rest"; +import "@azure-tools/typespec-azure-core"; +import "./models.session.tsp"; + +using TypeSpec.Http; +using TypeSpec.Rest; +using Azure.Core; +using Azure.Core.Traits; +using Foundations; + +namespace Face; + +alias SessionCommonDescription = "A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired."; +alias LivenessSessionWithVerifyDescription = """ +${SessionCommonDescription} + +Permissions includes... +> +* + * Ability to call /detectLivenessWithVerify/singleModal for up to 3 retries. + * A token lifetime of 10 minutes. + +> [!NOTE] +> +> * +> * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation. +> * To retrieve a result, use the Get Liveness With Verify Session. +> * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries. +"""; +alias SessionCreationSuccess = "A successful call create a session for a client device and provide an authorization token for use by the client application for a limited purpose and time."; +alias DeleteSessionSummary = "Delete all session related information for matching the specified session id."; +alias DeleteSessionDescription = """ +> [!NOTE] +> Deleting a session deactivates the Session Auth Token by blocking future API calls made with that Auth Token. While this can be used to remove any access for that token, those requests will still count towards overall resource rate limits. It's best to leverage TokenTTL to limit length of tokens in the case that it is misused. +"""; +alias ListSessionDescription = """ +List sessions from the last sessionId greater than the 'start'. + +The result should be ordered by sessionId in ascending order. +"""; +alias ListSessionAuditEntriesDescription = "Gets session requests and response body for the session."; + +@get +@action("audit") +@actionSeparator("/") +op FaceLivenessSessionListAuditEntries is Foundations.ResourceOperation< + TResource, + ListRequestOptions, + Body, + ServiceTraits, + FaceErrorResponse +>; + +interface LivenessSessionOperations { + @doc( + """ + ${SessionCommonDescription} + + Permissions includes... + > + * + * Ability to call /detectLiveness/singleModal for up to 3 retries. + * A token lifetime of 10 minutes. + + > [!NOTE] + > Client access can be revoked by deleting the session using the Delete Liveness Session operation. To retrieve a result, use the Get Liveness Session. To audit the individual requests that a client has made to your resource, use the List Liveness Session Audit Entries. + """ + ) + @summary("Create a new detect liveness session.") + @returnsDoc(SessionCreationSuccess) + createLivenessSession is FaceResourceCreateWithServiceProvidedName< + LivenessSession, + CreateLivenessSessionContent, + CreateLivenessSessionResult + >; + + @summary(DeleteSessionSummary) + @doc(DeleteSessionDescription) + @returnsDoc("Successfully deleted session and all correlated data.") + deleteLivenessSession is FaceResourceDeleteOperation; + + @doc("Get session result of detectLiveness/singleModal call.") + getLivenessSessionResult is FaceResourceReadOperation; + + @doc(ListSessionDescription) + @summary("Lists sessions for /detectLiveness/SingleModal.") + getLivenessSessions is FaceResourceListOperation< + LivenessSession, + LivenessSessionItem + >; + + @doc(ListSessionAuditEntriesDescription) + getLivenessSessionAuditEntries is FaceLivenessSessionListAuditEntries; + + #suppress "@azure-tools/typespec-azure-core/byos" "Representation of existing multipart/form-data operation" + @doc( + """ + ${LivenessSessionWithVerifyDescription} + + Recommended Option: VerifyImage is provided during session creation. + """ + ) + @summary("Create a new liveness session with verify. Provide the verify image during session creation.") + @returnsDoc(SessionCreationSuccess) + @sharedRoute + createLivenessWithVerifySessionWithVerifyImage is FaceResourceCreateWithServiceProvidedName< + LivenessWithVerifySession, + CreateLivenessWithVerifySessionContent, + CreateLivenessWithVerifySessionResult + >; + + @doc( + """ + ${LivenessSessionWithVerifyDescription} + + Alternative Option: Client device submits VerifyImage during the /detectLivenessWithVerify/singleModal call. + > [!NOTE] + > Extra measures should be taken to validate that the client is sending the expected VerifyImage. + """ + ) + @summary("Create a new liveness session with verify. Client device submits VerifyImage during the /detectLivenessWithVerify/singleModal call.") + @returnsDoc(SessionCreationSuccess) + @sharedRoute + createLivenessWithVerifySession is FaceResourceCreateWithServiceProvidedName< + LivenessWithVerifySession, + CreateLivenessSessionContent, + CreateLivenessWithVerifySessionResult + >; + + @summary(DeleteSessionSummary) + @doc(DeleteSessionDescription) + @returnsDoc("Successfully deleted session and all correlated data.") + deleteLivenessWithVerifySession is FaceResourceDeleteOperation; + + @doc("Get session result of detectLivenessWithVerify/singleModal call.") + getLivenessWithVerifySessionResult is FaceResourceReadOperation; + + @summary("Lists sessions for /detectLivenessWithVerify/SingleModal.") + @doc(""" + List sessions from the last sessionId greater than the \"start\". + + The result should be ordered by sessionId in ascending order. + """) + getLivenessWithVerifySessions is FaceResourceListOperation< + LivenessWithVerifySession, + LivenessSessionItem + >; + + @doc(ListSessionAuditEntriesDescription) + getLivenessWithVerifySessionAuditEntries is FaceLivenessSessionListAuditEntries; +} diff --git a/specification/ai/Face/tspconfig.yaml b/specification/ai/Face/tspconfig.yaml new file mode 100644 index 000000000000..df46583b7702 --- /dev/null +++ b/specification/ai/Face/tspconfig.yaml @@ -0,0 +1,45 @@ +parameters: + "service-dir": + default: "sdk/face" + "dependencies": + "additionalDirectories": [] + default: "" +emit: + - "@azure-tools/typespec-autorest" +options: + "@azure-tools/typespec-autorest": + azure-resource-provider-folder: "data-plane" + emitter-output-dir: "{project-root}/.." + examples-directory: "examples" + omit-unreachable-types: true + output-file: "{azure-resource-provider-folder}/{service-name}/{version-status}/{version}/Face.json" + "@azure-tools/typespec-python": + package-dir: "azure-ai-vision-face" + package-name: "{package-dir}" + package-mode: dataplane + flavor: azure + "@azure-tools/typespec-csharp": + package-dir: "Azure.AI.Vision.Face" + namespace: "{package-dir}" + clear-output-folder: true + model-namespace: false + flavor: azure + "@azure-tools/typespec-ts": + package-dir: "ai-vision-face-rest" + generateMetadata: true + generateTest: true + flavor: azure + packageDetails: + name: "@azure-rest/ai-vision-face" + description: "Face API REST Client" + "@azure-tools/typespec-java": + package-dir: "azure-ai-vision-face" + namespace: com.azure.ai.vision.face + partial-update: true + flavor: azure +linter: + extends: + - "@azure-tools/typespec-azure-core/all" + disable: + "@azure-tools/typespec-azure-core/operation-missing-api-version": "API version located in the host template" + "@azure-tools/typespec-azure-core/use-standard-operations": "Most of our operation doesn't fit standard ops" diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/Face.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/Face.json new file mode 100644 index 000000000000..2fe7dfd5b228 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/Face.json @@ -0,0 +1,9416 @@ +{ + "swagger": "2.0", + "info": { + "title": "Azure AI Face API", + "version": "v1.1-preview.1", + "x-typespec-generated": [ + { + "emitter": "@azure-tools/typespec-autorest" + } + ] + }, + "schemes": [ + "https" + ], + "x-ms-parameterized-host": { + "hostTemplate": "{endpoint}/face/{apiVersion}", + "useSchemePrefix": false, + "parameters": [ + { + "name": "endpoint", + "in": "path", + "description": "Supported Cognitive Services endpoints (protocol and hostname, for example:\nhttps://{resource-name}.cognitiveservices.azure.com).", + "required": true, + "type": "string", + "format": "uri", + "x-ms-skip-url-encoding": true + }, + { + "name": "apiVersion", + "in": "path", + "description": "API Version", + "required": true, + "type": "string", + "enum": [ + "v1.1-preview.1" + ], + "x-ms-enum": { + "name": "Versions", + "modelAsString": true, + "values": [ + { + "name": "v1_1_preview_1", + "value": "v1.1-preview.1", + "description": "v1.1-preview.1" + } + ] + } + } + ] + }, + "produces": [ + "application/json" + ], + "consumes": [ + "application/json" + ], + "security": [ + { + "KeyAuth": [] + }, + { + "AADToken": [ + "https://cognitiveservices.azure.com/.default" + ] + } + ], + "securityDefinitions": { + "AADToken": { + "type": "oauth2", + "description": "The Azure Active Directory OAuth2 Flow", + "flow": "accessCode", + "authorizationUrl": "https://api.example.com/oauth2/authorize", + "scopes": { + "https://cognitiveservices.azure.com/.default": "" + }, + "tokenUrl": "https://api.example.com/oauth2/token" + }, + "KeyAuth": { + "type": "apiKey", + "description": "The secret key for your Azure AI Face subscription.", + "name": "Ocp-Apim-Subscription-Key", + "in": "header" + } + }, + "tags": [], + "paths": { + "/detect": { + "post": { + "operationId": "FaceDetectionOperations_DetectFromUrl", + "summary": "Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes.", + "description": "> [!IMPORTANT]\n> To mitigate potential misuse that can subject people to stereotyping, discrimination, or unfair denial of services, we are retiring Face API attributes that predict emotion, gender, age, smile, facial hair, hair, and makeup. Read more about this decision https://azure.microsoft.com/en-us/blog/responsible-ai-investments-and-safeguards-for-facial-recognition/.\n\n*\n * No image will be stored. Only the extracted face feature(s) will be stored on server. The faceId is an identifier of the face feature and will be used in \"Identify\", \"Verify\", and \"Find Similar\". The stored face features will expire and be deleted at the time specified by faceIdTimeToLive after the original detection call.\n * Optional parameters include faceId, landmarks, and attributes. Attributes include headPose, glasses, occlusion, accessories, blur, exposure, noise, mask, and qualityForRecognition. Some of the results returned for specific attributes may not be highly accurate.\n * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB.\n * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size.\n * Up to 100 faces can be returned for an image. Faces are ranked by face rectangle size from large to small.\n * For optimal results when querying \"Identify\", \"Verify\", and \"Find Similar\" ('returnFaceId' is true), please use faces that are: frontal, clear, and with a minimum size of 200x200 pixels (100 pixels between eyes).\n * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/how-to/specify-detection-model\n * 'detection_02': Face attributes and landmarks are disabled if you choose this detection model.\n * 'detection_03': Face attributes (mask and headPose only) and landmarks are supported if you choose this detection model.\n * Different 'recognitionModel' values are provided. If follow-up operations like \"Verify\", \"Identify\", \"Find Similar\" are needed, please specify the recognition model with 'recognitionModel' parameter. The default value for 'recognitionModel' is 'recognition_01', if latest model needed, please explicitly specify the model you need in this parameter. Once specified, the detected faceIds will be associated with the specified recognition model. More details, please refer to https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/how-to/specify-recognition-model.", + "parameters": [ + { + "name": "detectionModel", + "in": "query", + "description": "The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'.", + "required": false, + "type": "string", + "default": "detection_01", + "enum": [ + "detection_01", + "detection_02", + "detection_03" + ], + "x-ms-enum": { + "name": "DetectionModel", + "modelAsString": true, + "values": [ + { + "name": "detection_01", + "value": "detection_01", + "description": "The default detection model. Recommend for near frontal face detection. For scenarios with exceptionally large angle (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be detected." + }, + { + "name": "detection_02", + "value": "detection_02", + "description": "Detection model released in 2019 May with improved accuracy especially on small, side and blurry faces." + }, + { + "name": "detection_03", + "value": "detection_03", + "description": "Detection model released in 2021 February with improved accuracy especially on small faces." + } + ] + } + }, + { + "name": "recognitionModel", + "in": "query", + "description": "The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'.", + "required": false, + "type": "string", + "default": "recognition_01", + "enum": [ + "recognition_01", + "recognition_02", + "recognition_03", + "recognition_04" + ], + "x-ms-enum": { + "name": "RecognitionModel", + "modelAsString": true, + "values": [ + { + "name": "recognition_01", + "value": "recognition_01", + "description": "The default recognition model for \"Detect\". All those faceIds created before 2019 March are bonded with this recognition model." + }, + { + "name": "recognition_02", + "value": "recognition_02", + "description": "Recognition model released in 2019 March." + }, + { + "name": "recognition_03", + "value": "recognition_03", + "description": "Recognition model released in 2020 May." + }, + { + "name": "recognition_04", + "value": "recognition_04", + "description": "Recognition model released in 2021 February. It's recommended to use this recognition model for better recognition accuracy." + } + ] + } + }, + { + "name": "returnFaceId", + "in": "query", + "description": "Return faceIds of the detected faces or not. The default value is true.", + "required": false, + "type": "boolean", + "default": true + }, + { + "name": "returnFaceAttributes", + "in": "query", + "description": "Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost.", + "required": false, + "type": "array", + "items": { + "type": "string", + "enum": [ + "headPose", + "glasses", + "occlusion", + "accessories", + "blur", + "exposure", + "noise", + "mask", + "qualityForRecognition", + "age", + "smile", + "facialHair", + "hair" + ], + "x-ms-enum": { + "name": "FaceAttributeType", + "modelAsString": true, + "values": [ + { + "name": "headPose", + "value": "headPose", + "description": "3-D roll/yaw/pitch angles for face direction." + }, + { + "name": "glasses", + "value": "glasses", + "description": "Glasses type. Values include 'NoGlasses', 'ReadingGlasses', 'Sunglasses', 'SwimmingGoggles'." + }, + { + "name": "occlusion", + "value": "occlusion", + "description": "Whether each facial area is occluded, including forehead, eyes and mouth." + }, + { + "name": "accessories", + "value": "accessories", + "description": "Accessories around face, including 'headwear', 'glasses' and 'mask'. Empty array means no accessories detected. Note this is after a face is detected. Large mask could result in no face to be detected." + }, + { + "name": "blur", + "value": "blur", + "description": "Face is blurry or not. Level returns 'Low', 'Medium' or 'High'. Value returns a number between [0,1], the larger the blurrier." + }, + { + "name": "exposure", + "value": "exposure", + "description": "Face exposure level. Level returns 'GoodExposure', 'OverExposure' or 'UnderExposure'." + }, + { + "name": "noise", + "value": "noise", + "description": "Noise level of face pixels. Level returns 'Low', 'Medium' and 'High'. Value returns a number between [0,1], the larger the noisier" + }, + { + "name": "mask", + "value": "mask", + "description": "Whether each face is wearing a mask. Mask type returns 'noMask', 'faceMask', 'otherMaskOrOcclusion', or 'uncertain'. Value returns a boolean 'noseAndMouthCovered' indicating whether nose and mouth are covered." + }, + { + "name": "qualityForRecognition", + "value": "qualityForRecognition", + "description": "The overall image quality regarding whether the image being used in the detection is of sufficient quality to attempt face recognition on. The value is an informal rating of low, medium, or high. Only 'high' quality images are recommended for person enrollment and quality at or above 'medium' is recommended for identification scenarios. The attribute is only available when using any combinations of detection models detection_01 or detection_03, and recognition models recognition_03 or recognition_04." + }, + { + "name": "age", + "value": "age", + "description": "Age in years." + }, + { + "name": "smile", + "value": "smile", + "description": "Smile intensity, a number between [0,1]." + }, + { + "name": "facialHair", + "value": "facialHair", + "description": "Properties describing facial hair attributes." + }, + { + "name": "hair", + "value": "hair", + "description": "Properties describing hair attributes." + } + ] + } + }, + "collectionFormat": "csv" + }, + { + "name": "returnFaceLandmarks", + "in": "query", + "description": "Return face landmarks of the detected faces or not. The default value is false.", + "required": false, + "type": "boolean", + "default": false + }, + { + "name": "returnRecognitionModel", + "in": "query", + "description": "Return 'recognitionModel' or not. The default value is false. This is only applicable when returnFaceId = true.", + "required": false, + "type": "boolean", + "default": false + }, + { + "name": "faceIdTimeToLive", + "in": "query", + "description": "The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours).", + "required": false, + "type": "integer", + "format": "int32", + "default": 86400, + "minimum": 60, + "maximum": 86400 + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "url": { + "type": "string", + "format": "uri", + "description": "URL of input image." + } + }, + "required": [ + "url" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns an array of face entries ranked by face rectangle size in descending order. An empty response indicates no faces detected.", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/FaceDetectionResult" + }, + "x-ms-identifiers": [] + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Detect with Image URL": { + "$ref": "./examples/DetectFromUrl.json" + } + } + } + }, + "/detectLiveness/singleModal/sessions": { + "get": { + "operationId": "LivenessSessionOperations_GetLivenessSessions", + "summary": "Lists sessions for /detectLiveness/SingleModal.", + "description": "List sessions from the last sessionId greater than the 'start'.\n\nThe result should be ordered by sessionId in ascending order.", + "parameters": [ + { + "name": "start", + "in": "query", + "description": "List resources greater than the \"start\". It contains no more than 64 characters. Default is empty.", + "required": false, + "type": "string" + }, + { + "name": "top", + "in": "query", + "description": "The number of items to list, ranging in [1, 1000]. Default is 1000.", + "required": false, + "type": "integer", + "format": "int32", + "default": 1000, + "minimum": 1, + "maximum": 1000 + } + ], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/LivenessSessionItem" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get LivenessSessions": { + "$ref": "./examples/LivenessSessionOperations_GetLivenessSessions.json" + } + } + }, + "post": { + "operationId": "LivenessSessionOperations_CreateLivenessSession", + "summary": "Create a new detect liveness session.", + "description": "A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired.\n\nPermissions includes...\n>\n*\n * Ability to call /detectLiveness/singleModal for up to 3 retries.\n * A token lifetime of 10 minutes.\n\n> [!NOTE]\n> Client access can be revoked by deleting the session using the Delete Liveness Session operation. To retrieve a result, use the Get Liveness Session. To audit the individual requests that a client has made to your resource, use the List Liveness Session Audit Entries.", + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/CreateLivenessSessionContent" + } + } + ], + "responses": { + "200": { + "description": "A successful call create a session for a client device and provide an authorization token for use by the client application for a limited purpose and time.", + "schema": { + "$ref": "#/definitions/CreateLivenessSessionResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Create Liveness Session": { + "$ref": "./examples/LivenessSessionOperations_CreateLivenessSession.json" + } + } + } + }, + "/detectLiveness/singleModal/sessions/{sessionId}": { + "get": { + "operationId": "LivenessSessionOperations_GetLivenessSessionResult", + "description": "Get session result of detectLiveness/singleModal call.", + "parameters": [ + { + "name": "sessionId", + "in": "path", + "description": "The unique ID to reference this session.", + "required": true, + "type": "string" + } + ], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "$ref": "#/definitions/LivenessSession" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get LivenessSession Result": { + "$ref": "./examples/LivenessSessionOperations_GetLivenessSessionResult.json" + } + } + }, + "delete": { + "operationId": "LivenessSessionOperations_DeleteLivenessSession", + "summary": "Delete all session related information for matching the specified session id.", + "description": "> [!NOTE]\n> Deleting a session deactivates the Session Auth Token by blocking future API calls made with that Auth Token. While this can be used to remove any access for that token, those requests will still count towards overall resource rate limits. It's best to leverage TokenTTL to limit length of tokens in the case that it is misused.", + "parameters": [ + { + "name": "sessionId", + "in": "path", + "description": "The unique ID to reference this session.", + "required": true, + "type": "string" + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Delete Liveness Session": { + "$ref": "./examples/LivenessSessionOperations_DeleteLivenessSession.json" + } + } + } + }, + "/detectLiveness/singleModal/sessions/{sessionId}/audit": { + "get": { + "operationId": "LivenessSessionOperations_GetLivenessSessionAuditEntries", + "description": "Gets session requests and response body for the session.", + "parameters": [ + { + "name": "sessionId", + "in": "path", + "description": "The unique ID to reference this session.", + "required": true, + "type": "string" + }, + { + "name": "start", + "in": "query", + "description": "List resources greater than the \"start\". It contains no more than 64 characters. Default is empty.", + "required": false, + "type": "string" + }, + { + "name": "top", + "in": "query", + "description": "The number of items to list, ranging in [1, 1000]. Default is 1000.", + "required": false, + "type": "integer", + "format": "int32", + "default": 1000, + "minimum": 1, + "maximum": 1000 + } + ], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/LivenessSessionAuditEntry" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get LivenessSession Audit Entries": { + "$ref": "./examples/LivenessSessionOperations_GetLivenessSessionAuditEntries.json" + } + } + } + }, + "/detectLivenessWithVerify/singleModal/sessions": { + "get": { + "operationId": "LivenessSessionOperations_GetLivenessWithVerifySessions", + "summary": "Lists sessions for /detectLivenessWithVerify/SingleModal.", + "description": "List sessions from the last sessionId greater than the \"start\".\n\nThe result should be ordered by sessionId in ascending order.", + "parameters": [ + { + "name": "start", + "in": "query", + "description": "List resources greater than the \"start\". It contains no more than 64 characters. Default is empty.", + "required": false, + "type": "string" + }, + { + "name": "top", + "in": "query", + "description": "The number of items to list, ranging in [1, 1000]. Default is 1000.", + "required": false, + "type": "integer", + "format": "int32", + "default": 1000, + "minimum": 1, + "maximum": 1000 + } + ], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/LivenessSessionItem" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get LivenessWithVerify Sessions": { + "$ref": "./examples/LivenessSessionOperations_GetLivenessWithVerifySessions.json" + } + } + }, + "post": { + "operationId": "LivenessSessionOperations_CreateLivenessWithVerifySessionWithVerifyImage", + "summary": "Create a new liveness session with verify. Provide the verify image during session creation.", + "description": "A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired.\n\nPermissions includes...\n>\n*\n * Ability to call /detectLivenessWithVerify/singleModal for up to 3 retries.\n * A token lifetime of 10 minutes.\n\n> [!NOTE]\n>\n> *\n> * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation.\n> * To retrieve a result, use the Get Liveness With Verify Session.\n> * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries.\n\nRecommended Option: VerifyImage is provided during session creation.", + "consumes": [ + "multipart/form-data" + ], + "parameters": [ + { + "$ref": "#/parameters/CreateLivenessWithVerifySessionContent.Parameters" + }, + { + "$ref": "#/parameters/CreateLivenessWithVerifySessionContent.VerifyImage" + } + ], + "responses": { + "200": { + "description": "A successful call create a session for a client device and provide an authorization token for use by the client application for a limited purpose and time.", + "schema": { + "$ref": "#/definitions/CreateLivenessWithVerifySessionResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Create LivenessWithVerify Session with VerifyImage": { + "$ref": "./examples/LivenessSessionOperations_CreateLivenessWithVerifySessionWithVerifyImage.json" + } + } + } + }, + "/detectLivenessWithVerify/singleModal/sessions/{sessionId}": { + "get": { + "operationId": "LivenessSessionOperations_GetLivenessWithVerifySessionResult", + "description": "Get session result of detectLivenessWithVerify/singleModal call.", + "parameters": [ + { + "name": "sessionId", + "in": "path", + "description": "The unique ID to reference this session.", + "required": true, + "type": "string" + } + ], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "$ref": "#/definitions/LivenessWithVerifySession" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get LivenessWithVerify Session Result": { + "$ref": "./examples/LivenessSessionOperations_GetLivenessWithVerifySessionResult.json" + } + } + }, + "delete": { + "operationId": "LivenessSessionOperations_DeleteLivenessWithVerifySession", + "summary": "Delete all session related information for matching the specified session id.", + "description": "> [!NOTE]\n> Deleting a session deactivates the Session Auth Token by blocking future API calls made with that Auth Token. While this can be used to remove any access for that token, those requests will still count towards overall resource rate limits. It's best to leverage TokenTTL to limit length of tokens in the case that it is misused.", + "parameters": [ + { + "name": "sessionId", + "in": "path", + "description": "The unique ID to reference this session.", + "required": true, + "type": "string" + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Delete LivenessWithVerify Session": { + "$ref": "./examples/LivenessSessionOperations_DeleteLivenessWithVerifySession.json" + } + } + } + }, + "/detectLivenessWithVerify/singleModal/sessions/{sessionId}/audit": { + "get": { + "operationId": "LivenessSessionOperations_GetLivenessWithVerifySessionAuditEntries", + "description": "Gets session requests and response body for the session.", + "parameters": [ + { + "name": "sessionId", + "in": "path", + "description": "The unique ID to reference this session.", + "required": true, + "type": "string" + }, + { + "name": "start", + "in": "query", + "description": "List resources greater than the \"start\". It contains no more than 64 characters. Default is empty.", + "required": false, + "type": "string" + }, + { + "name": "top", + "in": "query", + "description": "The number of items to list, ranging in [1, 1000]. Default is 1000.", + "required": false, + "type": "integer", + "format": "int32", + "default": 1000, + "minimum": 1, + "maximum": 1000 + } + ], + "responses": { + "200": { + "description": "The request has succeeded.", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/LivenessSessionAuditEntry" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get LivenessWithVerify Session Audit Entries": { + "$ref": "./examples/LivenessSessionOperations_GetLivenessWithVerifySessionAuditEntries.json" + } + } + } + }, + "/dynamicpersongroups": { + "get": { + "operationId": "PersonDirectoryOperations_GetDynamicPersonGroups", + "summary": "List all existing Dynamic Person Groups by dynamicPersonGroupId along with name and userData.", + "description": "Dynamic Person Groups are stored in alphabetical order of dynamicPersonGroupId.\n>\n*\n * \"start\" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting \"start\" to an empty value indicates that entries should be returned starting from the first item.\n * \"top\" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify \"start\" with the personId of the last entry returned in the current call.\n\n> [!TIP]\n>\n> * For example, there are total 5 items with their IDs: \"itemId1\", ..., \"itemId5\".\n> * \"start=&top=\" will return all 5 items.\n> * \"start=&top=2\" will return \"itemId1\", \"itemId2\".\n> * \"start=itemId2&top=3\" will return \"itemId3\", \"itemId4\", \"itemId5\".", + "parameters": [ + { + "name": "start", + "in": "query", + "description": "List resources greater than the \"start\". It contains no more than 64 characters. Default is empty.", + "required": false, + "type": "string" + }, + { + "name": "top", + "in": "query", + "description": "The number of items to list, ranging in [1, 1000]. Default is 1000.", + "required": false, + "type": "integer", + "format": "int32", + "default": 1000, + "minimum": 1, + "maximum": 1000 + } + ], + "responses": { + "200": { + "description": "A successful call returns an array of Dynamic Person Groups and their information (dynamicPersonGroupId, name and userData).", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/DynamicPersonGroup" + }, + "x-ms-identifiers": [] + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get DynamicPersonGroups": { + "$ref": "./examples/PersonDirectoryOperations_GetDynamicPersonGroups.json" + } + } + } + }, + "/dynamicpersongroups/{dynamicPersonGroupId}": { + "get": { + "operationId": "PersonDirectoryOperations_GetDynamicPersonGroup", + "summary": "Retrieve the information of a Dynamic Person Group, including its name and userData.", + "description": "This API returns Dynamic Person Group information only, use Person Directory \"Get Dynamic Person Group Persons\" instead to retrieve person information under the Dynamic Person Group.", + "parameters": [ + { + "name": "dynamicPersonGroupId", + "in": "path", + "description": "ID of the dynamic person group.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + } + ], + "responses": { + "200": { + "description": "A successful call returns the Dynamic Person Group's information.", + "schema": { + "$ref": "#/definitions/DynamicPersonGroup" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get DynamicPersonGroup": { + "$ref": "./examples/PersonDirectoryOperations_GetDynamicPersonGroup.json" + } + } + }, + "put": { + "operationId": "PersonDirectoryOperations_CreateDynamicPersonGroupWithPerson", + "summary": "Creates a new Dynamic Person Group with specified dynamicPersonGroupId, name, and user-provided userData.", + "description": "A Dynamic Person Group is a container that references Person Directory \"Create Person\". After creation, use Person Directory \"Update Dynamic Person Group\" to add/remove persons to/from the Dynamic Person Group.\n\nDynamic Person Group and user data will be stored on server until Person Directory \"Delete Dynamic Person Group\" is called. Use \"Identify From Dynamic Person Group\" with the dynamicPersonGroupId parameter to identify against persons.\n\nNo image will be stored. Only the person's extracted face feature(s) and userData will be stored on server until Person Directory \"Delete Person\" or \"Delete Person Face\" is called.\n\n'recognitionModel' does not need to be specified with Dynamic Person Groups. Dynamic Person Groups are references to Person Directory \"Create Person\" and therefore work with most all 'recognitionModels'. The faceId's provided during \"Identify\" determine the 'recognitionModel' used.", + "parameters": [ + { + "name": "dynamicPersonGroupId", + "in": "path", + "description": "ID of the dynamic person group.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + }, + "addPersonIds": { + "type": "array", + "description": "Array of personIds created by Person Directory \"Create Person\" to be added.", + "minItems": 1, + "items": { + "$ref": "#/definitions/Azure.Core.uuid" + } + } + }, + "required": [ + "name", + "addPersonIds" + ] + } + } + ], + "responses": { + "202": { + "description": "A successful call returns an empty response body. The service has accepted the request and will start processing soon. The client can query the operation status and result using the URL specified in the 'Operation-Location' response header. The URL expires in 48 hours. The URL provides the status of when Person Directory \"Get Dynamic Person Group References\" will return the changes made in this request.", + "headers": { + "operation-Location": { + "type": "string", + "format": "uri", + "description": "The location of an instance of OperationResult" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Create DynamicPersonGroup": { + "$ref": "./examples/PersonDirectoryOperations_CreateDynamicPersonGroupWithPerson.json" + } + }, + "x-ms-long-running-operation": true + }, + "patch": { + "operationId": "PersonDirectoryOperations_UpdateDynamicPersonGroupWithPersonChanges", + "summary": "Update the name or userData of an existing Dynamic Person Group, and manage its members by adding or removing persons.", + "description": "The properties keep unchanged if they are not in request body.", + "parameters": [ + { + "name": "dynamicPersonGroupId", + "in": "path", + "description": "ID of the dynamic person group.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + }, + "addPersonIds": { + "type": "array", + "description": "Array of personIds created by Person Directory \"Create Person\" to be added.", + "items": { + "$ref": "#/definitions/Azure.Core.uuid" + } + }, + "removePersonIds": { + "type": "array", + "description": "Array of personIds created by Person Directory \"Create Person\" to be removed.", + "items": { + "$ref": "#/definitions/Azure.Core.uuid" + } + } + } + } + } + ], + "responses": { + "202": { + "description": "A successful call returns an empty response body. The service has accepted the request and will start processing soon. The client can query the operation status and result using the URL specified in the 'Operation-Location' response header. The URL expires in 48 hours. The URL provides the status of when Person Directory \"Get Dynamic Person Group References\" will return the changes made in this request.", + "headers": { + "operation-Location": { + "type": "string", + "format": "uri", + "description": "The location of an instance of OperationResult" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Update DynamicPersonGroup": { + "$ref": "./examples/PersonDirectoryOperations_UpdateDynamicPersonGroupWithPersonChanges.json" + } + }, + "x-ms-long-running-operation": true + }, + "delete": { + "operationId": "PersonDirectoryOperations_DeleteDynamicPersonGroup", + "summary": "Deletes an existing Dynamic Person Group with specified dynamicPersonGroupId.", + "description": "Deleting this Dynamic Person Group only delete the references to persons data. To delete actual person see Person Directory \"Delete Person\".", + "parameters": [ + { + "name": "dynamicPersonGroupId", + "in": "path", + "description": "ID of the dynamic person group.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + } + ], + "responses": { + "202": { + "description": "A successful call returns an empty response body. The service has accepted the request and will start processing soon. The client can query the operation status and result using the URL specified in the 'Operation-Location' response header. The URL expires in 48 hours. The URL provides the status of when Person Directory \"Get Dynamic Person Group References\" will return the changes made in this request.", + "headers": { + "operation-Location": { + "type": "string", + "format": "uri", + "description": "The location of an instance of OperationResult" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Delete DynamicPersonGroup": { + "$ref": "./examples/PersonDirectoryOperations_DeleteDynamicPersonGroup.json" + } + }, + "x-ms-long-running-operation": true + } + }, + "/dynamicpersongroups/{dynamicPersonGroupId}/persons": { + "get": { + "operationId": "PersonDirectoryOperations_GetDynamicPersonGroupPersons", + "summary": "List all persons in the specified Dynamic Person Group.", + "description": "Persons are stored in alphabetical order of personId created in Person Directory \"Create Person\".\n>\n*\n * \"start\" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting \"start\" to an empty value indicates that entries should be returned starting from the first item.\n * \"top\" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify \"start\" with the personId of the last entry returned in the current call.\n\n> [!TIP]\n>\n> * For example, there are total 5 items with their IDs: \"itemId1\", ..., \"itemId5\".\n> * \"start=&top=\" will return all 5 items.\n> * \"start=&top=2\" will return \"itemId1\", \"itemId2\".\n> * \"start=itemId2&top=3\" will return \"itemId3\", \"itemId4\", \"itemId5\".", + "parameters": [ + { + "name": "dynamicPersonGroupId", + "in": "path", + "description": "ID of the dynamic person group.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "start", + "in": "query", + "description": "List resources greater than the \"start\". It contains no more than 64 characters. Default is empty.", + "required": false, + "type": "string" + }, + { + "name": "top", + "in": "query", + "description": "The number of items to list, ranging in [1, 1000]. Default is 1000.", + "required": false, + "type": "integer", + "format": "int32", + "default": 1000, + "minimum": 1, + "maximum": 1000 + } + ], + "responses": { + "200": { + "description": "A successful call returns an array of person information in the Person Directory.", + "schema": { + "$ref": "#/definitions/ListPersonResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get DynamicPersonGroup Persons": { + "$ref": "./examples/PersonDirectoryOperations_GetDynamicPersonGroupPersons.json" + } + } + } + }, + "/facelists": { + "get": { + "operationId": "FaceListOperations_GetFaceLists", + "description": "List Face Lists' faceListId, name, userData and recognitionModel.\n\nTo get face information inside Face List use \"Get Face List\".", + "parameters": [ + { + "name": "returnRecognitionModel", + "in": "query", + "description": "Return 'recognitionModel' or not. The default value is false.", + "required": false, + "type": "boolean", + "default": false + } + ], + "responses": { + "200": { + "description": "A successful call returns an array of Face Lists.", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/FaceListItem" + }, + "x-ms-identifiers": [] + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get FaceLists": { + "$ref": "./examples/FaceListOperations_GetFaceLists.json" + } + } + } + }, + "/facelists/{faceListId}": { + "get": { + "operationId": "FaceListOperations_GetFaceList", + "description": "Retrieve a Face List's faceListId, name, userData, recognitionModel and faces in the Face List.", + "parameters": [ + { + "name": "faceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "returnRecognitionModel", + "in": "query", + "description": "Return 'recognitionModel' or not. The default value is false.", + "required": false, + "type": "boolean", + "default": false + } + ], + "responses": { + "200": { + "description": "A successful call returns the Face List's information.", + "schema": { + "$ref": "#/definitions/FaceList" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get FaceList": { + "$ref": "./examples/FaceListOperations_GetFaceList.json" + } + } + }, + "put": { + "operationId": "FaceListOperations_CreateFaceList", + "summary": "Create an empty Face List with user-specified faceListId, name, an optional userData and recognitionModel.", + "description": "Up to 64 Face Lists are allowed in one subscription.\n\nFace List is a list of faces, up to 1,000 faces, and used by \"Find Similar From Face List\".\n\nAfter creation, user should use \"Add Face List Face\" to import the faces. No image will be stored. Only the extracted face feature(s) will be stored on server until \"Delete Face List\" is called.\n\n\"Find Similar\" is used for scenario like finding celebrity-like faces, similar face filtering, or as a light way face identification. But if the actual use is to identify person, please use Person Group / Large Person Group and \"Identify\".\n\nPlease consider Large Face List when the face number is large. It can support up to 1,000,000 faces.", + "parameters": [ + { + "name": "faceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + }, + "recognitionModel": { + "type": "string", + "description": "The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'.", + "default": "recognition_01", + "enum": [ + "recognition_01", + "recognition_02", + "recognition_03", + "recognition_04" + ], + "x-ms-enum": { + "name": "RecognitionModel", + "modelAsString": true, + "values": [ + { + "name": "recognition_01", + "value": "recognition_01", + "description": "The default recognition model for \"Detect\". All those faceIds created before 2019 March are bonded with this recognition model." + }, + { + "name": "recognition_02", + "value": "recognition_02", + "description": "Recognition model released in 2019 March." + }, + { + "name": "recognition_03", + "value": "recognition_03", + "description": "Recognition model released in 2020 May." + }, + { + "name": "recognition_04", + "value": "recognition_04", + "description": "Recognition model released in 2021 February. It's recommended to use this recognition model for better recognition accuracy." + } + ] + } + } + }, + "required": [ + "name" + ] + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Create FaceList": { + "$ref": "./examples/FaceListOperations_CreateFaceList.json" + } + } + }, + "patch": { + "operationId": "FaceListOperations_UpdateFaceList", + "description": "Update information of a Face List, including name and userData.", + "parameters": [ + { + "name": "faceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + } + } + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Update FaceList": { + "$ref": "./examples/FaceListOperations_UpdateFaceList.json" + } + } + }, + "delete": { + "operationId": "FaceListOperations_DeleteFaceList", + "description": "Delete a specified Face List.", + "parameters": [ + { + "name": "faceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Delete FaceList": { + "$ref": "./examples/FaceListOperations_DeleteFaceList.json" + } + } + } + }, + "/facelists/{faceListId}/persistedfaces": { + "post": { + "operationId": "FaceListOperations_AddFaceListFaceFromUrl", + "summary": "Add a face to a specified Face List, up to 1,000 faces.", + "description": "To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until \"Delete Face List Face\" or \"Delete Face List\" is called.\n\nNote that persistedFaceId is different from faceId generated by \"Detect\".\n>\n*\n * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n * Each person entry can hold up to 248 faces.\n * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB.\n * \"targetFace\" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided \"targetFace\" rectangle is not returned from \"Detect\", there's no guarantee to detect and add the face successfully.\n * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures.\n * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size.\n * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/how-to/specify-detection-model", + "parameters": [ + { + "name": "faceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "targetFace", + "in": "query", + "description": "A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'.", + "required": false, + "type": "array", + "items": { + "type": "integer", + "format": "int32" + }, + "collectionFormat": "csv", + "minItems": 4, + "maxItems": 4 + }, + { + "name": "detectionModel", + "in": "query", + "description": "The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'.", + "required": false, + "type": "string", + "default": "detection_01", + "enum": [ + "detection_01", + "detection_02", + "detection_03" + ], + "x-ms-enum": { + "name": "DetectionModel", + "modelAsString": true, + "values": [ + { + "name": "detection_01", + "value": "detection_01", + "description": "The default detection model. Recommend for near frontal face detection. For scenarios with exceptionally large angle (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be detected." + }, + { + "name": "detection_02", + "value": "detection_02", + "description": "Detection model released in 2019 May with improved accuracy especially on small, side and blurry faces." + }, + { + "name": "detection_03", + "value": "detection_03", + "description": "Detection model released in 2021 February with improved accuracy especially on small faces." + } + ] + } + }, + { + "name": "userData", + "in": "query", + "description": "User-provided data attached to the face. The size limit is 1K.", + "required": false, + "type": "string", + "maxLength": 1024 + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "url": { + "type": "string", + "format": "uri", + "description": "URL of input image." + } + }, + "required": [ + "url" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns a new persistedFaceId.", + "schema": { + "$ref": "#/definitions/AddFaceResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Add Face to FaceList from Url": { + "$ref": "./examples/FaceListOperations_AddFaceListFaceFromUrl.json" + } + } + } + }, + "/facelists/{faceListId}/persistedfaces/{persistedFaceId}": { + "delete": { + "operationId": "FaceListOperations_DeleteFaceListFace", + "summary": "Delete a face from a Face List by specified faceListId and persistedFaceId.", + "description": "Adding/deleting faces to/from a same Face List are processed sequentially and to/from different Face Lists are in parallel.", + "parameters": [ + { + "name": "faceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "persistedFaceId", + "in": "path", + "description": "Face ID of the face.", + "required": true, + "type": "string", + "format": "uuid" + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Delete Face from FaceList": { + "$ref": "./examples/FaceListOperations_DeleteFaceListFace.json" + } + } + } + }, + "/findsimilars": { + "post": { + "operationId": "FaceRecognitionOperations_FindSimilar", + "summary": "Given query face's faceId, to search the similar-looking faces from a faceId array. A faceId array contains the faces created by Detect.", + "description": "Depending on the input the returned similar faces list contains faceIds or persistedFaceIds ranked by similarity.\n\nFind similar has two working modes, \"matchPerson\" and \"matchFace\". \"matchPerson\" is the default mode that it tries to find faces of the same person as possible by using internal same-person thresholds. It is useful to find a known person's other photos. Note that an empty list will be returned if no faces pass the internal thresholds. \"matchFace\" mode ignores same-person thresholds and returns ranked similar faces anyway, even the similarity is low. It can be used in the cases like searching celebrity-looking faces.\n\nThe 'recognitionModel' associated with the query faceId should be the same as the 'recognitionModel' used by the target faceId array.", + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "faceId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "faceId of the query face. User needs to call \"Detect\" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call." + }, + "maxNumOfCandidatesReturned": { + "type": "integer", + "format": "int32", + "description": "The number of top similar faces returned. The valid range is [1, 1000]. Default value is 20.", + "default": 20, + "minimum": 1, + "maximum": 1000 + }, + "mode": { + "type": "string", + "description": "Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'.", + "default": "matchPerson", + "enum": [ + "matchPerson", + "matchFace" + ], + "x-ms-enum": { + "name": "FindSimilarMatchMode", + "modelAsString": true, + "values": [ + { + "name": "matchPerson", + "value": "matchPerson", + "description": "Match person." + }, + { + "name": "matchFace", + "value": "matchFace", + "description": "Match face." + } + ] + } + }, + "faceIds": { + "type": "array", + "description": "An array of candidate faceIds. All of them are created by \"Detect\" and the faceIds will expire 24 hours after the detection call. The number of faceIds is limited to 1000.", + "items": { + "$ref": "#/definitions/Azure.Core.uuid" + } + } + }, + "required": [ + "faceId", + "faceIds" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns an array of the most similar faces represented in faceId if the input parameter is faceIds or persistedFaceId if the input parameter is faceListId or largeFaceListId.", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/FindSimilarResult" + }, + "x-ms-identifiers": [] + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Find Similar among Face IDs": { + "$ref": "./examples/FaceRecognitionOperations_FindSimilar.json" + } + } + } + }, + "/group": { + "post": { + "operationId": "FaceRecognitionOperations_Group", + "summary": "Divide candidate faces into groups based on face similarity.", + "description": ">\n*\n * The output is one or more disjointed face groups and a messyGroup. A face group contains faces that have similar looking, often of the same person. Face groups are ranked by group size, i.e. number of faces. Notice that faces belonging to a same person might be split into several groups in the result.\n * MessyGroup is a special face group containing faces that cannot find any similar counterpart face from original faces. The messyGroup will not appear in the result if all faces found their counterparts.\n * Group API needs at least 2 candidate faces and 1000 at most. We suggest to try \"Verify Face To Face\" when you only have 2 candidate faces.\n * The 'recognitionModel' associated with the query faces' faceIds should be the same.", + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "faceIds": { + "type": "array", + "description": "Array of candidate faceIds created by \"Detect\". The maximum is 1000 faces.", + "items": { + "$ref": "#/definitions/Azure.Core.uuid" + } + } + }, + "required": [ + "faceIds" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns one or more groups of similar faces (rank by group size) and a messyGroup.", + "schema": { + "$ref": "#/definitions/GroupingResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Group Face IDs": { + "$ref": "./examples/FaceRecognitionOperations_Group.json" + } + } + } + }, + "/identify": { + "post": { + "operationId": "FaceRecognitionOperations_IdentifyFromPersonGroup", + "summary": "1-to-many identification to find the closest matches of the specific query person face from a Person Group.", + "description": "For each face in the faceIds array, Face Identify will compute similarities between the query face and all the faces in the Person Group (given by personGroupId), and return candidate person(s) for that face ranked by similarity confidence. The Person Group should be trained to make it ready for identification. See more in \"Train Person Group\".\n> [!NOTE]\n>\n> *\n> * The algorithm allows more than one face to be identified independently at the same request, but no more than 10 faces.\n> * Each person could have more than one face, but no more than 248 faces.\n> * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n> * Number of candidates returned is restricted by maxNumOfCandidatesReturned and confidenceThreshold. If no person is identified, the returned candidates will be an empty array.\n> * Try \"Find Similar\" when you need to find similar faces from a Face List/Large Face List instead of a Person Group.\n> * The 'recognitionModel' associated with the query faces' faceIds should be the same as the 'recognitionModel' used by the target Person Group.", + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "faceIds": { + "type": "array", + "description": "Array of query faces faceIds, created by the \"Detect\". Each of the faces are identified independently. The valid number of faceIds is between [1, 10].", + "minItems": 1, + "maxItems": 10, + "items": { + "$ref": "#/definitions/Azure.Core.uuid" + } + }, + "personGroupId": { + "type": "string", + "description": "personGroupId of the target Person Group, created by \"Create Person Group\". Parameter personGroupId and largePersonGroupId should not be provided at the same time." + }, + "maxNumOfCandidatesReturned": { + "type": "integer", + "format": "int32", + "description": "The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10.", + "default": 10, + "minimum": 1, + "maximum": 100 + }, + "confidenceThreshold": { + "type": "number", + "format": "float", + "description": "Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates.", + "minimum": 0, + "maximum": 1 + } + }, + "required": [ + "faceIds", + "personGroupId" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns the identified candidate person(s) for each query face.", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/IdentificationResult" + }, + "x-ms-identifiers": [] + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Identify from PersonGroup": { + "$ref": "./examples/FaceRecognitionOperations_IdentifyFromPersonGroup.json" + } + } + } + }, + "/largefacelists": { + "get": { + "operationId": "FaceListOperations_GetLargeFaceLists", + "summary": "List Large Face Lists' information of largeFaceListId, name, userData and recognitionModel.", + "description": "To get face information inside largeFaceList use \"Get Large Face List Face\".\n\nLarge Face Lists are stored in alphabetical order of largeFaceListId.\n>\n*\n * \"start\" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting \"start\" to an empty value indicates that entries should be returned starting from the first item.\n * \"top\" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify \"start\" with the personId of the last entry returned in the current call.\n\n> [!TIP]\n>\n> * For example, there are total 5 items with their IDs: \"itemId1\", ..., \"itemId5\".\n> * \"start=&top=\" will return all 5 items.\n> * \"start=&top=2\" will return \"itemId1\", \"itemId2\".\n> * \"start=itemId2&top=3\" will return \"itemId3\", \"itemId4\", \"itemId5\".", + "parameters": [ + { + "name": "start", + "in": "query", + "description": "List resources greater than the \"start\". It contains no more than 64 characters. Default is empty.", + "required": false, + "type": "string" + }, + { + "name": "top", + "in": "query", + "description": "The number of items to list, ranging in [1, 1000]. Default is 1000.", + "required": false, + "type": "integer", + "format": "int32", + "default": 1000, + "minimum": 1, + "maximum": 1000 + }, + { + "name": "returnRecognitionModel", + "in": "query", + "description": "Return 'recognitionModel' or not. The default value is false.", + "required": false, + "type": "boolean", + "default": false + } + ], + "responses": { + "200": { + "description": "A successful call returns an array of Large Face Lists and their information (largeFaceListId, name and userData).", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/LargeFaceList" + }, + "x-ms-identifiers": [] + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get LargeFaceLists": { + "$ref": "./examples/FaceListOperations_GetLargeFaceLists.json" + } + } + } + }, + "/largefacelists/{largeFaceListId}": { + "get": { + "operationId": "FaceListOperations_GetLargeFaceList", + "description": "Retrieve a Large Face List's largeFaceListId, name, userData and recognitionModel.", + "parameters": [ + { + "name": "largeFaceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "returnRecognitionModel", + "in": "query", + "description": "Return 'recognitionModel' or not. The default value is false.", + "required": false, + "type": "boolean", + "default": false + } + ], + "responses": { + "200": { + "description": "A successful call returns the Large Face List's information.", + "schema": { + "$ref": "#/definitions/LargeFaceList" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get LargeFaceList": { + "$ref": "./examples/FaceListOperations_GetLargeFaceList.json" + } + } + }, + "put": { + "operationId": "FaceListOperations_CreateLargeFaceList", + "summary": "Create an empty Large Face List with user-specified largeFaceListId, name, an optional userData and recognitionModel.", + "description": "Large Face List is a list of faces, up to 1,000,000 faces, and used by \"Find Similar From Large Face List\".\n\nAfter creation, user should use Add Large Face List Face to import the faces and Train Large Face List to make it ready for \"Find Similar\". No image will be stored. Only the extracted face feature(s) will be stored on server until Delete Large Face List is called.\n\n\"Find Similar\" is used for scenario like finding celebrity-like faces, similar face filtering, or as a light way face identification. But if the actual use is to identify person, please use Person Group / Large Person Group and \"Identify\".\n\n> [!NOTE]\n>\n> *\n> * Free-tier subscription quota: 64 Large Face Lists.\n> * S0-tier subscription quota: 1,000,000 Large Face Lists.", + "parameters": [ + { + "name": "largeFaceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + }, + "recognitionModel": { + "type": "string", + "description": "The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'.", + "default": "recognition_01", + "enum": [ + "recognition_01", + "recognition_02", + "recognition_03", + "recognition_04" + ], + "x-ms-enum": { + "name": "RecognitionModel", + "modelAsString": true, + "values": [ + { + "name": "recognition_01", + "value": "recognition_01", + "description": "The default recognition model for \"Detect\". All those faceIds created before 2019 March are bonded with this recognition model." + }, + { + "name": "recognition_02", + "value": "recognition_02", + "description": "Recognition model released in 2019 March." + }, + { + "name": "recognition_03", + "value": "recognition_03", + "description": "Recognition model released in 2020 May." + }, + { + "name": "recognition_04", + "value": "recognition_04", + "description": "Recognition model released in 2021 February. It's recommended to use this recognition model for better recognition accuracy." + } + ] + } + } + }, + "required": [ + "name" + ] + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Create LargeFaceList": { + "$ref": "./examples/FaceListOperations_CreateLargeFaceList.json" + } + } + }, + "patch": { + "operationId": "FaceListOperations_UpdateLargeFaceList", + "description": "Update information of a Large Face List, including name and userData.", + "parameters": [ + { + "name": "largeFaceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + } + } + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Update LargeFaceList": { + "$ref": "./examples/FaceListOperations_UpdateLargeFaceList.json" + } + } + }, + "delete": { + "operationId": "FaceListOperations_DeleteLargeFaceList", + "summary": "Delete a face from a Large Face List by specified largeFaceListId and persistedFaceId.", + "description": "Adding/deleting faces to/from a same Large Face List are processed sequentially and to/from different Large Face Lists are in parallel.", + "parameters": [ + { + "name": "largeFaceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Delete LargeFaceList": { + "$ref": "./examples/FaceListOperations_DeleteLargeFaceList.json" + } + } + } + }, + "/largefacelists/{largeFaceListId}/persistedfaces": { + "get": { + "operationId": "FaceListOperations_GetLargeFaceListFaces", + "summary": "List faces' persistedFaceId and userData in a specified Large Face List.", + "description": "Faces are stored in alphabetical order of persistedFaceId created in \"Add Large Face List Face\".\n>\n*\n * \"start\" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting \"start\" to an empty value indicates that entries should be returned starting from the first item.\n * \"top\" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify \"start\" with the personId of the last entry returned in the current call.\n\n> [!TIP]\n>\n> * For example, there are total 5 items with their IDs: \"itemId1\", ..., \"itemId5\".\n> * \"start=&top=\" will return all 5 items.\n> * \"start=&top=2\" will return \"itemId1\", \"itemId2\".\n> * \"start=itemId2&top=3\" will return \"itemId3\", \"itemId4\", \"itemId5\".", + "parameters": [ + { + "name": "largeFaceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "start", + "in": "query", + "description": "List resources greater than the \"start\". It contains no more than 64 characters. Default is empty.", + "required": false, + "type": "string" + }, + { + "name": "top", + "in": "query", + "description": "The number of items to list, ranging in [1, 1000]. Default is 1000.", + "required": false, + "type": "integer", + "format": "int32", + "default": 1000, + "minimum": 1, + "maximum": 1000 + } + ], + "responses": { + "200": { + "description": "A successful call returns an array of persisted faces and their information (persistedFaceId and userData).", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/LargeFaceListFace" + }, + "x-ms-identifiers": [] + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get Faces from LargeFaceList": { + "$ref": "./examples/FaceListOperations_GetLargeFaceListFaces.json" + } + } + }, + "post": { + "operationId": "FaceListOperations_AddLargeFaceListFaceFromUrl", + "summary": "Add a face to a specified Large Face List, up to 1,000,000 faces.", + "description": "To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until \"Delete Large Face List Face\" or \"Delete Large Face List\" is called.\n\nNote that persistedFaceId is different from faceId generated by \"Detect\".\n>\n*\n * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n * Each person entry can hold up to 248 faces.\n * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB.\n * \"targetFace\" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided \"targetFace\" rectangle is not returned from \"Detect\", there's no guarantee to detect and add the face successfully.\n * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures.\n * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size.\n * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/how-to/specify-detection-model\n\n> [!NOTE]\n>\n> *\n> * Free-tier subscription quota: 1,000 faces per Large Face List.\n> * S0-tier subscription quota: 1,000,000 faces per Large Face List.", + "parameters": [ + { + "name": "largeFaceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "targetFace", + "in": "query", + "description": "A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'.", + "required": false, + "type": "array", + "items": { + "type": "integer", + "format": "int32" + }, + "collectionFormat": "csv", + "minItems": 4, + "maxItems": 4 + }, + { + "name": "detectionModel", + "in": "query", + "description": "The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'.", + "required": false, + "type": "string", + "default": "detection_01", + "enum": [ + "detection_01", + "detection_02", + "detection_03" + ], + "x-ms-enum": { + "name": "DetectionModel", + "modelAsString": true, + "values": [ + { + "name": "detection_01", + "value": "detection_01", + "description": "The default detection model. Recommend for near frontal face detection. For scenarios with exceptionally large angle (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be detected." + }, + { + "name": "detection_02", + "value": "detection_02", + "description": "Detection model released in 2019 May with improved accuracy especially on small, side and blurry faces." + }, + { + "name": "detection_03", + "value": "detection_03", + "description": "Detection model released in 2021 February with improved accuracy especially on small faces." + } + ] + } + }, + { + "name": "userData", + "in": "query", + "description": "User-provided data attached to the face. The size limit is 1K.", + "required": false, + "type": "string", + "maxLength": 1024 + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "url": { + "type": "string", + "format": "uri", + "description": "URL of input image." + } + }, + "required": [ + "url" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns a new persistedFaceId.", + "schema": { + "$ref": "#/definitions/AddFaceResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Add Face to LargeFaceList from Url": { + "$ref": "./examples/FaceListOperations_AddLargeFaceListFaceFromUrl.json" + } + } + } + }, + "/largefacelists/{largeFaceListId}/persistedfaces/{persistedFaceId}": { + "get": { + "operationId": "FaceListOperations_GetLargeFaceListFace", + "description": "Retrieve persisted face in Large Face List by largeFaceListId and persistedFaceId.", + "parameters": [ + { + "name": "largeFaceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "persistedFaceId", + "in": "path", + "description": "Face ID of the face.", + "required": true, + "type": "string", + "format": "uuid" + } + ], + "responses": { + "200": { + "description": "A successful call returns target persisted face's information (persistedFaceId and userData).", + "schema": { + "$ref": "#/definitions/LargeFaceListFace" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get Face from LargeFaceList": { + "$ref": "./examples/FaceListOperations_GetLargeFaceListFace.json" + } + } + }, + "patch": { + "operationId": "FaceListOperations_UpdateLargeFaceListFace", + "description": "Update a specified face's userData field in a Large Face List by its persistedFaceId.", + "parameters": [ + { + "name": "largeFaceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "persistedFaceId", + "in": "path", + "description": "Face ID of the face.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "userData": { + "type": "string", + "description": "User-provided data attached to the face. The length limit is 1K.", + "maxLength": 1024 + } + } + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Update Face in LargeFaceList": { + "$ref": "./examples/FaceListOperations_UpdateLargeFaceListFace.json" + } + } + }, + "delete": { + "operationId": "FaceListOperations_DeleteLargeFaceListFace", + "description": "Delete a face from a Large Face List by specified largeFaceListId and persistedFaceId.", + "parameters": [ + { + "name": "largeFaceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "persistedFaceId", + "in": "path", + "description": "Face ID of the face.", + "required": true, + "type": "string", + "format": "uuid" + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Delete Face From LargeFaceList": { + "$ref": "./examples/FaceListOperations_DeleteLargeFaceListFace.json" + } + } + } + }, + "/largefacelists/{largeFaceListId}/train": { + "post": { + "operationId": "FaceListOperations_TrainLargeFaceList", + "summary": "Submit a Large Face List training task.", + "description": "\nTraining is a crucial step that only a trained Large Face List can be used by \"Find Similar From Large Face List\".\n\nThe training task is an asynchronous task. Training time depends on the number of face entries in a Large Face List. It could be in seconds, or up to half an hour for 1,000,000 faces. To check training completion, please use \"Get Large Face List Training Status\".", + "parameters": [ + { + "name": "largeFaceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + } + ], + "responses": { + "202": { + "description": "A successful call returns an empty response body.", + "headers": { + "operation-Location": { + "type": "string", + "format": "uri", + "description": "The location of an instance of TrainingResult" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Train LargeFaceList": { + "$ref": "./examples/FaceListOperations_TrainLargeFaceList.json" + } + }, + "x-ms-long-running-operation": true + } + }, + "/largefacelists/{largeFaceListId}/training": { + "get": { + "operationId": "FaceListOperations_GetLargeFaceListTrainingStatus", + "description": "To check the Large Face List training status completed or still ongoing. Large Face List training is an asynchronous operation triggered by \"Train Large Face List\".\n\nTraining time depends on the number of face entries in a Large Face List. It could be in seconds, or up to half an hour for 1,000,000 faces.", + "parameters": [ + { + "name": "largeFaceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + } + ], + "responses": { + "200": { + "description": "A successful call returns the Large Face List's training status.", + "schema": { + "$ref": "#/definitions/TrainingResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get Training Status of LargeFaceList": { + "$ref": "./examples/FaceListOperations_GetLargeFaceListTrainingStatus.json" + } + } + } + }, + "/largepersongroups": { + "get": { + "operationId": "PersonGroupOperations_GetLargePersonGroups", + "summary": "List all existing Large Person Groups' largePersonGroupId, name, userData and recognitionModel.", + "description": "Large Person Groups are stored in alphabetical order of largePersonGroupId.\n>\n*\n * \"start\" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting \"start\" to an empty value indicates that entries should be returned starting from the first item.\n * \"top\" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify \"start\" with the personId of the last entry returned in the current call.\n\n> [!TIP]\n>\n> * For example, there are total 5 items with their IDs: \"itemId1\", ..., \"itemId5\".\n> * \"start=&top=\" will return all 5 items.\n> * \"start=&top=2\" will return \"itemId1\", \"itemId2\".\n> * \"start=itemId2&top=3\" will return \"itemId3\", \"itemId4\", \"itemId5\".", + "parameters": [ + { + "name": "start", + "in": "query", + "description": "List resources greater than the \"start\". It contains no more than 64 characters. Default is empty.", + "required": false, + "type": "string" + }, + { + "name": "top", + "in": "query", + "description": "The number of items to list, ranging in [1, 1000]. Default is 1000.", + "required": false, + "type": "integer", + "format": "int32", + "default": 1000, + "minimum": 1, + "maximum": 1000 + }, + { + "name": "returnRecognitionModel", + "in": "query", + "description": "Return 'recognitionModel' or not. The default value is false.", + "required": false, + "type": "boolean", + "default": false + } + ], + "responses": { + "200": { + "description": "A successful call returns an array of Large Person Groups and their information (largePersonGroupId, name and userData).", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/LargePersonGroup" + }, + "x-ms-identifiers": [] + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get LargePersonGroups": { + "$ref": "./examples/PersonGroupOperations_GetLargePersonGroups.json" + } + } + } + }, + "/largepersongroups/{largePersonGroupId}": { + "get": { + "operationId": "PersonGroupOperations_GetLargePersonGroup", + "description": "Retrieve the information of a Large Person Group, including its name, userData and recognitionModel. This API returns Large Person Group information only, use \"Get Large Person Group Persons\" instead to retrieve person information under the Large Person Group.", + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "returnRecognitionModel", + "in": "query", + "description": "Return 'recognitionModel' or not. The default value is false.", + "required": false, + "type": "boolean", + "default": false + } + ], + "responses": { + "200": { + "description": "A successful call returns the Large Person Group's information.", + "schema": { + "$ref": "#/definitions/LargePersonGroup" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get LargePersonGroup": { + "$ref": "./examples/PersonGroupOperations_GetLargePersonGroup.json" + } + } + }, + "put": { + "operationId": "PersonGroupOperations_CreateLargePersonGroup", + "summary": "Create a new Large Person Group with user-specified largePersonGroupId, name, an optional userData and recognitionModel.", + "description": "A Large Person Group is a container holding the uploaded person data, including the face recognition features. It can hold up to 1,000,000 entities.\n\nAfter creation, use \"Create Large Person Group Person\" to add person into the group, and call \"Train Large Person Group\" to get this group ready for \"Identify From Large Person Group\".\n\nNo image will be stored. Only the person's extracted face feature(s) and userData will be stored on server until \"Delete Large Person Group Person\" or \"Delete Large Person Group\" is called.\n\n'recognitionModel' should be specified to associate with this Large Person Group. The default value for 'recognitionModel' is 'recognition_01', if the latest model needed, please explicitly specify the model you need in this parameter. New faces that are added to an existing Large Person Group will use the recognition model that's already associated with the collection. Existing face feature(s) in a Large Person Group can't be updated to features extracted by another version of recognition model.\n\n> [!NOTE]\n>\n> *\n> * Free-tier subscription quota: 1,000 Large Person Groups.\n> * S0-tier subscription quota: 1,000,000 Large Person Groups.", + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + }, + "recognitionModel": { + "type": "string", + "description": "The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'.", + "default": "recognition_01", + "enum": [ + "recognition_01", + "recognition_02", + "recognition_03", + "recognition_04" + ], + "x-ms-enum": { + "name": "RecognitionModel", + "modelAsString": true, + "values": [ + { + "name": "recognition_01", + "value": "recognition_01", + "description": "The default recognition model for \"Detect\". All those faceIds created before 2019 March are bonded with this recognition model." + }, + { + "name": "recognition_02", + "value": "recognition_02", + "description": "Recognition model released in 2019 March." + }, + { + "name": "recognition_03", + "value": "recognition_03", + "description": "Recognition model released in 2020 May." + }, + { + "name": "recognition_04", + "value": "recognition_04", + "description": "Recognition model released in 2021 February. It's recommended to use this recognition model for better recognition accuracy." + } + ] + } + } + }, + "required": [ + "name" + ] + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Create LargePersonGroup": { + "$ref": "./examples/PersonGroupOperations_CreateLargePersonGroup.json" + } + } + }, + "patch": { + "operationId": "PersonGroupOperations_UpdateLargePersonGroup", + "description": "Update an existing Large Person Group's name and userData. The properties keep unchanged if they are not in request body.", + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + } + } + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Update LargePersonGroup": { + "$ref": "./examples/PersonGroupOperations_UpdateLargePersonGroup.json" + } + } + }, + "delete": { + "operationId": "PersonGroupOperations_DeleteLargePersonGroup", + "description": "Delete an existing Large Person Group with specified personGroupId. Persisted data in this Large Person Group will be deleted.", + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Delete LargePersonGroup": { + "$ref": "./examples/PersonGroupOperations_DeleteLargePersonGroup.json" + } + } + } + }, + "/largepersongroups/{largePersonGroupId}/persons": { + "get": { + "operationId": "PersonGroupOperations_GetLargePersonGroupPersons", + "summary": "List all persons' information in the specified Large Person Group, including personId, name, userData and persistedFaceIds of registered person faces.", + "description": "Persons are stored in alphabetical order of personId created in \"Create Large Person Group Person\".\n>\n*\n * \"start\" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting \"start\" to an empty value indicates that entries should be returned starting from the first item.\n * \"top\" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify \"start\" with the personId of the last entry returned in the current call.\n\n> [!TIP]\n>\n> * For example, there are total 5 items with their IDs: \"itemId1\", ..., \"itemId5\".\n> * \"start=&top=\" will return all 5 items.\n> * \"start=&top=2\" will return \"itemId1\", \"itemId2\".\n> * \"start=itemId2&top=3\" will return \"itemId3\", \"itemId4\", \"itemId5\".", + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "start", + "in": "query", + "description": "List resources greater than the \"start\". It contains no more than 64 characters. Default is empty.", + "required": false, + "type": "string" + }, + { + "name": "top", + "in": "query", + "description": "The number of items to list, ranging in [1, 1000]. Default is 1000.", + "required": false, + "type": "integer", + "format": "int32", + "default": 1000, + "minimum": 1, + "maximum": 1000 + } + ], + "responses": { + "200": { + "description": "A successful call returns an array of person information that belong to the Large Person Group.", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/LargePersonGroupPerson" + }, + "x-ms-identifiers": [] + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get Persons from LargePersonGroup": { + "$ref": "./examples/PersonGroupOperations_GetLargePersonGroupPersons.json" + } + } + }, + "post": { + "operationId": "PersonGroupOperations_CreateLargePersonGroupPerson", + "summary": "Create a new person in a specified Large Person Group. To add face to this person, please call \"Add Large Person Group Person Face\".", + "description": "> [!NOTE]\n>\n> *\n> * Free-tier subscription quota:\n> * 1,000 persons in all Large Person Groups.\n> * S0-tier subscription quota:\n> * 1,000,000 persons per Large Person Group.\n> * 1,000,000 Large Person Groups.\n> * 1,000,000,000 persons in all Large Person Groups. ", + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + } + }, + "required": [ + "name" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns a new personId created.", + "schema": { + "$ref": "#/definitions/CreatePersonResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Create Person in LargePersonGroup": { + "$ref": "./examples/PersonGroupOperations_CreateLargePersonGroupPerson.json" + } + } + } + }, + "/largepersongroups/{largePersonGroupId}/persons/{personId}": { + "get": { + "operationId": "PersonGroupOperations_GetLargePersonGroupPerson", + "description": "Retrieve a person's name and userData, and the persisted faceIds representing the registered person face feature(s).", + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + } + ], + "responses": { + "200": { + "description": "A successful call returns the person's information.", + "schema": { + "$ref": "#/definitions/LargePersonGroupPerson" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get Person from LargePersonGroup": { + "$ref": "./examples/PersonGroupOperations_GetLargePersonGroupPerson.json" + } + } + }, + "patch": { + "operationId": "PersonGroupOperations_UpdateLargePersonGroupPerson", + "description": "Update name or userData of a person.", + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + } + } + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Update Person in LargePersonGroup": { + "$ref": "./examples/PersonGroupOperations_UpdateLargePersonGroupPerson.json" + } + } + }, + "delete": { + "operationId": "PersonGroupOperations_DeleteLargePersonGroupPerson", + "description": "Delete an existing person from a Large Person Group. The persistedFaceId, userData, person name and face feature(s) in the person entry will all be deleted.", + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Delete Person from LargePersonGroup": { + "$ref": "./examples/PersonGroupOperations_DeleteLargePersonGroupPerson.json" + } + } + } + }, + "/largepersongroups/{largePersonGroupId}/persons/{personId}/persistedfaces": { + "post": { + "operationId": "PersonGroupOperations_AddLargePersonGroupPersonFaceFromUrl", + "summary": "Add a face to a person into a Large Person Group for face identification or verification.", + "description": "To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until \"Delete Large Person Group Person Face\", \"Delete Large Person Group Person\" or \"Delete Large Person Group\" is called.\n\nNote that persistedFaceId is different from faceId generated by \"Detect\".\n>\n*\n * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n * Each person entry can hold up to 248 faces.\n * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB.\n * \"targetFace\" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided \"targetFace\" rectangle is not returned from \"Detect\", there's no guarantee to detect and add the face successfully.\n * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures.\n * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size.\n * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/how-to/specify-detection-model", + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "targetFace", + "in": "query", + "description": "A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'.", + "required": false, + "type": "array", + "items": { + "type": "integer", + "format": "int32" + }, + "collectionFormat": "csv", + "minItems": 4, + "maxItems": 4 + }, + { + "name": "detectionModel", + "in": "query", + "description": "The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'.", + "required": false, + "type": "string", + "default": "detection_01", + "enum": [ + "detection_01", + "detection_02", + "detection_03" + ], + "x-ms-enum": { + "name": "DetectionModel", + "modelAsString": true, + "values": [ + { + "name": "detection_01", + "value": "detection_01", + "description": "The default detection model. Recommend for near frontal face detection. For scenarios with exceptionally large angle (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be detected." + }, + { + "name": "detection_02", + "value": "detection_02", + "description": "Detection model released in 2019 May with improved accuracy especially on small, side and blurry faces." + }, + { + "name": "detection_03", + "value": "detection_03", + "description": "Detection model released in 2021 February with improved accuracy especially on small faces." + } + ] + } + }, + { + "name": "userData", + "in": "query", + "description": "User-provided data attached to the face. The size limit is 1K.", + "required": false, + "type": "string", + "maxLength": 1024 + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "url": { + "type": "string", + "format": "uri", + "description": "URL of input image." + } + }, + "required": [ + "url" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns a new persistedFaceId.", + "schema": { + "$ref": "#/definitions/AddFaceResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Add Face in LargePersonGroup Person from Url": { + "$ref": "./examples/PersonGroupOperations_AddLargePersonGroupPersonFaceFromUrl.json" + } + } + } + }, + "/largepersongroups/{largePersonGroupId}/persons/{personId}/persistedfaces/{persistedFaceId}": { + "get": { + "operationId": "PersonGroupOperations_GetLargePersonGroupPersonFace", + "description": "Retrieve person face information. The persisted person face is specified by its largePersonGroupId, personId and persistedFaceId.", + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "persistedFaceId", + "in": "path", + "description": "Face ID of the face.", + "required": true, + "type": "string", + "format": "uuid" + } + ], + "responses": { + "200": { + "description": "A successful call returns target persisted face's information (persistedFaceId and userData).", + "schema": { + "$ref": "#/definitions/LargePersonGroupPersonFace" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get Face from LargePersonGroup Person": { + "$ref": "./examples/PersonGroupOperations_GetLargePersonGroupPersonFace.json" + } + } + }, + "patch": { + "operationId": "PersonGroupOperations_UpdateLargePersonGroupPersonFace", + "description": "Update a person persisted face's userData field.", + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "persistedFaceId", + "in": "path", + "description": "Face ID of the face.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "userData": { + "type": "string", + "description": "User-provided data attached to the face. The length limit is 1K.", + "maxLength": 1024 + } + } + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Update Face in LargePersonGroup Person": { + "$ref": "./examples/PersonGroupOperations_UpdateLargePersonGroupPersonFace.json" + } + } + }, + "delete": { + "operationId": "PersonGroupOperations_DeleteLargePersonGroupPersonFace", + "summary": "Delete a face from a person in a Large Person Group by specified largePersonGroupId, personId and persistedFaceId.", + "description": "Adding/deleting faces to/from a same person will be processed sequentially. Adding/deleting faces to/from different persons are processed in parallel.", + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "persistedFaceId", + "in": "path", + "description": "Face ID of the face.", + "required": true, + "type": "string", + "format": "uuid" + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Delete Face from LargePersonGroup Person": { + "$ref": "./examples/PersonGroupOperations_DeleteLargePersonGroupPersonFace.json" + } + } + } + }, + "/largepersongroups/{largePersonGroupId}/train": { + "post": { + "operationId": "PersonGroupOperations_TrainLargePersonGroup", + "summary": "Submit a Large Person Group training task. Training is a crucial step that only a trained Large Person Group can be used by \"Identify From Large Person Group\".", + "description": "The training task is an asynchronous task. Training time depends on the number of person entries, and their faces in a Large Person Group. It could be in several seconds, or up to half a hour for 1,000,000 persons. To check training status, please use \"Get Large Person Group Training Status\".", + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + } + ], + "responses": { + "202": { + "description": "A successful call returns an empty response body.", + "headers": { + "operation-Location": { + "type": "string", + "format": "uri", + "description": "The location of an instance of TrainingResult" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Train LargePersonGroup": { + "$ref": "./examples/PersonGroupOperations_TrainLargePersonGroup.json" + } + }, + "x-ms-long-running-operation": true + } + }, + "/largepersongroups/{largePersonGroupId}/training": { + "get": { + "operationId": "PersonGroupOperations_GetLargePersonGroupTrainingStatus", + "summary": "To check Large Person Group training status completed or still ongoing. Large Person Group training is an asynchronous operation triggered by \"Train Large Person Group\" API.", + "description": "Training time depends on the number of person entries, and their faces in a Large Person Group. It could be in seconds, or up to half an hour for 1,000,000 persons.", + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + } + ], + "responses": { + "200": { + "description": "A successful call returns the Large Person Group's training status.", + "schema": { + "$ref": "#/definitions/TrainingResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get Training Status of LargePersonGroup": { + "$ref": "./examples/PersonGroupOperations_GetLargePersonGroupTrainingStatus.json" + } + } + } + }, + "/operations/{operationId}": { + "get": { + "operationId": "GetOperationResult", + "description": "Get status of a long running operation.", + "parameters": [ + { + "name": "operationId", + "in": "path", + "description": "Operation ID of the operation.", + "required": true, + "type": "string", + "format": "uuid" + } + ], + "responses": { + "200": { + "description": "A successful call returns the long running operation status.", + "schema": { + "$ref": "#/definitions/OperationResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get Face Operation Status": { + "$ref": "./examples/GetOperationResult.json" + } + } + } + }, + "/persongroups": { + "get": { + "operationId": "PersonGroupOperations_GetPersonGroups", + "summary": "List Person Groups' personGroupId, name, userData and recognitionModel.", + "description": "Person Groups are stored in alphabetical order of personGroupId.\n>\n*\n * \"start\" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting \"start\" to an empty value indicates that entries should be returned starting from the first item.\n * \"top\" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify \"start\" with the personId of the last entry returned in the current call.\n\n> [!TIP]\n>\n> * For example, there are total 5 items with their IDs: \"itemId1\", ..., \"itemId5\".\n> * \"start=&top=\" will return all 5 items.\n> * \"start=&top=2\" will return \"itemId1\", \"itemId2\".\n> * \"start=itemId2&top=3\" will return \"itemId3\", \"itemId4\", \"itemId5\".", + "parameters": [ + { + "name": "start", + "in": "query", + "description": "List resources greater than the \"start\". It contains no more than 64 characters. Default is empty.", + "required": false, + "type": "string" + }, + { + "name": "top", + "in": "query", + "description": "The number of items to list, ranging in [1, 1000]. Default is 1000.", + "required": false, + "type": "integer", + "format": "int32", + "default": 1000, + "minimum": 1, + "maximum": 1000 + }, + { + "name": "returnRecognitionModel", + "in": "query", + "description": "Return 'recognitionModel' or not. The default value is false.", + "required": false, + "type": "boolean", + "default": false + } + ], + "responses": { + "200": { + "description": "A successful call returns an array of Person Groups and their information (personGroupId, name and userData).", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/PersonGroup" + }, + "x-ms-identifiers": [] + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get PersonGroups": { + "$ref": "./examples/PersonGroupOperations_GetPersonGroups.json" + } + } + } + }, + "/persongroups/{personGroupId}": { + "get": { + "operationId": "PersonGroupOperations_GetPersonGroup", + "description": "Retrieve Person Group name, userData and recognitionModel. To get person information under this personGroup, use \"Get Person Group Persons\".", + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "returnRecognitionModel", + "in": "query", + "description": "Return 'recognitionModel' or not. The default value is false.", + "required": false, + "type": "boolean", + "default": false + } + ], + "responses": { + "200": { + "description": "A successful call returns the Person Group's information.", + "schema": { + "$ref": "#/definitions/PersonGroup" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get PersonGroup": { + "$ref": "./examples/PersonGroupOperations_GetPersonGroup.json" + } + } + }, + "put": { + "operationId": "PersonGroupOperations_CreatePersonGroup", + "summary": "Create a new Person Group with specified personGroupId, name, user-provided userData and recognitionModel.", + "description": "A Person Group is a container holding the uploaded person data, including face recognition features.\n\nAfter creation, use \"Create Person Group Person\" to add persons into the group, and then call \"Train Person Group\" to get this group ready for \"Identify From Person Group\".\n\nNo image will be stored. Only the person's extracted face feature(s) and userData will be stored on server until \"Delete Person Group Person\" or \"Delete Person Group\" is called.\n\n'recognitionModel' should be specified to associate with this Person Group. The default value for 'recognitionModel' is 'recognition_01', if the latest model needed, please explicitly specify the model you need in this parameter. New faces that are added to an existing Person Group will use the recognition model that's already associated with the collection. Existing face feature(s) in a Person Group can't be updated to features extracted by another version of recognition model.\n\n> [!NOTE]\n>\n> *\n> * Free-tier subscription quota: 1,000 Person Groups. Each holds up to 1,000 persons.\n> * S0-tier subscription quota: 1,000,000 Person Groups. Each holds up to 10,000 persons.\n> * to handle larger scale face identification problem, please consider using Large Person Group.", + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + }, + "recognitionModel": { + "type": "string", + "description": "The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'.", + "default": "recognition_01", + "enum": [ + "recognition_01", + "recognition_02", + "recognition_03", + "recognition_04" + ], + "x-ms-enum": { + "name": "RecognitionModel", + "modelAsString": true, + "values": [ + { + "name": "recognition_01", + "value": "recognition_01", + "description": "The default recognition model for \"Detect\". All those faceIds created before 2019 March are bonded with this recognition model." + }, + { + "name": "recognition_02", + "value": "recognition_02", + "description": "Recognition model released in 2019 March." + }, + { + "name": "recognition_03", + "value": "recognition_03", + "description": "Recognition model released in 2020 May." + }, + { + "name": "recognition_04", + "value": "recognition_04", + "description": "Recognition model released in 2021 February. It's recommended to use this recognition model for better recognition accuracy." + } + ] + } + } + }, + "required": [ + "name" + ] + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Create PersonGroup": { + "$ref": "./examples/PersonGroupOperations_CreatePersonGroup.json" + } + } + }, + "patch": { + "operationId": "PersonGroupOperations_UpdatePersonGroup", + "description": "Update an existing Person Group's name and userData. The properties keep unchanged if they are not in request body.", + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + } + } + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Update PersonGroup": { + "$ref": "./examples/PersonGroupOperations_UpdatePersonGroup.json" + } + } + }, + "delete": { + "operationId": "PersonGroupOperations_DeletePersonGroup", + "description": "Delete an existing Person Group with specified personGroupId. Persisted data in this Person Group will be deleted.", + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Delete PersonGroup": { + "$ref": "./examples/PersonGroupOperations_DeletePersonGroup.json" + } + } + } + }, + "/persongroups/{personGroupId}/persons": { + "get": { + "operationId": "PersonGroupOperations_GetPersonGroupPersons", + "summary": "List all persons' information in the specified Person Group, including personId, name, userData and persistedFaceIds of registered person faces.", + "description": "Persons are stored in alphabetical order of personId created in \"Create Person Group Person\".\n>\n*\n * \"start\" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting \"start\" to an empty value indicates that entries should be returned starting from the first item.\n * \"top\" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify \"start\" with the personId of the last entry returned in the current call.\n\n> [!TIP]\n>\n> * For example, there are total 5 items with their IDs: \"itemId1\", ..., \"itemId5\".\n> * \"start=&top=\" will return all 5 items.\n> * \"start=&top=2\" will return \"itemId1\", \"itemId2\".\n> * \"start=itemId2&top=3\" will return \"itemId3\", \"itemId4\", \"itemId5\".", + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "start", + "in": "query", + "description": "List resources greater than the \"start\". It contains no more than 64 characters. Default is empty.", + "required": false, + "type": "string" + }, + { + "name": "top", + "in": "query", + "description": "The number of items to list, ranging in [1, 1000]. Default is 1000.", + "required": false, + "type": "integer", + "format": "int32", + "default": 1000, + "minimum": 1, + "maximum": 1000 + } + ], + "responses": { + "200": { + "description": "A successful call returns an array of person information that belong to the Person Group.", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/PersonGroupPerson" + }, + "x-ms-identifiers": [] + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get Persons from PersonGroup": { + "$ref": "./examples/PersonGroupOperations_GetPersonGroupPersons.json" + } + } + }, + "post": { + "operationId": "PersonGroupOperations_CreatePersonGroupPerson", + "summary": "Create a new person in a specified Person Group. To add face to this person, please call \"Add Person Group Person Face\".", + "description": "> [!NOTE]\n>\n> *\n> * Free-tier subscription quota:\n> * 1,000 persons in all Person Groups.\n> * S0-tier subscription quota:\n> * 10,000 persons per Person Group.\n> * 1,000,000 Person Groups.\n> * 100,000,000 persons in all Person Groups.", + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + } + }, + "required": [ + "name" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns a new personId created.", + "schema": { + "$ref": "#/definitions/CreatePersonResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Create Person in PersonGroup": { + "$ref": "./examples/PersonGroupOperations_CreatePersonGroupPerson.json" + } + } + } + }, + "/persongroups/{personGroupId}/persons/{personId}": { + "get": { + "operationId": "PersonGroupOperations_GetPersonGroupPerson", + "description": "Retrieve a person's name and userData, and the persisted faceIds representing the registered person face feature(s).", + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + } + ], + "responses": { + "200": { + "description": "A successful call returns the person's information.", + "schema": { + "$ref": "#/definitions/PersonGroupPerson" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get Person from PersonGroup": { + "$ref": "./examples/PersonGroupOperations_GetPersonGroupPerson.json" + } + } + }, + "patch": { + "operationId": "PersonGroupOperations_UpdatePersonGroupPerson", + "description": "Update name or userData of a person.", + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + } + } + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Update PersonGroup Person": { + "$ref": "./examples/PersonGroupOperations_UpdatePersonGroupPerson.json" + } + } + }, + "delete": { + "operationId": "PersonGroupOperations_DeletePersonGroupPerson", + "description": "Delete an existing person from a Person Group. The persistedFaceId, userData, person name and face feature(s) in the person entry will all be deleted.", + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Delete Person from PersonGroup": { + "$ref": "./examples/PersonGroupOperations_DeletePersonGroupPerson.json" + } + } + } + }, + "/persongroups/{personGroupId}/persons/{personId}/persistedfaces": { + "post": { + "operationId": "PersonGroupOperations_AddPersonGroupPersonFaceFromUrl", + "summary": "Add a face to a person into a Person Group for face identification or verification.", + "description": "To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until \"Delete Person Group Person Face\", \"Delete Person Group Person\" or \"Delete Person Group\" is called.\n\nNote that persistedFaceId is different from faceId generated by \"Detect\".\n>\n*\n * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n * Each person entry can hold up to 248 faces.\n * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB.\n * \"targetFace\" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided \"targetFace\" rectangle is not returned from \"Detect\", there's no guarantee to detect and add the face successfully.\n * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures.\n * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size.\n * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/how-to/specify-detection-model", + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "targetFace", + "in": "query", + "description": "A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'.", + "required": false, + "type": "array", + "items": { + "type": "integer", + "format": "int32" + }, + "collectionFormat": "csv", + "minItems": 4, + "maxItems": 4 + }, + { + "name": "detectionModel", + "in": "query", + "description": "The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'.", + "required": false, + "type": "string", + "default": "detection_01", + "enum": [ + "detection_01", + "detection_02", + "detection_03" + ], + "x-ms-enum": { + "name": "DetectionModel", + "modelAsString": true, + "values": [ + { + "name": "detection_01", + "value": "detection_01", + "description": "The default detection model. Recommend for near frontal face detection. For scenarios with exceptionally large angle (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be detected." + }, + { + "name": "detection_02", + "value": "detection_02", + "description": "Detection model released in 2019 May with improved accuracy especially on small, side and blurry faces." + }, + { + "name": "detection_03", + "value": "detection_03", + "description": "Detection model released in 2021 February with improved accuracy especially on small faces." + } + ] + } + }, + { + "name": "userData", + "in": "query", + "description": "User-provided data attached to the face. The size limit is 1K.", + "required": false, + "type": "string", + "maxLength": 1024 + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "url": { + "type": "string", + "format": "uri", + "description": "URL of input image." + } + }, + "required": [ + "url" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns a new persistedFaceId.", + "schema": { + "$ref": "#/definitions/AddFaceResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Add Face to PersonGroupPerson from Url": { + "$ref": "./examples/PersonGroupOperations_AddPersonGroupPersonFaceFromUrl.json" + } + } + } + }, + "/persongroups/{personGroupId}/persons/{personId}/persistedfaces/{persistedFaceId}": { + "get": { + "operationId": "PersonGroupOperations_GetPersonGroupPersonFace", + "description": "Retrieve person face information. The persisted person face is specified by its personGroupId, personId and persistedFaceId.", + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "persistedFaceId", + "in": "path", + "description": "Face ID of the face.", + "required": true, + "type": "string", + "format": "uuid" + } + ], + "responses": { + "200": { + "description": "A successful call returns target persisted face's information (persistedFaceId and userData).", + "schema": { + "$ref": "#/definitions/PersonGroupPersonFace" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get Face form PersonGroup Person": { + "$ref": "./examples/PersonGroupOperations_GetPersonGroupPersonFace.json" + } + } + }, + "patch": { + "operationId": "PersonGroupOperations_UpdatePersonGroupPersonFace", + "description": "Update a person persisted face's userData field.", + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "persistedFaceId", + "in": "path", + "description": "Face ID of the face.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "userData": { + "type": "string", + "description": "User-provided data attached to the face. The length limit is 1K.", + "maxLength": 1024 + } + } + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Update Face in PersonGroup Person": { + "$ref": "./examples/PersonGroupOperations_UpdatePersonGroupPersonFace.json" + } + } + }, + "delete": { + "operationId": "PersonGroupOperations_DeletePersonGroupPersonFace", + "summary": "Delete a face from a person in a Person Group by specified personGroupId, personId and persistedFaceId.", + "description": "Adding/deleting faces to/from a same person will be processed sequentially. Adding/deleting faces to/from different persons are processed in parallel.", + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "persistedFaceId", + "in": "path", + "description": "Face ID of the face.", + "required": true, + "type": "string", + "format": "uuid" + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Delete Face from PersonGroup Person": { + "$ref": "./examples/PersonGroupOperations_DeletePersonGroupPersonFace.json" + } + } + } + }, + "/persongroups/{personGroupId}/train": { + "post": { + "operationId": "PersonGroupOperations_TrainPersonGroup", + "summary": "Submit a Person Group training task. Training is a crucial step that only a trained Person Group can be used by \"Identify From Person Group\".", + "description": "The training task is an asynchronous task. Training time depends on the number of person entries, and their faces in a Person Group. It could be several seconds to minutes. To check training status, please use \"Get Person Group Training Status\".", + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + } + ], + "responses": { + "202": { + "description": "A successful call returns an empty response body.", + "headers": { + "operation-Location": { + "type": "string", + "format": "uri", + "description": "The location of an instance of TrainingResult" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Train PersonGroup": { + "$ref": "./examples/PersonGroupOperations_TrainPersonGroup.json" + } + }, + "x-ms-long-running-operation": true + } + }, + "/persongroups/{personGroupId}/training": { + "get": { + "operationId": "PersonGroupOperations_GetPersonGroupTrainingStatus", + "description": "To check Person Group training status completed or still ongoing. Person Group training is an asynchronous operation triggered by \"Train Person Group\" API.", + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + } + ], + "responses": { + "200": { + "description": "A successful call returns the Person Group's training status.", + "schema": { + "$ref": "#/definitions/TrainingResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get Training Status of PersonGroup": { + "$ref": "./examples/PersonGroupOperations_GetPersonGroupTrainingStatus.json" + } + } + } + }, + "/persons": { + "get": { + "operationId": "PersonDirectoryOperations_GetPersons", + "summary": "List all persons' information in Person Directory, including personId, name, and userData.", + "description": "Persons are stored in alphabetical order of personId created in Person Directory \"Create Person\".\n>\n*\n * \"start\" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting \"start\" to an empty value indicates that entries should be returned starting from the first item.\n * \"top\" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify \"start\" with the personId of the last entry returned in the current call.\n\n> [!TIP]\n>\n> * For example, there are total 5 items with their IDs: \"itemId1\", ..., \"itemId5\".\n> * \"start=&top=\" will return all 5 items.\n> * \"start=&top=2\" will return \"itemId1\", \"itemId2\".\n> * \"start=itemId2&top=3\" will return \"itemId3\", \"itemId4\", \"itemId5\".", + "parameters": [ + { + "name": "start", + "in": "query", + "description": "List resources greater than the \"start\". It contains no more than 64 characters. Default is empty.", + "required": false, + "type": "string" + }, + { + "name": "top", + "in": "query", + "description": "The number of items to list, ranging in [1, 1000]. Default is 1000.", + "required": false, + "type": "integer", + "format": "int32", + "default": 1000, + "minimum": 1, + "maximum": 1000 + } + ], + "responses": { + "200": { + "description": "A successful call returns an array of Person Directory Persons contained in the Dynamic Person Group.", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/PersonDirectoryPerson" + }, + "x-ms-identifiers": [] + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get Persons from PersonDirectory": { + "$ref": "./examples/PersonDirectoryOperations_GetPersons.json" + } + } + }, + "post": { + "operationId": "PersonDirectoryOperations_CreatePerson", + "description": "Creates a new person in a Person Directory. To add face to this person, please call Person Directory \"Add Person Face\".", + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + } + }, + "required": [ + "name" + ] + } + } + ], + "responses": { + "202": { + "description": "A successful call returns an empty response body. The service has accepted the request and will start processing soon. The client can query the operation status and result using the URL specified in the 'Operation-Location' response header. The URL expires in 48 hours.", + "schema": { + "$ref": "#/definitions/CreatePersonResult" + }, + "headers": { + "Location": { + "type": "string", + "format": "uri", + "description": "The location of an instance of PersonDirectoryPerson" + }, + "operation-Location": { + "type": "string", + "format": "uri", + "description": "The location of an instance of OperationResult" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Create Person in PersonDirectory": { + "$ref": "./examples/PersonDirectoryOperations_CreatePerson.json" + } + }, + "x-ms-long-running-operation": true + } + }, + "/persons/{personId}": { + "get": { + "operationId": "PersonDirectoryOperations_GetPerson", + "description": "Retrieve a person's name and userData from Person Directory.", + "parameters": [ + { + "name": "personId", + "in": "path", + "description": "Person ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + } + ], + "responses": { + "200": { + "description": "A successful call returns the person's information.", + "schema": { + "$ref": "#/definitions/PersonDirectoryPerson" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get Person from PeronDirectory": { + "$ref": "./examples/PersonDirectoryOperations_GetPerson.json" + } + } + }, + "patch": { + "operationId": "PersonDirectoryOperations_UpdatePerson", + "description": "Update name or userData of a person.", + "parameters": [ + { + "name": "personId", + "in": "path", + "description": "Person ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + } + } + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Update Person in PersonDirectory": { + "$ref": "./examples/PersonDirectoryOperations_UpdatePerson.json" + } + } + }, + "delete": { + "operationId": "PersonDirectoryOperations_DeletePerson", + "description": "Delete an existing person from Person Directory. The persistedFaceId(s), userData, person name and face feature(s) in the person entry will all be deleted.", + "parameters": [ + { + "name": "personId", + "in": "path", + "description": "Person ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + } + ], + "responses": { + "202": { + "description": "A successful call returns an empty response body. The service has accepted the request and will start processing soon. The client can query the operation status and result using the URL specified in the 'Operation-Location' response header. The URL expires in 48 hours.", + "headers": { + "operation-Location": { + "type": "string", + "format": "uri", + "description": "The location of an instance of OperationResult" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Delete Person": { + "$ref": "./examples/PersonDirectoryOperations_DeletePerson.json" + } + }, + "x-ms-long-running-operation": true + } + }, + "/persons/{personId}/dynamicPersonGroupReferences": { + "get": { + "operationId": "PersonDirectoryOperations_GetDynamicPersonGroupReferences", + "summary": "List all Dynamic Person Groups a person has been referenced by in Person Directory.", + "description": "Dynamic Person Groups are stored in alphabetical order of Dynamic Person Group ID created in Person Directory \"Create Dynamic Person Group\".\n>\n*\n * \"start\" parameter (string, optional) specifies an ID value from which returned entries will have larger IDs based on string comparison. Setting \"start\" to an empty value indicates that entries should be returned starting from the first item.\n * \"top\" parameter (int, optional) determines the maximum number of entries to be returned, with a limit of up to 1000 entries per call. To retrieve additional entries beyond this limit, specify \"start\" with the personId of the last entry returned in the current call.\n\n> [!TIP]\n>\n> * For example, there are total 5 items with their IDs: \"itemId1\", ..., \"itemId5\".\n> * \"start=&top=\" will return all 5 items.\n> * \"start=&top=2\" will return \"itemId1\", \"itemId2\".\n> * \"start=itemId2&top=3\" will return \"itemId3\", \"itemId4\", \"itemId5\".", + "parameters": [ + { + "name": "personId", + "in": "path", + "description": "Person ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "start", + "in": "query", + "description": "List resources greater than the \"start\". It contains no more than 64 characters. Default is empty.", + "required": false, + "type": "string" + }, + { + "name": "top", + "in": "query", + "description": "The number of items to list, ranging in [1, 1000]. Default is 1000.", + "required": false, + "type": "integer", + "format": "int32", + "default": 1000, + "minimum": 1, + "maximum": 1000 + } + ], + "responses": { + "200": { + "description": "A successful call returns an array of dynamicPersonGroups information that reference the provided personId.", + "schema": { + "$ref": "#/definitions/ListGroupReferenceResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get DynamicPersonGroup References": { + "$ref": "./examples/PersonDirectoryOperations_GetDynamicPersonGroupReferences.json" + } + } + } + }, + "/persons/{personId}/recognitionModels/{recognitionModel}/persistedfaces": { + "get": { + "operationId": "PersonDirectoryOperations_GetPersonFaces", + "description": "Retrieve a person's persistedFaceIds representing the registered person face feature(s).", + "parameters": [ + { + "name": "personId", + "in": "path", + "description": "Person ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "recognitionModel", + "in": "path", + "description": "The 'recognitionModel' associated with faces.", + "required": true, + "type": "string", + "enum": [ + "recognition_01", + "recognition_02", + "recognition_03", + "recognition_04" + ], + "x-ms-enum": { + "name": "RecognitionModel", + "modelAsString": true, + "values": [ + { + "name": "recognition_01", + "value": "recognition_01", + "description": "The default recognition model for \"Detect\". All those faceIds created before 2019 March are bonded with this recognition model." + }, + { + "name": "recognition_02", + "value": "recognition_02", + "description": "Recognition model released in 2019 March." + }, + { + "name": "recognition_03", + "value": "recognition_03", + "description": "Recognition model released in 2020 May." + }, + { + "name": "recognition_04", + "value": "recognition_04", + "description": "Recognition model released in 2021 February. It's recommended to use this recognition model for better recognition accuracy." + } + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns an array of persistedFaceIds and and a person ID.", + "schema": { + "$ref": "#/definitions/ListFaceResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get Faces from PersonDirectory Person": { + "$ref": "./examples/PersonDirectoryOperations_GetPersonFaces.json" + } + } + }, + "post": { + "operationId": "PersonDirectoryOperations_AddPersonFace", + "summary": "Add a face to a person (see Person Directory \"Create Person\") for face identification or verification.", + "description": "To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until Person Directory \"Delete Person Face\" or \"Delete Person\" is called.\n\nNote that persistedFaceId is different from faceId generated by \"Detect\".\n>\n*\n * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n * Each person entry can hold up to 248 faces.\n * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB.\n * \"targetFace\" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided \"targetFace\" rectangle is not returned from \"Detect\", there's no guarantee to detect and add the face successfully.\n * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures.\n * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size.\n * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/how-to/specify-detection-model\n*\n * Adding/deleting faces to/from a same person will be processed sequentially. Adding/deleting faces to/from different persons are processed in parallel.\n * This is a long running operation. Use Response Header \"Operation-Location\" to determine when the AddFace operation has successfully propagated for future requests to \"Identify\". For further information about Operation-Locations see \"Get Face Operation Status\".", + "consumes": [ + "application/octet-stream" + ], + "parameters": [ + { + "name": "personId", + "in": "path", + "description": "Person ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "recognitionModel", + "in": "path", + "description": "The 'recognitionModel' associated with faces.", + "required": true, + "type": "string", + "enum": [ + "recognition_01", + "recognition_02", + "recognition_03", + "recognition_04" + ], + "x-ms-enum": { + "name": "RecognitionModel", + "modelAsString": true, + "values": [ + { + "name": "recognition_01", + "value": "recognition_01", + "description": "The default recognition model for \"Detect\". All those faceIds created before 2019 March are bonded with this recognition model." + }, + { + "name": "recognition_02", + "value": "recognition_02", + "description": "Recognition model released in 2019 March." + }, + { + "name": "recognition_03", + "value": "recognition_03", + "description": "Recognition model released in 2020 May." + }, + { + "name": "recognition_04", + "value": "recognition_04", + "description": "Recognition model released in 2021 February. It's recommended to use this recognition model for better recognition accuracy." + } + ] + } + }, + { + "name": "targetFace", + "in": "query", + "description": "A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'.", + "required": false, + "type": "array", + "items": { + "type": "integer", + "format": "int32" + }, + "collectionFormat": "csv", + "minItems": 4, + "maxItems": 4 + }, + { + "name": "detectionModel", + "in": "query", + "description": "The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'.", + "required": false, + "type": "string", + "default": "detection_01", + "enum": [ + "detection_01", + "detection_02", + "detection_03" + ], + "x-ms-enum": { + "name": "DetectionModel", + "modelAsString": true, + "values": [ + { + "name": "detection_01", + "value": "detection_01", + "description": "The default detection model. Recommend for near frontal face detection. For scenarios with exceptionally large angle (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be detected." + }, + { + "name": "detection_02", + "value": "detection_02", + "description": "Detection model released in 2019 May with improved accuracy especially on small, side and blurry faces." + }, + { + "name": "detection_03", + "value": "detection_03", + "description": "Detection model released in 2021 February with improved accuracy especially on small faces." + } + ] + } + }, + { + "name": "userData", + "in": "query", + "description": "User-provided data attached to the face. The size limit is 1K.", + "required": false, + "type": "string", + "maxLength": 1024 + }, + { + "name": "imageContent", + "in": "body", + "description": "The image to be analyzed", + "required": true, + "schema": { + "type": "string", + "format": "binary" + } + } + ], + "responses": { + "202": { + "description": "A successful call returns an empty response body. The service has accepted the request and will start processing soon. The client can query the operation status and result using the URL specified in the 'Operation-Location' response header. The URL expires in 48 hours.", + "schema": { + "$ref": "#/definitions/AddFaceResult" + }, + "headers": { + "Location": { + "type": "string", + "format": "uri", + "description": "The location of an instance of PersonDirectoryFace" + }, + "operation-Location": { + "type": "string", + "format": "uri", + "description": "The location of an instance of OperationResult" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Add Face to a PersonDirectory Person": { + "$ref": "./examples/PersonDirectoryOperations_AddPersonFaceFromStream.json" + } + }, + "x-ms-long-running-operation": true + } + }, + "/persons/{personId}/recognitionModels/{recognitionModel}/persistedfaces/{persistedFaceId}": { + "get": { + "operationId": "PersonDirectoryOperations_GetPersonFace", + "description": "Retrieve person face information. The persisted person face is specified by its personId. recognitionModel, and persistedFaceId.", + "parameters": [ + { + "name": "personId", + "in": "path", + "description": "Person ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "recognitionModel", + "in": "path", + "description": "The 'recognitionModel' associated with faces.", + "required": true, + "type": "string", + "enum": [ + "recognition_01", + "recognition_02", + "recognition_03", + "recognition_04" + ], + "x-ms-enum": { + "name": "RecognitionModel", + "modelAsString": true, + "values": [ + { + "name": "recognition_01", + "value": "recognition_01", + "description": "The default recognition model for \"Detect\". All those faceIds created before 2019 March are bonded with this recognition model." + }, + { + "name": "recognition_02", + "value": "recognition_02", + "description": "Recognition model released in 2019 March." + }, + { + "name": "recognition_03", + "value": "recognition_03", + "description": "Recognition model released in 2020 May." + }, + { + "name": "recognition_04", + "value": "recognition_04", + "description": "Recognition model released in 2021 February. It's recommended to use this recognition model for better recognition accuracy." + } + ] + } + }, + { + "name": "persistedFaceId", + "in": "path", + "description": "Face ID of the face.", + "required": true, + "type": "string", + "format": "uuid" + } + ], + "responses": { + "200": { + "description": "A successful call returns target persisted face's information (persistedFaceId and userData).", + "schema": { + "$ref": "#/definitions/PersonDirectoryFace" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Get Face from PersonDirectory Person": { + "$ref": "./examples/PersonDirectoryOperations_GetPersonFace.json" + } + } + }, + "patch": { + "operationId": "PersonDirectoryOperations_UpdatePersonFace", + "description": "Update a persisted face's userData field of a person.", + "parameters": [ + { + "name": "personId", + "in": "path", + "description": "Person ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "recognitionModel", + "in": "path", + "description": "The 'recognitionModel' associated with faces.", + "required": true, + "type": "string", + "enum": [ + "recognition_01", + "recognition_02", + "recognition_03", + "recognition_04" + ], + "x-ms-enum": { + "name": "RecognitionModel", + "modelAsString": true, + "values": [ + { + "name": "recognition_01", + "value": "recognition_01", + "description": "The default recognition model for \"Detect\". All those faceIds created before 2019 March are bonded with this recognition model." + }, + { + "name": "recognition_02", + "value": "recognition_02", + "description": "Recognition model released in 2019 March." + }, + { + "name": "recognition_03", + "value": "recognition_03", + "description": "Recognition model released in 2020 May." + }, + { + "name": "recognition_04", + "value": "recognition_04", + "description": "Recognition model released in 2021 February. It's recommended to use this recognition model for better recognition accuracy." + } + ] + } + }, + { + "name": "persistedFaceId", + "in": "path", + "description": "Face ID of the face.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "userData": { + "type": "string", + "description": "User-provided data attached to the face. The length limit is 1K.", + "maxLength": 1024 + } + } + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Update Face of PersonDirectory Person": { + "$ref": "./examples/PersonDirectoryOperations_UpdatePersonFace.json" + } + } + }, + "delete": { + "operationId": "PersonDirectoryOperations_DeletePersonFace", + "summary": "Delete a face from a person in Person Directory by specified personId and persistedFaceId.", + "description": "Adding/deleting faces to/from a same person will be processed sequentially. Adding/deleting faces to/from different persons are processed in parallel.", + "parameters": [ + { + "name": "personId", + "in": "path", + "description": "Person ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "recognitionModel", + "in": "path", + "description": "The 'recognitionModel' associated with faces.", + "required": true, + "type": "string", + "enum": [ + "recognition_01", + "recognition_02", + "recognition_03", + "recognition_04" + ], + "x-ms-enum": { + "name": "RecognitionModel", + "modelAsString": true, + "values": [ + { + "name": "recognition_01", + "value": "recognition_01", + "description": "The default recognition model for \"Detect\". All those faceIds created before 2019 March are bonded with this recognition model." + }, + { + "name": "recognition_02", + "value": "recognition_02", + "description": "Recognition model released in 2019 March." + }, + { + "name": "recognition_03", + "value": "recognition_03", + "description": "Recognition model released in 2020 May." + }, + { + "name": "recognition_04", + "value": "recognition_04", + "description": "Recognition model released in 2021 February. It's recommended to use this recognition model for better recognition accuracy." + } + ] + } + }, + { + "name": "persistedFaceId", + "in": "path", + "description": "Face ID of the face.", + "required": true, + "type": "string", + "format": "uuid" + } + ], + "responses": { + "202": { + "description": "A successful call returns an empty response body. The service has accepted the request and will start processing soon. The client can query the operation status and result using the URL specified in the 'Operation-Location' response header. The URL expires in 48 hours.", + "headers": { + "operation-Location": { + "type": "string", + "format": "uri", + "description": "The location of an instance of OperationResult" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Delete Face from PersonDirectory Person": { + "$ref": "./examples/PersonDirectoryOperations_DeletePersonFace.json" + } + }, + "x-ms-long-running-operation": true + } + }, + "/verify": { + "post": { + "operationId": "FaceRecognitionOperations_VerifyFaceToFace", + "summary": "Verify whether two faces belong to a same person.", + "description": "> [!NOTE]\n>\n> *\n> * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n> * For the scenarios that are sensitive to accuracy please make your own judgment.\n> * The 'recognitionModel' associated with the both faces should be the same.", + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "faceId1": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "The faceId of one face, come from \"Detect\"." + }, + "faceId2": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "The faceId of another face, come from \"Detect\"." + } + }, + "required": [ + "faceId1", + "faceId2" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns the verification result.", + "schema": { + "$ref": "#/definitions/VerificationResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Verify Face to Face": { + "$ref": "./examples/FaceRecognitionOperations_VerifyFaceToFace.json" + } + } + } + } + }, + "x-ms-paths": { + "/detect?_overload=detect": { + "post": { + "operationId": "FaceDetectionOperations_Detect", + "summary": "Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes.", + "description": "> [!IMPORTANT]\n> To mitigate potential misuse that can subject people to stereotyping, discrimination, or unfair denial of services, we are retiring Face API attributes that predict emotion, gender, age, smile, facial hair, hair, and makeup. Read more about this decision https://azure.microsoft.com/en-us/blog/responsible-ai-investments-and-safeguards-for-facial-recognition/.\n\n*\n * No image will be stored. Only the extracted face feature(s) will be stored on server. The faceId is an identifier of the face feature and will be used in \"Identify\", \"Verify\", and \"Find Similar\". The stored face features will expire and be deleted at the time specified by faceIdTimeToLive after the original detection call.\n * Optional parameters include faceId, landmarks, and attributes. Attributes include headPose, glasses, occlusion, accessories, blur, exposure, noise, mask, and qualityForRecognition. Some of the results returned for specific attributes may not be highly accurate.\n * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB.\n * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size.\n * Up to 100 faces can be returned for an image. Faces are ranked by face rectangle size from large to small.\n * For optimal results when querying \"Identify\", \"Verify\", and \"Find Similar\" ('returnFaceId' is true), please use faces that are: frontal, clear, and with a minimum size of 200x200 pixels (100 pixels between eyes).\n * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/how-to/specify-detection-model\n * 'detection_02': Face attributes and landmarks are disabled if you choose this detection model.\n * 'detection_03': Face attributes (mask and headPose only) and landmarks are supported if you choose this detection model.\n * Different 'recognitionModel' values are provided. If follow-up operations like \"Verify\", \"Identify\", \"Find Similar\" are needed, please specify the recognition model with 'recognitionModel' parameter. The default value for 'recognitionModel' is 'recognition_01', if latest model needed, please explicitly specify the model you need in this parameter. Once specified, the detected faceIds will be associated with the specified recognition model. More details, please refer to https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/how-to/specify-recognition-model.", + "consumes": [ + "application/octet-stream" + ], + "parameters": [ + { + "name": "detectionModel", + "in": "query", + "description": "The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'.", + "required": false, + "type": "string", + "default": "detection_01", + "enum": [ + "detection_01", + "detection_02", + "detection_03" + ], + "x-ms-enum": { + "name": "DetectionModel", + "modelAsString": true, + "values": [ + { + "name": "detection_01", + "value": "detection_01", + "description": "The default detection model. Recommend for near frontal face detection. For scenarios with exceptionally large angle (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be detected." + }, + { + "name": "detection_02", + "value": "detection_02", + "description": "Detection model released in 2019 May with improved accuracy especially on small, side and blurry faces." + }, + { + "name": "detection_03", + "value": "detection_03", + "description": "Detection model released in 2021 February with improved accuracy especially on small faces." + } + ] + } + }, + { + "name": "recognitionModel", + "in": "query", + "description": "The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'.", + "required": false, + "type": "string", + "default": "recognition_01", + "enum": [ + "recognition_01", + "recognition_02", + "recognition_03", + "recognition_04" + ], + "x-ms-enum": { + "name": "RecognitionModel", + "modelAsString": true, + "values": [ + { + "name": "recognition_01", + "value": "recognition_01", + "description": "The default recognition model for \"Detect\". All those faceIds created before 2019 March are bonded with this recognition model." + }, + { + "name": "recognition_02", + "value": "recognition_02", + "description": "Recognition model released in 2019 March." + }, + { + "name": "recognition_03", + "value": "recognition_03", + "description": "Recognition model released in 2020 May." + }, + { + "name": "recognition_04", + "value": "recognition_04", + "description": "Recognition model released in 2021 February. It's recommended to use this recognition model for better recognition accuracy." + } + ] + } + }, + { + "name": "returnFaceId", + "in": "query", + "description": "Return faceIds of the detected faces or not. The default value is true.", + "required": false, + "type": "boolean", + "default": true + }, + { + "name": "returnFaceAttributes", + "in": "query", + "description": "Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost.", + "required": false, + "type": "array", + "items": { + "type": "string", + "enum": [ + "headPose", + "glasses", + "occlusion", + "accessories", + "blur", + "exposure", + "noise", + "mask", + "qualityForRecognition", + "age", + "smile", + "facialHair", + "hair" + ], + "x-ms-enum": { + "name": "FaceAttributeType", + "modelAsString": true, + "values": [ + { + "name": "headPose", + "value": "headPose", + "description": "3-D roll/yaw/pitch angles for face direction." + }, + { + "name": "glasses", + "value": "glasses", + "description": "Glasses type. Values include 'NoGlasses', 'ReadingGlasses', 'Sunglasses', 'SwimmingGoggles'." + }, + { + "name": "occlusion", + "value": "occlusion", + "description": "Whether each facial area is occluded, including forehead, eyes and mouth." + }, + { + "name": "accessories", + "value": "accessories", + "description": "Accessories around face, including 'headwear', 'glasses' and 'mask'. Empty array means no accessories detected. Note this is after a face is detected. Large mask could result in no face to be detected." + }, + { + "name": "blur", + "value": "blur", + "description": "Face is blurry or not. Level returns 'Low', 'Medium' or 'High'. Value returns a number between [0,1], the larger the blurrier." + }, + { + "name": "exposure", + "value": "exposure", + "description": "Face exposure level. Level returns 'GoodExposure', 'OverExposure' or 'UnderExposure'." + }, + { + "name": "noise", + "value": "noise", + "description": "Noise level of face pixels. Level returns 'Low', 'Medium' and 'High'. Value returns a number between [0,1], the larger the noisier" + }, + { + "name": "mask", + "value": "mask", + "description": "Whether each face is wearing a mask. Mask type returns 'noMask', 'faceMask', 'otherMaskOrOcclusion', or 'uncertain'. Value returns a boolean 'noseAndMouthCovered' indicating whether nose and mouth are covered." + }, + { + "name": "qualityForRecognition", + "value": "qualityForRecognition", + "description": "The overall image quality regarding whether the image being used in the detection is of sufficient quality to attempt face recognition on. The value is an informal rating of low, medium, or high. Only 'high' quality images are recommended for person enrollment and quality at or above 'medium' is recommended for identification scenarios. The attribute is only available when using any combinations of detection models detection_01 or detection_03, and recognition models recognition_03 or recognition_04." + }, + { + "name": "age", + "value": "age", + "description": "Age in years." + }, + { + "name": "smile", + "value": "smile", + "description": "Smile intensity, a number between [0,1]." + }, + { + "name": "facialHair", + "value": "facialHair", + "description": "Properties describing facial hair attributes." + }, + { + "name": "hair", + "value": "hair", + "description": "Properties describing hair attributes." + } + ] + } + }, + "collectionFormat": "csv" + }, + { + "name": "returnFaceLandmarks", + "in": "query", + "description": "Return face landmarks of the detected faces or not. The default value is false.", + "required": false, + "type": "boolean", + "default": false + }, + { + "name": "returnRecognitionModel", + "in": "query", + "description": "Return 'recognitionModel' or not. The default value is false. This is only applicable when returnFaceId = true.", + "required": false, + "type": "boolean", + "default": false + }, + { + "name": "faceIdTimeToLive", + "in": "query", + "description": "The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours).", + "required": false, + "type": "integer", + "format": "int32", + "default": 86400, + "minimum": 60, + "maximum": 86400 + }, + { + "name": "imageContent", + "in": "body", + "description": "The input image binary.", + "required": true, + "schema": { + "type": "string", + "format": "binary" + } + } + ], + "responses": { + "200": { + "description": "A successful call returns an array of face entries ranked by face rectangle size in descending order. An empty response indicates no faces detected.", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/FaceDetectionResult" + }, + "x-ms-identifiers": [] + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Detect with Image": { + "$ref": "./examples/Detect.json" + } + } + } + }, + "/detectLivenessWithVerify/singleModal/sessions?_overload=createLivenessWithVerifySession": { + "post": { + "operationId": "LivenessSessionOperations_CreateLivenessWithVerifySession", + "summary": "Create a new liveness session with verify. Client device submits VerifyImage during the /detectLivenessWithVerify/singleModal call.", + "description": "A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired.\n\nPermissions includes...\n>\n*\n * Ability to call /detectLivenessWithVerify/singleModal for up to 3 retries.\n * A token lifetime of 10 minutes.\n\n> [!NOTE]\n>\n> *\n> * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation.\n> * To retrieve a result, use the Get Liveness With Verify Session.\n> * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries.\n\nAlternative Option: Client device submits VerifyImage during the /detectLivenessWithVerify/singleModal call.\n> [!NOTE]\n> Extra measures should be taken to validate that the client is sending the expected VerifyImage.", + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/CreateLivenessSessionContent" + } + } + ], + "responses": { + "200": { + "description": "A successful call create a session for a client device and provide an authorization token for use by the client application for a limited purpose and time.", + "schema": { + "$ref": "#/definitions/CreateLivenessWithVerifySessionResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Create LivenessWithVerify Session": { + "$ref": "./examples/LivenessSessionOperations_CreateLivenessWithVerifySession.json" + } + } + } + }, + "/dynamicpersongroups/{dynamicPersonGroupId}?_overload=createDynamicPersonGroup": { + "put": { + "operationId": "PersonDirectoryOperations_CreateDynamicPersonGroup", + "summary": "Creates a new Dynamic Person Group with specified dynamicPersonGroupId, name, and user-provided userData.", + "description": "A Dynamic Person Group is a container that references Person Directory \"Create Person\". After creation, use Person Directory \"Update Dynamic Person Group\" to add/remove persons to/from the Dynamic Person Group.\n\nDynamic Person Group and user data will be stored on server until Person Directory \"Delete Dynamic Person Group\" is called. Use \"Identify From Dynamic Person Group\" with the dynamicPersonGroupId parameter to identify against persons.\n\nNo image will be stored. Only the person's extracted face feature(s) and userData will be stored on server until Person Directory \"Delete Person\" or \"Delete Person Face\" is called.\n\n'recognitionModel' does not need to be specified with Dynamic Person Groups. Dynamic Person Groups are references to Person Directory \"Create Person\" and therefore work with most all 'recognitionModels'. The faceId's provided during \"Identify\" determine the 'recognitionModel' used.", + "parameters": [ + { + "name": "dynamicPersonGroupId", + "in": "path", + "description": "ID of the dynamic person group.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + } + }, + "required": [ + "name" + ] + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Create DynamicPersonGroup": { + "$ref": "./examples/PersonDirectoryOperations_CreateDynamicPersonGroup.json" + } + } + } + }, + "/dynamicpersongroups/{dynamicPersonGroupId}?_overload=updateDynamicPersonGroup": { + "patch": { + "operationId": "PersonDirectoryOperations_UpdateDynamicPersonGroup", + "summary": "Update the name or userData of an existing Dynamic Person Group, and manage its members by adding or removing persons.", + "description": "The properties keep unchanged if they are not in request body.", + "parameters": [ + { + "name": "dynamicPersonGroupId", + "in": "path", + "description": "ID of the dynamic person group.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + } + } + } + } + ], + "responses": { + "200": { + "description": "The request has succeeded." + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Update DynamicPersonGroup": { + "$ref": "./examples/PersonDirectoryOperations_UpdateDynamicPersonGroup.json" + } + } + } + }, + "/facelists/{faceListId}/persistedfaces?_overload=addFaceListFace": { + "post": { + "operationId": "FaceListOperations_AddFaceListFace", + "summary": "Add a face to a specified Face List, up to 1,000 faces.", + "description": "To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until \"Delete Face List Face\" or \"Delete Face List\" is called.\n\nNote that persistedFaceId is different from faceId generated by \"Detect\".\n>\n*\n * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n * Each person entry can hold up to 248 faces.\n * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB.\n * \"targetFace\" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided \"targetFace\" rectangle is not returned from \"Detect\", there's no guarantee to detect and add the face successfully.\n * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures.\n * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size.\n * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/how-to/specify-detection-model", + "consumes": [ + "application/octet-stream" + ], + "parameters": [ + { + "name": "faceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "targetFace", + "in": "query", + "description": "A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'.", + "required": false, + "type": "array", + "items": { + "type": "integer", + "format": "int32" + }, + "collectionFormat": "csv", + "minItems": 4, + "maxItems": 4 + }, + { + "name": "detectionModel", + "in": "query", + "description": "The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'.", + "required": false, + "type": "string", + "default": "detection_01", + "enum": [ + "detection_01", + "detection_02", + "detection_03" + ], + "x-ms-enum": { + "name": "DetectionModel", + "modelAsString": true, + "values": [ + { + "name": "detection_01", + "value": "detection_01", + "description": "The default detection model. Recommend for near frontal face detection. For scenarios with exceptionally large angle (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be detected." + }, + { + "name": "detection_02", + "value": "detection_02", + "description": "Detection model released in 2019 May with improved accuracy especially on small, side and blurry faces." + }, + { + "name": "detection_03", + "value": "detection_03", + "description": "Detection model released in 2021 February with improved accuracy especially on small faces." + } + ] + } + }, + { + "name": "userData", + "in": "query", + "description": "User-provided data attached to the face. The size limit is 1K.", + "required": false, + "type": "string", + "maxLength": 1024 + }, + { + "name": "imageContent", + "in": "body", + "description": "The image to be analyzed", + "required": true, + "schema": { + "type": "string", + "format": "binary" + } + } + ], + "responses": { + "200": { + "description": "A successful call returns a new persistedFaceId.", + "schema": { + "$ref": "#/definitions/AddFaceResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Add Face to FaceList": { + "$ref": "./examples/FaceListOperations_AddFaceListFaceFromStream.json" + } + } + } + }, + "/findsimilars?_overload=findSimilarFromFaceList": { + "post": { + "operationId": "FaceRecognitionOperations_FindSimilarFromFaceList", + "summary": "Given query face's faceId, to search the similar-looking faces from a Face List. A 'faceListId' is created by Create Face List.", + "description": "Depending on the input the returned similar faces list contains faceIds or persistedFaceIds ranked by similarity.\n\nFind similar has two working modes, \"matchPerson\" and \"matchFace\". \"matchPerson\" is the default mode that it tries to find faces of the same person as possible by using internal same-person thresholds. It is useful to find a known person's other photos. Note that an empty list will be returned if no faces pass the internal thresholds. \"matchFace\" mode ignores same-person thresholds and returns ranked similar faces anyway, even the similarity is low. It can be used in the cases like searching celebrity-looking faces.\n\nThe 'recognitionModel' associated with the query faceId should be the same as the 'recognitionModel' used by the target Face List.", + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "faceId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "faceId of the query face. User needs to call \"Detect\" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call." + }, + "maxNumOfCandidatesReturned": { + "type": "integer", + "format": "int32", + "description": "The number of top similar faces returned. The valid range is [1, 1000]. Default value is 20.", + "default": 20, + "minimum": 1, + "maximum": 1000 + }, + "mode": { + "type": "string", + "description": "Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'.", + "default": "matchPerson", + "enum": [ + "matchPerson", + "matchFace" + ], + "x-ms-enum": { + "name": "FindSimilarMatchMode", + "modelAsString": true, + "values": [ + { + "name": "matchPerson", + "value": "matchPerson", + "description": "Match person." + }, + { + "name": "matchFace", + "value": "matchFace", + "description": "Match face." + } + ] + } + }, + "faceListId": { + "type": "string", + "description": "An existing user-specified unique candidate Face List, created in \"Create Face List\". Face List contains a set of persistedFaceIds which are persisted and will never expire." + } + }, + "required": [ + "faceId", + "faceListId" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns an array of the most similar faces represented in faceId if the input parameter is faceIds or persistedFaceId if the input parameter is faceListId or largeFaceListId.", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/FindSimilarResult" + }, + "x-ms-identifiers": [] + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Find Similar from FaceList": { + "$ref": "./examples/FaceRecognitionOperations_FindSimilarFromFaceList.json" + } + } + } + }, + "/findsimilars?_overload=findSimilarFromLargeFaceList": { + "post": { + "operationId": "FaceRecognitionOperations_FindSimilarFromLargeFaceList", + "summary": "Given query face's faceId, to search the similar-looking faces from a Large Face List. A 'largeFaceListId' is created by Create Large Face List.", + "description": "Depending on the input the returned similar faces list contains faceIds or persistedFaceIds ranked by similarity.\n\nFind similar has two working modes, \"matchPerson\" and \"matchFace\". \"matchPerson\" is the default mode that it tries to find faces of the same person as possible by using internal same-person thresholds. It is useful to find a known person's other photos. Note that an empty list will be returned if no faces pass the internal thresholds. \"matchFace\" mode ignores same-person thresholds and returns ranked similar faces anyway, even the similarity is low. It can be used in the cases like searching celebrity-looking faces.\n\nThe 'recognitionModel' associated with the query faceId should be the same as the 'recognitionModel' used by the target Large Face List.", + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "faceId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "faceId of the query face. User needs to call \"Detect\" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call." + }, + "maxNumOfCandidatesReturned": { + "type": "integer", + "format": "int32", + "description": "The number of top similar faces returned. The valid range is [1, 1000]. Default value is 20.", + "default": 20, + "minimum": 1, + "maximum": 1000 + }, + "mode": { + "type": "string", + "description": "Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'.", + "default": "matchPerson", + "enum": [ + "matchPerson", + "matchFace" + ], + "x-ms-enum": { + "name": "FindSimilarMatchMode", + "modelAsString": true, + "values": [ + { + "name": "matchPerson", + "value": "matchPerson", + "description": "Match person." + }, + { + "name": "matchFace", + "value": "matchFace", + "description": "Match face." + } + ] + } + }, + "largeFaceListId": { + "type": "string", + "description": "An existing user-specified unique candidate Large Face List, created in \"Create Large Face List\". Large Face List contains a set of persistedFaceIds which are persisted and will never expire." + } + }, + "required": [ + "faceId", + "largeFaceListId" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns an array of the most similar faces represented in faceId if the input parameter is faceIds or persistedFaceId if the input parameter is faceListId or largeFaceListId.", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/FindSimilarResult" + }, + "x-ms-identifiers": [] + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Find Similar from LargeFaceList": { + "$ref": "./examples/FaceRecognitionOperations_FindSimilarFromLargeFaceList.json" + } + } + } + }, + "/identify?_overload=identifyFromDynamicPersonGroup": { + "post": { + "operationId": "FaceRecognitionOperations_IdentifyFromDynamicPersonGroup", + "summary": "1-to-many identification to find the closest matches of the specific query person face from a Dynamic Person Group.", + "description": "For each face in the faceIds array, Face Identify will compute similarities between the query face and all the faces in the Dynamic Person Group (given by dynamicPersonGroupId), and return candidate person(s) for that face ranked by similarity confidence.\n> [!NOTE]\n>\n> *\n> * The algorithm allows more than one face to be identified independently at the same request, but no more than 10 faces.\n> * Each person could have more than one face, but no more than 248 faces.\n> * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n> * Number of candidates returned is restricted by maxNumOfCandidatesReturned and confidenceThreshold. If no person is identified, the returned candidates will be an empty array.\n> * The Identify operation can only match faces obtained with the same recognition model, that is associated with the query faces.", + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "faceIds": { + "type": "array", + "description": "Array of query faces faceIds, created by the \"Detect\". Each of the faces are identified independently. The valid number of faceIds is between [1, 10].", + "minItems": 1, + "maxItems": 10, + "items": { + "$ref": "#/definitions/Azure.Core.uuid" + } + }, + "dynamicPersonGroupId": { + "type": "string", + "description": "DynamicPersonGroupId of the target PersonDirectory DynamicPersonGroup to match against." + }, + "maxNumOfCandidatesReturned": { + "type": "integer", + "format": "int32", + "description": "The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10.", + "default": 10, + "minimum": 1, + "maximum": 100 + }, + "confidenceThreshold": { + "type": "number", + "format": "float", + "description": "Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates.", + "minimum": 0, + "maximum": 1 + } + }, + "required": [ + "faceIds", + "dynamicPersonGroupId" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns the identified candidate person(s) for each query face.", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/IdentificationResult" + }, + "x-ms-identifiers": [] + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Identify from DynamicPersonGroup": { + "$ref": "./examples/FaceRecognitionOperations_IdentifyFromDynamicPersonGroup.json" + } + } + } + }, + "/identify?_overload=identifyFromLargePersonGroup": { + "post": { + "operationId": "FaceRecognitionOperations_IdentifyFromLargePersonGroup", + "summary": "1-to-many identification to find the closest matches of the specific query person face from a Large Person Group.", + "description": "For each face in the faceIds array, Face Identify will compute similarities between the query face and all the faces in the Large Person Group (given by largePersonGroupId), and return candidate person(s) for that face ranked by similarity confidence. The Large Person Group should be trained to make it ready for identification. See more in \"Train Large Person Group\".\n> [!NOTE]\n>\n> *\n> * The algorithm allows more than one face to be identified independently at the same request, but no more than 10 faces.\n> * Each person could have more than one face, but no more than 248 faces.\n> * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n> * Number of candidates returned is restricted by maxNumOfCandidatesReturned and confidenceThreshold. If no person is identified, the returned candidates will be an empty array.\n> * Try \"Find Similar\" when you need to find similar faces from a Face List/Large Face List instead of a Person Group/Large Person Group.\n> * The 'recognitionModel' associated with the query faces' faceIds should be the same as the 'recognitionModel' used by the target Person Group or Large Person Group.", + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "faceIds": { + "type": "array", + "description": "Array of query faces faceIds, created by the \"Detect\". Each of the faces are identified independently. The valid number of faceIds is between [1, 10].", + "minItems": 1, + "maxItems": 10, + "items": { + "$ref": "#/definitions/Azure.Core.uuid" + } + }, + "largePersonGroupId": { + "type": "string", + "description": "largePersonGroupId of the target Large Person Group, created by \"Create Large Person Group\". Parameter personGroupId and largePersonGroupId should not be provided at the same time." + }, + "maxNumOfCandidatesReturned": { + "type": "integer", + "format": "int32", + "description": "The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10.", + "default": 10, + "minimum": 1, + "maximum": 100 + }, + "confidenceThreshold": { + "type": "number", + "format": "float", + "description": "Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates.", + "minimum": 0, + "maximum": 1 + } + }, + "required": [ + "faceIds", + "largePersonGroupId" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns the identified candidate person(s) for each query face.", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/IdentificationResult" + }, + "x-ms-identifiers": [] + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Identify from LargePersonGroup": { + "$ref": "./examples/FaceRecognitionOperations_IdentifyFromLargePersonGroup.json" + } + } + } + }, + "/identify?_overload=identifyFromPersonDirectory": { + "post": { + "operationId": "FaceRecognitionOperations_IdentifyFromPersonDirectory", + "summary": "1-to-many identification to find the closest matches of the specific query person face from a person directory personIds array.", + "description": "For each face in the faceIds array, Face Identify will compute similarities between the query face and all the faces in the Person Directory Persons (given by personIds), and return candidate person(s) for that face ranked by similarity confidence.\n> [!NOTE]\n>\n> *\n> * The algorithm allows more than one face to be identified independently at the same request, but no more than 10 faces.\n> * Each person could have more than one face, but no more than 248 faces.\n> * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n> * Number of candidates returned is restricted by maxNumOfCandidatesReturned and confidenceThreshold. If no person is identified, the returned candidates will be an empty array.\n> * The Identify operation can only match faces obtained with the same recognition model, that is associated with the query faces.", + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "faceIds": { + "type": "array", + "description": "Array of query faces faceIds, created by the \"Detect\". Each of the faces are identified independently. The valid number of faceIds is between [1, 10].", + "minItems": 1, + "maxItems": 10, + "items": { + "$ref": "#/definitions/Azure.Core.uuid" + } + }, + "personIds": { + "type": "array", + "description": "Array of personIds created in Person Directory \"Create Person\". The valid number of personIds is between [1,30].", + "minItems": 1, + "maxItems": 30, + "items": { + "$ref": "#/definitions/Azure.Core.uuid" + } + }, + "maxNumOfCandidatesReturned": { + "type": "integer", + "format": "int32", + "description": "The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10.", + "default": 10, + "minimum": 1, + "maximum": 100 + }, + "confidenceThreshold": { + "type": "number", + "format": "float", + "description": "Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates.", + "minimum": 0, + "maximum": 1 + } + }, + "required": [ + "faceIds", + "personIds" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns the identified candidate person(s) for each query face.", + "schema": { + "type": "array", + "items": { + "$ref": "#/definitions/IdentificationResult" + }, + "x-ms-identifiers": [] + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Identify from PersonDirectory": { + "$ref": "./examples/FaceRecognitionOperations_IdentifyFromPersonDirectory.json" + } + } + } + }, + "/largefacelists/{largeFaceListId}/persistedfaces?_overload=addLargeFaceListFace": { + "post": { + "operationId": "FaceListOperations_AddLargeFaceListFace", + "summary": "Add a face to a specified Large Face List, up to 1,000,000 faces.", + "description": "To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until \"Delete Large Face List Face\" or \"Delete Large Face List\" is called.\n\nNote that persistedFaceId is different from faceId generated by \"Detect\".\n>\n*\n * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n * Each person entry can hold up to 248 faces.\n * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB.\n * \"targetFace\" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided \"targetFace\" rectangle is not returned from \"Detect\", there's no guarantee to detect and add the face successfully.\n * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures.\n * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size.\n * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/how-to/specify-detection-model\n\n> [!NOTE]\n>\n> *\n> * Free-tier subscription quota: 1,000 faces per Large Face List.\n> * S0-tier subscription quota: 1,000,000 faces per Large Face List.", + "consumes": [ + "application/octet-stream" + ], + "parameters": [ + { + "name": "largeFaceListId", + "in": "path", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "targetFace", + "in": "query", + "description": "A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'.", + "required": false, + "type": "array", + "items": { + "type": "integer", + "format": "int32" + }, + "collectionFormat": "csv", + "minItems": 4, + "maxItems": 4 + }, + { + "name": "detectionModel", + "in": "query", + "description": "The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'.", + "required": false, + "type": "string", + "default": "detection_01", + "enum": [ + "detection_01", + "detection_02", + "detection_03" + ], + "x-ms-enum": { + "name": "DetectionModel", + "modelAsString": true, + "values": [ + { + "name": "detection_01", + "value": "detection_01", + "description": "The default detection model. Recommend for near frontal face detection. For scenarios with exceptionally large angle (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be detected." + }, + { + "name": "detection_02", + "value": "detection_02", + "description": "Detection model released in 2019 May with improved accuracy especially on small, side and blurry faces." + }, + { + "name": "detection_03", + "value": "detection_03", + "description": "Detection model released in 2021 February with improved accuracy especially on small faces." + } + ] + } + }, + { + "name": "userData", + "in": "query", + "description": "User-provided data attached to the face. The size limit is 1K.", + "required": false, + "type": "string", + "maxLength": 1024 + }, + { + "name": "imageContent", + "in": "body", + "description": "The image to be analyzed", + "required": true, + "schema": { + "type": "string", + "format": "binary" + } + } + ], + "responses": { + "200": { + "description": "A successful call returns a new persistedFaceId.", + "schema": { + "$ref": "#/definitions/AddFaceResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Add Face to LargeFaceList": { + "$ref": "./examples/FaceListOperations_AddLargeFaceListFaceFromStream.json" + } + } + } + }, + "/largepersongroups/{largePersonGroupId}/persons/{personId}/persistedfaces?_overload=addLargePersonGroupPersonFace": { + "post": { + "operationId": "PersonGroupOperations_AddLargePersonGroupPersonFace", + "summary": "Add a face to a person into a Large Person Group for face identification or verification.", + "description": "To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until \"Delete Large Person Group Person Face\", \"Delete Large Person Group Person\" or \"Delete Large Person Group\" is called.\n\nNote that persistedFaceId is different from faceId generated by \"Detect\".\n>\n*\n * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n * Each person entry can hold up to 248 faces.\n * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB.\n * \"targetFace\" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided \"targetFace\" rectangle is not returned from \"Detect\", there's no guarantee to detect and add the face successfully.\n * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures.\n * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size.\n * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/how-to/specify-detection-model", + "consumes": [ + "application/octet-stream" + ], + "parameters": [ + { + "name": "largePersonGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "targetFace", + "in": "query", + "description": "A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'.", + "required": false, + "type": "array", + "items": { + "type": "integer", + "format": "int32" + }, + "collectionFormat": "csv", + "minItems": 4, + "maxItems": 4 + }, + { + "name": "detectionModel", + "in": "query", + "description": "The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'.", + "required": false, + "type": "string", + "default": "detection_01", + "enum": [ + "detection_01", + "detection_02", + "detection_03" + ], + "x-ms-enum": { + "name": "DetectionModel", + "modelAsString": true, + "values": [ + { + "name": "detection_01", + "value": "detection_01", + "description": "The default detection model. Recommend for near frontal face detection. For scenarios with exceptionally large angle (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be detected." + }, + { + "name": "detection_02", + "value": "detection_02", + "description": "Detection model released in 2019 May with improved accuracy especially on small, side and blurry faces." + }, + { + "name": "detection_03", + "value": "detection_03", + "description": "Detection model released in 2021 February with improved accuracy especially on small faces." + } + ] + } + }, + { + "name": "userData", + "in": "query", + "description": "User-provided data attached to the face. The size limit is 1K.", + "required": false, + "type": "string", + "maxLength": 1024 + }, + { + "name": "imageContent", + "in": "body", + "description": "The image to be analyzed", + "required": true, + "schema": { + "type": "string", + "format": "binary" + } + } + ], + "responses": { + "200": { + "description": "A successful call returns a new persistedFaceId.", + "schema": { + "$ref": "#/definitions/AddFaceResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Add Face in LargePersonGroup Person": { + "$ref": "./examples/PersonGroupOperations_AddLargePersonGroupPersonFaceFromStream.json" + } + } + } + }, + "/persongroups/{personGroupId}/persons/{personId}/persistedfaces?_overload=addPersonGroupPersonFace": { + "post": { + "operationId": "PersonGroupOperations_AddPersonGroupPersonFace", + "summary": "Add a face to a person into a Person Group for face identification or verification.", + "description": "To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until \"Delete Person Group Person Face\", \"Delete Person Group Person\" or \"Delete Person Group\" is called.\n\nNote that persistedFaceId is different from faceId generated by \"Detect\".\n>\n*\n * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n * Each person entry can hold up to 248 faces.\n * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB.\n * \"targetFace\" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided \"targetFace\" rectangle is not returned from \"Detect\", there's no guarantee to detect and add the face successfully.\n * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures.\n * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size.\n * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/how-to/specify-detection-model", + "consumes": [ + "application/octet-stream" + ], + "parameters": [ + { + "name": "personGroupId", + "in": "path", + "description": "ID of the container.", + "required": true, + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + }, + { + "name": "personId", + "in": "path", + "description": "ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "targetFace", + "in": "query", + "description": "A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'.", + "required": false, + "type": "array", + "items": { + "type": "integer", + "format": "int32" + }, + "collectionFormat": "csv", + "minItems": 4, + "maxItems": 4 + }, + { + "name": "detectionModel", + "in": "query", + "description": "The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'.", + "required": false, + "type": "string", + "default": "detection_01", + "enum": [ + "detection_01", + "detection_02", + "detection_03" + ], + "x-ms-enum": { + "name": "DetectionModel", + "modelAsString": true, + "values": [ + { + "name": "detection_01", + "value": "detection_01", + "description": "The default detection model. Recommend for near frontal face detection. For scenarios with exceptionally large angle (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be detected." + }, + { + "name": "detection_02", + "value": "detection_02", + "description": "Detection model released in 2019 May with improved accuracy especially on small, side and blurry faces." + }, + { + "name": "detection_03", + "value": "detection_03", + "description": "Detection model released in 2021 February with improved accuracy especially on small faces." + } + ] + } + }, + { + "name": "userData", + "in": "query", + "description": "User-provided data attached to the face. The size limit is 1K.", + "required": false, + "type": "string", + "maxLength": 1024 + }, + { + "name": "imageContent", + "in": "body", + "description": "The image to be analyzed", + "required": true, + "schema": { + "type": "string", + "format": "binary" + } + } + ], + "responses": { + "200": { + "description": "A successful call returns a new persistedFaceId.", + "schema": { + "$ref": "#/definitions/AddFaceResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Add Face to PersonGroup Person": { + "$ref": "./examples/PersonGroupOperations_AddPersonGroupPersonFaceFromStream.json" + } + } + } + }, + "/persons/{personId}/recognitionModels/{recognitionModel}/persistedfaces?_overload=addPersonFaceFromUrl": { + "post": { + "operationId": "PersonDirectoryOperations_AddPersonFaceFromUrl", + "summary": "Add a face to a person (see Person Directory \"Create Person\") for face identification or verification.", + "description": "To deal with an image containing multiple faces, input face can be specified as an image with a targetFace rectangle. It returns a persistedFaceId representing the added face. No image will be stored. Only the extracted face feature(s) will be stored on server until Person Directory \"Delete Person Face\" or \"Delete Person\" is called.\n\nNote that persistedFaceId is different from faceId generated by \"Detect\".\n>\n*\n * Higher face image quality means better recognition precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n * Each person entry can hold up to 248 faces.\n * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB.\n * \"targetFace\" rectangle should contain one face. Zero or multiple faces will be regarded as an error. If the provided \"targetFace\" rectangle is not returned from \"Detect\", there's no guarantee to detect and add the face successfully.\n * Out of detectable face size (36x36 - 4096x4096 pixels), large head-pose, or large occlusions will cause failures.\n * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size.\n * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/how-to/specify-detection-model\n*\n * Adding/deleting faces to/from a same person will be processed sequentially. Adding/deleting faces to/from different persons are processed in parallel.\n * This is a long running operation. Use Response Header \"Operation-Location\" to determine when the AddFace operation has successfully propagated for future requests to \"Identify\". For further information about Operation-Locations see \"Get Face Operation Status\".", + "parameters": [ + { + "name": "personId", + "in": "path", + "description": "Person ID of the person.", + "required": true, + "type": "string", + "format": "uuid" + }, + { + "name": "recognitionModel", + "in": "path", + "description": "The 'recognitionModel' associated with faces.", + "required": true, + "type": "string", + "enum": [ + "recognition_01", + "recognition_02", + "recognition_03", + "recognition_04" + ], + "x-ms-enum": { + "name": "RecognitionModel", + "modelAsString": true, + "values": [ + { + "name": "recognition_01", + "value": "recognition_01", + "description": "The default recognition model for \"Detect\". All those faceIds created before 2019 March are bonded with this recognition model." + }, + { + "name": "recognition_02", + "value": "recognition_02", + "description": "Recognition model released in 2019 March." + }, + { + "name": "recognition_03", + "value": "recognition_03", + "description": "Recognition model released in 2020 May." + }, + { + "name": "recognition_04", + "value": "recognition_04", + "description": "Recognition model released in 2021 February. It's recommended to use this recognition model for better recognition accuracy." + } + ] + } + }, + { + "name": "targetFace", + "in": "query", + "description": "A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'.", + "required": false, + "type": "array", + "items": { + "type": "integer", + "format": "int32" + }, + "collectionFormat": "csv", + "minItems": 4, + "maxItems": 4 + }, + { + "name": "detectionModel", + "in": "query", + "description": "The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'.", + "required": false, + "type": "string", + "default": "detection_01", + "enum": [ + "detection_01", + "detection_02", + "detection_03" + ], + "x-ms-enum": { + "name": "DetectionModel", + "modelAsString": true, + "values": [ + { + "name": "detection_01", + "value": "detection_01", + "description": "The default detection model. Recommend for near frontal face detection. For scenarios with exceptionally large angle (head-pose) faces, occluded faces or wrong image orientation, the faces in such cases may not be detected." + }, + { + "name": "detection_02", + "value": "detection_02", + "description": "Detection model released in 2019 May with improved accuracy especially on small, side and blurry faces." + }, + { + "name": "detection_03", + "value": "detection_03", + "description": "Detection model released in 2021 February with improved accuracy especially on small faces." + } + ] + } + }, + { + "name": "userData", + "in": "query", + "description": "User-provided data attached to the face. The size limit is 1K.", + "required": false, + "type": "string", + "maxLength": 1024 + }, + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "url": { + "type": "string", + "format": "uri", + "description": "URL of input image." + } + }, + "required": [ + "url" + ] + } + } + ], + "responses": { + "202": { + "description": "A successful call returns an empty response body. The service has accepted the request and will start processing soon. The client can query the operation status and result using the URL specified in the 'Operation-Location' response header. The URL expires in 48 hours.", + "schema": { + "$ref": "#/definitions/AddFaceResult" + }, + "headers": { + "Location": { + "type": "string", + "format": "uri", + "description": "The location of an instance of PersonDirectoryFace" + }, + "operation-Location": { + "type": "string", + "format": "uri", + "description": "The location of an instance of OperationResult" + } + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Add Face to PersonDirectory Person from Url": { + "$ref": "./examples/PersonDirectoryOperations_AddPersonFaceFromUrl.json" + } + }, + "x-ms-long-running-operation": true + } + }, + "/verify?_overload=verifyFromLargePersonGroup": { + "post": { + "operationId": "FaceRecognitionOperations_VerifyFromLargePersonGroup", + "summary": "Verify whether a face belongs to a person in a Large Person Group.", + "description": "> [!NOTE]\n>\n> *\n> * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n> * For the scenarios that are sensitive to accuracy please make your own judgment.\n> * The 'recognitionModel' associated with the query face should be the same as the 'recognitionModel' used by the Large Person Group.", + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "faceId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "The faceId of the face, come from \"Detect\"." + }, + "largePersonGroupId": { + "type": "string", + "description": "Using existing largePersonGroupId and personId for fast loading a specified person. largePersonGroupId is created in \"Create Large Person Group\"." + }, + "personId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "Specify a certain person in Large Person Group." + } + }, + "required": [ + "faceId", + "largePersonGroupId", + "personId" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns the verification result.", + "schema": { + "$ref": "#/definitions/VerificationResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Verify from LargePersonGroup": { + "$ref": "./examples/FaceRecognitionOperations_VerifyFromLargePersonGroup.json" + } + } + } + }, + "/verify?_overload=verifyFromPersonDirectory": { + "post": { + "operationId": "FaceRecognitionOperations_VerifyFromPersonDirectory", + "summary": "Verify whether a face belongs to a person in Person Directory.", + "description": "> [!NOTE]\n>\n> *\n> * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n> * For the scenarios that are sensitive to accuracy please make your own judgment.\n> * The Verify operation can only match faces obtained with the same recognition model, that is associated with the query face.", + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "faceId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "The faceId of the face, come from \"Detect\"." + }, + "personId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "Specify a certain person in PersonDirectory Person." + } + }, + "required": [ + "faceId", + "personId" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns the verification result.", + "schema": { + "$ref": "#/definitions/VerificationResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Verify from PersonDirectory": { + "$ref": "./examples/FaceRecognitionOperations_VerifyFromPersonDirectory.json" + } + } + } + }, + "/verify?_overload=verifyFromPersonGroup": { + "post": { + "operationId": "FaceRecognitionOperations_VerifyFromPersonGroup", + "summary": "Verify whether a face belongs to a person in a Person Group.", + "description": "> [!NOTE]\n>\n> *\n> * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.\n> * For the scenarios that are sensitive to accuracy please make your own judgment.\n> * The 'recognitionModel' associated with the query face should be the same as the 'recognitionModel' used by the Person Group.", + "parameters": [ + { + "name": "body", + "in": "body", + "required": true, + "schema": { + "type": "object", + "properties": { + "faceId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "The faceId of the face, come from \"Detect\"." + }, + "personGroupId": { + "type": "string", + "description": "Using existing personGroupId and personId for fast loading a specified person. personGroupId is created in \"Create Person Group\"." + }, + "personId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "Specify a certain person in Person Group." + } + }, + "required": [ + "faceId", + "personGroupId", + "personId" + ] + } + } + ], + "responses": { + "200": { + "description": "A successful call returns the verification result.", + "schema": { + "$ref": "#/definitions/VerificationResult" + } + }, + "default": { + "description": "An unexpected error response.", + "schema": { + "$ref": "#/definitions/FaceErrorResponse" + }, + "headers": { + "x-ms-error-code": { + "type": "string", + "description": "String error code indicating what went wrong." + } + } + } + }, + "x-ms-examples": { + "Verify from PersonGroup": { + "$ref": "./examples/FaceRecognitionOperations_VerifyFromPersonGroup.json" + } + } + } + } + }, + "definitions": { + "AccessoryItem": { + "type": "object", + "description": "Accessory item and corresponding confidence level.", + "properties": { + "type": { + "$ref": "#/definitions/AccessoryType", + "description": "Type of the accessory." + }, + "confidence": { + "type": "number", + "format": "float", + "description": "Confidence level of the accessory type. Range between [0,1].", + "minimum": 0, + "maximum": 1 + } + }, + "required": [ + "type", + "confidence" + ] + }, + "AccessoryType": { + "type": "string", + "description": "Type of the accessory.", + "enum": [ + "headwear", + "glasses", + "mask" + ], + "x-ms-enum": { + "name": "AccessoryType", + "modelAsString": true, + "values": [ + { + "name": "headwear", + "value": "headwear", + "description": "Head wear." + }, + { + "name": "glasses", + "value": "glasses", + "description": "Glasses." + }, + { + "name": "mask", + "value": "mask", + "description": "Mask." + } + ] + } + }, + "AddFaceResult": { + "type": "object", + "description": "Response body for adding face.", + "properties": { + "persistedFaceId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "Persisted Face ID of the added face, which is persisted and will not expire. Different from faceId which is created in \"Detect\" and will expire in 24 hours after the detection call." + } + }, + "required": [ + "persistedFaceId" + ] + }, + "AuditLivenessResponseInfo": { + "type": "object", + "description": "Audit entry for a response in the session.", + "properties": { + "body": { + "$ref": "#/definitions/LivenessResponseBody", + "description": "The response body. The schema of this field will depend on the request.url and request.method used by the client." + }, + "statusCode": { + "type": "integer", + "format": "int32", + "description": "The HTTP status code returned to the client." + }, + "latencyInMilliseconds": { + "type": "integer", + "format": "int64", + "description": "The server measured latency for this request in milliseconds." + } + }, + "required": [ + "body", + "statusCode", + "latencyInMilliseconds" + ] + }, + "AuditRequestInfo": { + "type": "object", + "description": "Audit entry for a request in the session.", + "properties": { + "url": { + "type": "string", + "description": "The relative URL and query of the liveness request." + }, + "method": { + "type": "string", + "description": "The HTTP method of the request (i.e., GET, POST, DELETE)." + }, + "contentLength": { + "type": "integer", + "format": "int64", + "description": "The length of the request body in bytes." + }, + "contentType": { + "type": "string", + "description": "The content type of the request." + }, + "userAgent": { + "type": "string", + "description": "The user agent used to submit the request." + } + }, + "required": [ + "url", + "method", + "contentType" + ] + }, + "Azure.Core.uuid": { + "type": "string", + "format": "uuid", + "description": "Universally Unique Identifier" + }, + "BlurLevel": { + "type": "string", + "description": "Indicates level of blurriness.", + "enum": [ + "low", + "medium", + "high" + ], + "x-ms-enum": { + "name": "BlurLevel", + "modelAsString": true, + "values": [ + { + "name": "low", + "value": "low", + "description": "Low blur level." + }, + { + "name": "medium", + "value": "medium", + "description": "Medium blur level." + }, + { + "name": "high", + "value": "high", + "description": "High blur level." + } + ] + } + }, + "BlurProperties": { + "type": "object", + "description": "Properties describing any presence of blur within the image.", + "properties": { + "blurLevel": { + "$ref": "#/definitions/BlurLevel", + "description": "An enum value indicating level of blurriness." + }, + "value": { + "type": "number", + "format": "float", + "description": "A number indicating level of blurriness ranging from 0 to 1.", + "minimum": 0, + "maximum": 1 + } + }, + "required": [ + "blurLevel", + "value" + ] + }, + "CreateLivenessSessionContent": { + "type": "object", + "description": "Request for creating liveness session.", + "properties": { + "livenessOperationMode": { + "$ref": "#/definitions/LivenessOperationMode", + "description": "Type of liveness mode the client should follow." + }, + "sendResultsToClient": { + "type": "boolean", + "description": "Whether or not to allow a '200 - Success' response body to be sent to the client, which may be undesirable for security reasons. Default is false, clients will receive a '204 - NoContent' empty body response. Regardless of selection, calling Session GetResult will always contain a response body enabling business logic to be implemented." + }, + "deviceCorrelationIdSetInClient": { + "type": "boolean", + "description": "Whether or not to allow client to set their own 'deviceCorrelationId' via the Vision SDK. Default is false, and 'deviceCorrelationId' must be set in this request body." + }, + "deviceCorrelationId": { + "type": "string", + "description": "Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null." + }, + "authTokenTimeToLiveInSeconds": { + "type": "integer", + "format": "int32", + "description": "Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600.", + "default": 600, + "minimum": 60, + "maximum": 86400 + } + }, + "required": [ + "livenessOperationMode" + ] + }, + "CreateLivenessSessionContentForMultipart": { + "type": "object", + "description": "Dedicated parameter model for multipart/form-data.", + "properties": { + "livenessOperationMode": { + "$ref": "#/definitions/LivenessOperationMode", + "description": "Type of liveness mode the client should follow." + }, + "sendResultsToClient": { + "type": "boolean", + "description": "Whether or not to allow a '200 - Success' response body to be sent to the client, which may be undesirable for security reasons. Default is false, clients will receive a '204 - NoContent' empty body response. Regardless of selection, calling Session GetResult will always contain a response body enabling business logic to be implemented." + }, + "deviceCorrelationIdSetInClient": { + "type": "boolean", + "description": "Whether or not to allow client to set their own 'deviceCorrelationId' via the Vision SDK. Default is false, and 'deviceCorrelationId' must be set in this request body." + }, + "deviceCorrelationId": { + "type": "string", + "description": "Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null." + }, + "authTokenTimeToLiveInSeconds": { + "type": "integer", + "format": "int32", + "description": "Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600.", + "default": 600, + "minimum": 60, + "maximum": 86400 + } + }, + "required": [ + "livenessOperationMode" + ] + }, + "CreateLivenessSessionResult": { + "type": "object", + "description": "Response of liveness session creation.", + "properties": { + "sessionId": { + "type": "string", + "description": "The unique session ID of the created session. It will expire 48 hours after it was created or may be deleted sooner using the corresponding Session DELETE operation." + }, + "authToken": { + "type": "string", + "description": "Bearer token to provide authentication for the Vision SDK running on a client application. This Bearer token has limited permissions to perform only the required action and expires after the TTL time. It is also auditable." + } + }, + "required": [ + "sessionId", + "authToken" + ] + }, + "CreateLivenessWithVerifySessionContent": { + "type": "object", + "description": "Request of liveness with verify session creation.", + "properties": { + "Parameters": { + "$ref": "#/definitions/CreateLivenessSessionContentForMultipart", + "description": "The parameters for creating session." + }, + "VerifyImage": { + "type": "string", + "format": "byte", + "description": "The image stream for verify. Content-Disposition header field for this part must have filename." + } + }, + "required": [ + "Parameters", + "VerifyImage" + ] + }, + "CreateLivenessWithVerifySessionResult": { + "type": "object", + "description": "Response of liveness session with verify creation with verify image provided.", + "properties": { + "sessionId": { + "type": "string", + "description": "The unique session ID of the created session. It will expire 48 hours after it was created or may be deleted sooner using the corresponding Session DELETE operation." + }, + "authToken": { + "type": "string", + "description": "Bearer token to provide authentication for the Vision SDK running on a client application. This Bearer token has limited permissions to perform only the required action and expires after the TTL time. It is also auditable." + }, + "verifyImage": { + "$ref": "#/definitions/LivenessWithVerifyImage", + "description": "The detail of face for verification." + } + }, + "required": [ + "sessionId", + "authToken" + ] + }, + "CreatePersonResult": { + "type": "object", + "description": "Response of create person.", + "properties": { + "personId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "Person ID of the person." + } + }, + "required": [ + "personId" + ] + }, + "DynamicPersonGroup": { + "type": "object", + "description": "A container that references Person Directory \"Create Person\".", + "properties": { + "dynamicPersonGroupId": { + "$ref": "#/definitions/collectionId", + "description": "ID of the dynamic person group.", + "readOnly": true + }, + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + } + }, + "required": [ + "dynamicPersonGroupId", + "name" + ] + }, + "ExposureLevel": { + "type": "string", + "description": "Indicates level of exposure.", + "enum": [ + "underExposure", + "goodExposure", + "overExposure" + ], + "x-ms-enum": { + "name": "ExposureLevel", + "modelAsString": true, + "values": [ + { + "name": "underExposure", + "value": "underExposure", + "description": "Low exposure level." + }, + { + "name": "goodExposure", + "value": "goodExposure", + "description": "Good exposure level." + }, + { + "name": "overExposure", + "value": "overExposure", + "description": "High exposure level." + } + ] + } + }, + "ExposureProperties": { + "type": "object", + "description": "Properties describing exposure level of the image.", + "properties": { + "exposureLevel": { + "$ref": "#/definitions/ExposureLevel", + "description": "An enum value indicating level of exposure." + }, + "value": { + "type": "number", + "format": "float", + "description": "A number indicating level of exposure level ranging from 0 to 1. [0, 0.25) is under exposure. [0.25, 0.75) is good exposure. [0.75, 1] is over exposure.", + "minimum": 0, + "maximum": 1 + } + }, + "required": [ + "exposureLevel", + "value" + ] + }, + "FaceAttributes": { + "type": "object", + "description": "Face attributes for the detected face.", + "properties": { + "age": { + "type": "number", + "format": "float", + "description": "Age in years." + }, + "smile": { + "type": "number", + "format": "float", + "description": "Smile intensity, a number between [0,1].", + "minimum": 0, + "maximum": 1 + }, + "facialHair": { + "$ref": "#/definitions/FacialHair", + "description": "Properties describing facial hair attributes." + }, + "glasses": { + "$ref": "#/definitions/GlassesType", + "description": "Glasses type if any of the face." + }, + "headPose": { + "$ref": "#/definitions/HeadPose", + "description": "3-D roll/yaw/pitch angles for face direction." + }, + "hair": { + "$ref": "#/definitions/HairProperties", + "description": "Properties describing hair attributes." + }, + "occlusion": { + "$ref": "#/definitions/OcclusionProperties", + "description": "Properties describing occlusions on a given face." + }, + "accessories": { + "type": "array", + "description": "Properties describing any accessories on a given face.", + "items": { + "$ref": "#/definitions/AccessoryItem" + }, + "x-ms-identifiers": [] + }, + "blur": { + "$ref": "#/definitions/BlurProperties", + "description": "Properties describing any presence of blur within the image." + }, + "exposure": { + "$ref": "#/definitions/ExposureProperties", + "description": "Properties describing exposure level of the image." + }, + "noise": { + "$ref": "#/definitions/NoiseProperties", + "description": "Properties describing noise level of the image." + }, + "mask": { + "$ref": "#/definitions/MaskProperties", + "description": "Properties describing the presence of a mask on a given face." + }, + "qualityForRecognition": { + "$ref": "#/definitions/QualityForRecognition", + "description": "Properties describing the overall image quality regarding whether the image being used in the detection is of sufficient quality to attempt face recognition on." + } + } + }, + "FaceDetectionResult": { + "type": "object", + "description": "Response for detect API.", + "properties": { + "faceId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "Unique faceId of the detected face, created by detection API and it will expire 24 hours after the detection call. To return this, it requires 'returnFaceId' parameter to be true." + }, + "recognitionModel": { + "$ref": "#/definitions/RecognitionModel", + "description": "The 'recognitionModel' associated with this faceId. This is only returned when 'returnRecognitionModel' is explicitly set as true." + }, + "faceRectangle": { + "$ref": "#/definitions/FaceRectangle", + "description": "A rectangle area for the face location on image." + }, + "faceLandmarks": { + "$ref": "#/definitions/FaceLandmarks", + "description": "An array of 27-point face landmarks pointing to the important positions of face components. To return this, it requires 'returnFaceLandmarks' parameter to be true." + }, + "faceAttributes": { + "$ref": "#/definitions/FaceAttributes", + "description": "Face attributes for detected face." + } + }, + "required": [ + "faceRectangle" + ] + }, + "FaceError": { + "type": "object", + "description": "The error object. For comprehensive details on error codes and messages returned by the Face Service, please refer to the following link: https://aka.ms/face-error-codes-and-messages.", + "properties": { + "code": { + "type": "string", + "description": "One of a server-defined set of error codes." + }, + "message": { + "type": "string", + "description": "A human-readable representation of the error." + } + }, + "required": [ + "code", + "message" + ] + }, + "FaceErrorResponse": { + "type": "object", + "description": "A response containing error details.", + "properties": { + "error": { + "$ref": "#/definitions/FaceError", + "description": "The error object." + } + }, + "required": [ + "error" + ] + }, + "FaceLandmarks": { + "type": "object", + "description": "A collection of 27-point face landmarks pointing to the important positions of face components.", + "properties": { + "pupilLeft": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the left eye pupil." + }, + "pupilRight": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the right eye pupil." + }, + "noseTip": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the nose tip." + }, + "mouthLeft": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the mouth left." + }, + "mouthRight": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the mouth right." + }, + "eyebrowLeftOuter": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the left eyebrow outer." + }, + "eyebrowLeftInner": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the left eyebrow inner." + }, + "eyeLeftOuter": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the left eye outer." + }, + "eyeLeftTop": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the left eye top." + }, + "eyeLeftBottom": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the left eye bottom." + }, + "eyeLeftInner": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the left eye inner." + }, + "eyebrowRightInner": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the right eyebrow inner." + }, + "eyebrowRightOuter": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the right eyebrow outer." + }, + "eyeRightInner": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the right eye inner." + }, + "eyeRightTop": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the right eye top." + }, + "eyeRightBottom": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the right eye bottom." + }, + "eyeRightOuter": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the right eye outer." + }, + "noseRootLeft": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the nose root left." + }, + "noseRootRight": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the nose root right." + }, + "noseLeftAlarTop": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the nose left alar top." + }, + "noseRightAlarTop": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the nose right alar top." + }, + "noseLeftAlarOutTip": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the nose left alar out tip." + }, + "noseRightAlarOutTip": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the nose right alar out tip." + }, + "upperLipTop": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the upper lip top." + }, + "upperLipBottom": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the upper lip bottom." + }, + "underLipTop": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the under lip top." + }, + "underLipBottom": { + "$ref": "#/definitions/LandmarkCoordinate", + "description": "The coordinates of the under lip bottom." + } + }, + "required": [ + "pupilLeft", + "pupilRight", + "noseTip", + "mouthLeft", + "mouthRight", + "eyebrowLeftOuter", + "eyebrowLeftInner", + "eyeLeftOuter", + "eyeLeftTop", + "eyeLeftBottom", + "eyeLeftInner", + "eyebrowRightInner", + "eyebrowRightOuter", + "eyeRightInner", + "eyeRightTop", + "eyeRightBottom", + "eyeRightOuter", + "noseRootLeft", + "noseRootRight", + "noseLeftAlarTop", + "noseRightAlarTop", + "noseLeftAlarOutTip", + "noseRightAlarOutTip", + "upperLipTop", + "upperLipBottom", + "underLipTop", + "underLipBottom" + ] + }, + "FaceList": { + "type": "object", + "description": "Face list is a list of faces, up to 1,000 faces.", + "properties": { + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + }, + "recognitionModel": { + "$ref": "#/definitions/RecognitionModel", + "description": "Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds." + }, + "faceListId": { + "$ref": "#/definitions/collectionId", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "readOnly": true + }, + "persistedFaces": { + "type": "array", + "description": "Face ids of registered faces in the face list.", + "items": { + "$ref": "#/definitions/FaceListFace" + }, + "x-ms-identifiers": [] + } + }, + "required": [ + "name", + "faceListId" + ] + }, + "FaceListFace": { + "type": "object", + "description": "Face resource for face list.", + "properties": { + "persistedFaceId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "Face ID of the face.", + "readOnly": true + }, + "userData": { + "type": "string", + "description": "User-provided data attached to the face. The length limit is 1K.", + "maxLength": 1024 + } + }, + "required": [ + "persistedFaceId" + ] + }, + "FaceListItem": { + "type": "object", + "description": "Face list item for list face list.", + "properties": { + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + }, + "recognitionModel": { + "$ref": "#/definitions/RecognitionModel", + "description": "Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds." + }, + "faceListId": { + "$ref": "#/definitions/collectionId", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64." + } + }, + "required": [ + "name", + "faceListId" + ] + }, + "FaceRectangle": { + "type": "object", + "description": "A rectangle within which a face can be found.", + "properties": { + "top": { + "type": "integer", + "format": "int32", + "description": "The distance from the top edge if the image to the top edge of the rectangle, in pixels." + }, + "left": { + "type": "integer", + "format": "int32", + "description": "The distance from the left edge if the image to the left edge of the rectangle, in pixels." + }, + "width": { + "type": "integer", + "format": "int32", + "description": "The width of the rectangle, in pixels." + }, + "height": { + "type": "integer", + "format": "int32", + "description": "The height of the rectangle, in pixels." + } + }, + "required": [ + "top", + "left", + "width", + "height" + ] + }, + "FaceSessionStatus": { + "type": "string", + "description": "The current status of the session.", + "enum": [ + "NotStarted", + "Started", + "ResultAvailable" + ], + "x-ms-enum": { + "name": "FaceSessionStatus", + "modelAsString": true, + "values": [ + { + "name": "NotStarted", + "value": "NotStarted", + "description": "Session has not started." + }, + { + "name": "Started", + "value": "Started", + "description": "Session has started." + }, + { + "name": "ResultAvailable", + "value": "ResultAvailable", + "description": "Session has available result." + } + ] + } + }, + "FacialHair": { + "type": "object", + "description": "Properties describing facial hair attributes.", + "properties": { + "moustache": { + "type": "number", + "format": "float", + "description": "A number ranging from 0 to 1 indicating a level of confidence associated with a property.", + "minimum": 0, + "maximum": 1 + }, + "beard": { + "type": "number", + "format": "float", + "description": "A number ranging from 0 to 1 indicating a level of confidence associated with a property.", + "minimum": 0, + "maximum": 1 + }, + "sideburns": { + "type": "number", + "format": "float", + "description": "A number ranging from 0 to 1 indicating a level of confidence associated with a property.", + "minimum": 0, + "maximum": 1 + } + }, + "required": [ + "moustache", + "beard", + "sideburns" + ] + }, + "FindSimilarResult": { + "type": "object", + "description": "Response body for find similar face operation.", + "properties": { + "confidence": { + "type": "number", + "format": "float", + "description": "Confidence value of the candidate. The higher confidence, the more similar. Range between [0,1].", + "minimum": 0, + "maximum": 1 + }, + "faceId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "faceId of candidate face when find by faceIds. faceId is created by \"Detect\" and will expire 24 hours after the detection call." + }, + "persistedFaceId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "persistedFaceId of candidate face when find by faceListId or largeFaceListId. persistedFaceId in face list/large face list is persisted and will not expire." + } + }, + "required": [ + "confidence" + ] + }, + "GlassesType": { + "type": "string", + "description": "Glasses type of the face.", + "enum": [ + "noGlasses", + "readingGlasses", + "sunglasses", + "swimmingGoggles" + ], + "x-ms-enum": { + "name": "GlassesType", + "modelAsString": true, + "values": [ + { + "name": "noGlasses", + "value": "noGlasses", + "description": "No glasses on the face." + }, + { + "name": "readingGlasses", + "value": "readingGlasses", + "description": "Normal glasses on the face." + }, + { + "name": "sunglasses", + "value": "sunglasses", + "description": "Sunglasses on the face." + }, + { + "name": "swimmingGoggles", + "value": "swimmingGoggles", + "description": "Swimming goggles on the face." + } + ] + } + }, + "GroupingResult": { + "type": "object", + "description": "Response body for group face operation.", + "properties": { + "groups": { + "type": "array", + "description": "A partition of the original faces based on face similarity. Groups are ranked by number of faces.", + "items": { + "type": "array", + "items": { + "$ref": "#/definitions/Azure.Core.uuid" + } + }, + "x-ms-identifiers": [] + }, + "messyGroup": { + "type": "array", + "description": "Face ids array of faces that cannot find any similar faces from original faces.", + "items": { + "$ref": "#/definitions/Azure.Core.uuid" + } + } + }, + "required": [ + "groups", + "messyGroup" + ] + }, + "HairColor": { + "type": "object", + "description": "An array of candidate colors and confidence level in the presence of each.", + "properties": { + "color": { + "$ref": "#/definitions/HairColorType", + "description": "Name of the hair color." + }, + "confidence": { + "type": "number", + "format": "float", + "description": "Confidence level of the color. Range between [0,1].", + "minimum": 0, + "maximum": 1 + } + }, + "required": [ + "color", + "confidence" + ] + }, + "HairColorType": { + "type": "string", + "description": "Name of the hair color.", + "enum": [ + "unknown", + "white", + "gray", + "blond", + "brown", + "red", + "black", + "other" + ], + "x-ms-enum": { + "name": "HairColorType", + "modelAsString": true, + "values": [ + { + "name": "unknownHairColor", + "value": "unknown", + "description": "Unknown." + }, + { + "name": "white", + "value": "white", + "description": "White." + }, + { + "name": "gray", + "value": "gray", + "description": "Gray." + }, + { + "name": "blond", + "value": "blond", + "description": "Blond." + }, + { + "name": "brown", + "value": "brown", + "description": "Brown." + }, + { + "name": "red", + "value": "red", + "description": "Red." + }, + { + "name": "black", + "value": "black", + "description": "Black." + }, + { + "name": "other", + "value": "other", + "description": "Other." + } + ] + } + }, + "HairProperties": { + "type": "object", + "description": "Properties describing hair attributes.", + "properties": { + "bald": { + "type": "number", + "format": "float", + "description": "A number describing confidence level of whether the person is bald.", + "minimum": 0, + "maximum": 1 + }, + "invisible": { + "type": "boolean", + "description": "A boolean value describing whether the hair is visible in the image." + }, + "hairColor": { + "type": "array", + "description": "An array of candidate colors and confidence level in the presence of each.", + "items": { + "$ref": "#/definitions/HairColor" + }, + "x-ms-identifiers": [] + } + }, + "required": [ + "bald", + "invisible", + "hairColor" + ] + }, + "HeadPose": { + "type": "object", + "description": "3-D roll/yaw/pitch angles for face direction.", + "properties": { + "pitch": { + "type": "number", + "format": "float", + "description": "Value of angles." + }, + "roll": { + "type": "number", + "format": "float", + "description": "Value of angles." + }, + "yaw": { + "type": "number", + "format": "float", + "description": "Value of angles." + } + }, + "required": [ + "pitch", + "roll", + "yaw" + ] + }, + "IdentificationCandidate": { + "type": "object", + "description": "Candidate for identify call.", + "properties": { + "personId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "personId of candidate person." + }, + "confidence": { + "type": "number", + "format": "float", + "description": "Confidence value of the candidate. The higher confidence, the more similar. Range between [0,1].", + "minimum": 0, + "maximum": 1 + } + }, + "required": [ + "personId", + "confidence" + ] + }, + "IdentificationResult": { + "type": "object", + "description": "Identify result.", + "properties": { + "faceId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "faceId of the query face." + }, + "candidates": { + "type": "array", + "description": "Identified person candidates for that face (ranked by confidence). Array size should be no larger than input maxNumOfCandidatesReturned. If no person is identified, will return an empty array.", + "items": { + "$ref": "#/definitions/IdentificationCandidate" + }, + "x-ms-identifiers": [] + } + }, + "required": [ + "faceId", + "candidates" + ] + }, + "ImageType": { + "type": "string", + "description": "The type of image.", + "enum": [ + "Color", + "Infrared", + "Depth" + ], + "x-ms-enum": { + "name": "ImageType", + "modelAsString": true + } + }, + "LandmarkCoordinate": { + "type": "object", + "description": "Landmark coordinates within an image.", + "properties": { + "x": { + "type": "number", + "format": "float", + "description": "The horizontal component, in pixels." + }, + "y": { + "type": "number", + "format": "float", + "description": "The vertical component, in pixels." + } + }, + "required": [ + "x", + "y" + ] + }, + "LargeFaceList": { + "type": "object", + "description": "Large face list is a list of faces, up to 1,000,000 faces.", + "properties": { + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + }, + "recognitionModel": { + "$ref": "#/definitions/RecognitionModel", + "description": "Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds." + }, + "largeFaceListId": { + "$ref": "#/definitions/collectionId", + "description": "Valid character is letter in lower case or digit or '-' or '_', maximum length is 64.", + "readOnly": true + } + }, + "required": [ + "name", + "largeFaceListId" + ] + }, + "LargeFaceListFace": { + "type": "object", + "description": "Face resource for large face list.", + "properties": { + "persistedFaceId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "Face ID of the face.", + "readOnly": true + }, + "userData": { + "type": "string", + "description": "User-provided data attached to the face. The length limit is 1K.", + "maxLength": 1024 + } + }, + "required": [ + "persistedFaceId" + ] + }, + "LargePersonGroup": { + "type": "object", + "description": "The container of the uploaded person data, including face recognition feature, and up to 1,000,000 people.", + "properties": { + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + }, + "recognitionModel": { + "$ref": "#/definitions/RecognitionModel", + "description": "Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds." + }, + "largePersonGroupId": { + "$ref": "#/definitions/collectionId", + "description": "ID of the container.", + "readOnly": true + } + }, + "required": [ + "name", + "largePersonGroupId" + ] + }, + "LargePersonGroupPerson": { + "type": "object", + "description": "The person in a specified large person group. To add face to this person, please call \"Add Large Person Group Person Face\".", + "properties": { + "personId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "ID of the person.", + "readOnly": true + }, + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + }, + "persistedFaceIds": { + "type": "array", + "description": "Face ids of registered faces in the person.", + "items": { + "$ref": "#/definitions/Azure.Core.uuid" + } + } + }, + "required": [ + "personId", + "name" + ] + }, + "LargePersonGroupPersonFace": { + "type": "object", + "description": "Face resource for large person group person.", + "properties": { + "persistedFaceId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "Face ID of the face.", + "readOnly": true + }, + "userData": { + "type": "string", + "description": "User-provided data attached to the face. The length limit is 1K.", + "maxLength": 1024 + } + }, + "required": [ + "persistedFaceId" + ] + }, + "ListFaceResult": { + "type": "object", + "description": "Response of list face of person.", + "properties": { + "personId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "Id of person." + }, + "persistedFaceIds": { + "type": "array", + "description": "Array of persisted face ids.", + "items": { + "$ref": "#/definitions/Azure.Core.uuid" + } + } + }, + "required": [ + "personId", + "persistedFaceIds" + ] + }, + "ListGroupReferenceResult": { + "type": "object", + "description": "Response of list dynamic person group of person.", + "properties": { + "dynamicPersonGroupIds": { + "type": "array", + "description": "Array of PersonDirectory DynamicPersonGroup ids.", + "items": { + "$ref": "#/definitions/collectionId" + } + } + }, + "required": [ + "dynamicPersonGroupIds" + ] + }, + "ListPersonResult": { + "type": "object", + "description": "Response of list dynamic person group person.", + "properties": { + "personIds": { + "type": "array", + "description": "Array of PersonDirectory Person ids.", + "items": { + "$ref": "#/definitions/Azure.Core.uuid" + } + } + }, + "required": [ + "personIds" + ] + }, + "LivenessDecision": { + "type": "string", + "description": "The outcome of the liveness classification.", + "enum": [ + "uncertain", + "realface", + "spoofface" + ], + "x-ms-enum": { + "name": "LivenessDecision", + "modelAsString": true, + "values": [ + { + "name": "uncertain", + "value": "uncertain", + "description": "The algorithm could not classify the target face as either real or spoof." + }, + { + "name": "realface", + "value": "realface", + "description": "The algorithm has classified the target face as real." + }, + { + "name": "spoofface", + "value": "spoofface", + "description": "The algorithm has classified the target face as a spoof." + } + ] + } + }, + "LivenessModel": { + "type": "string", + "description": "The model version used for liveness classification.", + "enum": [ + "2020-02-15-preview.01", + "2021-11-12-preview.03", + "2022-10-15-preview.04", + "2023-03-02-preview.05" + ], + "x-ms-enum": { + "name": "LivenessModel", + "modelAsString": true + } + }, + "LivenessOperationMode": { + "type": "string", + "description": "The operation mode for the liveness modal.", + "enum": [ + "Passive" + ], + "x-ms-enum": { + "name": "LivenessOperationMode", + "modelAsString": true, + "values": [ + { + "name": "Passive", + "value": "Passive", + "description": "The operation mode for the liveness modal." + } + ] + } + }, + "LivenessOutputsTarget": { + "type": "object", + "description": "The liveness classification for target face.", + "properties": { + "faceRectangle": { + "$ref": "#/definitions/FaceRectangle", + "description": "The face region where the liveness classification was made on." + }, + "fileName": { + "type": "string", + "description": "The file name which contains the face rectangle where the liveness classification was made on." + }, + "timeOffsetWithinFile": { + "type": "integer", + "format": "int32", + "description": "The time offset within the file of the frame which contains the face rectangle where the liveness classification was made on." + }, + "imageType": { + "$ref": "#/definitions/ImageType", + "description": "The image type which contains the face rectangle where the liveness classification was made on." + } + }, + "required": [ + "faceRectangle", + "fileName", + "timeOffsetWithinFile", + "imageType" + ] + }, + "LivenessResponseBody": { + "type": "object", + "description": "The response body of detect liveness API call.", + "properties": { + "livenessDecision": { + "$ref": "#/definitions/LivenessDecision", + "description": "The liveness classification for the target face." + }, + "target": { + "$ref": "#/definitions/LivenessOutputsTarget", + "description": "Specific targets used for liveness classification." + }, + "modelVersionUsed": { + "$ref": "#/definitions/LivenessModel", + "description": "The model version used for liveness classification." + }, + "verifyResult": { + "$ref": "#/definitions/LivenessWithVerifyOutputs", + "description": "The face verification output. Only available when the request is liveness with verify." + } + }, + "allOf": [ + { + "type": "object", + "additionalProperties": {} + } + ] + }, + "LivenessSession": { + "type": "object", + "description": "Session result of detect liveness.", + "properties": { + "id": { + "type": "string", + "description": "The unique ID to reference this session.", + "readOnly": true + }, + "createdDateTime": { + "type": "string", + "format": "date-time", + "description": "DateTime when this session was created." + }, + "sessionStartDateTime": { + "type": "string", + "format": "date-time", + "description": "DateTime when this session was started by the client." + }, + "sessionExpired": { + "type": "boolean", + "description": "Whether or not the session is expired." + }, + "deviceCorrelationId": { + "type": "string", + "description": "Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null." + }, + "authTokenTimeToLiveInSeconds": { + "type": "integer", + "format": "int32", + "description": "Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600.", + "default": 600, + "minimum": 60, + "maximum": 86400 + }, + "status": { + "$ref": "#/definitions/FaceSessionStatus", + "description": "The current status of the session." + }, + "result": { + "$ref": "#/definitions/LivenessSessionAuditEntry", + "description": "The latest session audit result only populated if status == 'ResultAvailable'." + } + }, + "required": [ + "id", + "createdDateTime", + "sessionExpired", + "status" + ] + }, + "LivenessSessionAuditEntry": { + "type": "object", + "description": "Audit entry for a request in session.", + "properties": { + "id": { + "type": "integer", + "format": "int64", + "description": "The unique id to refer to this audit request. Use this id with the 'start' query parameter to continue on to the next page of audit results." + }, + "sessionId": { + "type": "string", + "description": "The unique sessionId of the created session. It will expire 48 hours after it was created or may be deleted sooner using the corresponding session DELETE operation." + }, + "requestId": { + "type": "string", + "description": "The unique requestId that is returned by the service to the client in the 'apim-request-id' header." + }, + "clientRequestId": { + "type": "string", + "description": "The unique clientRequestId that is sent by the client in the 'client-request-id' header." + }, + "receivedDateTime": { + "type": "string", + "format": "date-time", + "description": "The UTC DateTime that the request was received." + }, + "request": { + "$ref": "#/definitions/AuditRequestInfo", + "description": "The request of this entry." + }, + "response": { + "$ref": "#/definitions/AuditLivenessResponseInfo", + "description": "The response of this entry." + }, + "digest": { + "type": "string", + "description": "The server calculated digest for this request. If the client reported digest differs from the server calculated digest, then the message integrity between the client and service has been compromised and the result should not be trusted. For more information, see how to guides on how to leverage this value to secure your end-to-end solution." + } + }, + "required": [ + "id", + "sessionId", + "requestId", + "clientRequestId", + "receivedDateTime", + "request", + "response", + "digest" + ] + }, + "LivenessSessionItem": { + "type": "object", + "description": "Session data returned for enumeration.", + "properties": { + "id": { + "type": "string", + "description": "The unique ID to reference this session.", + "readOnly": true + }, + "createdDateTime": { + "type": "string", + "format": "date-time", + "description": "DateTime when this session was created." + }, + "sessionStartDateTime": { + "type": "string", + "format": "date-time", + "description": "DateTime when this session was started by the client." + }, + "sessionExpired": { + "type": "boolean", + "description": "Whether or not the session is expired." + }, + "deviceCorrelationId": { + "type": "string", + "description": "Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null." + }, + "authTokenTimeToLiveInSeconds": { + "type": "integer", + "format": "int32", + "description": "Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600.", + "default": 600, + "minimum": 60, + "maximum": 86400 + } + }, + "required": [ + "id", + "createdDateTime", + "sessionExpired" + ] + }, + "LivenessWithVerifyImage": { + "type": "object", + "description": "The detail of face for verification.", + "properties": { + "faceRectangle": { + "$ref": "#/definitions/FaceRectangle", + "description": "The face region where the comparison image's classification was made." + }, + "qualityForRecognition": { + "$ref": "#/definitions/QualityForRecognition", + "description": "Quality of face image for recognition." + } + }, + "required": [ + "faceRectangle", + "qualityForRecognition" + ] + }, + "LivenessWithVerifyOutputs": { + "type": "object", + "description": "The face verification output.", + "properties": { + "verifyImage": { + "$ref": "#/definitions/LivenessWithVerifyImage", + "description": "The detail of face for verification." + }, + "matchConfidence": { + "type": "number", + "format": "float", + "description": "The target face liveness face and comparison image face verification confidence.", + "minimum": 0, + "maximum": 1 + }, + "isIdentical": { + "type": "boolean", + "description": "Whether the target liveness face and comparison image face match." + } + }, + "required": [ + "verifyImage", + "matchConfidence", + "isIdentical" + ] + }, + "LivenessWithVerifySession": { + "type": "object", + "description": "Session result of detect liveness with verify.", + "properties": { + "id": { + "type": "string", + "description": "The unique ID to reference this session.", + "readOnly": true + }, + "createdDateTime": { + "type": "string", + "format": "date-time", + "description": "DateTime when this session was created." + }, + "sessionStartDateTime": { + "type": "string", + "format": "date-time", + "description": "DateTime when this session was started by the client." + }, + "sessionExpired": { + "type": "boolean", + "description": "Whether or not the session is expired." + }, + "deviceCorrelationId": { + "type": "string", + "description": "Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null." + }, + "authTokenTimeToLiveInSeconds": { + "type": "integer", + "format": "int32", + "description": "Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600.", + "default": 600, + "minimum": 60, + "maximum": 86400 + }, + "status": { + "$ref": "#/definitions/FaceSessionStatus", + "description": "The current status of the session." + }, + "result": { + "$ref": "#/definitions/LivenessSessionAuditEntry", + "description": "The latest session audit result only populated if status == 'ResultAvailable'." + } + }, + "required": [ + "id", + "createdDateTime", + "sessionExpired", + "status" + ] + }, + "MaskProperties": { + "type": "object", + "description": "Properties describing the presence of a mask on a given face.", + "properties": { + "noseAndMouthCovered": { + "type": "boolean", + "description": "A boolean value indicating whether nose and mouth are covered." + }, + "type": { + "$ref": "#/definitions/MaskType", + "description": "Type of the mask." + } + }, + "required": [ + "noseAndMouthCovered", + "type" + ] + }, + "MaskType": { + "type": "string", + "description": "Type of the mask.", + "enum": [ + "faceMask", + "noMask", + "otherMaskOrOcclusion", + "uncertain" + ], + "x-ms-enum": { + "name": "MaskType", + "modelAsString": true, + "values": [ + { + "name": "faceMask", + "value": "faceMask", + "description": "Face mask." + }, + { + "name": "noMask", + "value": "noMask", + "description": "No mask." + }, + { + "name": "otherMaskOrOcclusion", + "value": "otherMaskOrOcclusion", + "description": "Other types of mask or occlusion." + }, + { + "name": "uncertain", + "value": "uncertain", + "description": "Uncertain." + } + ] + } + }, + "NoiseLevel": { + "type": "string", + "description": "Indicates level of noise.", + "enum": [ + "low", + "medium", + "high" + ], + "x-ms-enum": { + "name": "NoiseLevel", + "modelAsString": true, + "values": [ + { + "name": "low", + "value": "low", + "description": "Low noise level." + }, + { + "name": "medium", + "value": "medium", + "description": "Medium noise level." + }, + { + "name": "high", + "value": "high", + "description": "High noise level." + } + ] + } + }, + "NoiseProperties": { + "type": "object", + "description": "Properties describing noise level of the image.", + "properties": { + "noiseLevel": { + "$ref": "#/definitions/NoiseLevel", + "description": "An enum value indicating level of noise." + }, + "value": { + "type": "number", + "format": "float", + "description": "A number indicating level of noise level ranging from 0 to 1. [0, 0.25) is under exposure. [0.25, 0.75) is good exposure. [0.75, 1] is over exposure. [0, 0.3) is low noise level. [0.3, 0.7) is medium noise level. [0.7, 1] is high noise level.", + "minimum": 0, + "maximum": 1 + } + }, + "required": [ + "noiseLevel", + "value" + ] + }, + "OcclusionProperties": { + "type": "object", + "description": "Properties describing occlusions on a given face.", + "properties": { + "foreheadOccluded": { + "type": "boolean", + "description": "A boolean value indicating whether forehead is occluded." + }, + "eyeOccluded": { + "type": "boolean", + "description": "A boolean value indicating whether eyes are occluded." + }, + "mouthOccluded": { + "type": "boolean", + "description": "A boolean value indicating whether the mouth is occluded." + } + }, + "required": [ + "foreheadOccluded", + "eyeOccluded", + "mouthOccluded" + ] + }, + "OperationResult": { + "type": "object", + "description": "Long running operation resource for person directory.", + "properties": { + "operationId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "Operation ID of the operation.", + "readOnly": true + }, + "status": { + "$ref": "#/definitions/OperationStatus", + "description": "Current status of the operation." + }, + "createdTime": { + "type": "string", + "format": "date-time", + "description": "Date and time the operation was created." + }, + "lastActionTime": { + "type": "string", + "format": "date-time", + "description": "Date and time the operation was last updated." + }, + "finishedTime": { + "type": "string", + "format": "date-time", + "description": "Date and time the operation was finished." + }, + "message": { + "type": "string", + "description": "Message for the operation." + } + }, + "required": [ + "operationId", + "status", + "createdTime" + ] + }, + "OperationStatus": { + "type": "string", + "description": "The status of long running operation.", + "enum": [ + "notStarted", + "running", + "succeeded", + "failed" + ], + "x-ms-enum": { + "name": "OperationStatus", + "modelAsString": true, + "values": [ + { + "name": "notStarted", + "value": "notStarted", + "description": "The operation is not started." + }, + { + "name": "running", + "value": "running", + "description": "The operation is still running." + }, + { + "name": "succeeded", + "value": "succeeded", + "description": "The operation is succeeded." + }, + { + "name": "failed", + "value": "failed", + "description": "The operation is failed." + } + ] + } + }, + "PersonDirectoryFace": { + "type": "object", + "description": "Face resource for person directory person.", + "properties": { + "persistedFaceId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "Face ID of the face.", + "readOnly": true + }, + "userData": { + "type": "string", + "description": "User-provided data attached to the face. The length limit is 1K.", + "maxLength": 1024 + } + }, + "required": [ + "persistedFaceId" + ] + }, + "PersonDirectoryPerson": { + "type": "object", + "description": "Person resource for person directory", + "properties": { + "personId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "Person ID of the person.", + "readOnly": true + }, + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + } + }, + "required": [ + "personId", + "name" + ] + }, + "PersonGroup": { + "type": "object", + "description": "The container of the uploaded person data, including face recognition feature, and up to 10,000 persons. To handle larger scale face identification problem, please consider using Large Person Group.", + "properties": { + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + }, + "recognitionModel": { + "$ref": "#/definitions/RecognitionModel", + "description": "Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds." + }, + "personGroupId": { + "$ref": "#/definitions/collectionId", + "description": "ID of the container.", + "readOnly": true + } + }, + "required": [ + "name", + "personGroupId" + ] + }, + "PersonGroupPerson": { + "type": "object", + "description": "The person in a specified person group. To add face to this person, please call \"Add Large Person Group Person Face\".", + "properties": { + "personId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "ID of the person.", + "readOnly": true + }, + "name": { + "type": "string", + "description": "User defined name, maximum length is 128.", + "minLength": 1, + "maxLength": 128 + }, + "userData": { + "type": "string", + "description": "Optional user defined data. Length should not exceed 16K.", + "maxLength": 16384 + }, + "persistedFaceIds": { + "type": "array", + "description": "Face ids of registered faces in the person.", + "items": { + "$ref": "#/definitions/Azure.Core.uuid" + } + } + }, + "required": [ + "personId", + "name" + ] + }, + "PersonGroupPersonFace": { + "type": "object", + "description": "Face resource for person group person.", + "properties": { + "persistedFaceId": { + "$ref": "#/definitions/Azure.Core.uuid", + "description": "Face ID of the face.", + "readOnly": true + }, + "userData": { + "type": "string", + "description": "User-provided data attached to the face. The length limit is 1K.", + "maxLength": 1024 + } + }, + "required": [ + "persistedFaceId" + ] + }, + "QualityForRecognition": { + "type": "string", + "description": "Indicates quality of image for recognition.", + "enum": [ + "low", + "medium", + "high" + ], + "x-ms-enum": { + "name": "QualityForRecognition", + "modelAsString": true, + "values": [ + { + "name": "low", + "value": "low", + "description": "Low quality." + }, + { + "name": "medium", + "value": "medium", + "description": "Medium quality." + }, + { + "name": "high", + "value": "high", + "description": "High quality." + } + ] + } + }, + "RecognitionModel": { + "type": "string", + "description": "The recognition model for the face.", + "enum": [ + "recognition_01", + "recognition_02", + "recognition_03", + "recognition_04" + ], + "x-ms-enum": { + "name": "RecognitionModel", + "modelAsString": true, + "values": [ + { + "name": "recognition_01", + "value": "recognition_01", + "description": "The default recognition model for \"Detect\". All those faceIds created before 2019 March are bonded with this recognition model." + }, + { + "name": "recognition_02", + "value": "recognition_02", + "description": "Recognition model released in 2019 March." + }, + { + "name": "recognition_03", + "value": "recognition_03", + "description": "Recognition model released in 2020 May." + }, + { + "name": "recognition_04", + "value": "recognition_04", + "description": "Recognition model released in 2021 February. It's recommended to use this recognition model for better recognition accuracy." + } + ] + } + }, + "TrainingResult": { + "type": "object", + "description": "Training result of a container", + "properties": { + "status": { + "$ref": "#/definitions/OperationStatus", + "description": "Training status of the container." + }, + "createdDateTime": { + "type": "string", + "format": "date-time", + "description": "A combined UTC date and time string that describes the created time of the person group, large person group or large face list." + }, + "lastActionDateTime": { + "type": "string", + "format": "date-time", + "description": "A combined UTC date and time string that describes the last modify time of the person group, large person group or large face list, could be null value when the group is not successfully trained." + }, + "lastSuccessfulTrainingDateTime": { + "type": "string", + "format": "date-time", + "description": "A combined UTC date and time string that describes the last successful training time of the person group, large person group or large face list." + }, + "message": { + "type": "string", + "description": "Show failure message when training failed (omitted when training succeed)." + } + }, + "required": [ + "status", + "createdDateTime", + "lastActionDateTime", + "lastSuccessfulTrainingDateTime" + ] + }, + "VerificationResult": { + "type": "object", + "description": "Verify result.", + "properties": { + "isIdentical": { + "type": "boolean", + "description": "True if the two faces belong to the same person or the face belongs to the person, otherwise false." + }, + "confidence": { + "type": "number", + "format": "float", + "description": "A number indicates the similarity confidence of whether two faces belong to the same person, or whether the face belongs to the person. By default, isIdentical is set to True if similarity confidence is greater than or equal to 0.5. This is useful for advanced users to override 'isIdentical' and fine-tune the result on their own data.", + "minimum": 0, + "maximum": 1 + } + }, + "required": [ + "isIdentical", + "confidence" + ] + }, + "collectionId": { + "type": "string", + "minLength": 1, + "maxLength": 64, + "pattern": "^[a-z0-9-_]+$" + } + }, + "parameters": { + "CreateLivenessWithVerifySessionContent.Parameters": { + "name": "Parameters", + "in": "formData", + "description": "The parameters for creating session.", + "required": true, + "type": "string", + "x-ms-parameter-location": "method" + }, + "CreateLivenessWithVerifySessionContent.VerifyImage": { + "name": "VerifyImage", + "in": "formData", + "description": "The image stream for verify. Content-Disposition header field for this part must have filename.", + "required": true, + "type": "file", + "x-ms-parameter-location": "method" + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/Detect.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/Detect.json new file mode 100644 index 000000000000..06b9e7ecf543 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/Detect.json @@ -0,0 +1,181 @@ +{ + "title": "Detect with Image", + "operationId": "FaceDetectionOperations_Detect", + "parameters": { + "apiVersion": "v1.1-preview.1", + "returnFaceId": true, + "returnFaceLandmarks": true, + "returnFaceAttributes": "glasses,headPose,occlusion,accessories,blur,exposure,noise,qualityForRecognition", + "recognitionModel": "recognition_03", + "returnRecognitionModel": true, + "detectionModel": "detection_01", + "faceIdTimeToLive": 60, + "imageContent": "" + }, + "responses": { + "200": { + "body": [ + { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "recognitionModel": "recognition_03", + "faceRectangle": { + "width": 78, + "height": 78, + "left": 394, + "top": 54 + }, + "faceLandmarks": { + "pupilLeft": { + "x": 412.7, + "y": 78.4 + }, + "pupilRight": { + "x": 446.8, + "y": 74.2 + }, + "noseTip": { + "x": 437.7, + "y": 92.4 + }, + "mouthLeft": { + "x": 417.8, + "y": 114.4 + }, + "mouthRight": { + "x": 451.3, + "y": 109.3 + }, + "eyebrowLeftOuter": { + "x": 397.9, + "y": 78.5 + }, + "eyebrowLeftInner": { + "x": 425.4, + "y": 70.5 + }, + "eyeLeftOuter": { + "x": 406.7, + "y": 80.6 + }, + "eyeLeftTop": { + "x": 412.2, + "y": 76.2 + }, + "eyeLeftBottom": { + "x": 413.0, + "y": 80.1 + }, + "eyeLeftInner": { + "x": 418.9, + "y": 78.0 + }, + "eyebrowRightInner": { + "x": 4.8, + "y": 69.7 + }, + "eyebrowRightOuter": { + "x": 5.5, + "y": 68.5 + }, + "eyeRightInner": { + "x": 441.5, + "y": 75.0 + }, + "eyeRightTop": { + "x": 446.4, + "y": 71.7 + }, + "eyeRightBottom": { + "x": 447.0, + "y": 75.3 + }, + "eyeRightOuter": { + "x": 451.7, + "y": 73.4 + }, + "noseRootLeft": { + "x": 428.0, + "y": 77.1 + }, + "noseRootRight": { + "x": 435.8, + "y": 75.6 + }, + "noseLeftAlarTop": { + "x": 428.3, + "y": 89.7 + }, + "noseRightAlarTop": { + "x": 442.2, + "y": 87.0 + }, + "noseLeftAlarOutTip": { + "x": 424.3, + "y": 96.4 + }, + "noseRightAlarOutTip": { + "x": 446.6, + "y": 92.5 + }, + "upperLipTop": { + "x": 437.6, + "y": 105.9 + }, + "upperLipBottom": { + "x": 437.6, + "y": 108.2 + }, + "underLipTop": { + "x": 436.8, + "y": 111.4 + }, + "underLipBottom": { + "x": 437.3, + "y": 114.5 + } + }, + "faceAttributes": { + "glasses": "sunglasses", + "headPose": { + "roll": 2.1, + "yaw": 3, + "pitch": 1.6 + }, + "occlusion": { + "foreheadOccluded": false, + "eyeOccluded": false, + "mouthOccluded": false + }, + "accessories": [ + { + "type": "headwear", + "confidence": 0.99 + }, + { + "type": "glasses", + "confidence": 1.0 + }, + { + "type": "mask", + "confidence": 0.87 + } + ], + "blur": { + "blurLevel": "medium", + "value": 0.51 + }, + "exposure": { + "exposureLevel": "goodExposure", + "value": 0.55 + }, + "noise": { + "noiseLevel": "low", + "value": 0.12 + }, + "qualityForRecognition": "high" + } + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/DetectFromUrl.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/DetectFromUrl.json new file mode 100644 index 000000000000..469992dd6154 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/DetectFromUrl.json @@ -0,0 +1,183 @@ +{ + "title": "Detect with Image URL", + "operationId": "FaceDetectionOperations_DetectFromUrl", + "parameters": { + "apiVersion": "v1.1-preview.1", + "returnFaceId": true, + "returnFaceLandmarks": true, + "returnFaceAttributes": "glasses,headPose,occlusion,accessories,blur,exposure,noise,qualityForRecognition", + "recognitionModel": "recognition_03", + "returnRecognitionModel": true, + "detectionModel": "detection_01", + "faceIdTimeToLive": 60, + "body": { + "url": "https://microsoft.com/example.jpg" + } + }, + "responses": { + "200": { + "body": [ + { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "recognitionModel": "recognition_03", + "faceRectangle": { + "width": 78, + "height": 78, + "left": 394, + "top": 54 + }, + "faceLandmarks": { + "pupilLeft": { + "x": 412.7, + "y": 78.4 + }, + "pupilRight": { + "x": 446.8, + "y": 74.2 + }, + "noseTip": { + "x": 437.7, + "y": 92.4 + }, + "mouthLeft": { + "x": 417.8, + "y": 114.4 + }, + "mouthRight": { + "x": 451.3, + "y": 109.3 + }, + "eyebrowLeftOuter": { + "x": 397.9, + "y": 78.5 + }, + "eyebrowLeftInner": { + "x": 425.4, + "y": 70.5 + }, + "eyeLeftOuter": { + "x": 406.7, + "y": 80.6 + }, + "eyeLeftTop": { + "x": 412.2, + "y": 76.2 + }, + "eyeLeftBottom": { + "x": 413.0, + "y": 80.1 + }, + "eyeLeftInner": { + "x": 418.9, + "y": 78.0 + }, + "eyebrowRightInner": { + "x": 4.8, + "y": 69.7 + }, + "eyebrowRightOuter": { + "x": 5.5, + "y": 68.5 + }, + "eyeRightInner": { + "x": 441.5, + "y": 75.0 + }, + "eyeRightTop": { + "x": 446.4, + "y": 71.7 + }, + "eyeRightBottom": { + "x": 447.0, + "y": 75.3 + }, + "eyeRightOuter": { + "x": 451.7, + "y": 73.4 + }, + "noseRootLeft": { + "x": 428.0, + "y": 77.1 + }, + "noseRootRight": { + "x": 435.8, + "y": 75.6 + }, + "noseLeftAlarTop": { + "x": 428.3, + "y": 89.7 + }, + "noseRightAlarTop": { + "x": 442.2, + "y": 87.0 + }, + "noseLeftAlarOutTip": { + "x": 424.3, + "y": 96.4 + }, + "noseRightAlarOutTip": { + "x": 446.6, + "y": 92.5 + }, + "upperLipTop": { + "x": 437.6, + "y": 105.9 + }, + "upperLipBottom": { + "x": 437.6, + "y": 108.2 + }, + "underLipTop": { + "x": 436.8, + "y": 111.4 + }, + "underLipBottom": { + "x": 437.3, + "y": 114.5 + } + }, + "faceAttributes": { + "glasses": "sunglasses", + "headPose": { + "roll": 2.1, + "yaw": 3, + "pitch": 1.6 + }, + "occlusion": { + "foreheadOccluded": false, + "eyeOccluded": false, + "mouthOccluded": false + }, + "accessories": [ + { + "type": "headwear", + "confidence": 0.99 + }, + { + "type": "glasses", + "confidence": 1.0 + }, + { + "type": "mask", + "confidence": 0.87 + } + ], + "blur": { + "blurLevel": "medium", + "value": 0.51 + }, + "exposure": { + "exposureLevel": "goodExposure", + "value": 0.55 + }, + "noise": { + "noiseLevel": "low", + "value": 0.12 + }, + "qualityForRecognition": "high" + } + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_AddFaceListFaceFromStream.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_AddFaceListFaceFromStream.json new file mode 100644 index 000000000000..42601eb7f278 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_AddFaceListFaceFromStream.json @@ -0,0 +1,19 @@ +{ + "title": "Add Face to FaceList", + "operationId": "FaceListOperations_AddFaceListFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "faceListId": "your_face_list_id", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "imageContent": "" + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_AddFaceListFaceFromUrl.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_AddFaceListFaceFromUrl.json new file mode 100644 index 000000000000..f0da5f86413b --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_AddFaceListFaceFromUrl.json @@ -0,0 +1,21 @@ +{ + "title": "Add Face to FaceList from Url", + "operationId": "FaceListOperations_AddFaceListFaceFromUrl", + "parameters": { + "apiVersion": "v1.1-preview.1", + "faceListId": "your_face_list_id", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "body": { + "url": "https://microsoft.com/example.jpg" + } + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_AddLargeFaceListFaceFromStream.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_AddLargeFaceListFaceFromStream.json new file mode 100644 index 000000000000..d35761dda3ff --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_AddLargeFaceListFaceFromStream.json @@ -0,0 +1,19 @@ +{ + "title": "Add Face to LargeFaceList", + "operationId": "FaceListOperations_AddLargeFaceListFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largeFaceListId": "your_large_face_list_id", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "imageContent": "" + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_AddLargeFaceListFaceFromUrl.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_AddLargeFaceListFaceFromUrl.json new file mode 100644 index 000000000000..99d7ed8a9d2c --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_AddLargeFaceListFaceFromUrl.json @@ -0,0 +1,21 @@ +{ + "title": "Add Face to LargeFaceList from Url", + "operationId": "FaceListOperations_AddLargeFaceListFaceFromUrl", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largeFaceListId": "your_large_face_list_id", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "body": { + "url": "https://microsoft.com/example.jpg" + } + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_CreateFaceList.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_CreateFaceList.json new file mode 100644 index 000000000000..94c1e9a6775e --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_CreateFaceList.json @@ -0,0 +1,16 @@ +{ + "title": "Create FaceList", + "operationId": "FaceListOperations_CreateFaceList", + "parameters": { + "apiVersion": "v1.1-preview.1", + "faceListId": "your_face_list_id", + "body": { + "name": "your_face_list_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_CreateLargeFaceList.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_CreateLargeFaceList.json new file mode 100644 index 000000000000..f402a08149a4 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_CreateLargeFaceList.json @@ -0,0 +1,16 @@ +{ + "title": "Create LargeFaceList", + "operationId": "FaceListOperations_CreateLargeFaceList", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largeFaceListId": "your_large_face_list_id", + "body": { + "name": "your_large_face_list_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_DeleteFaceList.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_DeleteFaceList.json new file mode 100644 index 000000000000..0c47e068577d --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_DeleteFaceList.json @@ -0,0 +1,11 @@ +{ + "title": "Delete FaceList", + "operationId": "FaceListOperations_DeleteFaceList", + "parameters": { + "apiVersion": "v1.1-preview.1", + "faceListId": "your_face_list_id" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_DeleteFaceListFace.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_DeleteFaceListFace.json new file mode 100644 index 000000000000..a6fdb812eeac --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_DeleteFaceListFace.json @@ -0,0 +1,12 @@ +{ + "title": "Delete Face from FaceList", + "operationId": "FaceListOperations_DeleteFaceListFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "faceListId": "your_face_list_id", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_DeleteLargeFaceList.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_DeleteLargeFaceList.json new file mode 100644 index 000000000000..7dd5267be454 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_DeleteLargeFaceList.json @@ -0,0 +1,11 @@ +{ + "title": "Delete LargeFaceList", + "operationId": "FaceListOperations_DeleteLargeFaceList", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largeFaceListId": "your_large_face_list_id" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_DeleteLargeFaceListFace.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_DeleteLargeFaceListFace.json new file mode 100644 index 000000000000..31a2b680969b --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_DeleteLargeFaceListFace.json @@ -0,0 +1,12 @@ +{ + "title": "Delete Face From LargeFaceList", + "operationId": "FaceListOperations_DeleteLargeFaceListFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largeFaceListId": "your_large_face_list_id", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_GetFaceList.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_GetFaceList.json new file mode 100644 index 000000000000..9821416e1b95 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_GetFaceList.json @@ -0,0 +1,19 @@ +{ + "title": "Get FaceList", + "operationId": "FaceListOperations_GetFaceList", + "parameters": { + "apiVersion": "v1.1-preview.1", + "faceListId": "your_face_list_id", + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": { + "name": "your_face_list_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "faceListId": "your_face_list_id" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_GetFaceLists.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_GetFaceLists.json new file mode 100644 index 000000000000..53a5d318ae64 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_GetFaceLists.json @@ -0,0 +1,20 @@ +{ + "title": "Get FaceLists", + "operationId": "FaceListOperations_GetFaceLists", + "parameters": { + "apiVersion": "v1.1-preview.1", + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": [ + { + "name": "your_face_list_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "faceListId": "your_face_list_id" + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_GetLargeFaceList.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_GetLargeFaceList.json new file mode 100644 index 000000000000..ea56dc176324 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_GetLargeFaceList.json @@ -0,0 +1,19 @@ +{ + "title": "Get LargeFaceList", + "operationId": "FaceListOperations_GetLargeFaceList", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largeFaceListId": "your_large_face_list_id", + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": { + "name": "your_large_face_list_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "largeFaceListId": "your_large_face_list_id" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_GetLargeFaceListFace.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_GetLargeFaceListFace.json new file mode 100644 index 000000000000..17e867ce7136 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_GetLargeFaceListFace.json @@ -0,0 +1,17 @@ +{ + "title": "Get Face from LargeFaceList", + "operationId": "FaceListOperations_GetLargeFaceListFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largeFaceListId": "your_large_face_list_id", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "userData": "your_user_data" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_GetLargeFaceListFaces.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_GetLargeFaceListFaces.json new file mode 100644 index 000000000000..46f8bf8a6205 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_GetLargeFaceListFaces.json @@ -0,0 +1,20 @@ +{ + "title": "Get Faces from LargeFaceList", + "operationId": "FaceListOperations_GetLargeFaceListFaces", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largeFaceListId": "your_large_face_list_id", + "start": "00000000-0000-0000-0000-000000000000", + "top": 20 + }, + "responses": { + "200": { + "body": [ + { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "userData": "your_user_data" + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_GetLargeFaceListTrainingStatus.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_GetLargeFaceListTrainingStatus.json new file mode 100644 index 000000000000..cbc6d28a63ca --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_GetLargeFaceListTrainingStatus.json @@ -0,0 +1,19 @@ +{ + "title": "Get Training Status of LargeFaceList", + "operationId": "FaceListOperations_GetLargeFaceListTrainingStatus", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largeFaceListId": "your_large_face_list_id" + }, + "responses": { + "200": { + "body": { + "status": "notStarted", + "createdDateTime": "2024-03-05T11:07:58.371Z", + "lastActionDateTime": "2024-03-05T11:07:58.371Z", + "lastSuccessfulTrainingDateTime": "2024-03-05T11:07:58.371Z", + "message": null + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_GetLargeFaceLists.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_GetLargeFaceLists.json new file mode 100644 index 000000000000..83578707c7e4 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_GetLargeFaceLists.json @@ -0,0 +1,22 @@ +{ + "title": "Get LargeFaceLists", + "operationId": "FaceListOperations_GetLargeFaceLists", + "parameters": { + "apiVersion": "v1.1-preview.1", + "start": "my_list_id", + "top": 20, + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": [ + { + "name": "your_large_face_list_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "largeFaceListId": "your_large_face_list_id" + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_TrainLargeFaceList.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_TrainLargeFaceList.json new file mode 100644 index 000000000000..a3525a208a5e --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_TrainLargeFaceList.json @@ -0,0 +1,15 @@ +{ + "title": "Train LargeFaceList", + "operationId": "FaceListOperations_TrainLargeFaceList", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largeFaceListId": "your_large_face_list_id" + }, + "responses": { + "202": { + "headers": { + "operation-Location": "https://contoso.com/operationstatus" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_UpdateFaceList.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_UpdateFaceList.json new file mode 100644 index 000000000000..7619a581e7be --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_UpdateFaceList.json @@ -0,0 +1,15 @@ +{ + "title": "Update FaceList", + "operationId": "FaceListOperations_UpdateFaceList", + "parameters": { + "apiVersion": "v1.1-preview.1", + "faceListId": "your_face_list_id", + "body": { + "name": "your_face_list_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_UpdateLargeFaceList.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_UpdateLargeFaceList.json new file mode 100644 index 000000000000..d61a004eadb4 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_UpdateLargeFaceList.json @@ -0,0 +1,15 @@ +{ + "title": "Update LargeFaceList", + "operationId": "FaceListOperations_UpdateLargeFaceList", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largeFaceListId": "your_large_face_list_id", + "body": { + "name": "your_large_face_list_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_UpdateLargeFaceListFace.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_UpdateLargeFaceListFace.json new file mode 100644 index 000000000000..7798cb324a97 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceListOperations_UpdateLargeFaceListFace.json @@ -0,0 +1,15 @@ +{ + "title": "Update Face in LargeFaceList", + "operationId": "FaceListOperations_UpdateLargeFaceListFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largeFaceListId": "your_large_face_list_id", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "body": { + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_FindSimilar.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_FindSimilar.json new file mode 100644 index 000000000000..3a78056208ca --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_FindSimilar.json @@ -0,0 +1,26 @@ +{ + "title": "Find Similar among Face IDs", + "operationId": "FaceRecognitionOperations_FindSimilar", + "parameters": { + "apiVersion": "v1.1-preview.1", + "body": { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "maxNumOfCandidatesReturned": 3, + "mode": "matchPerson", + "faceIds": [ + "015839fb-fbd9-4f79-ace9-7675fc2f1dd9", + "be386ab3-af91-4104-9e6d-4dae4c9fddb7" + ] + } + }, + "responses": { + "200": { + "body": [ + { + "confidence": 0.9, + "persistedFaceId": "015839fb-fbd9-4f79-ace9-7675fc2f1dd9" + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_FindSimilarFromFaceList.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_FindSimilarFromFaceList.json new file mode 100644 index 000000000000..7c761424783e --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_FindSimilarFromFaceList.json @@ -0,0 +1,23 @@ +{ + "title": "Find Similar from FaceList", + "operationId": "FaceRecognitionOperations_FindSimilarFromFaceList", + "parameters": { + "apiVersion": "v1.1-preview.1", + "body": { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "maxNumOfCandidatesReturned": 3, + "mode": "matchPerson", + "faceListId": "your_face_list_id" + } + }, + "responses": { + "200": { + "body": [ + { + "confidence": 0.8, + "faceId": "015839fb-fbd9-4f79-ace9-7675fc2f1dd9" + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_FindSimilarFromLargeFaceList.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_FindSimilarFromLargeFaceList.json new file mode 100644 index 000000000000..f83d2ed8e685 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_FindSimilarFromLargeFaceList.json @@ -0,0 +1,23 @@ +{ + "title": "Find Similar from LargeFaceList", + "operationId": "FaceRecognitionOperations_FindSimilarFromLargeFaceList", + "parameters": { + "apiVersion": "v1.1-preview.1", + "body": { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "maxNumOfCandidatesReturned": 3, + "mode": "matchPerson", + "largeFaceListId": "your_large_face_list_id" + } + }, + "responses": { + "200": { + "body": [ + { + "confidence": 0.8, + "faceId": "015839fb-fbd9-4f79-ace9-7675fc2f1dd9" + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_Group.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_Group.json new file mode 100644 index 000000000000..659611be7f2c --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_Group.json @@ -0,0 +1,41 @@ +{ + "title": "Group Face IDs", + "operationId": "FaceRecognitionOperations_Group", + "parameters": { + "apiVersion": "v1.1-preview.1", + "body": { + "faceIds": [ + "c5c24a82-6845-4031-9d5d-978df9175426", + "015839fb-fbd9-4f79-ace9-7675fc2f1dd9", + "65d083d4-9447-47d1-af30-b626144bf0fb", + "fce92aed-d578-4d2e-8114-068f8af4492e", + "30ea1073-cc9e-4652-b1e3-d08fb7b95315", + "be386ab3-af91-4104-9e6d-4dae4c9fddb7", + "fbd2a038-dbff-452c-8e79-2ee81b1aa84e", + "b64d5e15-8257-4af2-b20a-5a750f8940e7" + ] + } + }, + "responses": { + "200": { + "body": { + "groups": [ + [ + "c5c24a82-6845-4031-9d5d-978df9175426", + "015839fb-fbd9-4f79-ace9-7675fc2f1dd9", + "fce92aed-d578-4d2e-8114-068f8af4492e", + "b64d5e15-8257-4af2-b20a-5a750f8940e7" + ], + [ + "65d083d4-9447-47d1-af30-b626144bf0fb", + "30ea1073-cc9e-4652-b1e3-d08fb7b95315" + ] + ], + "messyGroup": [ + "be386ab3-af91-4104-9e6d-4dae4c9fddb7", + "fbd2a038-dbff-452c-8e79-2ee81b1aa84e" + ] + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_IdentifyFromDynamicPersonGroup.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_IdentifyFromDynamicPersonGroup.json new file mode 100644 index 000000000000..f1bcf858243d --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_IdentifyFromDynamicPersonGroup.json @@ -0,0 +1,30 @@ +{ + "title": "Identify from DynamicPersonGroup", + "operationId": "FaceRecognitionOperations_IdentifyFromDynamicPersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "body": { + "faceIds": [ + "c5c24a82-6845-4031-9d5d-978df9175426" + ], + "dynamicPersonGroupId": "your_dynamic_person_group_id", + "maxNumOfCandidatesReturned": 9, + "confidenceThreshold": 0.7 + } + }, + "responses": { + "200": { + "body": [ + { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "candidates": [ + { + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "confidence": 0.8 + } + ] + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_IdentifyFromLargePersonGroup.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_IdentifyFromLargePersonGroup.json new file mode 100644 index 000000000000..bb9d12973c50 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_IdentifyFromLargePersonGroup.json @@ -0,0 +1,30 @@ +{ + "title": "Identify from LargePersonGroup", + "operationId": "FaceRecognitionOperations_IdentifyFromLargePersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "body": { + "faceIds": [ + "c5c24a82-6845-4031-9d5d-978df9175426" + ], + "largePersonGroupId": "your_large_person_group_id", + "maxNumOfCandidatesReturned": 9, + "confidenceThreshold": 0.7 + } + }, + "responses": { + "200": { + "body": [ + { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "candidates": [ + { + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "confidence": 0.8 + } + ] + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_IdentifyFromPersonDirectory.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_IdentifyFromPersonDirectory.json new file mode 100644 index 000000000000..68ea2773abf0 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_IdentifyFromPersonDirectory.json @@ -0,0 +1,32 @@ +{ + "title": "Identify from PersonDirectory", + "operationId": "FaceRecognitionOperations_IdentifyFromPersonDirectory", + "parameters": { + "apiVersion": "v1.1-preview.1", + "body": { + "faceIds": [ + "c5c24a82-6845-4031-9d5d-978df9175426" + ], + "personIds": [ + "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5" + ], + "maxNumOfCandidatesReturned": 9, + "confidenceThreshold": 0.7 + } + }, + "responses": { + "200": { + "body": [ + { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "candidates": [ + { + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "confidence": 0.8 + } + ] + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_IdentifyFromPersonGroup.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_IdentifyFromPersonGroup.json new file mode 100644 index 000000000000..aa429dd22a70 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_IdentifyFromPersonGroup.json @@ -0,0 +1,30 @@ +{ + "title": "Identify from PersonGroup", + "operationId": "FaceRecognitionOperations_IdentifyFromPersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "body": { + "faceIds": [ + "c5c24a82-6845-4031-9d5d-978df9175426" + ], + "personGroupId": "your_person_group_id", + "maxNumOfCandidatesReturned": 9, + "confidenceThreshold": 0.7 + } + }, + "responses": { + "200": { + "body": [ + { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "candidates": [ + { + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "confidence": 0.8 + } + ] + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_VerifyFaceToFace.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_VerifyFaceToFace.json new file mode 100644 index 000000000000..56f14664116a --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_VerifyFaceToFace.json @@ -0,0 +1,19 @@ +{ + "title": "Verify Face to Face", + "operationId": "FaceRecognitionOperations_VerifyFaceToFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "body": { + "faceId1": "c5c24a82-6845-4031-9d5d-978df9175426", + "faceId2": "3aa87e30-b380-48eb-ad9e-1aa54fc52bd3" + } + }, + "responses": { + "200": { + "body": { + "isIdentical": true, + "confidence": 0.8 + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_VerifyFromLargePersonGroup.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_VerifyFromLargePersonGroup.json new file mode 100644 index 000000000000..1f179351b860 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_VerifyFromLargePersonGroup.json @@ -0,0 +1,20 @@ +{ + "title": "Verify from LargePersonGroup", + "operationId": "FaceRecognitionOperations_VerifyFromLargePersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "body": { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "personId": "815df99c-598f-4926-930a-a734b3fd651c", + "largePersonGroupId": "your_large_person_group" + } + }, + "responses": { + "200": { + "body": { + "isIdentical": true, + "confidence": 0.8 + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_VerifyFromPersonDirectory.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_VerifyFromPersonDirectory.json new file mode 100644 index 000000000000..088f9560ae71 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_VerifyFromPersonDirectory.json @@ -0,0 +1,19 @@ +{ + "title": "Verify from PersonDirectory", + "operationId": "FaceRecognitionOperations_VerifyFromPersonDirectory", + "parameters": { + "apiVersion": "v1.1-preview.1", + "body": { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "personId": "815df99c-598f-4926-930a-a734b3fd651c" + } + }, + "responses": { + "200": { + "body": { + "isIdentical": true, + "confidence": 0.8 + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_VerifyFromPersonGroup.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_VerifyFromPersonGroup.json new file mode 100644 index 000000000000..8ce21790ac88 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/FaceRecognitionOperations_VerifyFromPersonGroup.json @@ -0,0 +1,20 @@ +{ + "title": "Verify from PersonGroup", + "operationId": "FaceRecognitionOperations_VerifyFromPersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "body": { + "faceId": "c5c24a82-6845-4031-9d5d-978df9175426", + "personId": "815df99c-598f-4926-930a-a734b3fd651c", + "personGroupId": "your_person_group" + } + }, + "responses": { + "200": { + "body": { + "isIdentical": true, + "confidence": 0.8 + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/GetOperationResult.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/GetOperationResult.json new file mode 100644 index 000000000000..9393fc18a96c --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/GetOperationResult.json @@ -0,0 +1,20 @@ +{ + "title": "Get Face Operation Status", + "operationId": "GetOperationResult", + "parameters": { + "apiVersion": "v1.1-preview.1", + "operationId": "1b22ff44-c7dc-43ce-93be-67d3283ba86c" + }, + "responses": { + "200": { + "body": { + "operationId": "1b22ff44-c7dc-43ce-93be-67d3283ba86c", + "status": "notStarted", + "createdTime": "2024-03-05T11:08:20.193Z", + "finishedTime": "2024-03-05T11:08:20.193Z", + "lastActionTime": "2024-03-05T11:08:20.193Z", + "message": null + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_CreateLivenessSession.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_CreateLivenessSession.json new file mode 100644 index 000000000000..1c111f7fcd7a --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_CreateLivenessSession.json @@ -0,0 +1,22 @@ +{ + "title": "Create Liveness Session", + "operationId": "LivenessSessionOperations_CreateLivenessSession", + "parameters": { + "apiVersion": "v1.1-preview.1", + "body": { + "livenessOperationMode": "Passive", + "sendResultsToClient": true, + "deviceCorrelationIdSetInClient": true, + "deviceCorrelationId": "your_device_correlation_id", + "authTokenTimeToLiveInSeconds": 60 + } + }, + "responses": { + "200": { + "body": { + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e", + "authToken": "eyJhbGciOiJFUzI1NiIsIm" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_CreateLivenessWithVerifySession.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_CreateLivenessWithVerifySession.json new file mode 100644 index 000000000000..72158007876e --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_CreateLivenessWithVerifySession.json @@ -0,0 +1,22 @@ +{ + "title": "Create LivenessWithVerify Session", + "operationId": "LivenessSessionOperations_CreateLivenessWithVerifySession", + "parameters": { + "apiVersion": "v1.1-preview.1", + "body": { + "livenessOperationMode": "Passive", + "sendResultsToClient": true, + "deviceCorrelationIdSetInClient": true, + "deviceCorrelationId": "your_device_correlation_id", + "authTokenTimeToLiveInSeconds": 60 + } + }, + "responses": { + "200": { + "body": { + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e", + "authToken": "eyJhbGciOiJFUzI1NiIsIm" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_CreateLivenessWithVerifySessionWithVerifyImage.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_CreateLivenessWithVerifySessionWithVerifyImage.json new file mode 100644 index 000000000000..f1a0cdad2bcb --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_CreateLivenessWithVerifySessionWithVerifyImage.json @@ -0,0 +1,26 @@ +{ + "title": "Create LivenessWithVerify Session with VerifyImage", + "operationId": "LivenessSessionOperations_CreateLivenessWithVerifySessionWithVerifyImage", + "parameters": { + "apiVersion": "v1.1-preview.1", + "Parameters": "{\"livenessOperationMode\": \"Passive\", \"sendResultsToClient\": true, \"deviceCorrelationIdSetInClient\": true, \"deviceCorrelationId\": \"your_device_correlation_id\", \"authTokenTimeToLiveInSeconds\": 60}", + "VerifyImage": "" + }, + "responses": { + "200": { + "body": { + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e", + "authToken": "eyJhbGciOiJFUzI1NiIsIm", + "verifyImage": { + "faceRectangle": { + "width": 78, + "height": 78, + "left": 394, + "top": 54 + }, + "qualityForRecognition": "high" + } + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_DeleteLivenessSession.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_DeleteLivenessSession.json new file mode 100644 index 000000000000..c5a6e71328cc --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_DeleteLivenessSession.json @@ -0,0 +1,11 @@ +{ + "title": "Delete Liveness Session", + "operationId": "LivenessSessionOperations_DeleteLivenessSession", + "parameters": { + "apiVersion": "v1.1-preview.1", + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_DeleteLivenessWithVerifySession.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_DeleteLivenessWithVerifySession.json new file mode 100644 index 000000000000..9c432670b62b --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_DeleteLivenessWithVerifySession.json @@ -0,0 +1,11 @@ +{ + "title": "Delete LivenessWithVerify Session", + "operationId": "LivenessSessionOperations_DeleteLivenessWithVerifySession", + "parameters": { + "apiVersion": "v1.1-preview.1", + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_GetLivenessSessionAuditEntries.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_GetLivenessSessionAuditEntries.json new file mode 100644 index 000000000000..091839dfa977 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_GetLivenessSessionAuditEntries.json @@ -0,0 +1,36 @@ +{ + "title": "Get LivenessSession Audit Entries", + "operationId": "LivenessSessionOperations_GetLivenessSessionAuditEntries", + "parameters": { + "apiVersion": "v1.1-preview.1", + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e", + "start": "0", + "top": 20 + }, + "responses": { + "200": { + "body": [ + { + "id": 4, + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e", + "requestId": "4af681e9-0e25-43e9-9922-a7adebf13b2f", + "clientRequestId": "4af681e9-0e25-43e9-9922-a7adebf13b2f", + "receivedDateTime": "2024-03-05T11:07:29.698Z", + "request": { + "url": "/face/v1.1-preview.1/detectliveness/singlemodal", + "method": "POST", + "contentLength": 18, + "contentType": "multipart/form-data", + "userAgent": "Mozilla/5.0 (Linux) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Mobile Safari/537.36" + }, + "response": { + "body": {}, + "statusCode": 200, + "latencyInMilliseconds": 1200 + }, + "digest": "1CC98BA83EAF1D0FF7F566FAEFCCCC787819FFA01251E2D9299143F7AD6651DB" + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_GetLivenessSessionResult.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_GetLivenessSessionResult.json new file mode 100644 index 000000000000..ffabf3220489 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_GetLivenessSessionResult.json @@ -0,0 +1,41 @@ +{ + "title": "Get LivenessSession Result", + "operationId": "LivenessSessionOperations_GetLivenessSessionResult", + "parameters": { + "apiVersion": "v1.1-preview.1", + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e" + }, + "responses": { + "200": { + "body": { + "id": "b12e033e-bda7-4b83-a211-e721c661f30e", + "createdDateTime": "2024-03-05T11:07:29.698Z", + "sessionStartDateTime": "2024-03-05T11:07:29.698Z", + "sessionExpired": true, + "deviceCorrelationId": "your_device_correlation_id", + "authTokenTimeToLiveInSeconds": 600, + "status": "NotStarted", + "result": { + "id": 4, + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e", + "requestId": "4af681e9-0e25-43e9-9922-a7adebf13b2f", + "clientRequestId": "4af681e9-0e25-43e9-9922-a7adebf13b2f", + "receivedDateTime": "2024-03-05T11:07:29.698Z", + "request": { + "url": "/face/v1.1-preview.1/detectliveness/singlemodal", + "method": "POST", + "contentLength": 18, + "contentType": "multipart/form-data", + "userAgent": "Mozilla/5.0 (Linux) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Mobile Safari/537.36" + }, + "response": { + "body": {}, + "statusCode": 200, + "latencyInMilliseconds": 1200 + }, + "digest": "1CC98BA83EAF1D0FF7F566FAEFCCCC787819FFA01251E2D9299143F7AD6651DB" + } + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_GetLivenessSessions.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_GetLivenessSessions.json new file mode 100644 index 000000000000..38ca5ad52bd6 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_GetLivenessSessions.json @@ -0,0 +1,23 @@ +{ + "title": "Get LivenessSessions", + "operationId": "LivenessSessionOperations_GetLivenessSessions", + "parameters": { + "apiVersion": "v1.1-preview.1", + "start": "00000000-0000-0000-0000-000000000000", + "top": 20 + }, + "responses": { + "200": { + "body": [ + { + "id": "b12e033e-bda7-4b83-a211-e721c661f30e", + "createdDateTime": "2024-03-05T11:07:28.540Z", + "sessionStartDateTime": "2024-03-05T11:07:28.540Z", + "sessionExpired": true, + "deviceCorrelationId": "your_device_correlation_id", + "authTokenTimeToLiveInSeconds": 60 + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_GetLivenessWithVerifySessionAuditEntries.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_GetLivenessWithVerifySessionAuditEntries.json new file mode 100644 index 000000000000..70befacd6ece --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_GetLivenessWithVerifySessionAuditEntries.json @@ -0,0 +1,36 @@ +{ + "title": "Get LivenessWithVerify Session Audit Entries", + "operationId": "LivenessSessionOperations_GetLivenessWithVerifySessionAuditEntries", + "parameters": { + "apiVersion": "v1.1-preview.1", + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e", + "start": "0", + "top": 20 + }, + "responses": { + "200": { + "body": [ + { + "id": 4, + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e", + "requestId": "4af681e9-0e25-43e9-9922-a7adebf13b2f", + "clientRequestId": "4af681e9-0e25-43e9-9922-a7adebf13b2f", + "receivedDateTime": "2024-03-05T11:07:29.698Z", + "request": { + "url": "/face/v1.1-preview.1/detectliveness/singlemodal", + "method": "POST", + "contentLength": 18, + "contentType": "multipart/form-data", + "userAgent": "Mozilla/5.0 (Linux) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Mobile Safari/537.36" + }, + "response": { + "body": {}, + "statusCode": 200, + "latencyInMilliseconds": 1200 + }, + "digest": "1CC98BA83EAF1D0FF7F566FAEFCCCC787819FFA01251E2D9299143F7AD6651DB" + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_GetLivenessWithVerifySessionResult.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_GetLivenessWithVerifySessionResult.json new file mode 100644 index 000000000000..d4cdc31dc549 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_GetLivenessWithVerifySessionResult.json @@ -0,0 +1,41 @@ +{ + "title": "Get LivenessWithVerify Session Result", + "operationId": "LivenessSessionOperations_GetLivenessWithVerifySessionResult", + "parameters": { + "apiVersion": "v1.1-preview.1", + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e" + }, + "responses": { + "200": { + "body": { + "id": "b12e033e-bda7-4b83-a211-e721c661f30e", + "createdDateTime": "2024-03-05T11:07:29.698Z", + "sessionStartDateTime": "2024-03-05T11:07:29.698Z", + "sessionExpired": true, + "deviceCorrelationId": "your_device_correlation_id", + "authTokenTimeToLiveInSeconds": 600, + "status": "NotStarted", + "result": { + "id": 4, + "sessionId": "b12e033e-bda7-4b83-a211-e721c661f30e", + "requestId": "4af681e9-0e25-43e9-9922-a7adebf13b2f", + "clientRequestId": "4af681e9-0e25-43e9-9922-a7adebf13b2f", + "receivedDateTime": "2024-03-05T11:07:29.698Z", + "request": { + "url": "/face/v1.1-preview.1/detectliveness/singlemodal", + "method": "POST", + "contentLength": 18, + "contentType": "multipart/form-data", + "userAgent": "Mozilla/5.0 (Linux) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/117.0.0.0 Mobile Safari/537.36" + }, + "response": { + "body": {}, + "statusCode": 200, + "latencyInMilliseconds": 1200 + }, + "digest": "1CC98BA83EAF1D0FF7F566FAEFCCCC787819FFA01251E2D9299143F7AD6651DB" + } + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_GetLivenessWithVerifySessions.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_GetLivenessWithVerifySessions.json new file mode 100644 index 000000000000..36a4283123d6 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/LivenessSessionOperations_GetLivenessWithVerifySessions.json @@ -0,0 +1,23 @@ +{ + "title": "Get LivenessWithVerify Sessions", + "operationId": "LivenessSessionOperations_GetLivenessWithVerifySessions", + "parameters": { + "apiVersion": "v1.1-preview.1", + "start": "00000000-0000-0000-0000-000000000000", + "top": 20 + }, + "responses": { + "200": { + "body": [ + { + "id": "b12e033e-bda7-4b83-a211-e721c661f30e", + "createdDateTime": "2024-03-05T11:07:28.540Z", + "sessionStartDateTime": "2024-03-05T11:07:28.540Z", + "sessionExpired": true, + "deviceCorrelationId": "your_device_correlation_id", + "authTokenTimeToLiveInSeconds": 60 + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_AddPersonFaceFromStream.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_AddPersonFaceFromStream.json new file mode 100644 index 000000000000..799b950ae8db --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_AddPersonFaceFromStream.json @@ -0,0 +1,24 @@ +{ + "title": "Add Face to a PersonDirectory Person", + "operationId": "PersonDirectoryOperations_AddPersonFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "recognitionModel": "recognition_01", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "imageContent": "" + }, + "responses": { + "202": { + "headers": { + "location": "https://contoso.com/operationstatus", + "operation-Location": "https://contoso.com/operationstatus" + }, + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_AddPersonFaceFromUrl.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_AddPersonFaceFromUrl.json new file mode 100644 index 000000000000..7bb0e2ca61a2 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_AddPersonFaceFromUrl.json @@ -0,0 +1,26 @@ +{ + "title": "Add Face to PersonDirectory Person from Url", + "operationId": "PersonDirectoryOperations_AddPersonFaceFromUrl", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "recognitionModel": "recognition_01", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "body": { + "url": "https://microsoft.com/example.jpg" + } + }, + "responses": { + "202": { + "headers": { + "location": "https://contoso.com/operationstatus", + "operation-Location": "https://contoso.com/operationstatus" + }, + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_CreateDynamicPersonGroup.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_CreateDynamicPersonGroup.json new file mode 100644 index 000000000000..75aaef75ebfe --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_CreateDynamicPersonGroup.json @@ -0,0 +1,15 @@ +{ + "title": "Create DynamicPersonGroup", + "operationId": "PersonDirectoryOperations_CreateDynamicPersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "dynamicPersonGroupId": "your_dynamic_person_group_id", + "body": { + "name": "your_dynamic_person_group_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_CreateDynamicPersonGroupWithPerson.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_CreateDynamicPersonGroupWithPerson.json new file mode 100644 index 000000000000..8aca52123292 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_CreateDynamicPersonGroupWithPerson.json @@ -0,0 +1,22 @@ +{ + "title": "Create DynamicPersonGroup", + "operationId": "PersonDirectoryOperations_CreateDynamicPersonGroupWithPerson", + "parameters": { + "apiVersion": "v1.1-preview.1", + "dynamicPersonGroupId": "your_dynamic_person_group_id", + "body": { + "name": "your_dynamic_person_group_name", + "userData": "your_user_data", + "addPersonIds": [ + "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5" + ] + } + }, + "responses": { + "202": { + "headers": { + "operation-Location": "https://contoso.com/operationstatus" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_CreatePerson.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_CreatePerson.json new file mode 100644 index 000000000000..5a24293e2e57 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_CreatePerson.json @@ -0,0 +1,22 @@ +{ + "title": "Create Person in PersonDirectory", + "operationId": "PersonDirectoryOperations_CreatePerson", + "parameters": { + "apiVersion": "v1.1-preview.1", + "body": { + "name": "your_person_name", + "userData": "your_user_data" + } + }, + "responses": { + "202": { + "headers": { + "location": "https://contoso.com/operationstatus", + "operation-Location": "https://contoso.com/operationstatus" + }, + "body": { + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_DeleteDynamicPersonGroup.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_DeleteDynamicPersonGroup.json new file mode 100644 index 000000000000..ce885996a49c --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_DeleteDynamicPersonGroup.json @@ -0,0 +1,16 @@ +{ + "title": "Delete DynamicPersonGroup", + "operationId": "PersonDirectoryOperations_DeleteDynamicPersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "dynamicPersonGroupId": "your_dynamic_person_group_id" + }, + "responses": { + "202": { + "headers": { + "location": "https://contoso.com/operationstatus", + "operation-Location": "https://contoso.com/operationstatus" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_DeletePerson.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_DeletePerson.json new file mode 100644 index 000000000000..ec878a74a629 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_DeletePerson.json @@ -0,0 +1,16 @@ +{ + "title": "Delete Person", + "operationId": "PersonDirectoryOperations_DeletePerson", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5" + }, + "responses": { + "202": { + "headers": { + "location": "https://contoso.com/operationstatus", + "operation-Location": "https://contoso.com/operationstatus" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_DeletePersonFace.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_DeletePersonFace.json new file mode 100644 index 000000000000..b5888b351645 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_DeletePersonFace.json @@ -0,0 +1,18 @@ +{ + "title": "Delete Face from PersonDirectory Person", + "operationId": "PersonDirectoryOperations_DeletePersonFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "recognitionModel": "recognition_01", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "202": { + "headers": { + "location": "https://contoso.com/operationstatus", + "operation-Location": "https://contoso.com/operationstatus" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetDynamicPersonGroup.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetDynamicPersonGroup.json new file mode 100644 index 000000000000..d9c77ab11b0c --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetDynamicPersonGroup.json @@ -0,0 +1,17 @@ +{ + "title": "Get DynamicPersonGroup", + "operationId": "PersonDirectoryOperations_GetDynamicPersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "dynamicPersonGroupId": "your_dynamic_person_group_id" + }, + "responses": { + "200": { + "body": { + "dynamicPersonGroupId": "your_dynamic_person_group_id", + "name": "your_dynamic_person_group_name", + "userData": "your_user_data" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetDynamicPersonGroupPersons.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetDynamicPersonGroupPersons.json new file mode 100644 index 000000000000..ad56754e3966 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetDynamicPersonGroupPersons.json @@ -0,0 +1,20 @@ +{ + "title": "Get DynamicPersonGroup Persons", + "operationId": "PersonDirectoryOperations_GetDynamicPersonGroupPersons", + "parameters": { + "apiVersion": "v1.1-preview.1", + "dynamicPersonGroupId": "your_dynamic_person_group_id", + "start": "00000000-0000-0000-0000-000000000000", + "top": 20 + }, + "responses": { + "200": { + "body": { + "personIds": [ + "1d44651f-fadb-41f5-8918-c30609964489", + "c1d3b745-2548-4abf-b057-a386c9bd52f1" + ] + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetDynamicPersonGroupReferences.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetDynamicPersonGroupReferences.json new file mode 100644 index 000000000000..9713e78774dd --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetDynamicPersonGroupReferences.json @@ -0,0 +1,19 @@ +{ + "title": "Get DynamicPersonGroup References", + "operationId": "PersonDirectoryOperations_GetDynamicPersonGroupReferences", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "start": "00000000-0000-0000-0000-000000000000", + "top": 20 + }, + "responses": { + "200": { + "body": { + "dynamicPersonGroupIds": [ + "your_dynamic_person_group_id" + ] + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetDynamicPersonGroups.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetDynamicPersonGroups.json new file mode 100644 index 000000000000..193635dd48c6 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetDynamicPersonGroups.json @@ -0,0 +1,20 @@ +{ + "title": "Get DynamicPersonGroups", + "operationId": "PersonDirectoryOperations_GetDynamicPersonGroups", + "parameters": { + "apiVersion": "v1.1-preview.1", + "start": "dynamic_person_group_id", + "top": 20 + }, + "responses": { + "200": { + "body": [ + { + "dynamicPersonGroupId": "your_dynamic_person_group_id", + "name": "your_dynamic_person_group_name", + "userData": "your_user_data" + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetPerson.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetPerson.json new file mode 100644 index 000000000000..a44a64dade8e --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetPerson.json @@ -0,0 +1,17 @@ +{ + "title": "Get Person from PeronDirectory", + "operationId": "PersonDirectoryOperations_GetPerson", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5" + }, + "responses": { + "200": { + "body": { + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "name": "your_person_name", + "userData": "your_user_data" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetPersonFace.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetPersonFace.json new file mode 100644 index 000000000000..c9936d951197 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetPersonFace.json @@ -0,0 +1,18 @@ +{ + "title": "Get Face from PersonDirectory Person", + "operationId": "PersonDirectoryOperations_GetPersonFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "recognitionModel": "recognition_01", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "userData": "your_user_data" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetPersonFaces.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetPersonFaces.json new file mode 100644 index 000000000000..be30b43b5dc5 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetPersonFaces.json @@ -0,0 +1,19 @@ +{ + "title": "Get Faces from PersonDirectory Person", + "operationId": "PersonDirectoryOperations_GetPersonFaces", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "recognitionModel": "recognition_01" + }, + "responses": { + "200": { + "body": { + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "persistedFaceIds": [ + "43897a75-8d6f-42cf-885e-74832febb055" + ] + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetPersons.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetPersons.json new file mode 100644 index 000000000000..3375158f280c --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_GetPersons.json @@ -0,0 +1,20 @@ +{ + "title": "Get Persons from PersonDirectory", + "operationId": "PersonDirectoryOperations_GetPersons", + "parameters": { + "apiVersion": "v1.1-preview.1", + "start": "00000000-0000-0000-0000-000000000000", + "top": 20 + }, + "responses": { + "200": { + "body": [ + { + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "name": "your_person_name", + "userData": "your_user_data" + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_UpdateDynamicPersonGroup.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_UpdateDynamicPersonGroup.json new file mode 100644 index 000000000000..1cbe9e6f2fa4 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_UpdateDynamicPersonGroup.json @@ -0,0 +1,15 @@ +{ + "title": "Update DynamicPersonGroup", + "operationId": "PersonDirectoryOperations_UpdateDynamicPersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "dynamicPersonGroupId": "your_dynamic_person_group_id", + "body": { + "name": "your_dynamic_person_group_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_UpdateDynamicPersonGroupWithPersonChanges.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_UpdateDynamicPersonGroupWithPersonChanges.json new file mode 100644 index 000000000000..d329ed1854d0 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_UpdateDynamicPersonGroupWithPersonChanges.json @@ -0,0 +1,25 @@ +{ + "title": "Update DynamicPersonGroup", + "operationId": "PersonDirectoryOperations_UpdateDynamicPersonGroupWithPersonChanges", + "parameters": { + "apiVersion": "v1.1-preview.1", + "dynamicPersonGroupId": "your_dynamic_person_group_id", + "body": { + "name": "your_dynamic_person_group_name", + "userData": "your_user_data", + "addPersonIds": [ + "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5" + ], + "removePersonIds": [ + "67f7e96d-823a-4318-9bf6-e9a2a2608899" + ] + } + }, + "responses": { + "202": { + "headers": { + "operation-Location": "https://contoso.com/operationstatus" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_UpdatePerson.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_UpdatePerson.json new file mode 100644 index 000000000000..c9b5dd6f09a6 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_UpdatePerson.json @@ -0,0 +1,15 @@ +{ + "title": "Update Person in PersonDirectory", + "operationId": "PersonDirectoryOperations_UpdatePerson", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "body": { + "name": "your_person_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_UpdatePersonFace.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_UpdatePersonFace.json new file mode 100644 index 000000000000..4820362ff86d --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonDirectoryOperations_UpdatePersonFace.json @@ -0,0 +1,16 @@ +{ + "title": "Update Face of PersonDirectory Person", + "operationId": "PersonDirectoryOperations_UpdatePersonFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personId": "85c0c630-c9c9-40f8-8a4e-f9ae4f926ea5", + "recognitionModel": "recognition_01", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "body": { + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_AddLargePersonGroupPersonFaceFromStream.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_AddLargePersonGroupPersonFaceFromStream.json new file mode 100644 index 000000000000..141ddfa3f570 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_AddLargePersonGroupPersonFaceFromStream.json @@ -0,0 +1,20 @@ +{ + "title": "Add Face in LargePersonGroup Person", + "operationId": "PersonGroupOperations_AddLargePersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "imageContent": "" + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_AddLargePersonGroupPersonFaceFromUrl.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_AddLargePersonGroupPersonFaceFromUrl.json new file mode 100644 index 000000000000..d508fe448032 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_AddLargePersonGroupPersonFaceFromUrl.json @@ -0,0 +1,22 @@ +{ + "title": "Add Face in LargePersonGroup Person from Url", + "operationId": "PersonGroupOperations_AddLargePersonGroupPersonFaceFromUrl", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "body": { + "url": "https://microsoft.com/example.jpg" + } + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_AddPersonGroupPersonFaceFromStream.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_AddPersonGroupPersonFaceFromStream.json new file mode 100644 index 000000000000..a5284dd3dd85 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_AddPersonGroupPersonFaceFromStream.json @@ -0,0 +1,20 @@ +{ + "title": "Add Face to PersonGroup Person", + "operationId": "PersonGroupOperations_AddPersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "imageContent": "" + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_AddPersonGroupPersonFaceFromUrl.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_AddPersonGroupPersonFaceFromUrl.json new file mode 100644 index 000000000000..2ffd39e216cf --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_AddPersonGroupPersonFaceFromUrl.json @@ -0,0 +1,22 @@ +{ + "title": "Add Face to PersonGroupPerson from Url", + "operationId": "PersonGroupOperations_AddPersonGroupPersonFaceFromUrl", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "targetFace": "10,10,100,100", + "detectionModel": "detection_01", + "userData": "your_user_data", + "body": { + "url": "https://microsoft.com/example.jpg" + } + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_CreateLargePersonGroup.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_CreateLargePersonGroup.json new file mode 100644 index 000000000000..331f4fea89e2 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_CreateLargePersonGroup.json @@ -0,0 +1,16 @@ +{ + "title": "Create LargePersonGroup", + "operationId": "PersonGroupOperations_CreateLargePersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "body": { + "name": "your_large_person_group_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_CreateLargePersonGroupPerson.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_CreateLargePersonGroupPerson.json new file mode 100644 index 000000000000..b20a03adf246 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_CreateLargePersonGroupPerson.json @@ -0,0 +1,19 @@ +{ + "title": "Create Person in LargePersonGroup", + "operationId": "PersonGroupOperations_CreateLargePersonGroupPerson", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "body": { + "name": "your_large_person_group_person_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": { + "body": { + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_CreatePersonGroup.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_CreatePersonGroup.json new file mode 100644 index 000000000000..326e8074f4d5 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_CreatePersonGroup.json @@ -0,0 +1,16 @@ +{ + "title": "Create PersonGroup", + "operationId": "PersonGroupOperations_CreatePersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id", + "body": { + "name": "your_person_group_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_CreatePersonGroupPerson.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_CreatePersonGroupPerson.json new file mode 100644 index 000000000000..e9002c1d37a1 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_CreatePersonGroupPerson.json @@ -0,0 +1,19 @@ +{ + "title": "Create Person in PersonGroup", + "operationId": "PersonGroupOperations_CreatePersonGroupPerson", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id", + "body": { + "name": "your_person_group_person_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": { + "body": { + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_DeleteLargePersonGroup.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_DeleteLargePersonGroup.json new file mode 100644 index 000000000000..2eb1187dc33c --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_DeleteLargePersonGroup.json @@ -0,0 +1,11 @@ +{ + "title": "Delete LargePersonGroup", + "operationId": "PersonGroupOperations_DeleteLargePersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_DeleteLargePersonGroupPerson.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_DeleteLargePersonGroupPerson.json new file mode 100644 index 000000000000..84b5b5abe4ef --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_DeleteLargePersonGroupPerson.json @@ -0,0 +1,12 @@ +{ + "title": "Delete Person from LargePersonGroup", + "operationId": "PersonGroupOperations_DeleteLargePersonGroupPerson", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_DeleteLargePersonGroupPersonFace.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_DeleteLargePersonGroupPersonFace.json new file mode 100644 index 000000000000..2a8d88a8506d --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_DeleteLargePersonGroupPersonFace.json @@ -0,0 +1,13 @@ +{ + "title": "Delete Face from LargePersonGroup Person", + "operationId": "PersonGroupOperations_DeleteLargePersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_DeletePersonGroup.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_DeletePersonGroup.json new file mode 100644 index 000000000000..b5848fb96e01 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_DeletePersonGroup.json @@ -0,0 +1,11 @@ +{ + "title": "Delete PersonGroup", + "operationId": "PersonGroupOperations_DeletePersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_DeletePersonGroupPerson.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_DeletePersonGroupPerson.json new file mode 100644 index 000000000000..380bec45e4b9 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_DeletePersonGroupPerson.json @@ -0,0 +1,12 @@ +{ + "title": "Delete Person from PersonGroup", + "operationId": "PersonGroupOperations_DeletePersonGroupPerson", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_DeletePersonGroupPersonFace.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_DeletePersonGroupPersonFace.json new file mode 100644 index 000000000000..1a1bcd0103d6 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_DeletePersonGroupPersonFace.json @@ -0,0 +1,13 @@ +{ + "title": "Delete Face from PersonGroup Person", + "operationId": "PersonGroupOperations_DeletePersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetLargePersonGroup.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetLargePersonGroup.json new file mode 100644 index 000000000000..7c292178d50c --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetLargePersonGroup.json @@ -0,0 +1,19 @@ +{ + "title": "Get LargePersonGroup", + "operationId": "PersonGroupOperations_GetLargePersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": { + "name": "your_large_person_group_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "largePersonGroupId": "your_large_person_group_id" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetLargePersonGroupPerson.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetLargePersonGroupPerson.json new file mode 100644 index 000000000000..5f17d932ce46 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetLargePersonGroupPerson.json @@ -0,0 +1,21 @@ +{ + "title": "Get Person from LargePersonGroup", + "operationId": "PersonGroupOperations_GetLargePersonGroupPerson", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1" + }, + "responses": { + "200": { + "body": { + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "name": "your_large_person_group_person_name", + "userData": "your_user_data", + "persistedFaceIds": [ + "43897a75-8d6f-42cf-885e-74832febb055" + ] + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetLargePersonGroupPersonFace.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetLargePersonGroupPersonFace.json new file mode 100644 index 000000000000..ec0074928330 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetLargePersonGroupPersonFace.json @@ -0,0 +1,18 @@ +{ + "title": "Get Face from LargePersonGroup Person", + "operationId": "PersonGroupOperations_GetLargePersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "userData": "your_user_data" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetLargePersonGroupPersons.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetLargePersonGroupPersons.json new file mode 100644 index 000000000000..6301d513e1e5 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetLargePersonGroupPersons.json @@ -0,0 +1,24 @@ +{ + "title": "Get Persons from LargePersonGroup", + "operationId": "PersonGroupOperations_GetLargePersonGroupPersons", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "start": "00000000-0000-0000-0000-000000000000", + "top": 20 + }, + "responses": { + "200": { + "body": [ + { + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "name": "your_large_person_group_person_name", + "userData": "your_user_data", + "persistedFaceIds": [ + "43897a75-8d6f-42cf-885e-74832febb055" + ] + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetLargePersonGroupTrainingStatus.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetLargePersonGroupTrainingStatus.json new file mode 100644 index 000000000000..37bfa8a54448 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetLargePersonGroupTrainingStatus.json @@ -0,0 +1,19 @@ +{ + "title": "Get Training Status of LargePersonGroup", + "operationId": "PersonGroupOperations_GetLargePersonGroupTrainingStatus", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id" + }, + "responses": { + "200": { + "body": { + "status": "notStarted", + "createdDateTime": "2024-03-05T11:07:58.371Z", + "lastActionDateTime": "2024-03-05T11:07:58.371Z", + "lastSuccessfulTrainingDateTime": "2024-03-05T11:07:58.371Z", + "message": null + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetLargePersonGroups.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetLargePersonGroups.json new file mode 100644 index 000000000000..41c1030f6917 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetLargePersonGroups.json @@ -0,0 +1,22 @@ +{ + "title": "Get LargePersonGroups", + "operationId": "PersonGroupOperations_GetLargePersonGroups", + "parameters": { + "apiVersion": "v1.1-preview.1", + "start": "00000000-0000-0000-0000-000000000000", + "top": 20, + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": [ + { + "name": "your_large_person_group_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "largePersonGroupId": "your_large_person_group_id" + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetPersonGroup.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetPersonGroup.json new file mode 100644 index 000000000000..d3dffc82041f --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetPersonGroup.json @@ -0,0 +1,19 @@ +{ + "title": "Get PersonGroup", + "operationId": "PersonGroupOperations_GetPersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id", + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": { + "name": "your_person_group_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "personGroupId": "your_person_group_id" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetPersonGroupPerson.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetPersonGroupPerson.json new file mode 100644 index 000000000000..19e331d9a9d4 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetPersonGroupPerson.json @@ -0,0 +1,21 @@ +{ + "title": "Get Person from PersonGroup", + "operationId": "PersonGroupOperations_GetPersonGroupPerson", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1" + }, + "responses": { + "200": { + "body": { + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "name": "your_person_group_person_name", + "userData": "your_user_data", + "persistedFaceIds": [ + "43897a75-8d6f-42cf-885e-74832febb055" + ] + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetPersonGroupPersonFace.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetPersonGroupPersonFace.json new file mode 100644 index 000000000000..3159157cb95f --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetPersonGroupPersonFace.json @@ -0,0 +1,18 @@ +{ + "title": "Get Face form PersonGroup Person", + "operationId": "PersonGroupOperations_GetPersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055" + }, + "responses": { + "200": { + "body": { + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "userData": "your_user_data" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetPersonGroupPersons.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetPersonGroupPersons.json new file mode 100644 index 000000000000..9105ebc4274d --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetPersonGroupPersons.json @@ -0,0 +1,24 @@ +{ + "title": "Get Persons from PersonGroup", + "operationId": "PersonGroupOperations_GetPersonGroupPersons", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id", + "start": "00000000-0000-0000-0000-000000000000", + "top": 20 + }, + "responses": { + "200": { + "body": [ + { + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "name": "your_person_group_person_name", + "userData": "your_user_data", + "persistedFaceIds": [ + "43897a75-8d6f-42cf-885e-74832febb055" + ] + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetPersonGroupTrainingStatus.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetPersonGroupTrainingStatus.json new file mode 100644 index 000000000000..747ce9df7199 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetPersonGroupTrainingStatus.json @@ -0,0 +1,19 @@ +{ + "title": "Get Training Status of PersonGroup", + "operationId": "PersonGroupOperations_GetPersonGroupTrainingStatus", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id" + }, + "responses": { + "200": { + "body": { + "status": "notStarted", + "createdDateTime": "2024-03-05T11:07:58.371Z", + "lastActionDateTime": "2024-03-05T11:07:58.371Z", + "lastSuccessfulTrainingDateTime": "2024-03-05T11:07:58.371Z", + "message": null + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetPersonGroups.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetPersonGroups.json new file mode 100644 index 000000000000..81b6bee61596 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_GetPersonGroups.json @@ -0,0 +1,22 @@ +{ + "title": "Get PersonGroups", + "operationId": "PersonGroupOperations_GetPersonGroups", + "parameters": { + "apiVersion": "v1.1-preview.1", + "start": "00000000-0000-0000-0000-000000000000", + "top": 20, + "returnRecognitionModel": true + }, + "responses": { + "200": { + "body": [ + { + "name": "your_person_group_name", + "userData": "your_user_data", + "recognitionModel": "recognition_01", + "personGroupId": "your_person_group_id" + } + ] + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_TrainLargePersonGroup.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_TrainLargePersonGroup.json new file mode 100644 index 000000000000..99a4c4ba537a --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_TrainLargePersonGroup.json @@ -0,0 +1,15 @@ +{ + "title": "Train LargePersonGroup", + "operationId": "PersonGroupOperations_TrainLargePersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id" + }, + "responses": { + "202": { + "headers": { + "operation-Location": "https://contoso.com/operationstatus" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_TrainPersonGroup.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_TrainPersonGroup.json new file mode 100644 index 000000000000..014adaab4dd6 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_TrainPersonGroup.json @@ -0,0 +1,15 @@ +{ + "title": "Train PersonGroup", + "operationId": "PersonGroupOperations_TrainPersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id" + }, + "responses": { + "202": { + "headers": { + "operation-Location": "https://contoso.com/operationstatus" + } + } + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_UpdateLargePersonGroup.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_UpdateLargePersonGroup.json new file mode 100644 index 000000000000..a0811844aaf3 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_UpdateLargePersonGroup.json @@ -0,0 +1,15 @@ +{ + "title": "Update LargePersonGroup", + "operationId": "PersonGroupOperations_UpdateLargePersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "body": { + "name": "your_large_person_group_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_UpdateLargePersonGroupPerson.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_UpdateLargePersonGroupPerson.json new file mode 100644 index 000000000000..65b8960f057e --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_UpdateLargePersonGroupPerson.json @@ -0,0 +1,16 @@ +{ + "title": "Update Person in LargePersonGroup", + "operationId": "PersonGroupOperations_UpdateLargePersonGroupPerson", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "body": { + "name": "your_large_person_group_person_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_UpdateLargePersonGroupPersonFace.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_UpdateLargePersonGroupPersonFace.json new file mode 100644 index 000000000000..7c615968cc63 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_UpdateLargePersonGroupPersonFace.json @@ -0,0 +1,16 @@ +{ + "title": "Update Face in LargePersonGroup Person", + "operationId": "PersonGroupOperations_UpdateLargePersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "largePersonGroupId": "your_large_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "body": { + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_UpdatePersonGroup.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_UpdatePersonGroup.json new file mode 100644 index 000000000000..05451bf387f7 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_UpdatePersonGroup.json @@ -0,0 +1,15 @@ +{ + "title": "Update PersonGroup", + "operationId": "PersonGroupOperations_UpdatePersonGroup", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id", + "body": { + "name": "your_person_group_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_UpdatePersonGroupPerson.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_UpdatePersonGroupPerson.json new file mode 100644 index 000000000000..4a81657da4a6 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_UpdatePersonGroupPerson.json @@ -0,0 +1,16 @@ +{ + "title": "Update PersonGroup Person", + "operationId": "PersonGroupOperations_UpdatePersonGroupPerson", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "body": { + "name": "your_person_group_person_name", + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_UpdatePersonGroupPersonFace.json b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_UpdatePersonGroupPersonFace.json new file mode 100644 index 000000000000..9713f646dc88 --- /dev/null +++ b/specification/ai/data-plane/Face/preview/v1.1-preview.1/examples/PersonGroupOperations_UpdatePersonGroupPersonFace.json @@ -0,0 +1,16 @@ +{ + "title": "Update Face in PersonGroup Person", + "operationId": "PersonGroupOperations_UpdatePersonGroupPersonFace", + "parameters": { + "apiVersion": "v1.1-preview.1", + "personGroupId": "your_person_group_id", + "personId": "25985303-c537-4467-b41d-bdb45cd95ca1", + "persistedFaceId": "43897a75-8d6f-42cf-885e-74832febb055", + "body": { + "userData": "your_user_data" + } + }, + "responses": { + "200": {} + } +} diff --git a/specification/ai/data-plane/Face/readme.md b/specification/ai/data-plane/Face/readme.md index 1d7ef37c8b09..1120d5b2634d 100644 --- a/specification/ai/data-plane/Face/readme.md +++ b/specification/ai/data-plane/Face/readme.md @@ -1,17 +1,18 @@ -# Cognitive Services Face SDK +# Azure AI Face SDK > see https://aka.ms/autorest Configuration for generating Face SDK. -The current release is `v1.0`. +The current release is `v1.1-preview.1`. ``` yaml -tag: v1.0 +tag: v1.1-preview.1 add-credentials: true openapi-type: data-plane ``` + # Releases ### Release v1.0 @@ -33,3 +34,17 @@ suppressions: from: Face.json reason: Legacy swagger file ``` + +### Release v1.1-preview.1 +These settings apply only when `--tag=v1.1-preview.1` is specified on the command line. +``` yaml $(tag) == 'v1.1-preview.1' +input-file: + - preview/v1.1-preview.1/Face.json +suppressions: + - code: LroExtension + from: Face.json + reason: Our LRO behavior does not fit the default generated poller + - code: AvoidAnonymousParameter + from: Face.json + reason: Use anonymous parameter to provide interface with flatten parameters +```