diff --git a/sdk/face/Azure.AI.Vision.Face/CHANGELOG.md b/sdk/face/Azure.AI.Vision.Face/CHANGELOG.md index d6f4e1c0204c..a87086134479 100644 --- a/sdk/face/Azure.AI.Vision.Face/CHANGELOG.md +++ b/sdk/face/Azure.AI.Vision.Face/CHANGELOG.md @@ -1,15 +1,28 @@ # Release History -## 1.0.0-beta.2 (Unreleased) +## 1.0.0-beta.2 (2024-10-23) -### Features Added +- Added support for the Large Face List and Large Person Group: + - Added client `LargeFaceListClient` and `LargePersonGroupClient`. + - Added operations `FindSimilarFromLargeFaceList`, `IdentifyFromLargePersonGroup` and `VerifyFromLargePersonGroup` to `FaceClient`. + - Added models for supporting Large Face List and Large Person Group. +- Added support for latest Detect Liveness Session API: + - Added operations `GetSessionImage` and `DetectFromSessionImage` to `FaceSessionClient`. + - Added properties `EnableSessionImage ` and `LivenessSingleModalModel` to model `CreateLivenessSessionContent`. + - Added model `CreateLivenessWithVerifySessionContent`. ### Breaking Changes +- Changed the parameter of `CreateLivenessWithVerifySession` from model `CreateLivenessSessionContent` to `CreateLivenessWithVerifySessionContent`. + ### Bugs Fixed +- Remove `Mask` from `FaceAsttributes.Detection01`, which is not supported. + ### Other Changes +- Change the default service API version to `v1.2-preview.1`. + ## 1.0.0-beta.1 (2024-05-27) This is the first preview Azure AI Face client library that follows the [.NET Azure SDK Design Guidelines](https://azure.github.io/azure-sdk/dotnet_introduction.html). diff --git a/sdk/face/Azure.AI.Vision.Face/README.md b/sdk/face/Azure.AI.Vision.Face/README.md index 9fc534e3991f..aa672c377782 100644 --- a/sdk/face/Azure.AI.Vision.Face/README.md +++ b/sdk/face/Azure.AI.Vision.Face/README.md @@ -6,6 +6,7 @@ The Azure AI Face service provides AI algorithms that detect, recognize, and ana - Liveness detection - Face recognition - Face verification ("one-to-one" matching) + - Face identification ("one-to-many" matching) - Find similar faces - Group faces @@ -97,6 +98,28 @@ AzureKeyCredential credential = new AzureKeyCredential(""); var client = new FaceClient(endpoint, credential); ``` +### Service API versions + +The client library targets the latest service API version by default. A client instance accepts an optional service API version parameter from its options to specify which API version service to communicate. + +#### Select a service API version + +You have the flexibility to explicitly select a supported service API version when instantiating a client by configuring its associated options. This ensures that the client can communicate with services using the specified API version. + +For example, + +```C# Snippet:CreateFaceClientWithVersion +Uri endpoint = new Uri(""); +DefaultAzureCredential credential = new DefaultAzureCredential(); +AzureAIVisionFaceClientOptions options = new AzureAIVisionFaceClientOptions(AzureAIVisionFaceClientOptions.ServiceVersion.V1_2_Preview_1); +FaceClient client = new FaceClient(endpoint, credential, options); +``` + +When selecting an API version, it's important to verify that there are no breaking changes compared to the latest API version. If there are significant differences, API calls may fail due to incompatibility. + +Always ensure that the chosen API version is fully supported and operational for your specific use case and that it aligns with the service's versioning policy. + + ## Key concepts ### FaceClient @@ -104,11 +127,18 @@ var client = new FaceClient(endpoint, credential); `FaceClient` provides operations for: - Face detection and analysis: Detect human faces in an image and return the rectangle coordinates of their locations, and optionally with landmarks, and face-related attributes. This operation is required as a first step in all the other face recognition scenarios. -- Face recognition: Confirm that a user is who they claim to be based on how closely their face data matches the target face. - Support Face verification ("one-to-one" matching). +- Face recognition: Confirm that a user is who they claim to be based on how closely their face data matches the target face. It includes Face verification ("one-to-one" matching) and Face identification ("one-to-many" matching). - Finding similar faces from a smaller set of faces that look similar to the target face. - Grouping faces into several smaller groups based on similarity. +### FaceAdministrationClient + +`FaceAdministrationClient` is provided to interact with the following data structures that hold data on faces and +persons for Face recognition: + +- LargeFaceList +- LargePersonGroup + ### FaceSessionClient `FaceSessionClient` is provided to interact with sessions which is used for Liveness detection. @@ -163,7 +193,7 @@ foreach (var detectedFace in detectedFaces) { Console.WriteLine($"Face Rectangle: left={detectedFace.FaceRectangle.Left}, top={detectedFace.FaceRectangle.Top}, width={detectedFace.FaceRectangle.Width}, height={detectedFace.FaceRectangle.Height}"); Console.WriteLine($"Head pose: pitch={detectedFace.FaceAttributes.HeadPose.Pitch}, roll={detectedFace.FaceAttributes.HeadPose.Roll}, yaw={detectedFace.FaceAttributes.HeadPose.Yaw}"); - Console.WriteLine($"Mask: {detectedFace.FaceAttributes.Mask}"); + Console.WriteLine($"Mask: NoseAndMouthCovered={detectedFace.FaceAttributes.Mask.NoseAndMouthCovered}, Type={detectedFace.FaceAttributes.Mask.Type}"); Console.WriteLine($"Quality: {detectedFace.FaceAttributes.QualityForRecognition}"); Console.WriteLine($"Recognition model: {detectedFace.RecognitionModel}"); Console.WriteLine($"Landmarks: "); diff --git a/sdk/face/Azure.AI.Vision.Face/api/Azure.AI.Vision.Face.netstandard2.0.cs b/sdk/face/Azure.AI.Vision.Face/api/Azure.AI.Vision.Face.netstandard2.0.cs index b437cafe7b63..b929533c8f99 100644 --- a/sdk/face/Azure.AI.Vision.Face/api/Azure.AI.Vision.Face.netstandard2.0.cs +++ b/sdk/face/Azure.AI.Vision.Face/api/Azure.AI.Vision.Face.netstandard2.0.cs @@ -30,32 +30,53 @@ internal AccessoryItem() { } public static bool operator !=(Azure.AI.Vision.Face.AccessoryType left, Azure.AI.Vision.Face.AccessoryType right) { throw null; } public override string ToString() { throw null; } } + public partial class AddFaceResult : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal AddFaceResult() { } + public System.Guid PersistedFaceId { get { throw null; } } + Azure.AI.Vision.Face.AddFaceResult System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.Vision.Face.AddFaceResult System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } public static partial class AIVisionFaceModelFactory { public static Azure.AI.Vision.Face.AccessoryItem AccessoryItem(Azure.AI.Vision.Face.AccessoryType type = default(Azure.AI.Vision.Face.AccessoryType), float confidence = 0f) { throw null; } + public static Azure.AI.Vision.Face.AddFaceResult AddFaceResult(System.Guid persistedFaceId = default(System.Guid)) { throw null; } public static Azure.AI.Vision.Face.AuditLivenessResponseInfo AuditLivenessResponseInfo(Azure.AI.Vision.Face.LivenessResponseBody body = null, int statusCode = 0, long latencyInMilliseconds = (long)0) { throw null; } public static Azure.AI.Vision.Face.AuditRequestInfo AuditRequestInfo(string url = null, string method = null, long? contentLength = default(long?), string contentType = null, string userAgent = null) { throw null; } public static Azure.AI.Vision.Face.BlurProperties BlurProperties(Azure.AI.Vision.Face.BlurLevel blurLevel = default(Azure.AI.Vision.Face.BlurLevel), float value = 0f) { throw null; } - public static Azure.AI.Vision.Face.CreateLivenessSessionContent CreateLivenessSessionContent(Azure.AI.Vision.Face.LivenessOperationMode livenessOperationMode = default(Azure.AI.Vision.Face.LivenessOperationMode), bool? sendResultsToClient = default(bool?), bool? deviceCorrelationIdSetInClient = default(bool?), string deviceCorrelationId = null, int? authTokenTimeToLiveInSeconds = default(int?)) { throw null; } + public static Azure.AI.Vision.Face.CreateLivenessSessionContent CreateLivenessSessionContent(Azure.AI.Vision.Face.LivenessOperationMode livenessOperationMode = default(Azure.AI.Vision.Face.LivenessOperationMode), bool? sendResultsToClient = default(bool?), bool? deviceCorrelationIdSetInClient = default(bool?), bool? enableSessionImage = default(bool?), Azure.AI.Vision.Face.LivenessModel? livenessSingleModalModel = default(Azure.AI.Vision.Face.LivenessModel?), string deviceCorrelationId = null, int? authTokenTimeToLiveInSeconds = default(int?)) { throw null; } public static Azure.AI.Vision.Face.CreateLivenessSessionResult CreateLivenessSessionResult(string sessionId = null, string authToken = null) { throw null; } + public static Azure.AI.Vision.Face.CreateLivenessWithVerifySessionContent CreateLivenessWithVerifySessionContent(Azure.AI.Vision.Face.LivenessOperationMode livenessOperationMode = default(Azure.AI.Vision.Face.LivenessOperationMode), bool? sendResultsToClient = default(bool?), bool? deviceCorrelationIdSetInClient = default(bool?), bool? enableSessionImage = default(bool?), Azure.AI.Vision.Face.LivenessModel? livenessSingleModalModel = default(Azure.AI.Vision.Face.LivenessModel?), string deviceCorrelationId = null, int? authTokenTimeToLiveInSeconds = default(int?), bool? returnVerifyImageHash = default(bool?), float? verifyConfidenceThreshold = default(float?)) { throw null; } public static Azure.AI.Vision.Face.CreateLivenessWithVerifySessionResult CreateLivenessWithVerifySessionResult(string sessionId = null, string authToken = null, Azure.AI.Vision.Face.LivenessWithVerifyImage verifyImage = null) { throw null; } + public static Azure.AI.Vision.Face.CreatePersonResult CreatePersonResult(System.Guid personId = default(System.Guid)) { throw null; } public static Azure.AI.Vision.Face.ExposureProperties ExposureProperties(Azure.AI.Vision.Face.ExposureLevel exposureLevel = default(Azure.AI.Vision.Face.ExposureLevel), float value = 0f) { throw null; } public static Azure.AI.Vision.Face.FaceAttributes FaceAttributes(float? age = default(float?), float? smile = default(float?), Azure.AI.Vision.Face.FacialHair facialHair = null, Azure.AI.Vision.Face.GlassesType? glasses = default(Azure.AI.Vision.Face.GlassesType?), Azure.AI.Vision.Face.HeadPose headPose = null, Azure.AI.Vision.Face.HairProperties hair = null, Azure.AI.Vision.Face.OcclusionProperties occlusion = null, System.Collections.Generic.IEnumerable accessories = null, Azure.AI.Vision.Face.BlurProperties blur = null, Azure.AI.Vision.Face.ExposureProperties exposure = null, Azure.AI.Vision.Face.NoiseProperties noise = null, Azure.AI.Vision.Face.MaskProperties mask = null, Azure.AI.Vision.Face.QualityForRecognition? qualityForRecognition = default(Azure.AI.Vision.Face.QualityForRecognition?)) { throw null; } public static Azure.AI.Vision.Face.FaceDetectionResult FaceDetectionResult(System.Guid? faceId = default(System.Guid?), Azure.AI.Vision.Face.FaceRecognitionModel? recognitionModel = default(Azure.AI.Vision.Face.FaceRecognitionModel?), Azure.AI.Vision.Face.FaceRectangle faceRectangle = null, Azure.AI.Vision.Face.FaceLandmarks faceLandmarks = null, Azure.AI.Vision.Face.FaceAttributes faceAttributes = null) { throw null; } public static Azure.AI.Vision.Face.FaceFindSimilarResult FaceFindSimilarResult(float confidence = 0f, System.Guid? faceId = default(System.Guid?), System.Guid? persistedFaceId = default(System.Guid?)) { throw null; } public static Azure.AI.Vision.Face.FaceGroupingResult FaceGroupingResult(System.Collections.Generic.IEnumerable> groups = null, System.Collections.Generic.IEnumerable messyGroup = null) { throw null; } + public static Azure.AI.Vision.Face.FaceIdentificationCandidate FaceIdentificationCandidate(System.Guid personId = default(System.Guid), float confidence = 0f) { throw null; } + public static Azure.AI.Vision.Face.FaceIdentificationResult FaceIdentificationResult(System.Guid faceId = default(System.Guid), System.Collections.Generic.IEnumerable candidates = null) { throw null; } public static Azure.AI.Vision.Face.FaceLandmarks FaceLandmarks(Azure.AI.Vision.Face.LandmarkCoordinate pupilLeft = null, Azure.AI.Vision.Face.LandmarkCoordinate pupilRight = null, Azure.AI.Vision.Face.LandmarkCoordinate noseTip = null, Azure.AI.Vision.Face.LandmarkCoordinate mouthLeft = null, Azure.AI.Vision.Face.LandmarkCoordinate mouthRight = null, Azure.AI.Vision.Face.LandmarkCoordinate eyebrowLeftOuter = null, Azure.AI.Vision.Face.LandmarkCoordinate eyebrowLeftInner = null, Azure.AI.Vision.Face.LandmarkCoordinate eyeLeftOuter = null, Azure.AI.Vision.Face.LandmarkCoordinate eyeLeftTop = null, Azure.AI.Vision.Face.LandmarkCoordinate eyeLeftBottom = null, Azure.AI.Vision.Face.LandmarkCoordinate eyeLeftInner = null, Azure.AI.Vision.Face.LandmarkCoordinate eyebrowRightInner = null, Azure.AI.Vision.Face.LandmarkCoordinate eyebrowRightOuter = null, Azure.AI.Vision.Face.LandmarkCoordinate eyeRightInner = null, Azure.AI.Vision.Face.LandmarkCoordinate eyeRightTop = null, Azure.AI.Vision.Face.LandmarkCoordinate eyeRightBottom = null, Azure.AI.Vision.Face.LandmarkCoordinate eyeRightOuter = null, Azure.AI.Vision.Face.LandmarkCoordinate noseRootLeft = null, Azure.AI.Vision.Face.LandmarkCoordinate noseRootRight = null, Azure.AI.Vision.Face.LandmarkCoordinate noseLeftAlarTop = null, Azure.AI.Vision.Face.LandmarkCoordinate noseRightAlarTop = null, Azure.AI.Vision.Face.LandmarkCoordinate noseLeftAlarOutTip = null, Azure.AI.Vision.Face.LandmarkCoordinate noseRightAlarOutTip = null, Azure.AI.Vision.Face.LandmarkCoordinate upperLipTop = null, Azure.AI.Vision.Face.LandmarkCoordinate upperLipBottom = null, Azure.AI.Vision.Face.LandmarkCoordinate underLipTop = null, Azure.AI.Vision.Face.LandmarkCoordinate underLipBottom = null) { throw null; } public static Azure.AI.Vision.Face.FaceRectangle FaceRectangle(int top = 0, int left = 0, int width = 0, int height = 0) { throw null; } + public static Azure.AI.Vision.Face.FaceTrainingResult FaceTrainingResult(Azure.AI.Vision.Face.FaceOperationStatus status = default(Azure.AI.Vision.Face.FaceOperationStatus), System.DateTimeOffset createdDateTime = default(System.DateTimeOffset), System.DateTimeOffset lastActionDateTime = default(System.DateTimeOffset), System.DateTimeOffset lastSuccessfulTrainingDateTime = default(System.DateTimeOffset), string message = null) { throw null; } public static Azure.AI.Vision.Face.FaceVerificationResult FaceVerificationResult(bool isIdentical = false, float confidence = 0f) { throw null; } public static Azure.AI.Vision.Face.FacialHair FacialHair(float moustache = 0f, float beard = 0f, float sideburns = 0f) { throw null; } public static Azure.AI.Vision.Face.HairColor HairColor(Azure.AI.Vision.Face.HairColorType color = default(Azure.AI.Vision.Face.HairColorType), float confidence = 0f) { throw null; } public static Azure.AI.Vision.Face.HairProperties HairProperties(float bald = 0f, bool invisible = false, System.Collections.Generic.IEnumerable hairColor = null) { throw null; } public static Azure.AI.Vision.Face.HeadPose HeadPose(float pitch = 0f, float roll = 0f, float yaw = 0f) { throw null; } public static Azure.AI.Vision.Face.LandmarkCoordinate LandmarkCoordinate(float x = 0f, float y = 0f) { throw null; } + public static Azure.AI.Vision.Face.LargeFaceList LargeFaceList(string name = null, string userData = null, Azure.AI.Vision.Face.FaceRecognitionModel? recognitionModel = default(Azure.AI.Vision.Face.FaceRecognitionModel?), string largeFaceListId = null) { throw null; } + public static Azure.AI.Vision.Face.LargeFaceListFace LargeFaceListFace(System.Guid persistedFaceId = default(System.Guid), string userData = null) { throw null; } + public static Azure.AI.Vision.Face.LargePersonGroup LargePersonGroup(string name = null, string userData = null, Azure.AI.Vision.Face.FaceRecognitionModel? recognitionModel = default(Azure.AI.Vision.Face.FaceRecognitionModel?), string largePersonGroupId = null) { throw null; } + public static Azure.AI.Vision.Face.LargePersonGroupPerson LargePersonGroupPerson(System.Guid personId = default(System.Guid), string name = null, string userData = null, System.Collections.Generic.IEnumerable persistedFaceIds = null) { throw null; } + public static Azure.AI.Vision.Face.LargePersonGroupPersonFace LargePersonGroupPersonFace(System.Guid persistedFaceId = default(System.Guid), string userData = null) { throw null; } public static Azure.AI.Vision.Face.LivenessOutputsTarget LivenessOutputsTarget(Azure.AI.Vision.Face.FaceRectangle faceRectangle = null, string fileName = null, int timeOffsetWithinFile = 0, Azure.AI.Vision.Face.FaceImageType imageType = default(Azure.AI.Vision.Face.FaceImageType)) { throw null; } public static Azure.AI.Vision.Face.LivenessResponseBody LivenessResponseBody(Azure.AI.Vision.Face.FaceLivenessDecision? livenessDecision = default(Azure.AI.Vision.Face.FaceLivenessDecision?), Azure.AI.Vision.Face.LivenessOutputsTarget target = null, Azure.AI.Vision.Face.LivenessModel? modelVersionUsed = default(Azure.AI.Vision.Face.LivenessModel?), Azure.AI.Vision.Face.LivenessWithVerifyOutputs verifyResult = null, System.Collections.Generic.IReadOnlyDictionary additionalProperties = null) { throw null; } public static Azure.AI.Vision.Face.LivenessSession LivenessSession(string id = null, System.DateTimeOffset createdDateTime = default(System.DateTimeOffset), System.DateTimeOffset? sessionStartDateTime = default(System.DateTimeOffset?), bool sessionExpired = false, string deviceCorrelationId = null, int? authTokenTimeToLiveInSeconds = default(int?), Azure.AI.Vision.Face.FaceSessionStatus status = default(Azure.AI.Vision.Face.FaceSessionStatus), Azure.AI.Vision.Face.LivenessSessionAuditEntry result = null) { throw null; } - public static Azure.AI.Vision.Face.LivenessSessionAuditEntry LivenessSessionAuditEntry(long id = (long)0, string sessionId = null, string requestId = null, string clientRequestId = null, System.DateTimeOffset receivedDateTime = default(System.DateTimeOffset), Azure.AI.Vision.Face.AuditRequestInfo request = null, Azure.AI.Vision.Face.AuditLivenessResponseInfo response = null, string digest = null) { throw null; } + public static Azure.AI.Vision.Face.LivenessSessionAuditEntry LivenessSessionAuditEntry(long id = (long)0, string sessionId = null, string requestId = null, string clientRequestId = null, System.DateTimeOffset receivedDateTime = default(System.DateTimeOffset), Azure.AI.Vision.Face.AuditRequestInfo request = null, Azure.AI.Vision.Face.AuditLivenessResponseInfo response = null, string digest = null, string sessionImageId = null, string verifyImageHash = null) { throw null; } public static Azure.AI.Vision.Face.LivenessSessionItem LivenessSessionItem(string id = null, System.DateTimeOffset createdDateTime = default(System.DateTimeOffset), System.DateTimeOffset? sessionStartDateTime = default(System.DateTimeOffset?), bool sessionExpired = false, string deviceCorrelationId = null, int? authTokenTimeToLiveInSeconds = default(int?)) { throw null; } public static Azure.AI.Vision.Face.LivenessWithVerifyImage LivenessWithVerifyImage(Azure.AI.Vision.Face.FaceRectangle faceRectangle = null, Azure.AI.Vision.Face.QualityForRecognition qualityForRecognition = default(Azure.AI.Vision.Face.QualityForRecognition)) { throw null; } public static Azure.AI.Vision.Face.LivenessWithVerifyOutputs LivenessWithVerifyOutputs(Azure.AI.Vision.Face.LivenessWithVerifyImage verifyImage = null, float matchConfidence = 0f, bool isIdentical = false) { throw null; } @@ -92,10 +113,11 @@ internal AuditRequestInfo() { } } public partial class AzureAIVisionFaceClientOptions : Azure.Core.ClientOptions { - public AzureAIVisionFaceClientOptions(Azure.AI.Vision.Face.AzureAIVisionFaceClientOptions.ServiceVersion version = Azure.AI.Vision.Face.AzureAIVisionFaceClientOptions.ServiceVersion.V1_1_Preview_1) { } + public AzureAIVisionFaceClientOptions(Azure.AI.Vision.Face.AzureAIVisionFaceClientOptions.ServiceVersion version = Azure.AI.Vision.Face.AzureAIVisionFaceClientOptions.ServiceVersion.V1_2_Preview_1) { } public enum ServiceVersion { V1_1_Preview_1 = 1, + V1_2_Preview_1 = 2, } } [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] @@ -134,7 +156,9 @@ public CreateLivenessSessionContent(Azure.AI.Vision.Face.LivenessOperationMode l public int? AuthTokenTimeToLiveInSeconds { get { throw null; } set { } } public string DeviceCorrelationId { get { throw null; } set { } } public bool? DeviceCorrelationIdSetInClient { get { throw null; } set { } } + public bool? EnableSessionImage { get { throw null; } set { } } public Azure.AI.Vision.Face.LivenessOperationMode LivenessOperationMode { get { throw null; } } + public Azure.AI.Vision.Face.LivenessModel? LivenessSingleModalModel { get { throw null; } set { } } public bool? SendResultsToClient { get { throw null; } set { } } Azure.AI.Vision.Face.CreateLivenessSessionContent System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } @@ -153,6 +177,24 @@ internal CreateLivenessSessionResult() { } string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } + public partial class CreateLivenessWithVerifySessionContent : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + public CreateLivenessWithVerifySessionContent(Azure.AI.Vision.Face.LivenessOperationMode livenessOperationMode) { } + public int? AuthTokenTimeToLiveInSeconds { get { throw null; } set { } } + public string DeviceCorrelationId { get { throw null; } set { } } + public bool? DeviceCorrelationIdSetInClient { get { throw null; } set { } } + public bool? EnableSessionImage { get { throw null; } set { } } + public Azure.AI.Vision.Face.LivenessOperationMode LivenessOperationMode { get { throw null; } } + public Azure.AI.Vision.Face.LivenessModel? LivenessSingleModalModel { get { throw null; } set { } } + public bool? ReturnVerifyImageHash { get { throw null; } set { } } + public bool? SendResultsToClient { get { throw null; } set { } } + public float? VerifyConfidenceThreshold { get { throw null; } set { } } + Azure.AI.Vision.Face.CreateLivenessWithVerifySessionContent System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.Vision.Face.CreateLivenessWithVerifySessionContent System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } public partial class CreateLivenessWithVerifySessionResult : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { internal CreateLivenessWithVerifySessionResult() { } @@ -165,6 +207,16 @@ internal CreateLivenessWithVerifySessionResult() { } string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } + public partial class CreatePersonResult : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal CreatePersonResult() { } + public System.Guid PersonId { get { throw null; } } + Azure.AI.Vision.Face.CreatePersonResult System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.Vision.Face.CreatePersonResult System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] public readonly partial struct ExposureLevel : System.IEquatable { @@ -195,6 +247,17 @@ internal ExposureProperties() { } string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } + public partial class FaceAdministrationClient + { + protected FaceAdministrationClient() { } + public FaceAdministrationClient(System.Uri endpoint, Azure.AzureKeyCredential credential) { } + public FaceAdministrationClient(System.Uri endpoint, Azure.AzureKeyCredential credential, Azure.AI.Vision.Face.AzureAIVisionFaceClientOptions options) { } + public FaceAdministrationClient(System.Uri endpoint, Azure.Core.TokenCredential credential) { } + public FaceAdministrationClient(System.Uri endpoint, Azure.Core.TokenCredential credential, Azure.AI.Vision.Face.AzureAIVisionFaceClientOptions options) { } + public virtual Azure.Core.Pipeline.HttpPipeline Pipeline { get { throw null; } } + public virtual Azure.AI.Vision.Face.LargeFaceListClient GetLargeFaceListClient(string largeFaceListId) { throw null; } + public virtual Azure.AI.Vision.Face.LargePersonGroupClient GetLargePersonGroupClient(string largePersonGroupId) { throw null; } + } public partial class FaceAttributes : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { internal FaceAttributes() { } @@ -253,7 +316,6 @@ public partial struct Detection01 public static Azure.AI.Vision.Face.FaceAttributeType Exposure { get { throw null; } } public static Azure.AI.Vision.Face.FaceAttributeType Glasses { get { throw null; } } public static Azure.AI.Vision.Face.FaceAttributeType HeadPose { get { throw null; } } - public static Azure.AI.Vision.Face.FaceAttributeType Mask { get { throw null; } } public static Azure.AI.Vision.Face.FaceAttributeType Noise { get { throw null; } } public static Azure.AI.Vision.Face.FaceAttributeType Occlusion { get { throw null; } } } @@ -291,14 +353,26 @@ public FaceClient(System.Uri endpoint, Azure.Core.TokenCredential credential, Az public virtual Azure.Response> FindSimilar(System.Guid faceId, System.Collections.Generic.IEnumerable faceIds, int? maxNumOfCandidatesReturned = default(int?), Azure.AI.Vision.Face.FindSimilarMatchMode? mode = default(Azure.AI.Vision.Face.FindSimilarMatchMode?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task FindSimilarAsync(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } public virtual System.Threading.Tasks.Task>> FindSimilarAsync(System.Guid faceId, System.Collections.Generic.IEnumerable faceIds, int? maxNumOfCandidatesReturned = default(int?), Azure.AI.Vision.Face.FindSimilarMatchMode? mode = default(Azure.AI.Vision.Face.FindSimilarMatchMode?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response FindSimilarFromLargeFaceList(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response> FindSimilarFromLargeFaceList(System.Guid faceId, string largeFaceListId, int? maxNumOfCandidatesReturned = default(int?), Azure.AI.Vision.Face.FindSimilarMatchMode? mode = default(Azure.AI.Vision.Face.FindSimilarMatchMode?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task FindSimilarFromLargeFaceListAsync(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task>> FindSimilarFromLargeFaceListAsync(System.Guid faceId, string largeFaceListId, int? maxNumOfCandidatesReturned = default(int?), Azure.AI.Vision.Face.FindSimilarMatchMode? mode = default(Azure.AI.Vision.Face.FindSimilarMatchMode?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual Azure.Response Group(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } public virtual Azure.Response Group(System.Collections.Generic.IEnumerable faceIds, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task GroupAsync(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } public virtual System.Threading.Tasks.Task> GroupAsync(System.Collections.Generic.IEnumerable faceIds, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response IdentifyFromLargePersonGroup(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response> IdentifyFromLargePersonGroup(System.Collections.Generic.IEnumerable faceIds, string largePersonGroupId, int? maxNumOfCandidatesReturned = default(int?), float? confidenceThreshold = default(float?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task IdentifyFromLargePersonGroupAsync(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task>> IdentifyFromLargePersonGroupAsync(System.Collections.Generic.IEnumerable faceIds, string largePersonGroupId, int? maxNumOfCandidatesReturned = default(int?), float? confidenceThreshold = default(float?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual Azure.Response VerifyFaceToFace(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } public virtual Azure.Response VerifyFaceToFace(System.Guid faceId1, System.Guid faceId2, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task VerifyFaceToFaceAsync(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } public virtual System.Threading.Tasks.Task> VerifyFaceToFaceAsync(System.Guid faceId1, System.Guid faceId2, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response VerifyFromLargePersonGroup(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response VerifyFromLargePersonGroup(System.Guid faceId, string largePersonGroupId, System.Guid personId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task VerifyFromLargePersonGroupAsync(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task> VerifyFromLargePersonGroupAsync(System.Guid faceId, string largePersonGroupId, System.Guid personId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } } [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] public readonly partial struct FaceDetectionModel : System.IEquatable @@ -356,6 +430,28 @@ internal FaceGroupingResult() { } string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } + public partial class FaceIdentificationCandidate : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal FaceIdentificationCandidate() { } + public float Confidence { get { throw null; } } + public System.Guid PersonId { get { throw null; } } + Azure.AI.Vision.Face.FaceIdentificationCandidate System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.Vision.Face.FaceIdentificationCandidate System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class FaceIdentificationResult : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal FaceIdentificationResult() { } + public System.Collections.Generic.IReadOnlyList Candidates { get { throw null; } } + public System.Guid FaceId { get { throw null; } } + Azure.AI.Vision.Face.FaceIdentificationResult System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.Vision.Face.FaceIdentificationResult System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] public readonly partial struct FaceImageType : System.IEquatable { @@ -431,6 +527,26 @@ internal FaceLandmarks() { } public override string ToString() { throw null; } } [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] + public readonly partial struct FaceOperationStatus : System.IEquatable + { + private readonly object _dummy; + private readonly int _dummyPrimitive; + public FaceOperationStatus(string value) { throw null; } + public static Azure.AI.Vision.Face.FaceOperationStatus Failed { get { throw null; } } + public static Azure.AI.Vision.Face.FaceOperationStatus NotStarted { get { throw null; } } + public static Azure.AI.Vision.Face.FaceOperationStatus Running { get { throw null; } } + public static Azure.AI.Vision.Face.FaceOperationStatus Succeeded { get { throw null; } } + public bool Equals(Azure.AI.Vision.Face.FaceOperationStatus other) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override bool Equals(object obj) { throw null; } + [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] + public override int GetHashCode() { throw null; } + public static bool operator ==(Azure.AI.Vision.Face.FaceOperationStatus left, Azure.AI.Vision.Face.FaceOperationStatus right) { throw null; } + public static implicit operator Azure.AI.Vision.Face.FaceOperationStatus (string value) { throw null; } + public static bool operator !=(Azure.AI.Vision.Face.FaceOperationStatus left, Azure.AI.Vision.Face.FaceOperationStatus right) { throw null; } + public override string ToString() { throw null; } + } + [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] public readonly partial struct FaceRecognitionModel : System.IEquatable { private readonly object _dummy; @@ -475,12 +591,16 @@ public FaceSessionClient(System.Uri endpoint, Azure.Core.TokenCredential credent public virtual Azure.Response CreateLivenessSession(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } public virtual System.Threading.Tasks.Task> CreateLivenessSessionAsync(Azure.AI.Vision.Face.CreateLivenessSessionContent body, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task CreateLivenessSessionAsync(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } - public virtual Azure.Response CreateLivenessWithVerifySession(Azure.AI.Vision.Face.CreateLivenessSessionContent createLivenessSessionContent, System.IO.Stream verifyImage, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } - public virtual System.Threading.Tasks.Task> CreateLivenessWithVerifySessionAsync(Azure.AI.Vision.Face.CreateLivenessSessionContent createLivenessSessionContent, System.IO.Stream verifyImage, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response CreateLivenessWithVerifySession(Azure.AI.Vision.Face.CreateLivenessWithVerifySessionContent jsonContent, System.IO.Stream verifyImage, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> CreateLivenessWithVerifySessionAsync(Azure.AI.Vision.Face.CreateLivenessWithVerifySessionContent jsonContent, System.IO.Stream verifyImage, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual Azure.Response DeleteLivenessSession(string sessionId, Azure.RequestContext context = null) { throw null; } public virtual System.Threading.Tasks.Task DeleteLivenessSessionAsync(string sessionId, Azure.RequestContext context = null) { throw null; } public virtual Azure.Response DeleteLivenessWithVerifySession(string sessionId, Azure.RequestContext context = null) { throw null; } public virtual System.Threading.Tasks.Task DeleteLivenessWithVerifySessionAsync(string sessionId, Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response DetectFromSessionImage(Azure.Core.RequestContent content, string detectionModel = null, string recognitionModel = null, bool? returnFaceId = default(bool?), System.Collections.Generic.IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = default(bool?), bool? returnRecognitionModel = default(bool?), int? faceIdTimeToLive = default(int?), Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response> DetectFromSessionImage(string sessionImageId, Azure.AI.Vision.Face.FaceDetectionModel? detectionModel = default(Azure.AI.Vision.Face.FaceDetectionModel?), Azure.AI.Vision.Face.FaceRecognitionModel? recognitionModel = default(Azure.AI.Vision.Face.FaceRecognitionModel?), bool? returnFaceId = default(bool?), System.Collections.Generic.IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = default(bool?), bool? returnRecognitionModel = default(bool?), int? faceIdTimeToLive = default(int?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task DetectFromSessionImageAsync(Azure.Core.RequestContent content, string detectionModel = null, string recognitionModel = null, bool? returnFaceId = default(bool?), System.Collections.Generic.IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = default(bool?), bool? returnRecognitionModel = default(bool?), int? faceIdTimeToLive = default(int?), Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task>> DetectFromSessionImageAsync(string sessionImageId, Azure.AI.Vision.Face.FaceDetectionModel? detectionModel = default(Azure.AI.Vision.Face.FaceDetectionModel?), Azure.AI.Vision.Face.FaceRecognitionModel? recognitionModel = default(Azure.AI.Vision.Face.FaceRecognitionModel?), bool? returnFaceId = default(bool?), System.Collections.Generic.IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = default(bool?), bool? returnRecognitionModel = default(bool?), int? faceIdTimeToLive = default(int?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual Azure.Response GetLivenessSessionAuditEntries(string sessionId, string start, int? top, Azure.RequestContext context) { throw null; } public virtual Azure.Response> GetLivenessSessionAuditEntries(string sessionId, string start = null, int? top = default(int?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task GetLivenessSessionAuditEntriesAsync(string sessionId, string start, int? top, Azure.RequestContext context) { throw null; } @@ -505,6 +625,10 @@ public FaceSessionClient(System.Uri endpoint, Azure.Core.TokenCredential credent public virtual Azure.Response> GetLivenessWithVerifySessions(string start = null, int? top = default(int?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } public virtual System.Threading.Tasks.Task GetLivenessWithVerifySessionsAsync(string start, int? top, Azure.RequestContext context) { throw null; } public virtual System.Threading.Tasks.Task>> GetLivenessWithVerifySessionsAsync(string start = null, int? top = default(int?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response GetSessionImage(string sessionImageId, Azure.RequestContext context) { throw null; } + public virtual Azure.Response GetSessionImage(string sessionImageId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task GetSessionImageAsync(string sessionImageId, Azure.RequestContext context) { throw null; } + public virtual System.Threading.Tasks.Task> GetSessionImageAsync(string sessionImageId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } } [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] public readonly partial struct FaceSessionStatus : System.IEquatable @@ -525,6 +649,20 @@ public FaceSessionClient(System.Uri endpoint, Azure.Core.TokenCredential credent public static bool operator !=(Azure.AI.Vision.Face.FaceSessionStatus left, Azure.AI.Vision.Face.FaceSessionStatus right) { throw null; } public override string ToString() { throw null; } } + public partial class FaceTrainingResult : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal FaceTrainingResult() { } + public System.DateTimeOffset CreatedDateTime { get { throw null; } } + public System.DateTimeOffset LastActionDateTime { get { throw null; } } + public System.DateTimeOffset LastSuccessfulTrainingDateTime { get { throw null; } } + public string Message { get { throw null; } } + public Azure.AI.Vision.Face.FaceOperationStatus Status { get { throw null; } } + Azure.AI.Vision.Face.FaceTrainingResult System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.Vision.Face.FaceTrainingResult System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } public partial class FaceVerificationResult : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel { internal FaceVerificationResult() { } @@ -656,16 +794,185 @@ internal LandmarkCoordinate() { } string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } } + public partial class LargeFaceList : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal LargeFaceList() { } + public string LargeFaceListId { get { throw null; } } + public string Name { get { throw null; } } + public Azure.AI.Vision.Face.FaceRecognitionModel? RecognitionModel { get { throw null; } } + public string UserData { get { throw null; } } + Azure.AI.Vision.Face.LargeFaceList System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.Vision.Face.LargeFaceList System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class LargeFaceListClient + { + protected LargeFaceListClient() { } + public LargeFaceListClient(System.Uri endpoint, Azure.AzureKeyCredential credential, string largeFaceListId) { } + public LargeFaceListClient(System.Uri endpoint, Azure.AzureKeyCredential credential, string largeFaceListId, Azure.AI.Vision.Face.AzureAIVisionFaceClientOptions options) { } + public LargeFaceListClient(System.Uri endpoint, Azure.Core.TokenCredential credential, string largeFaceListId) { } + public LargeFaceListClient(System.Uri endpoint, Azure.Core.TokenCredential credential, string largeFaceListId, Azure.AI.Vision.Face.AzureAIVisionFaceClientOptions options) { } + public virtual Azure.Core.Pipeline.HttpPipeline Pipeline { get { throw null; } } + public virtual Azure.Response AddFace(Azure.Core.RequestContent content, System.Collections.Generic.IEnumerable targetFace = null, string detectionModel = null, string userData = null, Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response AddFace(System.BinaryData imageContent, System.Collections.Generic.IEnumerable targetFace = null, Azure.AI.Vision.Face.FaceDetectionModel? detectionModel = default(Azure.AI.Vision.Face.FaceDetectionModel?), string userData = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response AddFace(System.Uri uri, System.Collections.Generic.IEnumerable targetFace = null, Azure.AI.Vision.Face.FaceDetectionModel? detectionModel = default(Azure.AI.Vision.Face.FaceDetectionModel?), string userData = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task AddFaceAsync(Azure.Core.RequestContent content, System.Collections.Generic.IEnumerable targetFace = null, string detectionModel = null, string userData = null, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task> AddFaceAsync(System.BinaryData imageContent, System.Collections.Generic.IEnumerable targetFace = null, Azure.AI.Vision.Face.FaceDetectionModel? detectionModel = default(Azure.AI.Vision.Face.FaceDetectionModel?), string userData = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> AddFaceAsync(System.Uri uri, System.Collections.Generic.IEnumerable targetFace = null, Azure.AI.Vision.Face.FaceDetectionModel? detectionModel = default(Azure.AI.Vision.Face.FaceDetectionModel?), string userData = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response Create(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response Create(string name, string userData = null, Azure.AI.Vision.Face.FaceRecognitionModel? recognitionModel = default(Azure.AI.Vision.Face.FaceRecognitionModel?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task CreateAsync(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task CreateAsync(string name, string userData = null, Azure.AI.Vision.Face.FaceRecognitionModel? recognitionModel = default(Azure.AI.Vision.Face.FaceRecognitionModel?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response Delete(Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task DeleteAsync(Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response DeleteFace(System.Guid persistedFaceId, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task DeleteFaceAsync(System.Guid persistedFaceId, Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response GetFace(System.Guid persistedFaceId, Azure.RequestContext context) { throw null; } + public virtual Azure.Response GetFace(System.Guid persistedFaceId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task GetFaceAsync(System.Guid persistedFaceId, Azure.RequestContext context) { throw null; } + public virtual System.Threading.Tasks.Task> GetFaceAsync(System.Guid persistedFaceId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response GetFaces(string start, int? top, Azure.RequestContext context) { throw null; } + public virtual Azure.Response> GetFaces(string start = null, int? top = default(int?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task GetFacesAsync(string start, int? top, Azure.RequestContext context) { throw null; } + public virtual System.Threading.Tasks.Task>> GetFacesAsync(string start = null, int? top = default(int?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response GetLargeFaceList(bool? returnRecognitionModel, Azure.RequestContext context) { throw null; } + public virtual Azure.Response GetLargeFaceList(bool? returnRecognitionModel = default(bool?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task GetLargeFaceListAsync(bool? returnRecognitionModel, Azure.RequestContext context) { throw null; } + public virtual System.Threading.Tasks.Task> GetLargeFaceListAsync(bool? returnRecognitionModel = default(bool?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response GetLargeFaceLists(string start, int? top, bool? returnRecognitionModel, Azure.RequestContext context) { throw null; } + public virtual Azure.Response> GetLargeFaceLists(string start = null, int? top = default(int?), bool? returnRecognitionModel = default(bool?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task GetLargeFaceListsAsync(string start, int? top, bool? returnRecognitionModel, Azure.RequestContext context) { throw null; } + public virtual System.Threading.Tasks.Task>> GetLargeFaceListsAsync(string start = null, int? top = default(int?), bool? returnRecognitionModel = default(bool?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response GetTrainingStatus(Azure.RequestContext context) { throw null; } + public virtual Azure.Response GetTrainingStatus(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task GetTrainingStatusAsync(Azure.RequestContext context) { throw null; } + public virtual System.Threading.Tasks.Task> GetTrainingStatusAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Operation Train(Azure.WaitUntil waitUntil, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task TrainAsync(Azure.WaitUntil waitUntil, Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response Update(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task UpdateAsync(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response UpdateFace(System.Guid persistedFaceId, Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task UpdateFaceAsync(System.Guid persistedFaceId, Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + } + public partial class LargeFaceListFace : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal LargeFaceListFace() { } + public System.Guid PersistedFaceId { get { throw null; } } + public string UserData { get { throw null; } } + Azure.AI.Vision.Face.LargeFaceListFace System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.Vision.Face.LargeFaceListFace System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class LargePersonGroup : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal LargePersonGroup() { } + public string LargePersonGroupId { get { throw null; } } + public string Name { get { throw null; } } + public Azure.AI.Vision.Face.FaceRecognitionModel? RecognitionModel { get { throw null; } } + public string UserData { get { throw null; } } + Azure.AI.Vision.Face.LargePersonGroup System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.Vision.Face.LargePersonGroup System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class LargePersonGroupClient + { + protected LargePersonGroupClient() { } + public LargePersonGroupClient(System.Uri endpoint, Azure.AzureKeyCredential credential, string largePersonGroupId) { } + public LargePersonGroupClient(System.Uri endpoint, Azure.AzureKeyCredential credential, string largePersonGroupId, Azure.AI.Vision.Face.AzureAIVisionFaceClientOptions options) { } + public LargePersonGroupClient(System.Uri endpoint, Azure.Core.TokenCredential credential, string largePersonGroupId) { } + public LargePersonGroupClient(System.Uri endpoint, Azure.Core.TokenCredential credential, string largePersonGroupId, Azure.AI.Vision.Face.AzureAIVisionFaceClientOptions options) { } + public virtual Azure.Core.Pipeline.HttpPipeline Pipeline { get { throw null; } } + public virtual Azure.Response AddFace(System.Guid personId, Azure.Core.RequestContent content, System.Collections.Generic.IEnumerable targetFace = null, string detectionModel = null, string userData = null, Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response AddFace(System.Guid personId, System.BinaryData imageContent, System.Collections.Generic.IEnumerable targetFace = null, Azure.AI.Vision.Face.FaceDetectionModel? detectionModel = default(Azure.AI.Vision.Face.FaceDetectionModel?), string userData = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response AddFace(System.Guid personId, System.Uri uri, System.Collections.Generic.IEnumerable targetFace = null, Azure.AI.Vision.Face.FaceDetectionModel? detectionModel = default(Azure.AI.Vision.Face.FaceDetectionModel?), string userData = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task AddFaceAsync(System.Guid personId, Azure.Core.RequestContent content, System.Collections.Generic.IEnumerable targetFace = null, string detectionModel = null, string userData = null, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task> AddFaceAsync(System.Guid personId, System.BinaryData imageContent, System.Collections.Generic.IEnumerable targetFace = null, Azure.AI.Vision.Face.FaceDetectionModel? detectionModel = default(Azure.AI.Vision.Face.FaceDetectionModel?), string userData = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task> AddFaceAsync(System.Guid personId, System.Uri uri, System.Collections.Generic.IEnumerable targetFace = null, Azure.AI.Vision.Face.FaceDetectionModel? detectionModel = default(Azure.AI.Vision.Face.FaceDetectionModel?), string userData = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response Create(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response Create(string name, string userData = null, Azure.AI.Vision.Face.FaceRecognitionModel? recognitionModel = default(Azure.AI.Vision.Face.FaceRecognitionModel?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task CreateAsync(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task CreateAsync(string name, string userData = null, Azure.AI.Vision.Face.FaceRecognitionModel? recognitionModel = default(Azure.AI.Vision.Face.FaceRecognitionModel?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response CreatePerson(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response CreatePerson(string name, string userData = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task CreatePersonAsync(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task> CreatePersonAsync(string name, string userData = null, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response Delete(Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task DeleteAsync(Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response DeleteFace(System.Guid personId, System.Guid persistedFaceId, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task DeleteFaceAsync(System.Guid personId, System.Guid persistedFaceId, Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response DeletePerson(System.Guid personId, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task DeletePersonAsync(System.Guid personId, Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response GetFace(System.Guid personId, System.Guid persistedFaceId, Azure.RequestContext context) { throw null; } + public virtual Azure.Response GetFace(System.Guid personId, System.Guid persistedFaceId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task GetFaceAsync(System.Guid personId, System.Guid persistedFaceId, Azure.RequestContext context) { throw null; } + public virtual System.Threading.Tasks.Task> GetFaceAsync(System.Guid personId, System.Guid persistedFaceId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response GetLargePersonGroup(bool? returnRecognitionModel, Azure.RequestContext context) { throw null; } + public virtual Azure.Response GetLargePersonGroup(bool? returnRecognitionModel = default(bool?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task GetLargePersonGroupAsync(bool? returnRecognitionModel, Azure.RequestContext context) { throw null; } + public virtual System.Threading.Tasks.Task> GetLargePersonGroupAsync(bool? returnRecognitionModel = default(bool?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response GetLargePersonGroups(string start, int? top, bool? returnRecognitionModel, Azure.RequestContext context) { throw null; } + public virtual Azure.Response> GetLargePersonGroups(string start = null, int? top = default(int?), bool? returnRecognitionModel = default(bool?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task GetLargePersonGroupsAsync(string start, int? top, bool? returnRecognitionModel, Azure.RequestContext context) { throw null; } + public virtual System.Threading.Tasks.Task>> GetLargePersonGroupsAsync(string start = null, int? top = default(int?), bool? returnRecognitionModel = default(bool?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response GetPerson(System.Guid personId, Azure.RequestContext context) { throw null; } + public virtual Azure.Response GetPerson(System.Guid personId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task GetPersonAsync(System.Guid personId, Azure.RequestContext context) { throw null; } + public virtual System.Threading.Tasks.Task> GetPersonAsync(System.Guid personId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response GetPersons(string start, int? top, Azure.RequestContext context) { throw null; } + public virtual Azure.Response> GetPersons(string start = null, int? top = default(int?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task GetPersonsAsync(string start, int? top, Azure.RequestContext context) { throw null; } + public virtual System.Threading.Tasks.Task>> GetPersonsAsync(string start = null, int? top = default(int?), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Response GetTrainingStatus(Azure.RequestContext context) { throw null; } + public virtual Azure.Response GetTrainingStatus(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual System.Threading.Tasks.Task GetTrainingStatusAsync(Azure.RequestContext context) { throw null; } + public virtual System.Threading.Tasks.Task> GetTrainingStatusAsync(System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) { throw null; } + public virtual Azure.Operation Train(Azure.WaitUntil waitUntil, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task TrainAsync(Azure.WaitUntil waitUntil, Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response Update(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task UpdateAsync(Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response UpdateFace(System.Guid personId, System.Guid persistedFaceId, Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task UpdateFaceAsync(System.Guid personId, System.Guid persistedFaceId, Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual Azure.Response UpdatePerson(System.Guid personId, Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + public virtual System.Threading.Tasks.Task UpdatePersonAsync(System.Guid personId, Azure.Core.RequestContent content, Azure.RequestContext context = null) { throw null; } + } + public partial class LargePersonGroupPerson : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal LargePersonGroupPerson() { } + public string Name { get { throw null; } } + public System.Collections.Generic.IReadOnlyList PersistedFaceIds { get { throw null; } } + public System.Guid PersonId { get { throw null; } } + public string UserData { get { throw null; } } + Azure.AI.Vision.Face.LargePersonGroupPerson System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.Vision.Face.LargePersonGroupPerson System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } + public partial class LargePersonGroupPersonFace : System.ClientModel.Primitives.IJsonModel, System.ClientModel.Primitives.IPersistableModel + { + internal LargePersonGroupPersonFace() { } + public System.Guid PersistedFaceId { get { throw null; } } + public string UserData { get { throw null; } } + Azure.AI.Vision.Face.LargePersonGroupPersonFace System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } + Azure.AI.Vision.Face.LargePersonGroupPersonFace System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + string System.ClientModel.Primitives.IPersistableModel.GetFormatFromOptions(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + System.BinaryData System.ClientModel.Primitives.IPersistableModel.Write(System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } + } [System.Runtime.InteropServices.StructLayoutAttribute(System.Runtime.InteropServices.LayoutKind.Sequential)] public readonly partial struct LivenessModel : System.IEquatable { private readonly object _dummy; private readonly int _dummyPrimitive; public LivenessModel(string value) { throw null; } - public static Azure.AI.Vision.Face.LivenessModel V20200215Preview01 { get { throw null; } } - public static Azure.AI.Vision.Face.LivenessModel V20211112Preview03 { get { throw null; } } public static Azure.AI.Vision.Face.LivenessModel V20221015Preview04 { get { throw null; } } - public static Azure.AI.Vision.Face.LivenessModel V20230302Preview05 { get { throw null; } } + public static Azure.AI.Vision.Face.LivenessModel V20231220Preview06 { get { throw null; } } public bool Equals(Azure.AI.Vision.Face.LivenessModel other) { throw null; } [System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)] public override bool Equals(object obj) { throw null; } @@ -749,6 +1056,8 @@ internal LivenessSessionAuditEntry() { } public string RequestId { get { throw null; } } public Azure.AI.Vision.Face.AuditLivenessResponseInfo Response { get { throw null; } } public string SessionId { get { throw null; } } + public string SessionImageId { get { throw null; } } + public string VerifyImageHash { get { throw null; } } Azure.AI.Vision.Face.LivenessSessionAuditEntry System.ClientModel.Primitives.IJsonModel.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } void System.ClientModel.Primitives.IJsonModel.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { } Azure.AI.Vision.Face.LivenessSessionAuditEntry System.ClientModel.Primitives.IPersistableModel.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; } @@ -907,6 +1216,9 @@ namespace Microsoft.Extensions.Azure { public static partial class AIVisionFaceClientBuilderExtensions { + public static Azure.Core.Extensions.IAzureClientBuilder AddFaceAdministrationClient(this TBuilder builder, System.Uri endpoint) where TBuilder : Azure.Core.Extensions.IAzureClientFactoryBuilderWithCredential { throw null; } + public static Azure.Core.Extensions.IAzureClientBuilder AddFaceAdministrationClient(this TBuilder builder, System.Uri endpoint, Azure.AzureKeyCredential credential) where TBuilder : Azure.Core.Extensions.IAzureClientFactoryBuilder { throw null; } + public static Azure.Core.Extensions.IAzureClientBuilder AddFaceAdministrationClient(this TBuilder builder, TConfiguration configuration) where TBuilder : Azure.Core.Extensions.IAzureClientFactoryBuilderWithConfiguration { throw null; } public static Azure.Core.Extensions.IAzureClientBuilder AddFaceClient(this TBuilder builder, System.Uri endpoint) where TBuilder : Azure.Core.Extensions.IAzureClientFactoryBuilderWithCredential { throw null; } public static Azure.Core.Extensions.IAzureClientBuilder AddFaceClient(this TBuilder builder, System.Uri endpoint, Azure.AzureKeyCredential credential) where TBuilder : Azure.Core.Extensions.IAzureClientFactoryBuilder { throw null; } public static Azure.Core.Extensions.IAzureClientBuilder AddFaceClient(this TBuilder builder, TConfiguration configuration) where TBuilder : Azure.Core.Extensions.IAzureClientFactoryBuilderWithConfiguration { throw null; } diff --git a/sdk/face/Azure.AI.Vision.Face/assets.json b/sdk/face/Azure.AI.Vision.Face/assets.json index 8436ed4868b6..d3d91b3944d1 100644 --- a/sdk/face/Azure.AI.Vision.Face/assets.json +++ b/sdk/face/Azure.AI.Vision.Face/assets.json @@ -2,5 +2,5 @@ "AssetsRepo": "Azure/azure-sdk-assets", "AssetsRepoPrefixPath": "net", "TagPrefix": "net/face/Azure.AI.Vision.Face", - "Tag": "net/face/Azure.AI.Vision.Face_7088055bd6" + "Tag": "net/face/Azure.AI.Vision.Face_a2e6c14099" } diff --git a/sdk/face/Azure.AI.Vision.Face/samples/README.md b/sdk/face/Azure.AI.Vision.Face/samples/README.md index a03d90e21460..a232abf279ae 100644 --- a/sdk/face/Azure.AI.Vision.Face/samples/README.md +++ b/sdk/face/Azure.AI.Vision.Face/samples/README.md @@ -17,3 +17,6 @@ Azure AI Vision Face is a cloud service that gives you access to advanced algor - From URL - Detect liveness in faces with session [synchronously](https://github.com/Azure/azure-sdk-for-net/tree/main/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample2_DetectLivenessWithSession.cs) or [asynchronously](https://github.com/Azure/azure-sdk-for-net/tree/main/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample2_DetectLivenessWithSessionAsync.cs) - Detect liveness with face verification with session [synchronously](https://github.com/Azure/azure-sdk-for-net/tree/main/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample3_DetectLivenessWithVerifyWithSession.cs) or [asynchronously](https://github.com/Azure/azure-sdk-for-net/tree/main/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample3_DetectLivenessWithVerifyWithSessionAsync.cs) +- Stateless face recognition [synchronously](https://github.com/Azure/azure-sdk-for-net/tree/main/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample4_StatelessFaceRecognition.cs) or [asynchronously](https://github.com/Azure/azure-sdk-for-net/tree/main/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample4_StatelessFaceRecognitionAsync.cs) +- Verification and identification from Large Person Group [synchronously](https://github.com/Azure/azure-sdk-for-net/tree/main/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample5_LargePersonGroup.cs) or [asynchronously](https://github.com/Azure/azure-sdk-for-net/tree/main/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample5_LargePersonGroupAsync.cs) +- Find similar faces from a large face list [synchronously](https://github.com/Azure/azure-sdk-for-net/tree/main/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample6_LargeFaceList.cs) or [asynchronously](https://github.com/Azure/azure-sdk-for-net/tree/main/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample6_LargeFaceListAsync.cs) diff --git a/sdk/face/Azure.AI.Vision.Face/samples/Sample1_FaceDetection.md b/sdk/face/Azure.AI.Vision.Face/samples/Sample1_FaceDetection.md index 525a132c8c0a..3c4bc395708c 100644 --- a/sdk/face/Azure.AI.Vision.Face/samples/Sample1_FaceDetection.md +++ b/sdk/face/Azure.AI.Vision.Face/samples/Sample1_FaceDetection.md @@ -37,7 +37,7 @@ foreach (var detectedFace in detectedFaces) { Console.WriteLine($"Face Rectangle: left={detectedFace.FaceRectangle.Left}, top={detectedFace.FaceRectangle.Top}, width={detectedFace.FaceRectangle.Width}, height={detectedFace.FaceRectangle.Height}"); Console.WriteLine($"Head pose: pitch={detectedFace.FaceAttributes.HeadPose.Pitch}, roll={detectedFace.FaceAttributes.HeadPose.Roll}, yaw={detectedFace.FaceAttributes.HeadPose.Yaw}"); - Console.WriteLine($"Mask: {detectedFace.FaceAttributes.Mask}"); + Console.WriteLine($"Mask: NoseAndMouthCovered={detectedFace.FaceAttributes.Mask.NoseAndMouthCovered}, Type={detectedFace.FaceAttributes.Mask.Type}"); Console.WriteLine($"Quality: {detectedFace.FaceAttributes.QualityForRecognition}"); Console.WriteLine($"Recognition model: {detectedFace.RecognitionModel}"); Console.WriteLine($"Landmarks: "); diff --git a/sdk/face/Azure.AI.Vision.Face/samples/Sample1_FaceDetectionAsync.md b/sdk/face/Azure.AI.Vision.Face/samples/Sample1_FaceDetectionAsync.md index 927aee767096..e209e80a57f0 100644 --- a/sdk/face/Azure.AI.Vision.Face/samples/Sample1_FaceDetectionAsync.md +++ b/sdk/face/Azure.AI.Vision.Face/samples/Sample1_FaceDetectionAsync.md @@ -37,7 +37,7 @@ foreach (var detectedFace in detectedFaces) { Console.WriteLine($"Face Rectangle: left={detectedFace.FaceRectangle.Left}, top={detectedFace.FaceRectangle.Top}, width={detectedFace.FaceRectangle.Width}, height={detectedFace.FaceRectangle.Height}"); Console.WriteLine($"Head pose: pitch={detectedFace.FaceAttributes.HeadPose.Pitch}, roll={detectedFace.FaceAttributes.HeadPose.Roll}, yaw={detectedFace.FaceAttributes.HeadPose.Yaw}"); - Console.WriteLine($"Mask: {detectedFace.FaceAttributes.Mask}"); + Console.WriteLine($"Mask: NoseAndMouthCovered={detectedFace.FaceAttributes.Mask.NoseAndMouthCovered}, Type={detectedFace.FaceAttributes.Mask.Type}"); Console.WriteLine($"Quality: {detectedFace.FaceAttributes.QualityForRecognition}"); Console.WriteLine($"Recognition model: {detectedFace.RecognitionModel}"); Console.WriteLine($"Landmarks: "); diff --git a/sdk/face/Azure.AI.Vision.Face/samples/Sample3_DetectLivenessWithVerifyWithSession.md b/sdk/face/Azure.AI.Vision.Face/samples/Sample3_DetectLivenessWithVerifyWithSession.md index 3431006c1ade..ac54d7b9e9cc 100644 --- a/sdk/face/Azure.AI.Vision.Face/samples/Sample3_DetectLivenessWithVerifyWithSession.md +++ b/sdk/face/Azure.AI.Vision.Face/samples/Sample3_DetectLivenessWithVerifyWithSession.md @@ -29,7 +29,7 @@ var sessionClient = new FaceSessionClient(endpoint, credential); Before you can detect liveness in a face, you need to create a liveness detection session with Azure AI Face Service. The service creates a liveness-session and responds back with a session-authorization-token. ```C# Snippet:CreateLivenessWithVerifySession -var parameters = new CreateLivenessSessionContent(LivenessOperationMode.Passive) { +var parameters = new CreateLivenessWithVerifySessionContent(LivenessOperationMode.Passive) { SendResultsToClient = true, DeviceCorrelationId = Guid.NewGuid().ToString(), }; diff --git a/sdk/face/Azure.AI.Vision.Face/samples/Sample3_DetectLivenessWithVerifyWithSessionAsync.md b/sdk/face/Azure.AI.Vision.Face/samples/Sample3_DetectLivenessWithVerifyWithSessionAsync.md index c81086e225e4..0b18fbcb5109 100644 --- a/sdk/face/Azure.AI.Vision.Face/samples/Sample3_DetectLivenessWithVerifyWithSessionAsync.md +++ b/sdk/face/Azure.AI.Vision.Face/samples/Sample3_DetectLivenessWithVerifyWithSessionAsync.md @@ -29,7 +29,7 @@ var sessionClient = new FaceSessionClient(endpoint, credential); Before you can detect liveness in a face, you need to create a liveness detection session with Azure AI Face Service. The service creates a liveness-session and responds back with a session-authorization-token. ```C# Snippet:CreateLivenessWithVerifySessionAsync -var parameters = new CreateLivenessSessionContent(LivenessOperationMode.Passive) { +var parameters = new CreateLivenessWithVerifySessionContent(LivenessOperationMode.Passive) { SendResultsToClient = true, DeviceCorrelationId = Guid.NewGuid().ToString(), }; diff --git a/sdk/face/Azure.AI.Vision.Face/samples/Sample4_StatelessFaceRecognition.md b/sdk/face/Azure.AI.Vision.Face/samples/Sample4_StatelessFaceRecognition.md new file mode 100644 index 000000000000..51e00cce5df4 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/samples/Sample4_StatelessFaceRecognition.md @@ -0,0 +1,113 @@ +# Stateless face recognition + +This sample demonstrates how to recognize faces in an image without data structure. + +To get started you'll need an Azure AI resource or a Face resource. See [README][README] for prerequisites and instructions. + +## Creating a `FaceClient` + +To create a new `FaceClient` you need the endpoint and credentials from your resource. In the sample below you'll use a `DefaultAzureCredential` object to authenticate. You can set `endpoint` based on an environment variable, a configuration setting, or any way that works for your application. See [Authenticate the client][README_authticate] for instructions. + +```C# Snippet:CreateFaceClient +Uri endpoint = new Uri(""); +DefaultAzureCredential credential = new DefaultAzureCredential(); +var client = new FaceClient(endpoint, credential); +``` + +## Verify whether two faces belong to the same person + +To verify whether two faces belong to the same person, you can use the `VerifyFaceToFace` method. This method returns a `FaceVerificationResult` object that contains a `Confidence` score indicating the similarity between the two faces. + +```C# Snippet:VerifyFaceToFace +var data = new (string Name, Uri Uri)[] { + ("Dad image 1", new Uri(FaceTestConstant.UrlFamily1Dad1Image)), + ("Dad image 2", new Uri(FaceTestConstant.UrlFamily1Dad2Image)), + ("Son image 1", new Uri(FaceTestConstant.UrlFamily1Son1Image)) +}; +var faceIds = new List(); + +foreach (var tuple in data) +{ + var detectResponse = client.Detect(tuple.Uri, FaceDetectionModel.Detection03, FaceRecognitionModel.Recognition04, true); + Console.WriteLine($"Detected {detectResponse.Value.Count} face(s) in the image '{tuple.Name}'."); + faceIds.Add(detectResponse.Value.Single().FaceId.Value); +} + +var verifyDad1Dad2Response = client.VerifyFaceToFace(faceIds[0], faceIds[1]); +Console.WriteLine($"Verification between Dad image 1 and Dad image 2: {verifyDad1Dad2Response.Value.Confidence}"); +Console.WriteLine($"Is the same person: {verifyDad1Dad2Response.Value.IsIdentical}"); + +var verifyDad1SonResponse = client.VerifyFaceToFace(faceIds[0], faceIds[2]); +Console.WriteLine($"Verification between Dad image 1 and Son image 1: {verifyDad1SonResponse.Value.Confidence}"); +Console.WriteLine($"Is the same person: {verifyDad1SonResponse.Value.IsIdentical}"); +``` + +## Find similar faces from a list of faces + +To find similar faces from a list of faces, you can use the `FindSimilar` method. This method returns a list of `FaceFindSimilarResult` objects that contain the `FaceId` of the face and a `Confidence` score indicating the similarity between the face and the query face. + +```C# Snippet:FindSimilar +var dadImage = new Uri(FaceTestConstant.UrlFamily1Dad1Image); +var detectDadResponse = client.Detect(dadImage, FaceDetectionModel.Detection03, FaceRecognitionModel.Recognition04, true); +Console.WriteLine($"Detected {detectDadResponse.Value.Count} face(s) in the Dad image."); +var dadFaceId = detectDadResponse.Value.Single().FaceId.Value; + +var targetImage = new Uri(FaceTestConstant.UrlIdentification1Image); +var detectResponse = client.Detect(targetImage, FaceDetectionModel.Detection03, FaceRecognitionModel.Recognition04, true); +Console.WriteLine($"Detected {detectResponse.Value.Count} face(s) in the image."); +var faceIds = detectResponse.Value.Select(face => face.FaceId.Value); + +var response = client.FindSimilar(dadFaceId, faceIds); +var similarFaces = response.Value; +Console.WriteLine($"Found {similarFaces.Count} similar face(s) in the target image."); +foreach (var similarFace in similarFaces) +{ + Console.WriteLine($"Face ID: {similarFace.FaceId}, confidence: {similarFace.Confidence}"); +} +``` + +## Group faces + +To group faces, you can use the `Group` method. This method returns a `FaceGroupingResult` objects that contain a 2 dimensional array of faces. Each array represents a group of faces that belong to the same person. There is also a faces array that contains all the faces that were not grouped. + +```C# Snippet:Group +var targetImages = new (string, Uri)[] { + ("Group image", new Uri(FaceTestConstant.UrlIdentification1Image)), + ("Dad image 1", new Uri(FaceTestConstant.UrlFamily1Dad1Image)), + ("Dad image 2", new Uri(FaceTestConstant.UrlFamily1Dad2Image)), + ("Son image 1", new Uri(FaceTestConstant.UrlFamily1Son1Image)) +}; +var faceIds = new Dictionary(); + +foreach (var (imageName, targetImage) in targetImages) +{ + var detectResponse = client.Detect(targetImage, FaceDetectionModel.Detection03, FaceRecognitionModel.Recognition04, true); + Console.WriteLine($"Detected {detectResponse.Value.Count} face(s) in the image '{imageName}'."); + foreach (var face in detectResponse.Value) + { + faceIds[face.FaceId.Value] = (face, imageName); + } +} + +var groupResponse = client.Group(faceIds.Keys); +var groups = groupResponse.Value; + +Console.WriteLine($"Found {groups.Groups.Count} group(s) in the target images."); +foreach (var group in groups.Groups) +{ + Console.WriteLine($"Group: "); + foreach (var faceId in group) + { + Console.WriteLine($" {faceId} from '{faceIds[faceId].Item2}', face rectangle: {faceIds[faceId].Item1.FaceRectangle.Left}, {faceIds[faceId].Item1.FaceRectangle.Top}, {faceIds[faceId].Item1.FaceRectangle.Width}, {faceIds[faceId].Item1.FaceRectangle.Height}"); + } +} + +Console.WriteLine($"Found {groups.MessyGroup.Count} face(s) that are not in any group."); +foreach (var faceId in groups.MessyGroup) +{ + Console.WriteLine($" {faceId} from '{faceIds[faceId].Item2}', face rectangle: {faceIds[faceId].Item1.FaceRectangle.Left}, {faceIds[faceId].Item1.FaceRectangle.Top}, {faceIds[faceId].Item1.FaceRectangle.Width}, {faceIds[faceId].Item1.FaceRectangle.Height}"); +} +``` + +[README]: https://github.com/Azure/azure-sdk-for-net/tree/main/sdk/face/Azure.AI.Vision.Face#getting-started +[README_authticate]: https://github.com/Azure/azure-sdk-for-net/tree/main/sdk/face/Azure.AI.Vision.Face#authenticate-the-client diff --git a/sdk/face/Azure.AI.Vision.Face/samples/Sample4_StatelessFaceRecognitionAsync.md b/sdk/face/Azure.AI.Vision.Face/samples/Sample4_StatelessFaceRecognitionAsync.md new file mode 100644 index 000000000000..d92f93d64690 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/samples/Sample4_StatelessFaceRecognitionAsync.md @@ -0,0 +1,113 @@ +# Stateless face recognition + +This sample demonstrates how to recognize faces in an image without data structure. + +To get started you'll need an Azure AI resource or a Face resource. See [README][README] for prerequisites and instructions. + +## Creating a `FaceClient` + +To create a new `FaceClient` you need the endpoint and credentials from your resource. In the sample below you'll use a `DefaultAzureCredential` object to authenticate. You can set `endpoint` based on an environment variable, a configuration setting, or any way that works for your application. See [Authenticate the client][README_authticate] for instructions. + +```C# Snippet:CreateFaceClient +Uri endpoint = new Uri(""); +DefaultAzureCredential credential = new DefaultAzureCredential(); +var client = new FaceClient(endpoint, credential); +``` + +## Verify whether two faces belong to the same person + +To verify whether two faces belong to the same person, you can use the `VerifyFaceToFaceAsync` method. This method returns a `FaceVerificationResult` object that contains a `Confidence` score indicating the similarity between the two faces. + +```C# Snippet:VerifyFaceToFaceAsync +var data = new (string Name, Uri Uri)[] { + ("Dad image 1", new Uri(FaceTestConstant.UrlFamily1Dad1Image)), + ("Dad image 2", new Uri(FaceTestConstant.UrlFamily1Dad2Image)), + ("Son image 1", new Uri(FaceTestConstant.UrlFamily1Son1Image)) +}; +var faceIds = new List(); + +foreach (var tuple in data) +{ + var detectResponse = await client.DetectAsync(tuple.Uri, FaceDetectionModel.Detection03, FaceRecognitionModel.Recognition04, true); + Console.WriteLine($"Detected {detectResponse.Value.Count} face(s) in the image '{tuple.Name}'."); + faceIds.Add(detectResponse.Value.Single().FaceId.Value); +} + +var verifyDad1Dad2Response = await client.VerifyFaceToFaceAsync(faceIds[0], faceIds[1]); +Console.WriteLine($"Verification between Dad image 1 and Dad image 2: {verifyDad1Dad2Response.Value.Confidence}"); +Console.WriteLine($"Is the same person: {verifyDad1Dad2Response.Value.IsIdentical}"); + +var verifyDad1SonResponse = await client.VerifyFaceToFaceAsync(faceIds[0], faceIds[2]); +Console.WriteLine($"Verification between Dad image 1 and Son image 1: {verifyDad1SonResponse.Value.Confidence}"); +Console.WriteLine($"Is the same person: {verifyDad1SonResponse.Value.IsIdentical}"); +``` + +## Find similar faces from a list of faces + +To find similar faces from a list of faces, you can use the `FindSimilarAsync` method. This method returns a list of `FaceFindSimilarResult` objects that contain the `FaceId` of the face and a `Confidence` score indicating the similarity between the face and the query face. + +```C# Snippet:FindSimilarAsync +var dadImage = new Uri(FaceTestConstant.UrlFamily1Dad1Image); +var detectDadResponse = await client.DetectAsync(dadImage, FaceDetectionModel.Detection03, FaceRecognitionModel.Recognition04, true); +Console.WriteLine($"Detected {detectDadResponse.Value.Count} face(s) in the Dad image."); +var dadFaceId = detectDadResponse.Value.Single().FaceId.Value; + +var targetImage = new Uri(FaceTestConstant.UrlIdentification1Image); +var detectResponse = await client.DetectAsync(targetImage, FaceDetectionModel.Detection03, FaceRecognitionModel.Recognition04, true); +Console.WriteLine($"Detected {detectResponse.Value.Count} face(s) in the image."); +var faceIds = detectResponse.Value.Select(face => face.FaceId.Value); + +var response = await client.FindSimilarAsync(dadFaceId, faceIds); +var similarFaces = response.Value; +Console.WriteLine($"Found {similarFaces.Count} similar face(s) in the target image."); +foreach (var similarFace in similarFaces) +{ + Console.WriteLine($"Face ID: {similarFace.FaceId}, confidence: {similarFace.Confidence}"); +} +``` + +## Group faces + +To group faces, you can use the `GroupAsync` method. This method returns a `FaceGroupingResult` objects that contain a 2 dimensional array of faces. Each array represents a group of faces that belong to the same person. There is also a faces array that contains all the faces that were not grouped. + +```C# Snippet:GroupAsync +var targetImages = new (string, Uri)[] { + ("Group image", new Uri(FaceTestConstant.UrlIdentification1Image)), + ("Dad image 1", new Uri(FaceTestConstant.UrlFamily1Dad1Image)), + ("Dad image 2", new Uri(FaceTestConstant.UrlFamily1Dad2Image)), + ("Son image 1", new Uri(FaceTestConstant.UrlFamily1Son1Image)) +}; +var faceIds = new Dictionary(); + +foreach (var (imageName, targetImage) in targetImages) +{ + var detectResponse = await client.DetectAsync(targetImage, FaceDetectionModel.Detection03, FaceRecognitionModel.Recognition04, true); + Console.WriteLine($"Detected {detectResponse.Value.Count} face(s) in the image '{imageName}'."); + foreach (var face in detectResponse.Value) + { + faceIds[face.FaceId.Value] = (face, imageName); + } +} + +var groupResponse = await client.GroupAsync(faceIds.Keys); +var groups = groupResponse.Value; + +Console.WriteLine($"Found {groups.Groups.Count} group(s) in the target images."); +foreach (var group in groups.Groups) +{ + Console.WriteLine($"Group: "); + foreach (var faceId in group) + { + Console.WriteLine($" {faceId} from '{faceIds[faceId].Item2}', face rectangle: {faceIds[faceId].Item1.FaceRectangle.Left}, {faceIds[faceId].Item1.FaceRectangle.Top}, {faceIds[faceId].Item1.FaceRectangle.Width}, {faceIds[faceId].Item1.FaceRectangle.Height}"); + } +} + +Console.WriteLine($"Found {groups.MessyGroup.Count} face(s) that are not in any group."); +foreach (var faceId in groups.MessyGroup) +{ + Console.WriteLine($" {faceId} from '{faceIds[faceId].Item2}', face rectangle: {faceIds[faceId].Item1.FaceRectangle.Left}, {faceIds[faceId].Item1.FaceRectangle.Top}, {faceIds[faceId].Item1.FaceRectangle.Width}, {faceIds[faceId].Item1.FaceRectangle.Height}"); +} +``` + +[README]: https://github.com/Azure/azure-sdk-for-net/tree/main/sdk/face/Azure.AI.Vision.Face#getting-started +[README_authticate]: https://github.com/Azure/azure-sdk-for-net/tree/main/sdk/face/Azure.AI.Vision.Face#authenticate-the-client diff --git a/sdk/face/Azure.AI.Vision.Face/samples/Sample5_LargePersonGroup.md b/sdk/face/Azure.AI.Vision.Face/samples/Sample5_LargePersonGroup.md new file mode 100644 index 000000000000..45ccc1266a85 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/samples/Sample5_LargePersonGroup.md @@ -0,0 +1,91 @@ +# Verification and identification from Large Person Group + +This sample demonstrates how to verify and identify faces from a large person group. + +To get started you'll need an Azure AI resource or a Face resource. See [README][README] for prerequisites and instructions. + +## Create the Large Person Group + +To create a large person group, you'll need `LargePersonGroupClient` object. + +```C# Snippet:CreateLargePersonGroupClient +Uri endpoint = new Uri(""); +DefaultAzureCredential credential = new DefaultAzureCredential(); +var groupClient = new LargePersonGroupClient(endpoint, credential, id); +``` + +Call `Create` to create a large person group. You need to provide the ID of the large person group you want to create with a name and optional user data. + +```C# Snippet:VerifyAndIdentifyFromLargePersonGroup_CreateLargePersonGroup +groupClient.Create("Family 1", userData: "A sweet family", recognitionModel: FaceRecognitionModel.Recognition04); +``` + +## Create the `Person` with faces in the Large Person Group + +The `Person` object is used to represent the individual you want to identify. You can call `CreateLargePersonGroupPerson` to create it within Large Person Group. Call `AddLargePersonGroupPersonFace` to add faces to the person. + +```C# Snippet:VerifyAndIdentifyFromLargePersonGroup_CreatePersonAndAddFaces +var persons = new[] +{ + new { Name = "Bill", UserData = "Dad", ImageUrls = new[] { FaceTestConstant.UrlFamily1Dad1Image, FaceTestConstant.UrlFamily1Dad2Image } }, + new { Name = "Clare", UserData = "Mom", ImageUrls = new[] { FaceTestConstant.UrlFamily1Mom1Image, FaceTestConstant.UrlFamily1Mom2Image } }, + new { Name = "Ron", UserData = "Son", ImageUrls = new[] { FaceTestConstant.UrlFamily1Son1Image, FaceTestConstant.UrlFamily1Son2Image } } +}; +var personIds = new Dictionary(); + +foreach (var person in persons) +{ + var createPersonResponse = groupClient.CreatePerson(person.Name, userData: person.UserData); + var personId = createPersonResponse.Value.PersonId; + personIds.Add(person.Name, personId); + + foreach (var imageUrl in person.ImageUrls) + { + groupClient.AddFace(personId, new Uri(imageUrl), userData: $"{person.UserData}-{imageUrl}", detectionModel: FaceDetectionModel.Detection03); + } +} +``` + +## Train the Large Person Group before performing identification + +Before you can identify faces, you must train the large person group. Call `TrainLargePersonGroup` to start the training process. `TrainLargePersonGroup` is a long-running operation that may take a while to complete. + +```C# Snippet:VerifyAndIdentifyFromLargePersonGroup_Train +var operation = groupClient.Train(WaitUntil.Completed); +operation.WaitForCompletionResponse(); +``` + +## Verify a face against a `Person` in the Large Person Group + +To verify a face against a `Person` in the large person group, call `VerifyFromLargePersonGroup`. This method returns a `VerifyResult` object that contains the confidence score of the verification. + +```C# Snippet:VerifyAndIdentifyFromLargePersonGroup_Verify +var verifyDadResponse = faceClient.VerifyFromLargePersonGroup(faceId, groupId, personIds["Bill"]); +Console.WriteLine($"Is the detected face Bill? {verifyDadResponse.Value.IsIdentical} ({verifyDadResponse.Value.Confidence})"); + +var verifyMomResponse = faceClient.VerifyFromLargePersonGroup(faceId, groupId, personIds["Clare"]); +Console.WriteLine($"Is the detected face Clare? {verifyMomResponse.Value.IsIdentical} ({verifyMomResponse.Value.Confidence})"); +``` + +## Identify a face from the Large Person Group + +To identify a face from the large person group, call `IdentifyFromLargePersonGroup`. This method returns a list of `IdentifyResult` objects, each containing the `Person` ID and the confidence score of the identification. + +```C# Snippet:VerifyAndIdentifyFromLargePersonGroup_Identify +var identifyResponse = faceClient.IdentifyFromLargePersonGroup(new[] { faceId }, groupId); +foreach (var candidate in identifyResponse.Value[0].Candidates) +{ + var person = groupClient.GetPerson(candidate.PersonId); + Console.WriteLine($"The detected face belongs to {person.Value.Name} ({candidate.Confidence})"); +} +``` + +## Delete the Large Person Group + +When you no longer need the large person group, you can delete it by calling `DeleteLargePersonGroup`. The associated persons and faces will also be deleted. + +```C# Snippet:VerifyAndIdentifyFromLargePersonGroup_DeleteLargePersonGroup +groupClient.Delete(); +``` + +[README]: https://github.com/Azure/azure-sdk-for-net/tree/main/sdk/face/Azure.AI.Vision.Face#getting-started \ No newline at end of file diff --git a/sdk/face/Azure.AI.Vision.Face/samples/Sample5_LargePersonGroupAsync.md b/sdk/face/Azure.AI.Vision.Face/samples/Sample5_LargePersonGroupAsync.md new file mode 100644 index 000000000000..aad36e849fe3 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/samples/Sample5_LargePersonGroupAsync.md @@ -0,0 +1,91 @@ +# Verification and identification from Large Person Group + +This sample demonstrates how to verify and identify faces from a large person group. + +To get started you'll need an Azure AI resource or a Face resource. See [README][README] for prerequisites and instructions. + +## Create the Large Person Group + +To create a large person group, you'll need `LargePersonGroupClient` object. + +```C# Snippet:CreateLargePersonGroupClient +Uri endpoint = new Uri(""); +DefaultAzureCredential credential = new DefaultAzureCredential(); +var groupClient = new LargePersonGroupClient(endpoint, credential, id); +``` + +Call `CreateAsync` to create a large person group. You need to provide the ID of the large person group you want to create with a name and optional user data. + +```C# Snippet:VerifyAndIdentifyFromLargePersonGroup_CreateLargePersonGroupAsync +await groupClient.CreateAsync("Family 1", userData: "A sweet family", recognitionModel: FaceRecognitionModel.Recognition04); +``` + +## Create the `Person` with faces in the Large Person Group + +The `Person` object is used to represent the individual you want to identify. You can call `CreateLargePersonGroupPerson` to create it within Large Person Group. Call `AddLargePersonGroupPersonFace` to add faces to the person. + +```C# Snippet:VerifyAndIdentifyFromLargePersonGroup_CreatePersonAndAddFacesAsync +var persons = new[] +{ + new { Name = "Bill", UserData = "Dad", ImageUrls = new[] { FaceTestConstant.UrlFamily1Dad1Image, FaceTestConstant.UrlFamily1Dad2Image } }, + new { Name = "Clare", UserData = "Mom", ImageUrls = new[] { FaceTestConstant.UrlFamily1Mom1Image, FaceTestConstant.UrlFamily1Mom2Image } }, + new { Name = "Ron", UserData = "Son", ImageUrls = new[] { FaceTestConstant.UrlFamily1Son1Image, FaceTestConstant.UrlFamily1Son2Image } } +}; +var personIds = new Dictionary(); + +foreach (var person in persons) +{ + var createPersonResponse = await groupClient.CreatePersonAsync(person.Name, userData: person.UserData); + var personId = createPersonResponse.Value.PersonId; + personIds.Add(person.Name, personId); + + foreach (var imageUrl in person.ImageUrls) + { + await groupClient.AddFaceAsync(personId, new Uri(imageUrl), userData: $"{person.UserData}-{imageUrl}", detectionModel: FaceDetectionModel.Detection03); + } +} +``` + +## Train the Large Person Group before performing identification + +Before you can identify faces, you must train the large person group. Call `TrainLargePersonGroup` to start the training process. `TrainLargePersonGroup` is a long-running operation that may take a while to complete. + +```C# Snippet:VerifyAndIdentifyFromLargePersonGroup_TrainAsync +var operation = await groupClient.TrainAsync(WaitUntil.Completed); +await operation.WaitForCompletionResponseAsync(); +``` + +## Verify a face against a `Person` in the Large Person Group + +To verify a face against a `Person` in the large person group, call `VerifyFromLargePersonGroup`. This method returns a `VerifyResult` object that contains the confidence score of the verification. + +```C# Snippet:VerifyAndIdentifyFromLargePersonGroup_VerifyAsync +var verifyDadResponse = await faceClient.VerifyFromLargePersonGroupAsync(faceId, groupId, personIds["Bill"]); +Console.WriteLine($"Is the detected face Bill? {verifyDadResponse.Value.IsIdentical} ({verifyDadResponse.Value.Confidence})"); + +var verifyMomResponse = await faceClient.VerifyFromLargePersonGroupAsync(faceId, groupId, personIds["Clare"]); +Console.WriteLine($"Is the detected face Clare? {verifyMomResponse.Value.IsIdentical} ({verifyMomResponse.Value.Confidence})"); +``` + +## Identify a face from the Large Person Group + +To identify a face from the large person group, call `IdentifyFromLargePersonGroup`. This method returns a list of `IdentifyResult` objects, each containing the `Person` ID and the confidence score of the identification. + +```C# Snippet:VerifyAndIdentifyFromLargePersonGroup_IdentifyAsync +var identifyResponse = await faceClient.IdentifyFromLargePersonGroupAsync(new[] { faceId }, groupId); +foreach (var candidate in identifyResponse.Value[0].Candidates) +{ + var person = await groupClient.GetPersonAsync(candidate.PersonId); + Console.WriteLine($"The detected face belongs to {person.Value.Name} ({candidate.Confidence})"); +} +``` + +## Delete the Large Person Group + +When you no longer need the large person group, you can delete it by calling `DeleteLargePersonGroup`. The associated persons and faces will also be deleted. + +```C# Snippet:VerifyAndIdentifyFromLargePersonGroup_DeleteLargePersonGroupAsync +await groupClient.DeleteAsync(); +``` + +[README]: https://github.com/Azure/azure-sdk-for-net/tree/main/sdk/face/Azure.AI.Vision.Face#getting-started \ No newline at end of file diff --git a/sdk/face/Azure.AI.Vision.Face/samples/Sample6_LargeFaceList.md b/sdk/face/Azure.AI.Vision.Face/samples/Sample6_LargeFaceList.md new file mode 100644 index 000000000000..48a672b4153f --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/samples/Sample6_LargeFaceList.md @@ -0,0 +1,76 @@ +# Find similar faces from a large face list + +This sample demonstrates how to find similar faces from a large face list. + +To get started you'll need an Azure AI resource or a Face resource. See [README][README] for prerequisites and instructions. + +## Create the Large Face List + +To create a large face list, you'll need `LargeFaceListClient` object. + +```C# Snippet:CreateLargeFaceListClient +Uri endpoint = new Uri(""); +DefaultAzureCredential credential = new DefaultAzureCredential(); +var listClient = new LargeFaceListClient(endpoint, credential, id); +``` + +Call `Create` to create a large face list. You can specify the `name` and `userData` for the large face list. + +```C# Snippet:CreateLargeFaceList +listClient.Create("Family 1", userData: "A sweet family", recognitionModel: FaceRecognitionModel.Recognition04); +``` + +## Add faces to the Large Face List + +To add faces to the large face list, call `AddFace`. You can specify the `imageUri` and `userData` for the face. + +```C# Snippet:AddFacesToLargeFaceList +var faces = new[] +{ + new { UserData = "Dad", ImageUrl = new Uri(FaceTestConstant.UrlFamily1Dad1Image) }, + new { UserData = "Mom", ImageUrl = new Uri(FaceTestConstant.UrlFamily1Mom1Image) }, + new { UserData = "Son", ImageUrl = new Uri(FaceTestConstant.UrlFamily1Son1Image) } +}; +var faceIds = new Dictionary(); + +foreach (var face in faces) +{ + var addFaceResponse = listClient.AddFace(face.ImageUrl, userData: face.UserData); + faceIds[addFaceResponse.Value.PersistedFaceId] = face.UserData; +} +``` + +## Train the Large Face List before finding similar faces + +Before you can identify faces, you must train the large face list by calling `Train`. This method is asynchronous and returns an `Operation` object that you can use to wait for the training to complete. + +```C# Snippet:TrainLargeFaceList +var operation = listClient.Train(WaitUntil.Completed); +operation.WaitForCompletionResponse(); +``` + +## Find similar faces from the Large Face List + +To find similar faces from the large face list, call `FindSimilar`. This method returns a list of `FaceSimilarResult` objects that contain the `FaceId` of the face and a `Confidence` score indicating the similarity between the face and the query face. + +```C# Snippet:FindSimilarFromLargeFaceList +var faceClient = CreateClient(); +var detectResponse = faceClient.Detect(new Uri(FaceTestConstant.UrlFamily1Dad3Image), FaceDetectionModel.Detection03, FaceRecognitionModel.Recognition04, true); +var faceId = detectResponse.Value[0].FaceId.Value; + +var findSimilarResponse = faceClient.FindSimilarFromLargeFaceList(faceId, listId); +foreach (var similarFace in findSimilarResponse.Value) +{ + Console.WriteLine($"The detected face is similar to the face with '{faceIds[similarFace.PersistedFaceId.Value]}' ID {similarFace.PersistedFaceId} ({similarFace.Confidence})"); +} +``` + +## Delete the Large Face List + +When you no longer need the large face list, you can delete it by calling `Delete`. + +```C# Snippet:DeleteLargeFaceList +listClient.Delete(); +``` + +[README]: https://github.com/Azure/azure-sdk-for-net/tree/main/sdk/face/Azure.AI.Vision.Face#getting-started \ No newline at end of file diff --git a/sdk/face/Azure.AI.Vision.Face/samples/Sample6_LargeFaceListAsync.md b/sdk/face/Azure.AI.Vision.Face/samples/Sample6_LargeFaceListAsync.md new file mode 100644 index 000000000000..d75368e1d4cd --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/samples/Sample6_LargeFaceListAsync.md @@ -0,0 +1,76 @@ +# Find similar faces from a large face list + +This sample demonstrates how to find similar faces from a large face list. + +To get started you'll need an Azure AI resource or a Face resource. See [README][README] for prerequisites and instructions. + +## Create the Large Face List + +To create a large face list, you'll need `LargeFaceListClient` object. + +```C# Snippet:CreateLargeFaceListClient +Uri endpoint = new Uri(""); +DefaultAzureCredential credential = new DefaultAzureCredential(); +var listClient = new LargeFaceListClient(endpoint, credential, id); +``` + +Call `CreateAsync` to create a large face list. You can specify the `name` and `userData` for the large face list. + +```C# Snippet:CreateLargeFaceListAsync +await listClient.CreateAsync("Family 1", userData: "A sweet family", recognitionModel: FaceRecognitionModel.Recognition04); +``` + +## Add faces to the Large Face List + +To add faces to the large face list, call `AddFaceAsync`. You can specify the `imageUri` and `userData` for the face. + +```C# Snippet:AddFacesToLargeFaceListAsync +var faces = new[] +{ + new { UserData = "Dad", ImageUrl = new Uri(FaceTestConstant.UrlFamily1Dad1Image) }, + new { UserData = "Mom", ImageUrl = new Uri(FaceTestConstant.UrlFamily1Mom1Image) }, + new { UserData = "Son", ImageUrl = new Uri(FaceTestConstant.UrlFamily1Son1Image) } +}; +var faceIds = new Dictionary(); + +foreach (var face in faces) +{ + var addFaceResponse = await listClient.AddFaceAsync(face.ImageUrl, userData: face.UserData); + faceIds[addFaceResponse.Value.PersistedFaceId] = face.UserData; +} +``` + +## Train the Large Face List before finding similar faces + +Before you can identify faces, you must train the large face list by calling `TrainAsync`. This method is asynchronous and returns an `Operation` object that you can use to wait for the training to complete. + +```C# Snippet:TrainLargeFaceListAsync +var operation = await listClient.TrainAsync(WaitUntil.Completed); +await operation.WaitForCompletionResponseAsync(); +``` + +## Find similar faces from the Large Face List + +To find similar faces from the large face list, call `FindSimilarAsync`. This method returns a list of `FaceSimilarResult` objects that contain the `FaceId` of the face and a `Confidence` score indicating the similarity between the face and the query face. + +```C# Snippet:FindSimilarFromLargeFaceListAsync +var faceClient = CreateClient(); +var detectResponse = await faceClient.DetectAsync(new Uri(FaceTestConstant.UrlFamily1Dad3Image), FaceDetectionModel.Detection03, FaceRecognitionModel.Recognition04, true); +var faceId = detectResponse.Value[0].FaceId.Value; + +var findSimilarResponse = await faceClient.FindSimilarFromLargeFaceListAsync(faceId, listId); +foreach (var similarFace in findSimilarResponse.Value) +{ + Console.WriteLine($"The detected face is similar to the face with '{faceIds[similarFace.PersistedFaceId.Value]}' ID {similarFace.PersistedFaceId} ({similarFace.Confidence})"); +} +``` + +## Delete the Large Face List + +When you no longer need the large face list, you can delete it by calling `DeleteAsync`. + +```C# Snippet:DeleteLargeFaceListAsync +await listClient.DeleteAsync(); +``` + +[README]: https://github.com/Azure/azure-sdk-for-net/tree/main/sdk/face/Azure.AI.Vision.Face#getting-started \ No newline at end of file diff --git a/sdk/face/Azure.AI.Vision.Face/src/Custom/FaceAttributeType.cs b/sdk/face/Azure.AI.Vision.Face/src/Custom/FaceAttributeType.cs index 0ce23a8eabef..7385634d4772 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Custom/FaceAttributeType.cs +++ b/sdk/face/Azure.AI.Vision.Face/src/Custom/FaceAttributeType.cs @@ -24,8 +24,6 @@ public struct Detection01 { public static FaceAttributeType Exposure { get; } = FaceAttributeType.Exposure; /// Noise level of face pixels. Level returns 'Low', 'Medium' and 'High'. Value returns a number between [0,1], the larger the noisier. public static FaceAttributeType Noise { get; } = FaceAttributeType.Noise; - /// Whether each face is wearing a mask. Mask type returns 'noMask', 'faceMask', 'otherMaskOrOcclusion', or 'uncertain'. Value returns a boolean 'noseAndMouthCovered' indicating whether nose and mouth are covered. - public static FaceAttributeType Mask { get; } = FaceAttributeType.Mask; } /// Available attributes for detection03 model. diff --git a/sdk/face/Azure.AI.Vision.Face/src/Custom/FaceClient.cs b/sdk/face/Azure.AI.Vision.Face/src/Custom/FaceClient.cs index 3cda0ed2a9a9..3035e6864a98 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Custom/FaceClient.cs +++ b/sdk/face/Azure.AI.Vision.Face/src/Custom/FaceClient.cs @@ -25,22 +25,7 @@ public partial class FaceClient /// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours). /// The cancellation token to use. /// is null. - /// - /// > [!IMPORTANT] - /// > To mitigate potential misuse that can subject people to stereotyping, discrimination, or unfair denial of services, we are retiring Face API attributes that predict emotion, gender, age, smile, facial hair, hair, and makeup. Read more about this decision https://azure.microsoft.com/en-us/blog/responsible-ai-investments-and-safeguards-for-facial-recognition/. - /// - /// * - /// * No image will be stored. Only the extracted face feature(s) will be stored on server. The faceId is an identifier of the face feature and will be used in "Identify", "Verify", and "Find Similar". The stored face features will expire and be deleted at the time specified by faceIdTimeToLive after the original detection call. - /// * Optional parameters include faceId, landmarks, and attributes. Attributes include headPose, glasses, occlusion, accessories, blur, exposure, noise, mask, and qualityForRecognition. Some of the results returned for specific attributes may not be highly accurate. - /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. - /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. - /// * Up to 100 faces can be returned for an image. Faces are ranked by face rectangle size from large to small. - /// * For optimal results when querying "Identify", "Verify", and "Find Similar" ('returnFaceId' is true), please use faces that are: frontal, clear, and with a minimum size of 200x200 pixels (100 pixels between eyes). - /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/how-to/specify-detection-model - /// * 'detection_02': Face attributes and landmarks are disabled if you choose this detection model. - /// * 'detection_03': Face attributes (mask and headPose only) and landmarks are supported if you choose this detection model. - /// * Different 'recognitionModel' values are provided. If follow-up operations like "Verify", "Identify", "Find Similar" are needed, please specify the recognition model with 'recognitionModel' parameter. The default value for 'recognitionModel' is 'recognition_01', if latest model needed, please explicitly specify the model you need in this parameter. Once specified, the detected faceIds will be associated with the specified recognition model. More details, please refer to https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/how-to/specify-recognition-model. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/face-detection-operations/detect-from-url for more details. [ForwardsClientCalls] public virtual Task>> DetectAsync( Uri url, @@ -72,22 +57,7 @@ public virtual Task>> DetectAsync( /// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours). /// The cancellation token to use. /// is null. - /// - /// > [!IMPORTANT] - /// > To mitigate potential misuse that can subject people to stereotyping, discrimination, or unfair denial of services, we are retiring Face API attributes that predict emotion, gender, age, smile, facial hair, hair, and makeup. Read more about this decision https://azure.microsoft.com/en-us/blog/responsible-ai-investments-and-safeguards-for-facial-recognition/. - /// - /// * - /// * No image will be stored. Only the extracted face feature(s) will be stored on server. The faceId is an identifier of the face feature and will be used in "Identify", "Verify", and "Find Similar". The stored face features will expire and be deleted at the time specified by faceIdTimeToLive after the original detection call. - /// * Optional parameters include faceId, landmarks, and attributes. Attributes include headPose, glasses, occlusion, accessories, blur, exposure, noise, mask, and qualityForRecognition. Some of the results returned for specific attributes may not be highly accurate. - /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. - /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. - /// * Up to 100 faces can be returned for an image. Faces are ranked by face rectangle size from large to small. - /// * For optimal results when querying "Identify", "Verify", and "Find Similar" ('returnFaceId' is true), please use faces that are: frontal, clear, and with a minimum size of 200x200 pixels (100 pixels between eyes). - /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/how-to/specify-detection-model - /// * 'detection_02': Face attributes and landmarks are disabled if you choose this detection model. - /// * 'detection_03': Face attributes (mask and headPose only) and landmarks are supported if you choose this detection model. - /// * Different 'recognitionModel' values are provided. If follow-up operations like "Verify", "Identify", "Find Similar" are needed, please specify the recognition model with 'recognitionModel' parameter. The default value for 'recognitionModel' is 'recognition_01', if latest model needed, please explicitly specify the model you need in this parameter. Once specified, the detected faceIds will be associated with the specified recognition model. More details, please refer to https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/how-to/specify-recognition-model. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/face-detection-operations/detect-from-url for more details. [ForwardsClientCalls] public virtual Response> Detect( Uri url, @@ -119,22 +89,7 @@ public virtual Response> Detect( /// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours). /// The cancellation token to use. /// is null. - /// - /// > [!IMPORTANT] - /// > To mitigate potential misuse that can subject people to stereotyping, discrimination, or unfair denial of services, we are retiring Face API attributes that predict emotion, gender, age, smile, facial hair, hair, and makeup. Read more about this decision https://azure.microsoft.com/en-us/blog/responsible-ai-investments-and-safeguards-for-facial-recognition/. - /// - /// * - /// * No image will be stored. Only the extracted face feature(s) will be stored on server. The faceId is an identifier of the face feature and will be used in "Identify", "Verify", and "Find Similar". The stored face features will expire and be deleted at the time specified by faceIdTimeToLive after the original detection call. - /// * Optional parameters include faceId, landmarks, and attributes. Attributes include headPose, glasses, occlusion, accessories, blur, exposure, noise, mask, and qualityForRecognition. Some of the results returned for specific attributes may not be highly accurate. - /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. - /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. - /// * Up to 100 faces can be returned for an image. Faces are ranked by face rectangle size from large to small. - /// * For optimal results when querying "Identify", "Verify", and "Find Similar" ('returnFaceId' is true), please use faces that are: frontal, clear, and with a minimum size of 200x200 pixels (100 pixels between eyes). - /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/how-to/specify-detection-model - /// * 'detection_02': Face attributes and landmarks are disabled if you choose this detection model. - /// * 'detection_03': Face attributes (mask and headPose only) and landmarks are supported if you choose this detection model. - /// * Different 'recognitionModel' values are provided. If follow-up operations like "Verify", "Identify", "Find Similar" are needed, please specify the recognition model with 'recognitionModel' parameter. The default value for 'recognitionModel' is 'recognition_01', if latest model needed, please explicitly specify the model you need in this parameter. Once specified, the detected faceIds will be associated with the specified recognition model. More details, please refer to https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/how-to/specify-recognition-model. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/face-detection-operations/detect for more details. [ForwardsClientCalls] public virtual Task>> DetectAsync( BinaryData imageContent, @@ -166,22 +121,7 @@ public virtual Task>> DetectAsync( /// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours). /// The cancellation token to use. /// is null. - /// - /// > [!IMPORTANT] - /// > To mitigate potential misuse that can subject people to stereotyping, discrimination, or unfair denial of services, we are retiring Face API attributes that predict emotion, gender, age, smile, facial hair, hair, and makeup. Read more about this decision https://azure.microsoft.com/en-us/blog/responsible-ai-investments-and-safeguards-for-facial-recognition/. - /// - /// * - /// * No image will be stored. Only the extracted face feature(s) will be stored on server. The faceId is an identifier of the face feature and will be used in "Identify", "Verify", and "Find Similar". The stored face features will expire and be deleted at the time specified by faceIdTimeToLive after the original detection call. - /// * Optional parameters include faceId, landmarks, and attributes. Attributes include headPose, glasses, occlusion, accessories, blur, exposure, noise, mask, and qualityForRecognition. Some of the results returned for specific attributes may not be highly accurate. - /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. - /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. - /// * Up to 100 faces can be returned for an image. Faces are ranked by face rectangle size from large to small. - /// * For optimal results when querying "Identify", "Verify", and "Find Similar" ('returnFaceId' is true), please use faces that are: frontal, clear, and with a minimum size of 200x200 pixels (100 pixels between eyes). - /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/how-to/specify-detection-model - /// * 'detection_02': Face attributes and landmarks are disabled if you choose this detection model. - /// * 'detection_03': Face attributes (mask and headPose only) and landmarks are supported if you choose this detection model. - /// * Different 'recognitionModel' values are provided. If follow-up operations like "Verify", "Identify", "Find Similar" are needed, please specify the recognition model with 'recognitionModel' parameter. The default value for 'recognitionModel' is 'recognition_01', if latest model needed, please explicitly specify the model you need in this parameter. Once specified, the detected faceIds will be associated with the specified recognition model. More details, please refer to https://learn.microsoft.com/en-us/azure/ai-services/computer-vision/how-to/specify-recognition-model. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/face-detection-operations/detect for more details. [ForwardsClientCalls] public virtual Response> Detect( BinaryData imageContent, diff --git a/sdk/face/Azure.AI.Vision.Face/src/Custom/FaceSessionClient.cs b/sdk/face/Azure.AI.Vision.Face/src/Custom/FaceSessionClient.cs index a49be1714b3b..dd468dfa9589 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Custom/FaceSessionClient.cs +++ b/sdk/face/Azure.AI.Vision.Face/src/Custom/FaceSessionClient.cs @@ -13,83 +13,43 @@ namespace Azure.AI.Vision.Face public partial class FaceSessionClient { /// Create a new liveness session with verify. Provide the verify image during session creation. - /// Parameters for liveness with verify session creation. + /// Parameters for liveness with verify session creation. /// Image binary data for verify image, can be provided as session creation time or during the /detectLivenessWithVerify/singleModal /// /// The cancellation token to use. - /// is null. - /// - /// A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired. - /// - /// Permissions includes... - /// > - /// * - /// * Ability to call /detectLivenessWithVerify/singleModal for up to 3 retries. - /// * A token lifetime of 10 minutes. - /// - /// > [!NOTE] - /// > - /// > * - /// > * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation. - /// > * To retrieve a result, use the Get Liveness With Verify Session. - /// > * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries. - /// - /// Recommended Option: VerifyImage is provided during session creation. - /// Alternative Option: Client device submits VerifyImage during the /detectLivenessWithVerify/singleModal call. - /// > [!NOTE] - /// > Extra measures should be taken to validate that the client is sending the expected VerifyImage. - /// + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/create-liveness-with-verify-session-with-verify-image for more details. [ForwardsClientCalls] - public virtual async Task> CreateLivenessWithVerifySessionAsync(CreateLivenessSessionContent createLivenessSessionContent, Stream verifyImage, CancellationToken cancellationToken = default) + public virtual async Task> CreateLivenessWithVerifySessionAsync(CreateLivenessWithVerifySessionContent jsonContent, Stream verifyImage, CancellationToken cancellationToken = default) { - Argument.AssertNotNull(createLivenessSessionContent, nameof(createLivenessSessionContent)); + Argument.AssertNotNull(jsonContent, nameof(jsonContent)); if (verifyImage == null) { - return await CreateLivenessWithVerifySessionAsync(createLivenessSessionContent, cancellationToken).ConfigureAwait(false); + return await CreateLivenessWithVerifySessionAsync(jsonContent, cancellationToken).ConfigureAwait(false); } - var createLivenessWithVerifySessionContent = new CreateLivenessWithVerifySessionContent(createLivenessSessionContent, verifyImage); - return await CreateLivenessWithVerifySessionWithVerifyImageAsync(createLivenessWithVerifySessionContent, cancellationToken).ConfigureAwait(false); + CreateLivenessWithVerifySessionMultipartContent multipartContent = new CreateLivenessWithVerifySessionMultipartContent(jsonContent, verifyImage); + return await CreateLivenessWithVerifySessionWithVerifyImageAsync(multipartContent, cancellationToken).ConfigureAwait(false); } /// Create a new liveness session with verify. Provide the verify image during session creation. - /// Parameters for liveness with verify session creation. + /// Parameters for liveness with verify session creation. /// Image binary data for verify image, can be provided as session creation time or during the /detectLivenessWithVerify/singleModal /// /// The cancellation token to use. - /// is null. - /// - /// A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired. - /// - /// Permissions includes... - /// > - /// * - /// * Ability to call /detectLivenessWithVerify/singleModal for up to 3 retries. - /// * A token lifetime of 10 minutes. - /// - /// > [!NOTE] - /// > - /// > * - /// > * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation. - /// > * To retrieve a result, use the Get Liveness With Verify Session. - /// > * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries. - /// - /// Recommended Option: VerifyImage is provided during session creation. - /// Alternative Option: Client device submits VerifyImage during the /detectLivenessWithVerify/singleModal call. - /// > [!NOTE] - /// > Extra measures should be taken to validate that the client is sending the expected VerifyImage. - /// + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/create-liveness-with-verify-session-with-verify-image for more details. [ForwardsClientCalls] - public virtual Response CreateLivenessWithVerifySession(CreateLivenessSessionContent createLivenessSessionContent, Stream verifyImage, CancellationToken cancellationToken = default) + public virtual Response CreateLivenessWithVerifySession(CreateLivenessWithVerifySessionContent jsonContent, Stream verifyImage, CancellationToken cancellationToken = default) { - Argument.AssertNotNull(createLivenessSessionContent, nameof(createLivenessSessionContent)); + Argument.AssertNotNull(jsonContent, nameof(jsonContent)); if (verifyImage == null) { - return CreateLivenessWithVerifySession(createLivenessSessionContent, cancellationToken); + return CreateLivenessWithVerifySession(jsonContent, cancellationToken); } - var createLivenessWithVerifySessionContent = new CreateLivenessWithVerifySessionContent(createLivenessSessionContent, verifyImage); - return CreateLivenessWithVerifySessionWithVerifyImage(createLivenessWithVerifySessionContent, cancellationToken); + CreateLivenessWithVerifySessionMultipartContent multipartContent = new CreateLivenessWithVerifySessionMultipartContent(jsonContent, verifyImage); + return CreateLivenessWithVerifySessionWithVerifyImage(multipartContent, cancellationToken); } } } diff --git a/sdk/face/Azure.AI.Vision.Face/src/Custom/LargeFaceListClient.cs b/sdk/face/Azure.AI.Vision.Face/src/Custom/LargeFaceListClient.cs new file mode 100644 index 000000000000..2f62101aa423 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Custom/LargeFaceListClient.cs @@ -0,0 +1,214 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Core.Pipeline; + +namespace Azure.AI.Vision.Face +{ + // Data plane generated sub-client. + /// The LargeFaceList sub-client. + [CodeGenClient("LargeFaceListClientImpl")] + public partial class LargeFaceListClient + { + /// Initializes a new instance of FaceClient. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// A credential used to authenticate to an Azure Service. + /// or is null. + public LargeFaceListClient(Uri endpoint, AzureKeyCredential credential, string largeFaceListId) : this(endpoint, credential, largeFaceListId, new AzureAIVisionFaceClientOptions()) + { + } + + /// Initializes a new instance of FaceClient. + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// A credential used to authenticate to an Azure Service. + /// or is null. + public LargeFaceListClient(Uri endpoint, TokenCredential credential, string largeFaceListId) : this(endpoint, credential, largeFaceListId, new AzureAIVisionFaceClientOptions()) + { + } + + /// Initializes a new instance of LargeFaceListClient. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// A credential used to authenticate to an Azure Service. + /// The options for configuring the client. + /// or is null. + public LargeFaceListClient(Uri endpoint, AzureKeyCredential credential, string largeFaceListId, AzureAIVisionFaceClientOptions options) + { + Argument.AssertNotNull(endpoint, nameof(endpoint)); + Argument.AssertNotNull(credential, nameof(credential)); + options ??= new AzureAIVisionFaceClientOptions(); + + ClientDiagnostics = new ClientDiagnostics(options, true); + _keyCredential = credential; + _pipeline = HttpPipelineBuilder.Build(options, Array.Empty(), new HttpPipelinePolicy[] { new AzureKeyCredentialPolicy(_keyCredential, AuthorizationHeader) }, new ResponseClassifier()); + _endpoint = endpoint; + _apiVersion = options.Version; + _largeFaceListId = largeFaceListId; + } + + /// Initializes a new instance of FaceClient. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// A credential used to authenticate to an Azure Service. + /// The options for configuring the client. + /// or is null. + public LargeFaceListClient(Uri endpoint, TokenCredential credential, string largeFaceListId, AzureAIVisionFaceClientOptions options) + { + Argument.AssertNotNull(endpoint, nameof(endpoint)); + Argument.AssertNotNull(credential, nameof(credential)); + options ??= new AzureAIVisionFaceClientOptions(); + + ClientDiagnostics = new ClientDiagnostics(options, true); + _tokenCredential = credential; + _pipeline = HttpPipelineBuilder.Build(options, Array.Empty(), new HttpPipelinePolicy[] { new BearerTokenAuthenticationPolicy(_tokenCredential, AuthorizationScopes) }, new ResponseClassifier()); + _endpoint = endpoint; + _apiVersion = options.Version; + _largeFaceListId = largeFaceListId; + } + + /// Add a face to a specified Large Face List, up to 1,000,000 faces. + /// URL of input image. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/add-large-face-list-face-from-url for more details. + [ForwardsClientCalls] + public virtual Task> AddFaceAsync(Uri uri, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(uri, nameof(uri)); + + return AddFaceFromUrlImplAsync(uri, targetFace, detectionModel, userData, cancellationToken); + } + + /// Add a face to a specified Large Face List, up to 1,000,000 faces. + /// URL of input image. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/add-large-face-list-face-from-url for more details. + [ForwardsClientCalls] + public virtual Response AddFace(Uri uri, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(uri, nameof(uri)); + + return AddFaceFromUrlImpl(uri, targetFace, detectionModel, userData, cancellationToken); + } + + /// Add a face to a specified Large Face List, up to 1,000,000 faces. + /// The image to be analyzed. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/add-large-face-list-face for more details. + [ForwardsClientCalls] + public virtual Task> AddFaceAsync(BinaryData imageContent, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(imageContent, nameof(imageContent)); + + return AddFaceImplAsync(imageContent, targetFace, detectionModel, userData, cancellationToken); + } + + /// Add a face to a specified Large Face List, up to 1,000,000 faces. + /// The image to be analyzed. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/add-large-face-list-face for more details. + [ForwardsClientCalls] + public virtual Response AddFace(BinaryData imageContent, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(imageContent, nameof(imageContent)); + + return AddFaceImpl(imageContent, targetFace, detectionModel, userData, cancellationToken); + } + + /// + /// [Protocol Method] Add a face to a specified Large Face List, up to 1,000,000 faces. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + [ForwardsClientCalls] + public virtual Task AddFaceAsync(RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + return AddFaceImplAsync(content, targetFace, detectionModel, userData, context); + } + + /// + /// [Protocol Method] Add a face to a specified Large Face List, up to 1,000,000 faces. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + [ForwardsClientCalls] + public virtual Response AddFace(RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + return AddFaceImpl(content, targetFace, detectionModel, userData, context); + } + } +} \ No newline at end of file diff --git a/sdk/face/Azure.AI.Vision.Face/src/Custom/LargePersonGroupClient.cs b/sdk/face/Azure.AI.Vision.Face/src/Custom/LargePersonGroupClient.cs new file mode 100644 index 000000000000..7c8ecaf0975f --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Custom/LargePersonGroupClient.cs @@ -0,0 +1,220 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Core.Pipeline; + +namespace Azure.AI.Vision.Face +{ + // Data plane generated sub-client. + /// The LargePersonGroup sub-client. + [CodeGenClient("LargePersonGroupClientImpl")] + public partial class LargePersonGroupClient + { + /// Initializes a new instance of FaceClient. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// ID of the container. + /// A credential used to authenticate to an Azure Service. + /// or is null. + public LargePersonGroupClient(Uri endpoint, AzureKeyCredential credential, string largePersonGroupId) : this(endpoint, credential, largePersonGroupId, new AzureAIVisionFaceClientOptions()) + { + } + + /// Initializes a new instance of FaceClient. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// ID of the container. + /// A credential used to authenticate to an Azure Service. + /// or is null. + public LargePersonGroupClient(Uri endpoint, TokenCredential credential, string largePersonGroupId) : this(endpoint, credential, largePersonGroupId, new AzureAIVisionFaceClientOptions()) + { + } + + /// Initializes a new instance of LargePersonGroupClient. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// ID of the container. + /// A credential used to authenticate to an Azure Service. + /// The options for configuring the client. + /// or is null. + public LargePersonGroupClient(Uri endpoint, AzureKeyCredential credential, string largePersonGroupId, AzureAIVisionFaceClientOptions options) + { + Argument.AssertNotNull(endpoint, nameof(endpoint)); + Argument.AssertNotNull(credential, nameof(credential)); + options ??= new AzureAIVisionFaceClientOptions(); + + ClientDiagnostics = new ClientDiagnostics(options, true); + _keyCredential = credential; + _pipeline = HttpPipelineBuilder.Build(options, Array.Empty(), new HttpPipelinePolicy[] { new AzureKeyCredentialPolicy(_keyCredential, AuthorizationHeader) }, new ResponseClassifier()); + _endpoint = endpoint; + _apiVersion = options.Version; + _largePersonGroupId = largePersonGroupId; + } + + /// Initializes a new instance of FaceClient. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// ID of the container. + /// A credential used to authenticate to an Azure Service. + /// The options for configuring the client. + /// or is null. + public LargePersonGroupClient(Uri endpoint, TokenCredential credential, string largePersonGroupId, AzureAIVisionFaceClientOptions options) + { + Argument.AssertNotNull(endpoint, nameof(endpoint)); + Argument.AssertNotNull(credential, nameof(credential)); + options ??= new AzureAIVisionFaceClientOptions(); + + ClientDiagnostics = new ClientDiagnostics(options, true); + _tokenCredential = credential; + _pipeline = HttpPipelineBuilder.Build(options, Array.Empty(), new HttpPipelinePolicy[] { new BearerTokenAuthenticationPolicy(_tokenCredential, AuthorizationScopes) }, new ResponseClassifier()); + _endpoint = endpoint; + _apiVersion = options.Version; + _largePersonGroupId = largePersonGroupId; + } + + /// Add a face to a person into a Large Person Group for face identification or verification. + /// ID of the person. + /// URL of input image. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/add-large-person-group-person-face-from-url for more details. + [ForwardsClientCalls] + public virtual Task> AddFaceAsync(Guid personId, Uri uri, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(uri, nameof(uri)); + + return AddFaceFromUrlImplAsync(personId, uri, targetFace, detectionModel, userData, cancellationToken); + } + + /// Add a face to a person into a Large Person Group for face identification or verification. + /// ID of the person. + /// URL of input image. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/add-large-person-group-person-face-from-url for more details. + [ForwardsClientCalls] + public virtual Response AddFace(Guid personId, Uri uri, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(uri, nameof(uri)); + + return AddFaceFromUrlImpl(personId, uri, targetFace, detectionModel, userData, cancellationToken); + } + + /// Add a face to a person into a Large Person Group for face identification or verification. + /// ID of the person. + /// The image to be analyzed. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/add-large-person-group-person-face for more details. + [ForwardsClientCalls] + public virtual Task> AddFaceAsync(Guid personId, BinaryData imageContent, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(imageContent, nameof(imageContent)); + + return AddFaceImplAsync(personId, imageContent, targetFace, detectionModel, userData, cancellationToken); + } + + /// Add a face to a person into a Large Person Group for face identification or verification. + /// ID of the person. + /// The image to be analyzed. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/add-large-person-group-person-face for more details. + [ForwardsClientCalls] + public virtual Response AddFace(Guid personId, BinaryData imageContent, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(imageContent, nameof(imageContent)); + + return AddFaceImpl(personId, imageContent, targetFace, detectionModel, userData, cancellationToken); + } + + /// + /// [Protocol Method] Add a face to a person into a Large Person Group for face identification or verification. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the ser convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the person. + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + [ForwardsClientCalls] + public virtual Task AddFaceAsync(Guid personId, RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + return AddFaceImplAsync(personId, content, targetFace, detectionModel, userData, context); + } + + /// + /// [Protocol Method] Add a face to a person into a Large Person Group for face identification or verification. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the ser convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the person. + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + [ForwardsClientCalls] + public virtual Response AddFace(Guid personId, RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + return AddFaceImpl(personId, content, targetFace, detectionModel, userData, context); + } + } +} \ No newline at end of file diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/AIVisionFaceClientBuilderExtensions.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/AIVisionFaceClientBuilderExtensions.cs index fdc44b3da4e3..d1ff0bc7e195 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Generated/AIVisionFaceClientBuilderExtensions.cs +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/AIVisionFaceClientBuilderExtensions.cs @@ -12,9 +12,34 @@ namespace Microsoft.Extensions.Azure { - /// Extension methods to add , to client builder. + /// Extension methods to add , , to client builder. public static partial class AIVisionFaceClientBuilderExtensions { + /// Registers a instance. + /// The builder to register with. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// A credential used to authenticate to an Azure Service. + public static IAzureClientBuilder AddFaceAdministrationClient(this TBuilder builder, Uri endpoint, AzureKeyCredential credential) + where TBuilder : IAzureClientFactoryBuilder + { + return builder.RegisterClientFactory((options) => new FaceAdministrationClient(endpoint, credential, options)); + } + + /// Registers a instance. + /// The builder to register with. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + public static IAzureClientBuilder AddFaceAdministrationClient(this TBuilder builder, Uri endpoint) + where TBuilder : IAzureClientFactoryBuilderWithCredential + { + return builder.RegisterClientFactory((options, cred) => new FaceAdministrationClient(endpoint, cred, options)); + } + /// Registers a instance. /// The builder to register with. /// @@ -65,6 +90,14 @@ public static IAzureClientBuilder((options, cred) => new FaceSessionClient(endpoint, cred, options)); } + /// Registers a instance. + /// The builder to register with. + /// The configuration values. + public static IAzureClientBuilder AddFaceAdministrationClient(this TBuilder builder, TConfiguration configuration) + where TBuilder : IAzureClientFactoryBuilderWithConfiguration + { + return builder.RegisterClientFactory(configuration); + } /// Registers a instance. /// The builder to register with. /// The configuration values. diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/AIVisionFaceModelFactory.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/AIVisionFaceModelFactory.cs index f85eca044688..49e4e7fad35a 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Generated/AIVisionFaceModelFactory.cs +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/AIVisionFaceModelFactory.cs @@ -14,6 +14,93 @@ namespace Azure.AI.Vision.Face /// Model factory for models. public static partial class AIVisionFaceModelFactory { + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds. + /// ID of the container. + /// A new instance for mocking. + public static LargePersonGroup LargePersonGroup(string name = null, string userData = null, FaceRecognitionModel? recognitionModel = null, string largePersonGroupId = null) + { + return new LargePersonGroup(name, userData, recognitionModel, largePersonGroupId, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// Training status of the container. + /// A combined UTC date and time string that describes the created time of the person group, large person group or large face list. + /// A combined UTC date and time string that describes the last modify time of the person group, large person group or large face list, could be null value when the group is not successfully trained. + /// A combined UTC date and time string that describes the last successful training time of the person group, large person group or large face list. + /// Show failure message when training failed (omitted when training succeed). + /// A new instance for mocking. + public static FaceTrainingResult FaceTrainingResult(FaceOperationStatus status = default, DateTimeOffset createdDateTime = default, DateTimeOffset lastActionDateTime = default, DateTimeOffset lastSuccessfulTrainingDateTime = default, string message = null) + { + return new FaceTrainingResult( + status, + createdDateTime, + lastActionDateTime, + lastSuccessfulTrainingDateTime, + message, + serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// Person ID of the person. + /// A new instance for mocking. + public static CreatePersonResult CreatePersonResult(Guid personId = default) + { + return new CreatePersonResult(personId, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// ID of the person. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Face ids of registered faces in the person. + /// A new instance for mocking. + public static LargePersonGroupPerson LargePersonGroupPerson(Guid personId = default, string name = null, string userData = null, IEnumerable persistedFaceIds = null) + { + persistedFaceIds ??= new List(); + + return new LargePersonGroupPerson(personId, name, userData, persistedFaceIds?.ToList(), serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// Persisted Face ID of the added face, which is persisted and will not expire. Different from faceId which is created in "Detect" and will expire in 24 hours after the detection call. + /// A new instance for mocking. + public static AddFaceResult AddFaceResult(Guid persistedFaceId = default) + { + return new AddFaceResult(persistedFaceId, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// Face ID of the face. + /// User-provided data attached to the face. The length limit is 1K. + /// A new instance for mocking. + public static LargePersonGroupPersonFace LargePersonGroupPersonFace(Guid persistedFaceId = default, string userData = null) + { + return new LargePersonGroupPersonFace(persistedFaceId, userData, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// A new instance for mocking. + public static LargeFaceList LargeFaceList(string name = null, string userData = null, FaceRecognitionModel? recognitionModel = null, string largeFaceListId = null) + { + return new LargeFaceList(name, userData, recognitionModel, largeFaceListId, serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// Face ID of the face. + /// User-provided data attached to the face. The length limit is 1K. + /// A new instance for mocking. + public static LargeFaceListFace LargeFaceListFace(Guid persistedFaceId = default, string userData = null) + { + return new LargeFaceListFace(persistedFaceId, userData, serializedAdditionalRawData: null); + } + /// Initializes a new instance of . /// Unique faceId of the detected face, created by detection API and it will expire 24 hours after the detection call. To return this, it requires 'returnFaceId' parameter to be true. /// The 'recognitionModel' associated with this faceId. This is only returned when 'returnRecognitionModel' is explicitly set as true. @@ -277,19 +364,43 @@ public static FaceGroupingResult FaceGroupingResult(IEnumerable> gro return new FaceGroupingResult(groups?.ToList(), messyGroup?.ToList(), serializedAdditionalRawData: null); } + /// Initializes a new instance of . + /// faceId of the query face. + /// Identified person candidates for that face (ranked by confidence). Array size should be no larger than input maxNumOfCandidatesReturned. If no person is identified, will return an empty array. + /// A new instance for mocking. + public static FaceIdentificationResult FaceIdentificationResult(Guid faceId = default, IEnumerable candidates = null) + { + candidates ??= new List(); + + return new FaceIdentificationResult(faceId, candidates?.ToList(), serializedAdditionalRawData: null); + } + + /// Initializes a new instance of . + /// personId of candidate person. + /// Confidence value of the candidate. The higher confidence, the more similar. Range between [0,1]. + /// A new instance for mocking. + public static FaceIdentificationCandidate FaceIdentificationCandidate(Guid personId = default, float confidence = default) + { + return new FaceIdentificationCandidate(personId, confidence, serializedAdditionalRawData: null); + } + /// Initializes a new instance of . /// Type of liveness mode the client should follow. /// Whether or not to allow a '200 - Success' response body to be sent to the client, which may be undesirable for security reasons. Default is false, clients will receive a '204 - NoContent' empty body response. Regardless of selection, calling Session GetResult will always contain a response body enabling business logic to be implemented. /// Whether or not to allow client to set their own 'deviceCorrelationId' via the Vision SDK. Default is false, and 'deviceCorrelationId' must be set in this request body. + /// Whether or not store the session image. + /// The model version used for liveness classification. This is an optional parameter, and if this is not specified, then the latest supported model version will be chosen. /// Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null. /// Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600. /// A new instance for mocking. - public static CreateLivenessSessionContent CreateLivenessSessionContent(LivenessOperationMode livenessOperationMode = default, bool? sendResultsToClient = null, bool? deviceCorrelationIdSetInClient = null, string deviceCorrelationId = null, int? authTokenTimeToLiveInSeconds = null) + public static CreateLivenessSessionContent CreateLivenessSessionContent(LivenessOperationMode livenessOperationMode = default, bool? sendResultsToClient = null, bool? deviceCorrelationIdSetInClient = null, bool? enableSessionImage = null, LivenessModel? livenessSingleModalModel = null, string deviceCorrelationId = null, int? authTokenTimeToLiveInSeconds = null) { return new CreateLivenessSessionContent( livenessOperationMode, sendResultsToClient, deviceCorrelationIdSetInClient, + enableSessionImage, + livenessSingleModalModel, deviceCorrelationId, authTokenTimeToLiveInSeconds, serializedAdditionalRawData: null); @@ -337,8 +448,10 @@ public static LivenessSession LivenessSession(string id = null, DateTimeOffset c /// The request of this entry. /// The response of this entry. /// The server calculated digest for this request. If the client reported digest differs from the server calculated digest, then the message integrity between the client and service has been compromised and the result should not be trusted. For more information, see how to guides on how to leverage this value to secure your end-to-end solution. + /// The image ID of the session request. + /// The sha256 hash of the verify-image in the request. /// A new instance for mocking. - public static LivenessSessionAuditEntry LivenessSessionAuditEntry(long id = default, string sessionId = null, string requestId = null, string clientRequestId = null, DateTimeOffset receivedDateTime = default, AuditRequestInfo request = null, AuditLivenessResponseInfo response = null, string digest = null) + public static LivenessSessionAuditEntry LivenessSessionAuditEntry(long id = default, string sessionId = null, string requestId = null, string clientRequestId = null, DateTimeOffset receivedDateTime = default, AuditRequestInfo request = null, AuditLivenessResponseInfo response = null, string digest = null, string sessionImageId = null, string verifyImageHash = null) { return new LivenessSessionAuditEntry( id, @@ -349,6 +462,8 @@ public static LivenessSessionAuditEntry LivenessSessionAuditEntry(long id = defa request, response, digest, + sessionImageId, + verifyImageHash, serializedAdditionalRawData: null); } @@ -444,6 +559,32 @@ public static LivenessSessionItem LivenessSessionItem(string id = null, DateTime serializedAdditionalRawData: null); } + /// Initializes a new instance of . + /// Type of liveness mode the client should follow. + /// Whether or not to allow a '200 - Success' response body to be sent to the client, which may be undesirable for security reasons. Default is false, clients will receive a '204 - NoContent' empty body response. Regardless of selection, calling Session GetResult will always contain a response body enabling business logic to be implemented. + /// Whether or not to allow client to set their own 'deviceCorrelationId' via the Vision SDK. Default is false, and 'deviceCorrelationId' must be set in this request body. + /// Whether or not store the session image. + /// The model version used for liveness classification. This is an optional parameter, and if this is not specified, then the latest supported model version will be chosen. + /// Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null. + /// Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600. + /// Whether or not return the verify image hash. + /// Threshold for confidence of the face verification. + /// A new instance for mocking. + public static CreateLivenessWithVerifySessionContent CreateLivenessWithVerifySessionContent(LivenessOperationMode livenessOperationMode = default, bool? sendResultsToClient = null, bool? deviceCorrelationIdSetInClient = null, bool? enableSessionImage = null, LivenessModel? livenessSingleModalModel = null, string deviceCorrelationId = null, int? authTokenTimeToLiveInSeconds = null, bool? returnVerifyImageHash = null, float? verifyConfidenceThreshold = null) + { + return new CreateLivenessWithVerifySessionContent( + livenessOperationMode, + sendResultsToClient, + deviceCorrelationIdSetInClient, + enableSessionImage, + livenessSingleModalModel, + deviceCorrelationId, + authTokenTimeToLiveInSeconds, + returnVerifyImageHash, + verifyConfidenceThreshold, + serializedAdditionalRawData: null); + } + /// Initializes a new instance of . /// The unique session ID of the created session. It will expire 48 hours after it was created or may be deleted sooner using the corresponding Session DELETE operation. /// Bearer token to provide authentication for the Vision SDK running on a client application. This Bearer token has limited permissions to perform only the required action and expires after the TTL time. It is also auditable. diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest.Serialization.cs new file mode 100644 index 000000000000..4068e6fba19a --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest.Serialization.cs @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class AddFaceFromUrlRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AddFaceFromUrlRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("url"u8); + writer.WriteStringValue(Uri.AbsoluteUri); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + AddFaceFromUrlRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AddFaceFromUrlRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAddFaceFromUrlRequest(document.RootElement, options); + } + + internal static AddFaceFromUrlRequest DeserializeAddFaceFromUrlRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Uri url = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("url"u8)) + { + url = new Uri(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AddFaceFromUrlRequest(url, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AddFaceFromUrlRequest)} does not support writing '{options.Format}' format."); + } + } + + AddFaceFromUrlRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeAddFaceFromUrlRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AddFaceFromUrlRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static AddFaceFromUrlRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeAddFaceFromUrlRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest.cs new file mode 100644 index 000000000000..f6d64f1102d7 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest.cs @@ -0,0 +1,75 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The AddFaceFromUrlRequest. + internal partial class AddFaceFromUrlRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// URL of input image. + /// is null. + internal AddFaceFromUrlRequest(Uri uri) + { + Argument.AssertNotNull(uri, nameof(uri)); + + Uri = uri; + } + + /// Initializes a new instance of . + /// URL of input image. + /// Keeps track of any properties unknown to the library. + internal AddFaceFromUrlRequest(Uri uri, IDictionary serializedAdditionalRawData) + { + Uri = uri; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal AddFaceFromUrlRequest() + { + } + + /// URL of input image. + public Uri Uri { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest1.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest1.Serialization.cs new file mode 100644 index 000000000000..6afe63d8fb43 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest1.Serialization.cs @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class AddFaceFromUrlRequest1 : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AddFaceFromUrlRequest1)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("url"u8); + writer.WriteStringValue(Uri.AbsoluteUri); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + AddFaceFromUrlRequest1 IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AddFaceFromUrlRequest1)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAddFaceFromUrlRequest1(document.RootElement, options); + } + + internal static AddFaceFromUrlRequest1 DeserializeAddFaceFromUrlRequest1(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Uri url = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("url"u8)) + { + url = new Uri(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AddFaceFromUrlRequest1(url, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AddFaceFromUrlRequest1)} does not support writing '{options.Format}' format."); + } + } + + AddFaceFromUrlRequest1 IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeAddFaceFromUrlRequest1(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AddFaceFromUrlRequest1)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static AddFaceFromUrlRequest1 FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeAddFaceFromUrlRequest1(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest1.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest1.cs new file mode 100644 index 000000000000..2bce74bdbf08 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceFromUrlRequest1.cs @@ -0,0 +1,75 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The AddFaceFromUrlRequest1. + internal partial class AddFaceFromUrlRequest1 + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// URL of input image. + /// is null. + internal AddFaceFromUrlRequest1(Uri uri) + { + Argument.AssertNotNull(uri, nameof(uri)); + + Uri = uri; + } + + /// Initializes a new instance of . + /// URL of input image. + /// Keeps track of any properties unknown to the library. + internal AddFaceFromUrlRequest1(Uri uri, IDictionary serializedAdditionalRawData) + { + Uri = uri; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal AddFaceFromUrlRequest1() + { + } + + /// URL of input image. + public Uri Uri { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceResult.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceResult.Serialization.cs new file mode 100644 index 000000000000..716408b25a91 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceResult.Serialization.cs @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class AddFaceResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AddFaceResult)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("persistedFaceId"u8); + writer.WriteStringValue(PersistedFaceId); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + AddFaceResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(AddFaceResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeAddFaceResult(document.RootElement, options); + } + + internal static AddFaceResult DeserializeAddFaceResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid persistedFaceId = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("persistedFaceId"u8)) + { + persistedFaceId = property.Value.GetGuid(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new AddFaceResult(persistedFaceId, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(AddFaceResult)} does not support writing '{options.Format}' format."); + } + } + + AddFaceResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeAddFaceResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(AddFaceResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static AddFaceResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeAddFaceResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceResult.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceResult.cs new file mode 100644 index 000000000000..28ad972fc9d5 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/AddFaceResult.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Response body for adding face. + public partial class AddFaceResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Persisted Face ID of the added face, which is persisted and will not expire. Different from faceId which is created in "Detect" and will expire in 24 hours after the detection call. + internal AddFaceResult(Guid persistedFaceId) + { + PersistedFaceId = persistedFaceId; + } + + /// Initializes a new instance of . + /// Persisted Face ID of the added face, which is persisted and will not expire. Different from faceId which is created in "Detect" and will expire in 24 hours after the detection call. + /// Keeps track of any properties unknown to the library. + internal AddFaceResult(Guid persistedFaceId, IDictionary serializedAdditionalRawData) + { + PersistedFaceId = persistedFaceId; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal AddFaceResult() + { + } + + /// Persisted Face ID of the added face, which is persisted and will not expire. Different from faceId which is created in "Detect" and will expire in 24 hours after the detection call. + public Guid PersistedFaceId { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/AzureAIVisionFaceClientOptions.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/AzureAIVisionFaceClientOptions.cs index a8cf7ed227cc..a27f25f2a008 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Generated/AzureAIVisionFaceClientOptions.cs +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/AzureAIVisionFaceClientOptions.cs @@ -13,13 +13,15 @@ namespace Azure.AI.Vision.Face /// Client options for Azure.AI.Vision.Face library clients. public partial class AzureAIVisionFaceClientOptions : ClientOptions { - private const ServiceVersion LatestVersion = ServiceVersion.V1_1_Preview_1; + private const ServiceVersion LatestVersion = ServiceVersion.V1_2_Preview_1; /// The version of the service to use. public enum ServiceVersion { /// Service version "v1.1-preview.1". V1_1_Preview_1 = 1, + /// Service version "v1.2-preview.1". + V1_2_Preview_1 = 2, } internal string Version { get; } @@ -30,6 +32,7 @@ public AzureAIVisionFaceClientOptions(ServiceVersion version = LatestVersion) Version = version switch { ServiceVersion.V1_1_Preview_1 => "v1.1-preview.1", + ServiceVersion.V1_2_Preview_1 => "v1.2-preview.1", _ => throw new NotSupportedException() }; } diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.Serialization.cs index 5890a4f74258..b32ad1822811 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.Serialization.cs +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.Serialization.cs @@ -38,6 +38,16 @@ void IJsonModel.Write(Utf8JsonWriter writer, Model writer.WritePropertyName("deviceCorrelationIdSetInClient"u8); writer.WriteBooleanValue(DeviceCorrelationIdSetInClient.Value); } + if (Optional.IsDefined(EnableSessionImage)) + { + writer.WritePropertyName("enableSessionImage"u8); + writer.WriteBooleanValue(EnableSessionImage.Value); + } + if (Optional.IsDefined(LivenessSingleModalModel)) + { + writer.WritePropertyName("livenessSingleModalModel"u8); + writer.WriteStringValue(LivenessSingleModalModel.Value.ToString()); + } if (Optional.IsDefined(DeviceCorrelationId)) { writer.WritePropertyName("deviceCorrelationId"u8); @@ -89,6 +99,8 @@ internal static CreateLivenessSessionContent DeserializeCreateLivenessSessionCon LivenessOperationMode livenessOperationMode = default; bool? sendResultsToClient = default; bool? deviceCorrelationIdSetInClient = default; + bool? enableSessionImage = default; + LivenessModel? livenessSingleModalModel = default; string deviceCorrelationId = default; int? authTokenTimeToLiveInSeconds = default; IDictionary serializedAdditionalRawData = default; @@ -118,6 +130,24 @@ internal static CreateLivenessSessionContent DeserializeCreateLivenessSessionCon deviceCorrelationIdSetInClient = property.Value.GetBoolean(); continue; } + if (property.NameEquals("enableSessionImage"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + enableSessionImage = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("livenessSingleModalModel"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + livenessSingleModalModel = new LivenessModel(property.Value.GetString()); + continue; + } if (property.NameEquals("deviceCorrelationId"u8)) { deviceCorrelationId = property.Value.GetString(); @@ -142,6 +172,8 @@ internal static CreateLivenessSessionContent DeserializeCreateLivenessSessionCon livenessOperationMode, sendResultsToClient, deviceCorrelationIdSetInClient, + enableSessionImage, + livenessSingleModalModel, deviceCorrelationId, authTokenTimeToLiveInSeconds, serializedAdditionalRawData); diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.cs index eb27333cbe93..9de38ee841c6 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.cs +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessSessionContent.cs @@ -10,7 +10,7 @@ namespace Azure.AI.Vision.Face { - /// Request for creating liveness session. + /// Request model for creating liveness session. public partial class CreateLivenessSessionContent { /// @@ -56,14 +56,18 @@ public CreateLivenessSessionContent(LivenessOperationMode livenessOperationMode) /// Type of liveness mode the client should follow. /// Whether or not to allow a '200 - Success' response body to be sent to the client, which may be undesirable for security reasons. Default is false, clients will receive a '204 - NoContent' empty body response. Regardless of selection, calling Session GetResult will always contain a response body enabling business logic to be implemented. /// Whether or not to allow client to set their own 'deviceCorrelationId' via the Vision SDK. Default is false, and 'deviceCorrelationId' must be set in this request body. + /// Whether or not store the session image. + /// The model version used for liveness classification. This is an optional parameter, and if this is not specified, then the latest supported model version will be chosen. /// Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null. /// Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600. /// Keeps track of any properties unknown to the library. - internal CreateLivenessSessionContent(LivenessOperationMode livenessOperationMode, bool? sendResultsToClient, bool? deviceCorrelationIdSetInClient, string deviceCorrelationId, int? authTokenTimeToLiveInSeconds, IDictionary serializedAdditionalRawData) + internal CreateLivenessSessionContent(LivenessOperationMode livenessOperationMode, bool? sendResultsToClient, bool? deviceCorrelationIdSetInClient, bool? enableSessionImage, LivenessModel? livenessSingleModalModel, string deviceCorrelationId, int? authTokenTimeToLiveInSeconds, IDictionary serializedAdditionalRawData) { LivenessOperationMode = livenessOperationMode; SendResultsToClient = sendResultsToClient; DeviceCorrelationIdSetInClient = deviceCorrelationIdSetInClient; + EnableSessionImage = enableSessionImage; + LivenessSingleModalModel = livenessSingleModalModel; DeviceCorrelationId = deviceCorrelationId; AuthTokenTimeToLiveInSeconds = authTokenTimeToLiveInSeconds; _serializedAdditionalRawData = serializedAdditionalRawData; @@ -80,6 +84,10 @@ internal CreateLivenessSessionContent() public bool? SendResultsToClient { get; set; } /// Whether or not to allow client to set their own 'deviceCorrelationId' via the Vision SDK. Default is false, and 'deviceCorrelationId' must be set in this request body. public bool? DeviceCorrelationIdSetInClient { get; set; } + /// Whether or not store the session image. + public bool? EnableSessionImage { get; set; } + /// The model version used for liveness classification. This is an optional parameter, and if this is not specified, then the latest supported model version will be chosen. + public LivenessModel? LivenessSingleModalModel { get; set; } /// Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null. public string DeviceCorrelationId { get; set; } /// Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600. diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.Serialization.cs index 6b713f68b180..443812ef9cf0 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.Serialization.cs +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.Serialization.cs @@ -8,13 +8,12 @@ using System; using System.ClientModel.Primitives; using System.Collections.Generic; -using System.IO; using System.Text.Json; using Azure.Core; namespace Azure.AI.Vision.Face { - internal partial class CreateLivenessWithVerifySessionContent : IUtf8JsonSerializable, IJsonModel + public partial class CreateLivenessWithVerifySessionContent : IUtf8JsonSerializable, IJsonModel { void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); @@ -27,17 +26,48 @@ void IJsonModel.Write(Utf8JsonWriter wri } writer.WriteStartObject(); - writer.WritePropertyName("Parameters"u8); - writer.WriteObjectValue(Parameters, options); - writer.WritePropertyName("VerifyImage"u8); -#if NET6_0_OR_GREATER - writer.WriteRawValue(global::System.BinaryData.FromStream(VerifyImage)); -#else - using (JsonDocument document = JsonDocument.Parse(BinaryData.FromStream(VerifyImage))) + writer.WritePropertyName("livenessOperationMode"u8); + writer.WriteStringValue(LivenessOperationMode.ToString()); + if (Optional.IsDefined(SendResultsToClient)) { - JsonSerializer.Serialize(writer, document.RootElement); + writer.WritePropertyName("sendResultsToClient"u8); + writer.WriteBooleanValue(SendResultsToClient.Value); + } + if (Optional.IsDefined(DeviceCorrelationIdSetInClient)) + { + writer.WritePropertyName("deviceCorrelationIdSetInClient"u8); + writer.WriteBooleanValue(DeviceCorrelationIdSetInClient.Value); + } + if (Optional.IsDefined(EnableSessionImage)) + { + writer.WritePropertyName("enableSessionImage"u8); + writer.WriteBooleanValue(EnableSessionImage.Value); + } + if (Optional.IsDefined(LivenessSingleModalModel)) + { + writer.WritePropertyName("livenessSingleModalModel"u8); + writer.WriteStringValue(LivenessSingleModalModel.Value.ToString()); + } + if (Optional.IsDefined(DeviceCorrelationId)) + { + writer.WritePropertyName("deviceCorrelationId"u8); + writer.WriteStringValue(DeviceCorrelationId); + } + if (Optional.IsDefined(AuthTokenTimeToLiveInSeconds)) + { + writer.WritePropertyName("authTokenTimeToLiveInSeconds"u8); + writer.WriteNumberValue(AuthTokenTimeToLiveInSeconds.Value); + } + if (Optional.IsDefined(ReturnVerifyImageHash)) + { + writer.WritePropertyName("returnVerifyImageHash"u8); + writer.WriteBooleanValue(ReturnVerifyImageHash.Value); + } + if (Optional.IsDefined(VerifyConfidenceThreshold)) + { + writer.WritePropertyName("verifyConfidenceThreshold"u8); + writer.WriteNumberValue(VerifyConfidenceThreshold.Value); } -#endif if (options.Format != "W" && _serializedAdditionalRawData != null) { foreach (var item in _serializedAdditionalRawData) @@ -76,20 +106,90 @@ internal static CreateLivenessWithVerifySessionContent DeserializeCreateLiveness { return null; } - CreateLivenessSessionContent parameters = default; - Stream verifyImage = default; + LivenessOperationMode livenessOperationMode = default; + bool? sendResultsToClient = default; + bool? deviceCorrelationIdSetInClient = default; + bool? enableSessionImage = default; + LivenessModel? livenessSingleModalModel = default; + string deviceCorrelationId = default; + int? authTokenTimeToLiveInSeconds = default; + bool? returnVerifyImageHash = default; + float? verifyConfidenceThreshold = default; IDictionary serializedAdditionalRawData = default; Dictionary rawDataDictionary = new Dictionary(); foreach (var property in element.EnumerateObject()) { - if (property.NameEquals("Parameters"u8)) + if (property.NameEquals("livenessOperationMode"u8)) + { + livenessOperationMode = new LivenessOperationMode(property.Value.GetString()); + continue; + } + if (property.NameEquals("sendResultsToClient"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + sendResultsToClient = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("deviceCorrelationIdSetInClient"u8)) { - parameters = CreateLivenessSessionContent.DeserializeCreateLivenessSessionContent(property.Value, options); + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + deviceCorrelationIdSetInClient = property.Value.GetBoolean(); continue; } - if (property.NameEquals("VerifyImage"u8)) + if (property.NameEquals("enableSessionImage"u8)) { - verifyImage = BinaryData.FromString(property.Value.GetRawText()).ToStream(); + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + enableSessionImage = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("livenessSingleModalModel"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + livenessSingleModalModel = new LivenessModel(property.Value.GetString()); + continue; + } + if (property.NameEquals("deviceCorrelationId"u8)) + { + deviceCorrelationId = property.Value.GetString(); + continue; + } + if (property.NameEquals("authTokenTimeToLiveInSeconds"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + authTokenTimeToLiveInSeconds = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("returnVerifyImageHash"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + returnVerifyImageHash = property.Value.GetBoolean(); + continue; + } + if (property.NameEquals("verifyConfidenceThreshold"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + verifyConfidenceThreshold = property.Value.GetSingle(); continue; } if (options.Format != "W") @@ -98,30 +198,17 @@ internal static CreateLivenessWithVerifySessionContent DeserializeCreateLiveness } } serializedAdditionalRawData = rawDataDictionary; - return new CreateLivenessWithVerifySessionContent(parameters, verifyImage, serializedAdditionalRawData); - } - - private BinaryData SerializeMultipart(ModelReaderWriterOptions options) - { - using MultipartFormDataRequestContent content = ToMultipartRequestContent(); - using MemoryStream stream = new MemoryStream(); - content.WriteTo(stream); - if (stream.Position > int.MaxValue) - { - return BinaryData.FromStream(stream); - } - else - { - return new BinaryData(stream.GetBuffer().AsMemory(0, (int)stream.Position)); - } - } - - internal virtual MultipartFormDataRequestContent ToMultipartRequestContent() - { - MultipartFormDataRequestContent content = new MultipartFormDataRequestContent(); - content.Add(ModelReaderWriter.Write(Parameters, ModelSerializationExtensions.WireOptions), "Parameters"); - content.Add(VerifyImage, "VerifyImage", "VerifyImage", "application/octet-stream"); - return content; + return new CreateLivenessWithVerifySessionContent( + livenessOperationMode, + sendResultsToClient, + deviceCorrelationIdSetInClient, + enableSessionImage, + livenessSingleModalModel, + deviceCorrelationId, + authTokenTimeToLiveInSeconds, + returnVerifyImageHash, + verifyConfidenceThreshold, + serializedAdditionalRawData); } BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) @@ -132,8 +219,6 @@ BinaryData IPersistableModel.Write(Model { case "J": return ModelReaderWriter.Write(this, options); - case "MFD": - return SerializeMultipart(options); default: throw new FormatException($"The model {nameof(CreateLivenessWithVerifySessionContent)} does not support writing '{options.Format}' format."); } @@ -155,7 +240,7 @@ CreateLivenessWithVerifySessionContent IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "MFD"; + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; /// Deserializes the model from a raw response. /// The response to deserialize the model from. diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.cs index 5d6d724fdbda..99e890eaf7e3 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.cs +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionContent.cs @@ -7,12 +7,11 @@ using System; using System.Collections.Generic; -using System.IO; namespace Azure.AI.Vision.Face { - /// Request of liveness with verify session creation. - internal partial class CreateLivenessWithVerifySessionContent + /// Request for creating liveness with verify session. + public partial class CreateLivenessWithVerifySessionContent { /// /// Keeps track of any properties unknown to the library. @@ -47,26 +46,34 @@ internal partial class CreateLivenessWithVerifySessionContent private IDictionary _serializedAdditionalRawData; /// Initializes a new instance of . - /// The parameters for creating session. - /// The image stream for verify. Content-Disposition header field for this part must have filename. - /// or is null. - public CreateLivenessWithVerifySessionContent(CreateLivenessSessionContent parameters, Stream verifyImage) + /// Type of liveness mode the client should follow. + public CreateLivenessWithVerifySessionContent(LivenessOperationMode livenessOperationMode) { - Argument.AssertNotNull(parameters, nameof(parameters)); - Argument.AssertNotNull(verifyImage, nameof(verifyImage)); - - Parameters = parameters; - VerifyImage = verifyImage; + LivenessOperationMode = livenessOperationMode; } /// Initializes a new instance of . - /// The parameters for creating session. - /// The image stream for verify. Content-Disposition header field for this part must have filename. + /// Type of liveness mode the client should follow. + /// Whether or not to allow a '200 - Success' response body to be sent to the client, which may be undesirable for security reasons. Default is false, clients will receive a '204 - NoContent' empty body response. Regardless of selection, calling Session GetResult will always contain a response body enabling business logic to be implemented. + /// Whether or not to allow client to set their own 'deviceCorrelationId' via the Vision SDK. Default is false, and 'deviceCorrelationId' must be set in this request body. + /// Whether or not store the session image. + /// The model version used for liveness classification. This is an optional parameter, and if this is not specified, then the latest supported model version will be chosen. + /// Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null. + /// Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600. + /// Whether or not return the verify image hash. + /// Threshold for confidence of the face verification. /// Keeps track of any properties unknown to the library. - internal CreateLivenessWithVerifySessionContent(CreateLivenessSessionContent parameters, Stream verifyImage, IDictionary serializedAdditionalRawData) + internal CreateLivenessWithVerifySessionContent(LivenessOperationMode livenessOperationMode, bool? sendResultsToClient, bool? deviceCorrelationIdSetInClient, bool? enableSessionImage, LivenessModel? livenessSingleModalModel, string deviceCorrelationId, int? authTokenTimeToLiveInSeconds, bool? returnVerifyImageHash, float? verifyConfidenceThreshold, IDictionary serializedAdditionalRawData) { - Parameters = parameters; - VerifyImage = verifyImage; + LivenessOperationMode = livenessOperationMode; + SendResultsToClient = sendResultsToClient; + DeviceCorrelationIdSetInClient = deviceCorrelationIdSetInClient; + EnableSessionImage = enableSessionImage; + LivenessSingleModalModel = livenessSingleModalModel; + DeviceCorrelationId = deviceCorrelationId; + AuthTokenTimeToLiveInSeconds = authTokenTimeToLiveInSeconds; + ReturnVerifyImageHash = returnVerifyImageHash; + VerifyConfidenceThreshold = verifyConfidenceThreshold; _serializedAdditionalRawData = serializedAdditionalRawData; } @@ -75,9 +82,23 @@ internal CreateLivenessWithVerifySessionContent() { } - /// The parameters for creating session. - public CreateLivenessSessionContent Parameters { get; } - /// The image stream for verify. Content-Disposition header field for this part must have filename. - public Stream VerifyImage { get; } + /// Type of liveness mode the client should follow. + public LivenessOperationMode LivenessOperationMode { get; } + /// Whether or not to allow a '200 - Success' response body to be sent to the client, which may be undesirable for security reasons. Default is false, clients will receive a '204 - NoContent' empty body response. Regardless of selection, calling Session GetResult will always contain a response body enabling business logic to be implemented. + public bool? SendResultsToClient { get; set; } + /// Whether or not to allow client to set their own 'deviceCorrelationId' via the Vision SDK. Default is false, and 'deviceCorrelationId' must be set in this request body. + public bool? DeviceCorrelationIdSetInClient { get; set; } + /// Whether or not store the session image. + public bool? EnableSessionImage { get; set; } + /// The model version used for liveness classification. This is an optional parameter, and if this is not specified, then the latest supported model version will be chosen. + public LivenessModel? LivenessSingleModalModel { get; set; } + /// Unique Guid per each end-user device. This is to provide rate limiting and anti-hammering. If 'deviceCorrelationIdSetInClient' is true in this request, this 'deviceCorrelationId' must be null. + public string DeviceCorrelationId { get; set; } + /// Seconds the session should last for. Range is 60 to 86400 seconds. Default value is 600. + public int? AuthTokenTimeToLiveInSeconds { get; set; } + /// Whether or not return the verify image hash. + public bool? ReturnVerifyImageHash { get; set; } + /// Threshold for confidence of the face verification. + public float? VerifyConfidenceThreshold { get; set; } } } diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionMultipartContent.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionMultipartContent.Serialization.cs new file mode 100644 index 000000000000..cccaed59a390 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionMultipartContent.Serialization.cs @@ -0,0 +1,176 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.IO; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class CreateLivenessWithVerifySessionMultipartContent : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateLivenessWithVerifySessionMultipartContent)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("Parameters"u8); + writer.WriteObjectValue(Parameters, options); + writer.WritePropertyName("VerifyImage"u8); +#if NET6_0_OR_GREATER + writer.WriteRawValue(global::System.BinaryData.FromStream(VerifyImage)); +#else + using (JsonDocument document = JsonDocument.Parse(BinaryData.FromStream(VerifyImage))) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + CreateLivenessWithVerifySessionMultipartContent IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateLivenessWithVerifySessionMultipartContent)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCreateLivenessWithVerifySessionMultipartContent(document.RootElement, options); + } + + internal static CreateLivenessWithVerifySessionMultipartContent DeserializeCreateLivenessWithVerifySessionMultipartContent(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + CreateLivenessWithVerifySessionContent parameters = default; + Stream verifyImage = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("Parameters"u8)) + { + parameters = CreateLivenessWithVerifySessionContent.DeserializeCreateLivenessWithVerifySessionContent(property.Value, options); + continue; + } + if (property.NameEquals("VerifyImage"u8)) + { + verifyImage = BinaryData.FromString(property.Value.GetRawText()).ToStream(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CreateLivenessWithVerifySessionMultipartContent(parameters, verifyImage, serializedAdditionalRawData); + } + + private BinaryData SerializeMultipart(ModelReaderWriterOptions options) + { + using MultipartFormDataRequestContent content = ToMultipartRequestContent(); + using MemoryStream stream = new MemoryStream(); + content.WriteTo(stream); + if (stream.Position > int.MaxValue) + { + return BinaryData.FromStream(stream); + } + else + { + return new BinaryData(stream.GetBuffer().AsMemory(0, (int)stream.Position)); + } + } + + internal virtual MultipartFormDataRequestContent ToMultipartRequestContent() + { + MultipartFormDataRequestContent content = new MultipartFormDataRequestContent(); + content.Add(ModelReaderWriter.Write(Parameters, ModelSerializationExtensions.WireOptions), "Parameters"); + content.Add(VerifyImage, "VerifyImage", "VerifyImage", "application/octet-stream"); + return content; + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + case "MFD": + return SerializeMultipart(options); + default: + throw new FormatException($"The model {nameof(CreateLivenessWithVerifySessionMultipartContent)} does not support writing '{options.Format}' format."); + } + } + + CreateLivenessWithVerifySessionMultipartContent IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeCreateLivenessWithVerifySessionMultipartContent(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CreateLivenessWithVerifySessionMultipartContent)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "MFD"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static CreateLivenessWithVerifySessionMultipartContent FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeCreateLivenessWithVerifySessionMultipartContent(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionMultipartContent.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionMultipartContent.cs new file mode 100644 index 000000000000..972c14733528 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateLivenessWithVerifySessionMultipartContent.cs @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.IO; + +namespace Azure.AI.Vision.Face +{ + /// Request of liveness with verify session creation. + internal partial class CreateLivenessWithVerifySessionMultipartContent + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The parameters for creating session. + /// The image stream for verify. Content-Disposition header field for this part must have filename. + /// or is null. + public CreateLivenessWithVerifySessionMultipartContent(CreateLivenessWithVerifySessionContent parameters, Stream verifyImage) + { + Argument.AssertNotNull(parameters, nameof(parameters)); + Argument.AssertNotNull(verifyImage, nameof(verifyImage)); + + Parameters = parameters; + VerifyImage = verifyImage; + } + + /// Initializes a new instance of . + /// The parameters for creating session. + /// The image stream for verify. Content-Disposition header field for this part must have filename. + /// Keeps track of any properties unknown to the library. + internal CreateLivenessWithVerifySessionMultipartContent(CreateLivenessWithVerifySessionContent parameters, Stream verifyImage, IDictionary serializedAdditionalRawData) + { + Parameters = parameters; + VerifyImage = verifyImage; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal CreateLivenessWithVerifySessionMultipartContent() + { + } + + /// The parameters for creating session. + public CreateLivenessWithVerifySessionContent Parameters { get; } + /// The image stream for verify. Content-Disposition header field for this part must have filename. + public Stream VerifyImage { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonRequest.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonRequest.Serialization.cs new file mode 100644 index 000000000000..682c4f08d863 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonRequest.Serialization.cs @@ -0,0 +1,146 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class CreatePersonRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreatePersonRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + CreatePersonRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreatePersonRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCreatePersonRequest(document.RootElement, options); + } + + internal static CreatePersonRequest DeserializeCreatePersonRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string userData = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CreatePersonRequest(name, userData, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CreatePersonRequest)} does not support writing '{options.Format}' format."); + } + } + + CreatePersonRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeCreatePersonRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CreatePersonRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static CreatePersonRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeCreatePersonRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonRequest.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonRequest.cs new file mode 100644 index 000000000000..5f2cefbcce5b --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonRequest.cs @@ -0,0 +1,79 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The CreatePersonRequest. + internal partial class CreatePersonRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// is null. + internal CreatePersonRequest(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + } + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Keeps track of any properties unknown to the library. + internal CreatePersonRequest(string name, string userData, IDictionary serializedAdditionalRawData) + { + Name = name; + UserData = userData; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal CreatePersonRequest() + { + } + + /// User defined name, maximum length is 128. + public string Name { get; } + /// Optional user defined data. Length should not exceed 16K. + public string UserData { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonResult.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonResult.Serialization.cs new file mode 100644 index 000000000000..e0afa23647b8 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonResult.Serialization.cs @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class CreatePersonResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreatePersonResult)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("personId"u8); + writer.WriteStringValue(PersonId); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + CreatePersonResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreatePersonResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCreatePersonResult(document.RootElement, options); + } + + internal static CreatePersonResult DeserializeCreatePersonResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid personId = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("personId"u8)) + { + personId = property.Value.GetGuid(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CreatePersonResult(personId, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CreatePersonResult)} does not support writing '{options.Format}' format."); + } + } + + CreatePersonResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeCreatePersonResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CreatePersonResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static CreatePersonResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeCreatePersonResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonResult.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonResult.cs new file mode 100644 index 000000000000..d2b993228dd6 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreatePersonResult.cs @@ -0,0 +1,72 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Response of create person. + public partial class CreatePersonResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Person ID of the person. + internal CreatePersonResult(Guid personId) + { + PersonId = personId; + } + + /// Initializes a new instance of . + /// Person ID of the person. + /// Keeps track of any properties unknown to the library. + internal CreatePersonResult(Guid personId, IDictionary serializedAdditionalRawData) + { + PersonId = personId; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal CreatePersonResult() + { + } + + /// Person ID of the person. + public Guid PersonId { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest.Serialization.cs new file mode 100644 index 000000000000..5dddb239ac5c --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest.Serialization.cs @@ -0,0 +1,161 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class CreateRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (Optional.IsDefined(RecognitionModel)) + { + writer.WritePropertyName("recognitionModel"u8); + writer.WriteStringValue(RecognitionModel.Value.ToString()); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + CreateRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCreateRequest(document.RootElement, options); + } + + internal static CreateRequest DeserializeCreateRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string userData = default; + FaceRecognitionModel? recognitionModel = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (property.NameEquals("recognitionModel"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + recognitionModel = new FaceRecognitionModel(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CreateRequest(name, userData, recognitionModel, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CreateRequest)} does not support writing '{options.Format}' format."); + } + } + + CreateRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeCreateRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CreateRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static CreateRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeCreateRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest.cs new file mode 100644 index 000000000000..28b037d301c9 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest.cs @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The CreateRequest. + internal partial class CreateRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// is null. + internal CreateRequest(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + } + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + /// Keeps track of any properties unknown to the library. + internal CreateRequest(string name, string userData, FaceRecognitionModel? recognitionModel, IDictionary serializedAdditionalRawData) + { + Name = name; + UserData = userData; + RecognitionModel = recognitionModel; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal CreateRequest() + { + } + + /// User defined name, maximum length is 128. + public string Name { get; } + /// Optional user defined data. Length should not exceed 16K. + public string UserData { get; } + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + public FaceRecognitionModel? RecognitionModel { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest1.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest1.Serialization.cs new file mode 100644 index 000000000000..2d7461262254 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest1.Serialization.cs @@ -0,0 +1,161 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class CreateRequest1 : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateRequest1)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (Optional.IsDefined(RecognitionModel)) + { + writer.WritePropertyName("recognitionModel"u8); + writer.WriteStringValue(RecognitionModel.Value.ToString()); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + CreateRequest1 IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(CreateRequest1)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeCreateRequest1(document.RootElement, options); + } + + internal static CreateRequest1 DeserializeCreateRequest1(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string userData = default; + FaceRecognitionModel? recognitionModel = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (property.NameEquals("recognitionModel"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + recognitionModel = new FaceRecognitionModel(property.Value.GetString()); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new CreateRequest1(name, userData, recognitionModel, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(CreateRequest1)} does not support writing '{options.Format}' format."); + } + } + + CreateRequest1 IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeCreateRequest1(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(CreateRequest1)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static CreateRequest1 FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeCreateRequest1(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest1.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest1.cs new file mode 100644 index 000000000000..6c354abd8419 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/CreateRequest1.cs @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The CreateRequest1. + internal partial class CreateRequest1 + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// is null. + internal CreateRequest1(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + } + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + /// Keeps track of any properties unknown to the library. + internal CreateRequest1(string name, string userData, FaceRecognitionModel? recognitionModel, IDictionary serializedAdditionalRawData) + { + Name = name; + UserData = userData; + RecognitionModel = recognitionModel; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal CreateRequest1() + { + } + + /// User defined name, maximum length is 128. + public string Name { get; } + /// Optional user defined data. Length should not exceed 16K. + public string UserData { get; } + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + public FaceRecognitionModel? RecognitionModel { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/DetectFromSessionImageRequest.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/DetectFromSessionImageRequest.Serialization.cs new file mode 100644 index 000000000000..df96544d127a --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/DetectFromSessionImageRequest.Serialization.cs @@ -0,0 +1,135 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class DetectFromSessionImageRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DetectFromSessionImageRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("sessionImageId"u8); + writer.WriteStringValue(SessionImageId); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + DetectFromSessionImageRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(DetectFromSessionImageRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeDetectFromSessionImageRequest(document.RootElement, options); + } + + internal static DetectFromSessionImageRequest DeserializeDetectFromSessionImageRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string sessionImageId = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("sessionImageId"u8)) + { + sessionImageId = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new DetectFromSessionImageRequest(sessionImageId, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(DetectFromSessionImageRequest)} does not support writing '{options.Format}' format."); + } + } + + DetectFromSessionImageRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeDetectFromSessionImageRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(DetectFromSessionImageRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static DetectFromSessionImageRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeDetectFromSessionImageRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/DetectFromSessionImageRequest.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/DetectFromSessionImageRequest.cs new file mode 100644 index 000000000000..637773ec68ff --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/DetectFromSessionImageRequest.cs @@ -0,0 +1,75 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The DetectFromSessionImageRequest. + internal partial class DetectFromSessionImageRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Id of session image. + /// is null. + internal DetectFromSessionImageRequest(string sessionImageId) + { + Argument.AssertNotNull(sessionImageId, nameof(sessionImageId)); + + SessionImageId = sessionImageId; + } + + /// Initializes a new instance of . + /// Id of session image. + /// Keeps track of any properties unknown to the library. + internal DetectFromSessionImageRequest(string sessionImageId, IDictionary serializedAdditionalRawData) + { + SessionImageId = sessionImageId; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal DetectFromSessionImageRequest() + { + } + + /// Id of session image. + public string SessionImageId { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/FaceClient.xml b/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/FaceClient.xml index 14e05d4067a3..e4da2138d7f3 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/FaceClient.xml +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/FaceClient.xml @@ -213,6 +213,208 @@ Response response = client.Group(content); JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; Console.WriteLine(result.GetProperty("groups")[0][0].ToString()); Console.WriteLine(result.GetProperty("messyGroup")[0].ToString()); +]]> + + + +This sample shows how to call FindSimilarFromLargeFaceListAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = await client.FindSimilarFromLargeFaceListAsync(Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426"), "your_large_face_list_id"); +]]> + + + +This sample shows how to call FindSimilarFromLargeFaceList. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = client.FindSimilarFromLargeFaceList(Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426"), "your_large_face_list_id"); +]]> + + + +This sample shows how to call FindSimilarFromLargeFaceListAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "c5c24a82-6845-4031-9d5d-978df9175426", + maxNumOfCandidatesReturned = 3, + mode = "matchPerson", + largeFaceListId = "your_large_face_list_id", +}); +Response response = await client.FindSimilarFromLargeFaceListAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("confidence").ToString()); +]]> + + + +This sample shows how to call FindSimilarFromLargeFaceList and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "c5c24a82-6845-4031-9d5d-978df9175426", + maxNumOfCandidatesReturned = 3, + mode = "matchPerson", + largeFaceListId = "your_large_face_list_id", +}); +Response response = client.FindSimilarFromLargeFaceList(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("confidence").ToString()); +]]> + + + +This sample shows how to call IdentifyFromLargePersonGroupAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = await client.IdentifyFromLargePersonGroupAsync(new Guid[] { Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426") }, "your_large_person_group_id"); +]]> + + + +This sample shows how to call IdentifyFromLargePersonGroup. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response> response = client.IdentifyFromLargePersonGroup(new Guid[] { Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426") }, "your_large_person_group_id"); +]]> + + + +This sample shows how to call IdentifyFromLargePersonGroupAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceIds = new object[] + { + "c5c24a82-6845-4031-9d5d-978df9175426" + }, + largePersonGroupId = "your_large_person_group_id", + maxNumOfCandidatesReturned = 9, + confidenceThreshold = 0.7F, +}); +Response response = await client.IdentifyFromLargePersonGroupAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("faceId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); +]]> + + + +This sample shows how to call IdentifyFromLargePersonGroup and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceIds = new object[] + { + "c5c24a82-6845-4031-9d5d-978df9175426" + }, + largePersonGroupId = "your_large_person_group_id", + maxNumOfCandidatesReturned = 9, + confidenceThreshold = 0.7F, +}); +Response response = client.IdentifyFromLargePersonGroup(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("faceId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); +]]> + + + +This sample shows how to call VerifyFromLargePersonGroupAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response response = await client.VerifyFromLargePersonGroupAsync(Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426"), "your_large_person_group", Guid.Parse("815df99c-598f-4926-930a-a734b3fd651c")); +]]> + + + +This sample shows how to call VerifyFromLargePersonGroup. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +Response response = client.VerifyFromLargePersonGroup(Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426"), "your_large_person_group", Guid.Parse("815df99c-598f-4926-930a-a734b3fd651c")); +]]> + + + +This sample shows how to call VerifyFromLargePersonGroupAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "c5c24a82-6845-4031-9d5d-978df9175426", + personId = "815df99c-598f-4926-930a-a734b3fd651c", + largePersonGroupId = "your_large_person_group", +}); +Response response = await client.VerifyFromLargePersonGroupAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("isIdentical").ToString()); +Console.WriteLine(result.GetProperty("confidence").ToString()); +]]> + + + +This sample shows how to call VerifyFromLargePersonGroup and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceClient client = new FaceClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + faceId = "c5c24a82-6845-4031-9d5d-978df9175426", + personId = "815df99c-598f-4926-930a-a734b3fd651c", + largePersonGroupId = "your_large_person_group", +}); +Response response = client.VerifyFromLargePersonGroup(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("isIdentical").ToString()); +Console.WriteLine(result.GetProperty("confidence").ToString()); ]]> diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/FaceSessionClient.xml b/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/FaceSessionClient.xml index 3ab65974a572..3754a0217479 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/FaceSessionClient.xml +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/FaceSessionClient.xml @@ -497,6 +497,120 @@ Console.WriteLine(result[0].GetProperty("response").GetProperty("body").ToString Console.WriteLine(result[0].GetProperty("response").GetProperty("statusCode").ToString()); Console.WriteLine(result[0].GetProperty("response").GetProperty("latencyInMilliseconds").ToString()); Console.WriteLine(result[0].GetProperty("digest").ToString()); +]]> + + + +This sample shows how to call DetectFromSessionImageAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response> response = await client.DetectFromSessionImageAsync("aa93ce80-9a9b-48bd-ae1a-1c7543841e92"); +]]> + + + +This sample shows how to call DetectFromSessionImage. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response> response = client.DetectFromSessionImage("aa93ce80-9a9b-48bd-ae1a-1c7543841e92"); +]]> + + + +This sample shows how to call DetectFromSessionImageAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + sessionImageId = "aa93ce80-9a9b-48bd-ae1a-1c7543841e92", +}); +Response response = await client.DetectFromSessionImageAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("top").ToString()); +Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("left").ToString()); +Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("width").ToString()); +Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("height").ToString()); +]]> + + + +This sample shows how to call DetectFromSessionImage and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +using RequestContent content = RequestContent.Create(new +{ + sessionImageId = "aa93ce80-9a9b-48bd-ae1a-1c7543841e92", +}); +Response response = client.DetectFromSessionImage(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("top").ToString()); +Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("left").ToString()); +Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("width").ToString()); +Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("height").ToString()); +]]> + + + +This sample shows how to call GetSessionImageAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = await client.GetSessionImageAsync("3d035d35-2e01-4ed4-8935-577afde9caaa"); +]]> + + + +This sample shows how to call GetSessionImage. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = client.GetSessionImage("3d035d35-2e01-4ed4-8935-577afde9caaa"); +]]> + + + +This sample shows how to call GetSessionImageAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = await client.GetSessionImageAsync("3d035d35-2e01-4ed4-8935-577afde9caaa", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.ToString()); +]]> + + + +This sample shows how to call GetSessionImage and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +FaceSessionClient client = new FaceSessionClient(endpoint, credential); + +Response response = client.GetSessionImage("3d035d35-2e01-4ed4-8935-577afde9caaa", null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.ToString()); ]]> diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/LargeFaceListClient.xml b/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/LargeFaceListClient.xml new file mode 100644 index 000000000000..67af72d4e83f --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/LargeFaceListClient.xml @@ -0,0 +1,469 @@ + + + + + +This sample shows how to call CreateAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + +Response response = await client.CreateAsync("your_large_face_list_name"); +]]> + + + +This sample shows how to call Create. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + +Response response = client.Create("your_large_face_list_name"); +]]> + + + +This sample shows how to call CreateAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + +using RequestContent content = RequestContent.Create(new +{ + name = "your_large_face_list_name", + userData = "your_user_data", + recognitionModel = "recognition_01", +}); +Response response = await client.CreateAsync(content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call Create. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + +using RequestContent content = RequestContent.Create(new +{ + name = "your_large_face_list_name", + userData = "your_user_data", + recognitionModel = "recognition_01", +}); +Response response = client.Create(content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call DeleteAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + +Response response = await client.DeleteAsync(); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call Delete. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + +Response response = client.Delete(); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetLargeFaceListAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + +Response response = await client.GetLargeFaceListAsync(); +]]> + + + +This sample shows how to call GetLargeFaceList. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + +Response response = client.GetLargeFaceList(); +]]> + + + +This sample shows how to call GetLargeFaceListAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + +Response response = await client.GetLargeFaceListAsync(true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("largeFaceListId").ToString()); +]]> + + + +This sample shows how to call GetLargeFaceList and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + +Response response = client.GetLargeFaceList(true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("largeFaceListId").ToString()); +]]> + + + +This sample shows how to call UpdateAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + +using RequestContent content = RequestContent.Create(new +{ + name = "your_large_face_list_name", + userData = "your_user_data", +}); +Response response = await client.UpdateAsync(content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call Update. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + +using RequestContent content = RequestContent.Create(new +{ + name = "your_large_face_list_name", + userData = "your_user_data", +}); +Response response = client.Update(content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetLargeFaceListsAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient(null); + +Response> response = await client.GetLargeFaceListsAsync(); +]]> + + + +This sample shows how to call GetLargeFaceLists. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient(null); + +Response> response = client.GetLargeFaceLists(); +]]> + + + +This sample shows how to call GetLargeFaceListsAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient(null); + +Response response = await client.GetLargeFaceListsAsync("my_list_id", 20, true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("largeFaceListId").ToString()); +]]> + + + +This sample shows how to call GetLargeFaceLists and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient(null); + +Response response = client.GetLargeFaceLists("my_list_id", 20, true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("largeFaceListId").ToString()); +]]> + + + +This sample shows how to call GetTrainingStatusAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + +Response response = await client.GetTrainingStatusAsync(); +]]> + + + +This sample shows how to call GetTrainingStatus. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + +Response response = client.GetTrainingStatus(); +]]> + + + +This sample shows how to call GetTrainingStatusAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + +Response response = await client.GetTrainingStatusAsync(null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("createdDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); +]]> + + + +This sample shows how to call GetTrainingStatus and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + +Response response = client.GetTrainingStatus(null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("createdDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); +]]> + + + +This sample shows how to call DeleteFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + +Response response = await client.DeleteFaceAsync(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call DeleteFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + +Response response = client.DeleteFace(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + +Response response = await client.GetFaceAsync(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); +]]> + + + +This sample shows how to call GetFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + +Response response = client.GetFace(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); +]]> + + + +This sample shows how to call GetFaceAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + +Response response = await client.GetFaceAsync(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call GetFace and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + +Response response = client.GetFace(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call UpdateFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + +using RequestContent content = RequestContent.Create(new +{ + userData = "your_user_data", +}); +Response response = await client.UpdateFaceAsync(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call UpdateFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + +using RequestContent content = RequestContent.Create(new +{ + userData = "your_user_data", +}); +Response response = client.UpdateFace(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetFacesAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + +Response> response = await client.GetFacesAsync(); +]]> + + + +This sample shows how to call GetFaces. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + +Response> response = client.GetFaces(); +]]> + + + +This sample shows how to call GetFacesAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + +Response response = await client.GetFacesAsync("00000000-0000-0000-0000-000000000000", 20, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call GetFaces and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + +Response response = client.GetFaces("00000000-0000-0000-0000-000000000000", 20, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call TrainAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + +Operation operation = await client.TrainAsync(WaitUntil.Completed); +]]> + + + +This sample shows how to call Train. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + +Operation operation = client.Train(WaitUntil.Completed); +]]> + + + \ No newline at end of file diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/LargePersonGroupClient.xml b/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/LargePersonGroupClient.xml new file mode 100644 index 000000000000..99d900e0bce4 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/Docs/LargePersonGroupClient.xml @@ -0,0 +1,645 @@ + + + + + +This sample shows how to call CreateAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Response response = await client.CreateAsync("your_large_person_group_name"); +]]> + + + +This sample shows how to call Create. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Response response = client.Create("your_large_person_group_name"); +]]> + + + +This sample shows how to call CreateAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +using RequestContent content = RequestContent.Create(new +{ + name = "your_large_person_group_name", + userData = "your_user_data", + recognitionModel = "recognition_01", +}); +Response response = await client.CreateAsync(content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call Create. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +using RequestContent content = RequestContent.Create(new +{ + name = "your_large_person_group_name", + userData = "your_user_data", + recognitionModel = "recognition_01", +}); +Response response = client.Create(content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call DeleteAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Response response = await client.DeleteAsync(); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call Delete. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Response response = client.Delete(); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetLargePersonGroupAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Response response = await client.GetLargePersonGroupAsync(); +]]> + + + +This sample shows how to call GetLargePersonGroup. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Response response = client.GetLargePersonGroup(); +]]> + + + +This sample shows how to call GetLargePersonGroupAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Response response = await client.GetLargePersonGroupAsync(true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("largePersonGroupId").ToString()); +]]> + + + +This sample shows how to call GetLargePersonGroup and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Response response = client.GetLargePersonGroup(true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("name").ToString()); +Console.WriteLine(result.GetProperty("largePersonGroupId").ToString()); +]]> + + + +This sample shows how to call UpdateAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +using RequestContent content = RequestContent.Create(new +{ + name = "your_large_person_group_name", + userData = "your_user_data", +}); +Response response = await client.UpdateAsync(content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call Update. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +using RequestContent content = RequestContent.Create(new +{ + name = "your_large_person_group_name", + userData = "your_user_data", +}); +Response response = client.Update(content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetLargePersonGroupsAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient(null); + +Response> response = await client.GetLargePersonGroupsAsync(); +]]> + + + +This sample shows how to call GetLargePersonGroups. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient(null); + +Response> response = client.GetLargePersonGroups(); +]]> + + + +This sample shows how to call GetLargePersonGroupsAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient(null); + +Response response = await client.GetLargePersonGroupsAsync("00000000-0000-0000-0000-000000000000", 20, true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("largePersonGroupId").ToString()); +]]> + + + +This sample shows how to call GetLargePersonGroups and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient(null); + +Response response = client.GetLargePersonGroups("00000000-0000-0000-0000-000000000000", 20, true, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("name").ToString()); +Console.WriteLine(result[0].GetProperty("largePersonGroupId").ToString()); +]]> + + + +This sample shows how to call GetTrainingStatusAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Response response = await client.GetTrainingStatusAsync(); +]]> + + + +This sample shows how to call GetTrainingStatus. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Response response = client.GetTrainingStatus(); +]]> + + + +This sample shows how to call GetTrainingStatusAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Response response = await client.GetTrainingStatusAsync(null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("createdDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); +]]> + + + +This sample shows how to call GetTrainingStatus and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Response response = client.GetTrainingStatus(null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("status").ToString()); +Console.WriteLine(result.GetProperty("createdDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); +Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); +]]> + + + +This sample shows how to call CreatePersonAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Response response = await client.CreatePersonAsync("your_large_person_group_person_name"); +]]> + + + +This sample shows how to call CreatePerson. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Response response = client.CreatePerson("your_large_person_group_person_name"); +]]> + + + +This sample shows how to call CreatePersonAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +using RequestContent content = RequestContent.Create(new +{ + name = "your_large_person_group_person_name", + userData = "your_user_data", +}); +Response response = await client.CreatePersonAsync(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +]]> + + + +This sample shows how to call CreatePerson and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +using RequestContent content = RequestContent.Create(new +{ + name = "your_large_person_group_person_name", + userData = "your_user_data", +}); +Response response = client.CreatePerson(content); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +]]> + + + +This sample shows how to call DeletePersonAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Response response = await client.DeletePersonAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1")); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call DeletePerson. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Response response = client.DeletePerson(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1")); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetPersonAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Response response = await client.GetPersonAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1")); +]]> + + + +This sample shows how to call GetPerson. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Response response = client.GetPerson(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1")); +]]> + + + +This sample shows how to call GetPersonAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Response response = await client.GetPersonAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +Console.WriteLine(result.GetProperty("name").ToString()); +]]> + + + +This sample shows how to call GetPerson and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Response response = client.GetPerson(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("personId").ToString()); +Console.WriteLine(result.GetProperty("name").ToString()); +]]> + + + +This sample shows how to call UpdatePersonAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +using RequestContent content = RequestContent.Create(new +{ + name = "your_large_person_group_person_name", + userData = "your_user_data", +}); +Response response = await client.UpdatePersonAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call UpdatePerson. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +using RequestContent content = RequestContent.Create(new +{ + name = "your_large_person_group_person_name", + userData = "your_user_data", +}); +Response response = client.UpdatePerson(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetPersonsAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Response> response = await client.GetPersonsAsync(); +]]> + + + +This sample shows how to call GetPersons. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Response> response = client.GetPersons(); +]]> + + + +This sample shows how to call GetPersonsAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Response response = await client.GetPersonsAsync("00000000-0000-0000-0000-000000000000", 20, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("name").ToString()); +]]> + + + +This sample shows how to call GetPersons and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Response response = client.GetPersons("00000000-0000-0000-0000-000000000000", 20, null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result[0].GetProperty("personId").ToString()); +Console.WriteLine(result[0].GetProperty("name").ToString()); +]]> + + + +This sample shows how to call DeleteFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Response response = await client.DeleteFaceAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call DeleteFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Response response = client.DeleteFace(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call GetFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Response response = await client.GetFaceAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); +]]> + + + +This sample shows how to call GetFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Response response = client.GetFace(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); +]]> + + + +This sample shows how to call GetFaceAsync and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Response response = await client.GetFaceAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call GetFace and parse the result. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Response response = client.GetFace(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), null); + +JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; +Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); +]]> + + + +This sample shows how to call UpdateFaceAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +using RequestContent content = RequestContent.Create(new +{ + userData = "your_user_data", +}); +Response response = await client.UpdateFaceAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call UpdateFace. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +using RequestContent content = RequestContent.Create(new +{ + userData = "your_user_data", +}); +Response response = client.UpdateFace(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), content); + +Console.WriteLine(response.Status); +]]> + + + +This sample shows how to call TrainAsync. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Operation operation = await client.TrainAsync(WaitUntil.Completed); +]]> + + + +This sample shows how to call Train. +"); +AzureKeyCredential credential = new AzureKeyCredential(""); +LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + +Operation operation = client.Train(WaitUntil.Completed); +]]> + + + \ No newline at end of file diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceAdministrationClient.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceAdministrationClient.cs new file mode 100644 index 000000000000..f1f0cc7b85f6 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceAdministrationClient.cs @@ -0,0 +1,123 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using Azure.Core; +using Azure.Core.Pipeline; + +namespace Azure.AI.Vision.Face +{ + // Data plane generated client. + /// The FaceAdministration service client. + public partial class FaceAdministrationClient + { + private const string AuthorizationHeader = "Ocp-Apim-Subscription-Key"; + private readonly AzureKeyCredential _keyCredential; + private static readonly string[] AuthorizationScopes = new string[] { "https://cognitiveservices.azure.com/.default" }; + private readonly TokenCredential _tokenCredential; + private readonly HttpPipeline _pipeline; + private readonly Uri _endpoint; + private readonly string _apiVersion; + + /// The ClientDiagnostics is used to provide tracing support for the client library. + internal ClientDiagnostics ClientDiagnostics { get; } + + /// The HTTP pipeline for sending and receiving REST requests and responses. + public virtual HttpPipeline Pipeline => _pipeline; + + /// Initializes a new instance of FaceAdministrationClient for mocking. + protected FaceAdministrationClient() + { + } + + /// Initializes a new instance of FaceAdministrationClient. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// A credential used to authenticate to an Azure Service. + /// or is null. + public FaceAdministrationClient(Uri endpoint, AzureKeyCredential credential) : this(endpoint, credential, new AzureAIVisionFaceClientOptions()) + { + } + + /// Initializes a new instance of FaceAdministrationClient. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// A credential used to authenticate to an Azure Service. + /// or is null. + public FaceAdministrationClient(Uri endpoint, TokenCredential credential) : this(endpoint, credential, new AzureAIVisionFaceClientOptions()) + { + } + + /// Initializes a new instance of FaceAdministrationClient. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// A credential used to authenticate to an Azure Service. + /// The options for configuring the client. + /// or is null. + public FaceAdministrationClient(Uri endpoint, AzureKeyCredential credential, AzureAIVisionFaceClientOptions options) + { + Argument.AssertNotNull(endpoint, nameof(endpoint)); + Argument.AssertNotNull(credential, nameof(credential)); + options ??= new AzureAIVisionFaceClientOptions(); + + ClientDiagnostics = new ClientDiagnostics(options, true); + _keyCredential = credential; + _pipeline = HttpPipelineBuilder.Build(options, Array.Empty(), new HttpPipelinePolicy[] { new AzureKeyCredentialPolicy(_keyCredential, AuthorizationHeader) }, new ResponseClassifier()); + _endpoint = endpoint; + _apiVersion = options.Version; + } + + /// Initializes a new instance of FaceAdministrationClient. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// A credential used to authenticate to an Azure Service. + /// The options for configuring the client. + /// or is null. + public FaceAdministrationClient(Uri endpoint, TokenCredential credential, AzureAIVisionFaceClientOptions options) + { + Argument.AssertNotNull(endpoint, nameof(endpoint)); + Argument.AssertNotNull(credential, nameof(credential)); + options ??= new AzureAIVisionFaceClientOptions(); + + ClientDiagnostics = new ClientDiagnostics(options, true); + _tokenCredential = credential; + _pipeline = HttpPipelineBuilder.Build(options, Array.Empty(), new HttpPipelinePolicy[] { new BearerTokenAuthenticationPolicy(_tokenCredential, AuthorizationScopes) }, new ResponseClassifier()); + _endpoint = endpoint; + _apiVersion = options.Version; + } + + /// Initializes a new instance of LargeFaceListClient. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// is null. + /// is an empty string, and was expected to be non-empty. + public virtual LargeFaceListClient GetLargeFaceListClient(string largeFaceListId) + { + Argument.AssertNotNullOrEmpty(largeFaceListId, nameof(largeFaceListId)); + + return new LargeFaceListClient(ClientDiagnostics, _pipeline, _keyCredential, _tokenCredential, _endpoint, largeFaceListId, _apiVersion); + } + + /// Initializes a new instance of LargePersonGroupClient. + /// ID of the container. + /// is null. + /// is an empty string, and was expected to be non-empty. + public virtual LargePersonGroupClient GetLargePersonGroupClient(string largePersonGroupId) + { + Argument.AssertNotNullOrEmpty(largePersonGroupId, nameof(largePersonGroupId)); + + return new LargePersonGroupClient(ClientDiagnostics, _pipeline, _keyCredential, _tokenCredential, _endpoint, largePersonGroupId, _apiVersion); + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceClient.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceClient.cs index 1a0b973e9201..7cb8c3849ea7 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceClient.cs +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceClient.cs @@ -105,7 +105,7 @@ public FaceClient(Uri endpoint, TokenCredential credential, AzureAIVisionFaceCli /// Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes. /// URL of input image. - /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations. /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. /// Return faceIds of the detected faces or not. The default value is true. /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost. @@ -114,22 +114,7 @@ public FaceClient(Uri endpoint, TokenCredential credential, AzureAIVisionFaceCli /// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours). /// The cancellation token to use. /// is null. - /// - /// > [!IMPORTANT] - /// > To mitigate potential misuse that can subject people to stereotyping, discrimination, or unfair denial of services, we are retiring Face API attributes that predict emotion, gender, age, smile, facial hair, hair, and makeup. Read more about this decision https://azure.microsoft.com/blog/responsible-ai-investments-and-safeguards-for-facial-recognition/. - /// - /// * - /// * No image will be stored. Only the extracted face feature(s) will be stored on server. The faceId is an identifier of the face feature and will be used in "Identify", "Verify", and "Find Similar". The stored face features will expire and be deleted at the time specified by faceIdTimeToLive after the original detection call. - /// * Optional parameters include faceId, landmarks, and attributes. Attributes include headPose, glasses, occlusion, accessories, blur, exposure, noise, mask, and qualityForRecognition. Some of the results returned for specific attributes may not be highly accurate. - /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. - /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. - /// * Up to 100 faces can be returned for an image. Faces are ranked by face rectangle size from large to small. - /// * For optimal results when querying "Identify", "Verify", and "Find Similar" ('returnFaceId' is true), please use faces that are: frontal, clear, and with a minimum size of 200x200 pixels (100 pixels between eyes). - /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model - /// * 'detection_02': Face attributes and landmarks are disabled if you choose this detection model. - /// * 'detection_03': Face attributes (mask, blur, and headPose) and landmarks are supported if you choose this detection model. - /// * Different 'recognitionModel' values are provided. If follow-up operations like "Verify", "Identify", "Find Similar" are needed, please specify the recognition model with 'recognitionModel' parameter. The default value for 'recognitionModel' is 'recognition_01', if latest model needed, please explicitly specify the model you need in this parameter. Once specified, the detected faceIds will be associated with the specified recognition model. More details, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-recognition-model. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/face-detection-operations/detect-from-url for more details. internal virtual async Task>> DetectFromUrlImplAsync(Uri uri, FaceDetectionModel? detectionModel = null, FaceRecognitionModel? recognitionModel = null, bool? returnFaceId = null, IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = null, bool? returnRecognitionModel = null, int? faceIdTimeToLive = null, CancellationToken cancellationToken = default) { Argument.AssertNotNull(uri, nameof(uri)); @@ -150,7 +135,7 @@ internal virtual async Task>> Detect /// Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes. /// URL of input image. - /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations. /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. /// Return faceIds of the detected faces or not. The default value is true. /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost. @@ -159,22 +144,7 @@ internal virtual async Task>> Detect /// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours). /// The cancellation token to use. /// is null. - /// - /// > [!IMPORTANT] - /// > To mitigate potential misuse that can subject people to stereotyping, discrimination, or unfair denial of services, we are retiring Face API attributes that predict emotion, gender, age, smile, facial hair, hair, and makeup. Read more about this decision https://azure.microsoft.com/blog/responsible-ai-investments-and-safeguards-for-facial-recognition/. - /// - /// * - /// * No image will be stored. Only the extracted face feature(s) will be stored on server. The faceId is an identifier of the face feature and will be used in "Identify", "Verify", and "Find Similar". The stored face features will expire and be deleted at the time specified by faceIdTimeToLive after the original detection call. - /// * Optional parameters include faceId, landmarks, and attributes. Attributes include headPose, glasses, occlusion, accessories, blur, exposure, noise, mask, and qualityForRecognition. Some of the results returned for specific attributes may not be highly accurate. - /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. - /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. - /// * Up to 100 faces can be returned for an image. Faces are ranked by face rectangle size from large to small. - /// * For optimal results when querying "Identify", "Verify", and "Find Similar" ('returnFaceId' is true), please use faces that are: frontal, clear, and with a minimum size of 200x200 pixels (100 pixels between eyes). - /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model - /// * 'detection_02': Face attributes and landmarks are disabled if you choose this detection model. - /// * 'detection_03': Face attributes (mask, blur, and headPose) and landmarks are supported if you choose this detection model. - /// * Different 'recognitionModel' values are provided. If follow-up operations like "Verify", "Identify", "Find Similar" are needed, please specify the recognition model with 'recognitionModel' parameter. The default value for 'recognitionModel' is 'recognition_01', if latest model needed, please explicitly specify the model you need in this parameter. Once specified, the detected faceIds will be associated with the specified recognition model. More details, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-recognition-model. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/face-detection-operations/detect-from-url for more details. internal virtual Response> DetectFromUrlImpl(Uri uri, FaceDetectionModel? detectionModel = null, FaceRecognitionModel? recognitionModel = null, bool? returnFaceId = null, IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = null, bool? returnRecognitionModel = null, int? faceIdTimeToLive = null, CancellationToken cancellationToken = default) { Argument.AssertNotNull(uri, nameof(uri)); @@ -209,7 +179,7 @@ internal virtual Response> DetectFromUrlImpl( /// /// /// The content to send as the body of the request. - /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations. Allowed values: "detection_01" | "detection_02" | "detection_03". /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04". /// Return faceIds of the detected faces or not. The default value is true. /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost. @@ -254,7 +224,7 @@ internal virtual async Task DetectFromUrlImplAsync(RequestContent cont /// /// /// The content to send as the body of the request. - /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations. Allowed values: "detection_01" | "detection_02" | "detection_03". /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04". /// Return faceIds of the detected faces or not. The default value is true. /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost. @@ -285,7 +255,7 @@ internal virtual Response DetectFromUrlImpl(RequestContent content, string detec /// Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes. /// The input image binary. - /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations. /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. /// Return faceIds of the detected faces or not. The default value is true. /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost. @@ -294,22 +264,7 @@ internal virtual Response DetectFromUrlImpl(RequestContent content, string detec /// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours). /// The cancellation token to use. /// is null. - /// - /// > [!IMPORTANT] - /// > To mitigate potential misuse that can subject people to stereotyping, discrimination, or unfair denial of services, we are retiring Face API attributes that predict emotion, gender, age, smile, facial hair, hair, and makeup. Read more about this decision https://azure.microsoft.com/blog/responsible-ai-investments-and-safeguards-for-facial-recognition/. - /// - /// * - /// * No image will be stored. Only the extracted face feature(s) will be stored on server. The faceId is an identifier of the face feature and will be used in "Identify", "Verify", and "Find Similar". The stored face features will expire and be deleted at the time specified by faceIdTimeToLive after the original detection call. - /// * Optional parameters include faceId, landmarks, and attributes. Attributes include headPose, glasses, occlusion, accessories, blur, exposure, noise, mask, and qualityForRecognition. Some of the results returned for specific attributes may not be highly accurate. - /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. - /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. - /// * Up to 100 faces can be returned for an image. Faces are ranked by face rectangle size from large to small. - /// * For optimal results when querying "Identify", "Verify", and "Find Similar" ('returnFaceId' is true), please use faces that are: frontal, clear, and with a minimum size of 200x200 pixels (100 pixels between eyes). - /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model - /// * 'detection_02': Face attributes and landmarks are disabled if you choose this detection model. - /// * 'detection_03': Face attributes (mask, blur, and headPose) and landmarks are supported if you choose this detection model. - /// * Different 'recognitionModel' values are provided. If follow-up operations like "Verify", "Identify", "Find Similar" are needed, please specify the recognition model with 'recognitionModel' parameter. The default value for 'recognitionModel' is 'recognition_01', if latest model needed, please explicitly specify the model you need in this parameter. Once specified, the detected faceIds will be associated with the specified recognition model. More details, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-recognition-model. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/face-detection-operations/detect for more details. internal virtual async Task>> DetectImplAsync(BinaryData imageContent, FaceDetectionModel? detectionModel = null, FaceRecognitionModel? recognitionModel = null, bool? returnFaceId = null, IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = null, bool? returnRecognitionModel = null, int? faceIdTimeToLive = null, CancellationToken cancellationToken = default) { Argument.AssertNotNull(imageContent, nameof(imageContent)); @@ -330,7 +285,7 @@ internal virtual async Task>> Detect /// Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes. /// The input image binary. - /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations. /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. /// Return faceIds of the detected faces or not. The default value is true. /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost. @@ -339,22 +294,7 @@ internal virtual async Task>> Detect /// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours). /// The cancellation token to use. /// is null. - /// - /// > [!IMPORTANT] - /// > To mitigate potential misuse that can subject people to stereotyping, discrimination, or unfair denial of services, we are retiring Face API attributes that predict emotion, gender, age, smile, facial hair, hair, and makeup. Read more about this decision https://azure.microsoft.com/blog/responsible-ai-investments-and-safeguards-for-facial-recognition/. - /// - /// * - /// * No image will be stored. Only the extracted face feature(s) will be stored on server. The faceId is an identifier of the face feature and will be used in "Identify", "Verify", and "Find Similar". The stored face features will expire and be deleted at the time specified by faceIdTimeToLive after the original detection call. - /// * Optional parameters include faceId, landmarks, and attributes. Attributes include headPose, glasses, occlusion, accessories, blur, exposure, noise, mask, and qualityForRecognition. Some of the results returned for specific attributes may not be highly accurate. - /// * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB. - /// * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size. - /// * Up to 100 faces can be returned for an image. Faces are ranked by face rectangle size from large to small. - /// * For optimal results when querying "Identify", "Verify", and "Find Similar" ('returnFaceId' is true), please use faces that are: frontal, clear, and with a minimum size of 200x200 pixels (100 pixels between eyes). - /// * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-detection-model - /// * 'detection_02': Face attributes and landmarks are disabled if you choose this detection model. - /// * 'detection_03': Face attributes (mask, blur, and headPose) and landmarks are supported if you choose this detection model. - /// * Different 'recognitionModel' values are provided. If follow-up operations like "Verify", "Identify", "Find Similar" are needed, please specify the recognition model with 'recognitionModel' parameter. The default value for 'recognitionModel' is 'recognition_01', if latest model needed, please explicitly specify the model you need in this parameter. Once specified, the detected faceIds will be associated with the specified recognition model. More details, please refer to https://learn.microsoft.com/azure/ai-services/computer-vision/how-to/specify-recognition-model. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/face-detection-operations/detect for more details. internal virtual Response> DetectImpl(BinaryData imageContent, FaceDetectionModel? detectionModel = null, FaceRecognitionModel? recognitionModel = null, bool? returnFaceId = null, IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = null, bool? returnRecognitionModel = null, int? faceIdTimeToLive = null, CancellationToken cancellationToken = default) { Argument.AssertNotNull(imageContent, nameof(imageContent)); @@ -389,7 +329,7 @@ internal virtual Response> DetectImpl(BinaryD /// /// /// The content to send as the body of the request. - /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations. Allowed values: "detection_01" | "detection_02" | "detection_03". /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04". /// Return faceIds of the detected faces or not. The default value is true. /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost. @@ -434,7 +374,7 @@ internal virtual async Task DetectImplAsync(RequestContent content, st /// /// /// The content to send as the body of the request. - /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations. Allowed values: "detection_01" | "detection_02" | "detection_03". /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04". /// Return faceIds of the detected faces or not. The default value is true. /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost. @@ -470,13 +410,7 @@ internal virtual Response DetectImpl(RequestContent content, string detectionMod /// Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'. /// The cancellation token to use. /// is null. - /// - /// Depending on the input the returned similar faces list contains faceIds or persistedFaceIds ranked by similarity. - /// - /// Find similar has two working modes, "matchPerson" and "matchFace". "matchPerson" is the default mode that it tries to find faces of the same person as possible by using internal same-person thresholds. It is useful to find a known person's other photos. Note that an empty list will be returned if no faces pass the internal thresholds. "matchFace" mode ignores same-person thresholds and returns ranked similar faces anyway, even the similarity is low. It can be used in the cases like searching celebrity-looking faces. - /// - /// The 'recognitionModel' associated with the query faceId should be the same as the 'recognitionModel' used by the target faceId array. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/find-similar for more details. /// public virtual async Task>> FindSimilarAsync(Guid faceId, IEnumerable faceIds, int? maxNumOfCandidatesReturned = null, FindSimilarMatchMode? mode = null, CancellationToken cancellationToken = default) { @@ -503,13 +437,7 @@ public virtual async Task>> FindSi /// Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'. /// The cancellation token to use. /// is null. - /// - /// Depending on the input the returned similar faces list contains faceIds or persistedFaceIds ranked by similarity. - /// - /// Find similar has two working modes, "matchPerson" and "matchFace". "matchPerson" is the default mode that it tries to find faces of the same person as possible by using internal same-person thresholds. It is useful to find a known person's other photos. Note that an empty list will be returned if no faces pass the internal thresholds. "matchFace" mode ignores same-person thresholds and returns ranked similar faces anyway, even the similarity is low. It can be used in the cases like searching celebrity-looking faces. - /// - /// The 'recognitionModel' associated with the query faceId should be the same as the 'recognitionModel' used by the target faceId array. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/find-similar for more details. /// public virtual Response> FindSimilar(Guid faceId, IEnumerable faceIds, int? maxNumOfCandidatesReturned = null, FindSimilarMatchMode? mode = null, CancellationToken cancellationToken = default) { @@ -611,14 +539,7 @@ public virtual Response FindSimilar(RequestContent content, RequestContext conte /// The faceId of one face, come from "Detect". /// The faceId of another face, come from "Detect". /// The cancellation token to use. - /// - /// > [!NOTE] - /// > - /// > * - /// > * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. - /// > * For the scenarios that are sensitive to accuracy please make your own judgment. - /// > * The 'recognitionModel' associated with the both faces should be the same. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/verify-face-to-face for more details. /// public virtual async Task> VerifyFaceToFaceAsync(Guid faceId1, Guid faceId2, CancellationToken cancellationToken = default) { @@ -632,14 +553,7 @@ public virtual async Task> VerifyFaceToFaceAsyn /// The faceId of one face, come from "Detect". /// The faceId of another face, come from "Detect". /// The cancellation token to use. - /// - /// > [!NOTE] - /// > - /// > * - /// > * Higher face image quality means better identification precision. Please consider high-quality faces: frontal, clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger. - /// > * For the scenarios that are sensitive to accuracy please make your own judgment. - /// > * The 'recognitionModel' associated with the both faces should be the same. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/verify-face-to-face for more details. /// public virtual Response VerifyFaceToFace(Guid faceId1, Guid faceId2, CancellationToken cancellationToken = default) { @@ -731,14 +645,7 @@ public virtual Response VerifyFaceToFace(RequestContent content, RequestContext /// Array of candidate faceIds created by "Detect". The maximum is 1000 faces. /// The cancellation token to use. /// is null. - /// - /// > - /// * - /// * The output is one or more disjointed face groups and a messyGroup. A face group contains faces that have similar looking, often of the same person. Face groups are ranked by group size, i.e. number of faces. Notice that faces belonging to a same person might be split into several groups in the result. - /// * MessyGroup is a special face group containing faces that cannot find any similar counterpart face from original faces. The messyGroup will not appear in the result if all faces found their counterparts. - /// * Group API needs at least 2 candidate faces and 1000 at most. We suggest to try "Verify Face To Face" when you only have 2 candidate faces. - /// * The 'recognitionModel' associated with the query faces' faceIds should be the same. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/group for more details. /// public virtual async Task> GroupAsync(IEnumerable faceIds, CancellationToken cancellationToken = default) { @@ -754,14 +661,7 @@ public virtual async Task> GroupAsync(IEnumerable Array of candidate faceIds created by "Detect". The maximum is 1000 faces. /// The cancellation token to use. /// is null. - /// - /// > - /// * - /// * The output is one or more disjointed face groups and a messyGroup. A face group contains faces that have similar looking, often of the same person. Face groups are ranked by group size, i.e. number of faces. Notice that faces belonging to a same person might be split into several groups in the result. - /// * MessyGroup is a special face group containing faces that cannot find any similar counterpart face from original faces. The messyGroup will not appear in the result if all faces found their counterparts. - /// * Group API needs at least 2 candidate faces and 1000 at most. We suggest to try "Verify Face To Face" when you only have 2 candidate faces. - /// * The 'recognitionModel' associated with the query faces' faceIds should be the same. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/group for more details. /// public virtual Response Group(IEnumerable faceIds, CancellationToken cancellationToken = default) { @@ -851,6 +751,386 @@ public virtual Response Group(RequestContent content, RequestContext context = n } } + /// Given query face's faceId, to search the similar-looking faces from a Large Face List. A 'largeFaceListId' is created by Create Large Face List. + /// faceId of the query face. User needs to call "Detect" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call. + /// An existing user-specified unique candidate Large Face List, created in "Create Large Face List". Large Face List contains a set of persistedFaceIds which are persisted and will never expire. + /// The number of top similar faces returned. The valid range is [1, 1000]. Default value is 20. + /// Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/find-similar-from-large-face-list for more details. + /// + public virtual async Task>> FindSimilarFromLargeFaceListAsync(Guid faceId, string largeFaceListId, int? maxNumOfCandidatesReturned = null, FindSimilarMatchMode? mode = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(largeFaceListId, nameof(largeFaceListId)); + + FindSimilarFromLargeFaceListRequest findSimilarFromLargeFaceListRequest = new FindSimilarFromLargeFaceListRequest(faceId, maxNumOfCandidatesReturned, mode, largeFaceListId, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await FindSimilarFromLargeFaceListAsync(findSimilarFromLargeFaceListRequest.ToRequestContent(), context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(FaceFindSimilarResult.DeserializeFaceFindSimilarResult(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// Given query face's faceId, to search the similar-looking faces from a Large Face List. A 'largeFaceListId' is created by Create Large Face List. + /// faceId of the query face. User needs to call "Detect" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call. + /// An existing user-specified unique candidate Large Face List, created in "Create Large Face List". Large Face List contains a set of persistedFaceIds which are persisted and will never expire. + /// The number of top similar faces returned. The valid range is [1, 1000]. Default value is 20. + /// Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/find-similar-from-large-face-list for more details. + /// + public virtual Response> FindSimilarFromLargeFaceList(Guid faceId, string largeFaceListId, int? maxNumOfCandidatesReturned = null, FindSimilarMatchMode? mode = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(largeFaceListId, nameof(largeFaceListId)); + + FindSimilarFromLargeFaceListRequest findSimilarFromLargeFaceListRequest = new FindSimilarFromLargeFaceListRequest(faceId, maxNumOfCandidatesReturned, mode, largeFaceListId, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = FindSimilarFromLargeFaceList(findSimilarFromLargeFaceListRequest.ToRequestContent(), context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(FaceFindSimilarResult.DeserializeFaceFindSimilarResult(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] Given query face's faceId, to search the similar-looking faces from a Large Face List. A 'largeFaceListId' is created by Create Large Face List. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task FindSimilarFromLargeFaceListAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.FindSimilarFromLargeFaceList"); + scope.Start(); + try + { + using HttpMessage message = CreateFindSimilarFromLargeFaceListRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Given query face's faceId, to search the similar-looking faces from a Large Face List. A 'largeFaceListId' is created by Create Large Face List. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response FindSimilarFromLargeFaceList(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.FindSimilarFromLargeFaceList"); + scope.Start(); + try + { + using HttpMessage message = CreateFindSimilarFromLargeFaceListRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// 1-to-many identification to find the closest matches of the specific query person face from a Large Person Group. + /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10]. + /// largePersonGroupId of the target Large Person Group, created by "Create Large Person Group". Parameter personGroupId and largePersonGroupId should not be provided at the same time. + /// The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10. + /// Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates. + /// The cancellation token to use. + /// or is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/identify-from-person-group for more details. + /// + public virtual async Task>> IdentifyFromLargePersonGroupAsync(IEnumerable faceIds, string largePersonGroupId, int? maxNumOfCandidatesReturned = null, float? confidenceThreshold = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(faceIds, nameof(faceIds)); + Argument.AssertNotNull(largePersonGroupId, nameof(largePersonGroupId)); + + IdentifyFromLargePersonGroupRequest identifyFromLargePersonGroupRequest = new IdentifyFromLargePersonGroupRequest(faceIds.ToList(), largePersonGroupId, maxNumOfCandidatesReturned, confidenceThreshold, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await IdentifyFromLargePersonGroupAsync(identifyFromLargePersonGroupRequest.ToRequestContent(), context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(FaceIdentificationResult.DeserializeFaceIdentificationResult(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// 1-to-many identification to find the closest matches of the specific query person face from a Large Person Group. + /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10]. + /// largePersonGroupId of the target Large Person Group, created by "Create Large Person Group". Parameter personGroupId and largePersonGroupId should not be provided at the same time. + /// The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10. + /// Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates. + /// The cancellation token to use. + /// or is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/identify-from-person-group for more details. + /// + public virtual Response> IdentifyFromLargePersonGroup(IEnumerable faceIds, string largePersonGroupId, int? maxNumOfCandidatesReturned = null, float? confidenceThreshold = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(faceIds, nameof(faceIds)); + Argument.AssertNotNull(largePersonGroupId, nameof(largePersonGroupId)); + + IdentifyFromLargePersonGroupRequest identifyFromLargePersonGroupRequest = new IdentifyFromLargePersonGroupRequest(faceIds.ToList(), largePersonGroupId, maxNumOfCandidatesReturned, confidenceThreshold, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = IdentifyFromLargePersonGroup(identifyFromLargePersonGroupRequest.ToRequestContent(), context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(FaceIdentificationResult.DeserializeFaceIdentificationResult(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] 1-to-many identification to find the closest matches of the specific query person face from a Large Person Group. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task IdentifyFromLargePersonGroupAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.IdentifyFromLargePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateIdentifyFromLargePersonGroupRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] 1-to-many identification to find the closest matches of the specific query person face from a Large Person Group. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response IdentifyFromLargePersonGroup(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.IdentifyFromLargePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateIdentifyFromLargePersonGroupRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Verify whether a face belongs to a person in a Large Person Group. + /// The faceId of the face, come from "Detect". + /// Using existing largePersonGroupId and personId for fast loading a specified person. largePersonGroupId is created in "Create Large Person Group". + /// Specify a certain person in Large Person Group. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/verify-from-large-person-group for more details. + /// + public virtual async Task> VerifyFromLargePersonGroupAsync(Guid faceId, string largePersonGroupId, Guid personId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(largePersonGroupId, nameof(largePersonGroupId)); + + VerifyFromLargePersonGroupRequest verifyFromLargePersonGroupRequest = new VerifyFromLargePersonGroupRequest(faceId, largePersonGroupId, personId, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await VerifyFromLargePersonGroupAsync(verifyFromLargePersonGroupRequest.ToRequestContent(), context).ConfigureAwait(false); + return Response.FromValue(FaceVerificationResult.FromResponse(response), response); + } + + /// Verify whether a face belongs to a person in a Large Person Group. + /// The faceId of the face, come from "Detect". + /// Using existing largePersonGroupId and personId for fast loading a specified person. largePersonGroupId is created in "Create Large Person Group". + /// Specify a certain person in Large Person Group. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-recognition-operations/verify-from-large-person-group for more details. + /// + public virtual Response VerifyFromLargePersonGroup(Guid faceId, string largePersonGroupId, Guid personId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(largePersonGroupId, nameof(largePersonGroupId)); + + VerifyFromLargePersonGroupRequest verifyFromLargePersonGroupRequest = new VerifyFromLargePersonGroupRequest(faceId, largePersonGroupId, personId, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = VerifyFromLargePersonGroup(verifyFromLargePersonGroupRequest.ToRequestContent(), context); + return Response.FromValue(FaceVerificationResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Verify whether a face belongs to a person in a Large Person Group. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task VerifyFromLargePersonGroupAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.VerifyFromLargePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateVerifyFromLargePersonGroupRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Verify whether a face belongs to a person in a Large Person Group. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response VerifyFromLargePersonGroup(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceClient.VerifyFromLargePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateVerifyFromLargePersonGroupRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + internal HttpMessage CreateDetectFromUrlImplRequest(RequestContent content, string detectionModel, string recognitionModel, bool? returnFaceId, IEnumerable returnFaceAttributes, bool? returnFaceLandmarks, bool? returnRecognitionModel, int? faceIdTimeToLive, RequestContext context) { var message = _pipeline.CreateMessage(context, ResponseClassifier200); @@ -992,6 +1272,57 @@ internal HttpMessage CreateGroupRequest(RequestContent content, RequestContext c return message; } + internal HttpMessage CreateFindSimilarFromLargeFaceListRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/findsimilars", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateIdentifyFromLargePersonGroupRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/identify", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateVerifyFromLargePersonGroupRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/verify", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + private static RequestContext DefaultRequestContext = new RequestContext(); internal static RequestContext FromCancellationToken(CancellationToken cancellationToken = default) { diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationCandidate.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationCandidate.Serialization.cs new file mode 100644 index 000000000000..3d0dcf5633df --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationCandidate.Serialization.cs @@ -0,0 +1,143 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class FaceIdentificationCandidate : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceIdentificationCandidate)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("personId"u8); + writer.WriteStringValue(PersonId); + writer.WritePropertyName("confidence"u8); + writer.WriteNumberValue(Confidence); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + FaceIdentificationCandidate IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceIdentificationCandidate)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeFaceIdentificationCandidate(document.RootElement, options); + } + + internal static FaceIdentificationCandidate DeserializeFaceIdentificationCandidate(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid personId = default; + float confidence = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("personId"u8)) + { + personId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("confidence"u8)) + { + confidence = property.Value.GetSingle(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new FaceIdentificationCandidate(personId, confidence, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(FaceIdentificationCandidate)} does not support writing '{options.Format}' format."); + } + } + + FaceIdentificationCandidate IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeFaceIdentificationCandidate(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(FaceIdentificationCandidate)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static FaceIdentificationCandidate FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeFaceIdentificationCandidate(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationCandidate.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationCandidate.cs new file mode 100644 index 000000000000..b4fcaba1400b --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationCandidate.cs @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Candidate for identify call. + public partial class FaceIdentificationCandidate + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// personId of candidate person. + /// Confidence value of the candidate. The higher confidence, the more similar. Range between [0,1]. + internal FaceIdentificationCandidate(Guid personId, float confidence) + { + PersonId = personId; + Confidence = confidence; + } + + /// Initializes a new instance of . + /// personId of candidate person. + /// Confidence value of the candidate. The higher confidence, the more similar. Range between [0,1]. + /// Keeps track of any properties unknown to the library. + internal FaceIdentificationCandidate(Guid personId, float confidence, IDictionary serializedAdditionalRawData) + { + PersonId = personId; + Confidence = confidence; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal FaceIdentificationCandidate() + { + } + + /// personId of candidate person. + public Guid PersonId { get; } + /// Confidence value of the candidate. The higher confidence, the more similar. Range between [0,1]. + public float Confidence { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationResult.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationResult.Serialization.cs new file mode 100644 index 000000000000..edee38653822 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationResult.Serialization.cs @@ -0,0 +1,153 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class FaceIdentificationResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceIdentificationResult)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("faceId"u8); + writer.WriteStringValue(FaceId); + writer.WritePropertyName("candidates"u8); + writer.WriteStartArray(); + foreach (var item in Candidates) + { + writer.WriteObjectValue(item, options); + } + writer.WriteEndArray(); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + FaceIdentificationResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceIdentificationResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeFaceIdentificationResult(document.RootElement, options); + } + + internal static FaceIdentificationResult DeserializeFaceIdentificationResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid faceId = default; + IReadOnlyList candidates = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("faceId"u8)) + { + faceId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("candidates"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(FaceIdentificationCandidate.DeserializeFaceIdentificationCandidate(item, options)); + } + candidates = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new FaceIdentificationResult(faceId, candidates, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(FaceIdentificationResult)} does not support writing '{options.Format}' format."); + } + } + + FaceIdentificationResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeFaceIdentificationResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(FaceIdentificationResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static FaceIdentificationResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeFaceIdentificationResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationResult.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationResult.cs new file mode 100644 index 000000000000..1489f5061145 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceIdentificationResult.cs @@ -0,0 +1,82 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.AI.Vision.Face +{ + /// Identify result. + public partial class FaceIdentificationResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// faceId of the query face. + /// Identified person candidates for that face (ranked by confidence). Array size should be no larger than input maxNumOfCandidatesReturned. If no person is identified, will return an empty array. + /// is null. + internal FaceIdentificationResult(Guid faceId, IEnumerable candidates) + { + Argument.AssertNotNull(candidates, nameof(candidates)); + + FaceId = faceId; + Candidates = candidates.ToList(); + } + + /// Initializes a new instance of . + /// faceId of the query face. + /// Identified person candidates for that face (ranked by confidence). Array size should be no larger than input maxNumOfCandidatesReturned. If no person is identified, will return an empty array. + /// Keeps track of any properties unknown to the library. + internal FaceIdentificationResult(Guid faceId, IReadOnlyList candidates, IDictionary serializedAdditionalRawData) + { + FaceId = faceId; + Candidates = candidates; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal FaceIdentificationResult() + { + } + + /// faceId of the query face. + public Guid FaceId { get; } + /// Identified person candidates for that face (ranked by confidence). Array size should be no larger than input maxNumOfCandidatesReturned. If no person is identified, will return an empty array. + public IReadOnlyList Candidates { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceOperationStatus.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceOperationStatus.cs new file mode 100644 index 000000000000..67683c753b30 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceOperationStatus.cs @@ -0,0 +1,57 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ComponentModel; + +namespace Azure.AI.Vision.Face +{ + /// The status of long running operation. + public readonly partial struct FaceOperationStatus : IEquatable + { + private readonly string _value; + + /// Initializes a new instance of . + /// is null. + public FaceOperationStatus(string value) + { + _value = value ?? throw new ArgumentNullException(nameof(value)); + } + + private const string NotStartedValue = "notStarted"; + private const string RunningValue = "running"; + private const string SucceededValue = "succeeded"; + private const string FailedValue = "failed"; + + /// The operation is not started. + public static FaceOperationStatus NotStarted { get; } = new FaceOperationStatus(NotStartedValue); + /// The operation is still running. + public static FaceOperationStatus Running { get; } = new FaceOperationStatus(RunningValue); + /// The operation is succeeded. + public static FaceOperationStatus Succeeded { get; } = new FaceOperationStatus(SucceededValue); + /// The operation is failed. + public static FaceOperationStatus Failed { get; } = new FaceOperationStatus(FailedValue); + /// Determines if two values are the same. + public static bool operator ==(FaceOperationStatus left, FaceOperationStatus right) => left.Equals(right); + /// Determines if two values are not the same. + public static bool operator !=(FaceOperationStatus left, FaceOperationStatus right) => !left.Equals(right); + /// Converts a to a . + public static implicit operator FaceOperationStatus(string value) => new FaceOperationStatus(value); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override bool Equals(object obj) => obj is FaceOperationStatus other && Equals(other); + /// + public bool Equals(FaceOperationStatus other) => string.Equals(_value, other._value, StringComparison.InvariantCultureIgnoreCase); + + /// + [EditorBrowsable(EditorBrowsableState.Never)] + public override int GetHashCode() => _value != null ? StringComparer.InvariantCultureIgnoreCase.GetHashCode(_value) : 0; + /// + public override string ToString() => _value; + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceSessionClient.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceSessionClient.cs index 4019a5bc3493..f3f4c70329ee 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceSessionClient.cs +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceSessionClient.cs @@ -106,18 +106,7 @@ public FaceSessionClient(Uri endpoint, TokenCredential credential, AzureAIVision /// Body parameter. /// The cancellation token to use. /// is null. - /// - /// A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired. - /// - /// Permissions includes... - /// > - /// * - /// * Ability to call /detectLiveness/singleModal for up to 3 retries. - /// * A token lifetime of 10 minutes. - /// - /// > [!NOTE] - /// > Client access can be revoked by deleting the session using the Delete Liveness Session operation. To retrieve a result, use the Get Liveness Session. To audit the individual requests that a client has made to your resource, use the List Liveness Session Audit Entries. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/create-liveness-session for more details. /// public virtual async Task> CreateLivenessSessionAsync(CreateLivenessSessionContent body, CancellationToken cancellationToken = default) { @@ -133,18 +122,7 @@ public virtual async Task> CreateLivenessS /// Body parameter. /// The cancellation token to use. /// is null. - /// - /// A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired. - /// - /// Permissions includes... - /// > - /// * - /// * Ability to call /detectLiveness/singleModal for up to 3 retries. - /// * A token lifetime of 10 minutes. - /// - /// > [!NOTE] - /// > Client access can be revoked by deleting the session using the Delete Liveness Session operation. To retrieve a result, use the Get Liveness Session. To audit the individual requests that a client has made to your resource, use the List Liveness Session Audit Entries. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/create-liveness-session for more details. /// public virtual Response CreateLivenessSession(CreateLivenessSessionContent body, CancellationToken cancellationToken = default) { @@ -306,7 +284,7 @@ public virtual Response DeleteLivenessSession(string sessionId, RequestContext c } } - /// Get session result of detectLiveness/singleModal call. + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-session-result for more details. /// The unique ID to reference this session. /// The cancellation token to use. /// is null. @@ -321,7 +299,7 @@ public virtual async Task> GetLivenessSessionResultAsy return Response.FromValue(LivenessSession.FromResponse(response), response); } - /// Get session result of detectLiveness/singleModal call. + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-session-result for more details. /// The unique ID to reference this session. /// The cancellation token to use. /// is null. @@ -337,7 +315,7 @@ public virtual Response GetLivenessSessionResult(string session } /// - /// [Protocol Method] Get session result of detectLiveness/singleModal call. + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-session-result for more details. /// /// /// @@ -377,7 +355,7 @@ public virtual async Task GetLivenessSessionResultAsync(string session } /// - /// [Protocol Method] Get session result of detectLiveness/singleModal call. + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-session-result for more details. /// /// /// @@ -420,11 +398,7 @@ public virtual Response GetLivenessSessionResult(string sessionId, RequestContex /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. /// The number of items to list, ranging in [1, 1000]. Default is 1000. /// The cancellation token to use. - /// - /// List sessions from the last sessionId greater than the 'start'. - /// - /// The result should be ordered by sessionId in ascending order. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-sessions for more details. /// public virtual async Task>> GetLivenessSessionsAsync(string start = null, int? top = null, CancellationToken cancellationToken = default) { @@ -445,11 +419,7 @@ public virtual async Task>> GetLiven /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. /// The number of items to list, ranging in [1, 1000]. Default is 1000. /// The cancellation token to use. - /// - /// List sessions from the last sessionId greater than the 'start'. - /// - /// The result should be ordered by sessionId in ascending order. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-sessions for more details. /// public virtual Response> GetLivenessSessions(string start = null, int? top = null, CancellationToken cancellationToken = default) { @@ -540,7 +510,7 @@ public virtual Response GetLivenessSessions(string start, int? top, RequestConte } } - /// Gets session requests and response body for the session. + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-session-audit-entries for more details. /// The unique ID to reference this session. /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. /// The number of items to list, ranging in [1, 1000]. Default is 1000. @@ -565,7 +535,7 @@ public virtual async Task>> Ge return Response.FromValue(value, response); } - /// Gets session requests and response body for the session. + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-session-audit-entries for more details. /// The unique ID to reference this session. /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. /// The number of items to list, ranging in [1, 1000]. Default is 1000. @@ -591,7 +561,7 @@ public virtual Response> GetLivenessSes } /// - /// [Protocol Method] Gets session requests and response body for the session. + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-session-audit-entries for more details. /// /// /// @@ -633,7 +603,7 @@ public virtual async Task GetLivenessSessionAuditEntriesAsync(string s } /// - /// [Protocol Method] Gets session requests and response body for the session. + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-session-audit-entries for more details. /// /// /// @@ -678,27 +648,8 @@ public virtual Response GetLivenessSessionAuditEntries(string sessionId, string /// Body parameter. /// The cancellation token to use. /// is null. - /// - /// A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired. - /// - /// Permissions includes... - /// > - /// * - /// * Ability to call /detectLivenessWithVerify/singleModal for up to 3 retries. - /// * A token lifetime of 10 minutes. - /// - /// > [!NOTE] - /// > - /// > * - /// > * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation. - /// > * To retrieve a result, use the Get Liveness With Verify Session. - /// > * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries. - /// - /// Alternative Option: Client device submits VerifyImage during the /detectLivenessWithVerify/singleModal call. - /// > [!NOTE] - /// > Extra measures should be taken to validate that the client is sending the expected VerifyImage. - /// - internal virtual async Task> CreateLivenessWithVerifySessionAsync(CreateLivenessSessionContent body, CancellationToken cancellationToken = default) + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/create-liveness-with-verify-session for more details. + internal virtual async Task> CreateLivenessWithVerifySessionAsync(CreateLivenessWithVerifySessionContent body, CancellationToken cancellationToken = default) { Argument.AssertNotNull(body, nameof(body)); @@ -712,27 +663,8 @@ internal virtual async Task> Cre /// Body parameter. /// The cancellation token to use. /// is null. - /// - /// A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired. - /// - /// Permissions includes... - /// > - /// * - /// * Ability to call /detectLivenessWithVerify/singleModal for up to 3 retries. - /// * A token lifetime of 10 minutes. - /// - /// > [!NOTE] - /// > - /// > * - /// > * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation. - /// > * To retrieve a result, use the Get Liveness With Verify Session. - /// > * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries. - /// - /// Alternative Option: Client device submits VerifyImage during the /detectLivenessWithVerify/singleModal call. - /// > [!NOTE] - /// > Extra measures should be taken to validate that the client is sending the expected VerifyImage. - /// - internal virtual Response CreateLivenessWithVerifySession(CreateLivenessSessionContent body, CancellationToken cancellationToken = default) + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/create-liveness-with-verify-session for more details. + internal virtual Response CreateLivenessWithVerifySession(CreateLivenessWithVerifySessionContent body, CancellationToken cancellationToken = default) { Argument.AssertNotNull(body, nameof(body)); @@ -752,7 +684,7 @@ internal virtual Response CreateLivenessW /// /// /// - /// Please try the simpler convenience overload with strongly typed models first. + /// Please try the simpler convenience overload with strongly typed models first. /// /// /// @@ -790,7 +722,7 @@ internal virtual async Task CreateLivenessWithVerifySessionAsync(Reque /// /// /// - /// Please try the simpler convenience overload with strongly typed models first. + /// Please try the simpler convenience overload with strongly typed models first. /// /// /// @@ -822,25 +754,8 @@ internal virtual Response CreateLivenessWithVerifySession(RequestContent content /// Request content of liveness with verify session creation. /// The cancellation token to use. /// is null. - /// - /// A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired. - /// - /// Permissions includes... - /// > - /// * - /// * Ability to call /detectLivenessWithVerify/singleModal for up to 3 retries. - /// * A token lifetime of 10 minutes. - /// - /// > [!NOTE] - /// > - /// > * - /// > * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation. - /// > * To retrieve a result, use the Get Liveness With Verify Session. - /// > * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries. - /// - /// Recommended Option: VerifyImage is provided during session creation. - /// - internal virtual async Task> CreateLivenessWithVerifySessionWithVerifyImageAsync(CreateLivenessWithVerifySessionContent body, CancellationToken cancellationToken = default) + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/create-liveness-with-verify-session-with-verify-image for more details. + internal virtual async Task> CreateLivenessWithVerifySessionWithVerifyImageAsync(CreateLivenessWithVerifySessionMultipartContent body, CancellationToken cancellationToken = default) { Argument.AssertNotNull(body, nameof(body)); @@ -854,25 +769,8 @@ internal virtual async Task> Cre /// Request content of liveness with verify session creation. /// The cancellation token to use. /// is null. - /// - /// A session is best for client device scenarios where developers want to authorize a client device to perform only a liveness detection without granting full access to their resource. Created sessions have a limited life span and only authorize clients to perform the desired action before access is expired. - /// - /// Permissions includes... - /// > - /// * - /// * Ability to call /detectLivenessWithVerify/singleModal for up to 3 retries. - /// * A token lifetime of 10 minutes. - /// - /// > [!NOTE] - /// > - /// > * - /// > * Client access can be revoked by deleting the session using the Delete Liveness With Verify Session operation. - /// > * To retrieve a result, use the Get Liveness With Verify Session. - /// > * To audit the individual requests that a client has made to your resource, use the List Liveness With Verify Session Audit Entries. - /// - /// Recommended Option: VerifyImage is provided during session creation. - /// - internal virtual Response CreateLivenessWithVerifySessionWithVerifyImage(CreateLivenessWithVerifySessionContent body, CancellationToken cancellationToken = default) + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/create-liveness-with-verify-session-with-verify-image for more details. + internal virtual Response CreateLivenessWithVerifySessionWithVerifyImage(CreateLivenessWithVerifySessionMultipartContent body, CancellationToken cancellationToken = default) { Argument.AssertNotNull(body, nameof(body)); @@ -892,7 +790,7 @@ internal virtual Response CreateLivenessW /// /// /// - /// Please try the simpler convenience overload with strongly typed models first. + /// Please try the simpler convenience overload with strongly typed models first. /// /// /// @@ -931,7 +829,7 @@ internal virtual async Task CreateLivenessWithVerifySessionWithVerifyI /// /// /// - /// Please try the simpler convenience overload with strongly typed models first. + /// Please try the simpler convenience overload with strongly typed models first. /// /// /// @@ -1032,7 +930,7 @@ public virtual Response DeleteLivenessWithVerifySession(string sessionId, Reques } } - /// Get session result of detectLivenessWithVerify/singleModal call. + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-with-verify-session-result for more details. /// The unique ID to reference this session. /// The cancellation token to use. /// is null. @@ -1047,7 +945,7 @@ public virtual async Task> GetLivenessWithVe return Response.FromValue(LivenessWithVerifySession.FromResponse(response), response); } - /// Get session result of detectLivenessWithVerify/singleModal call. + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-with-verify-session-result for more details. /// The unique ID to reference this session. /// The cancellation token to use. /// is null. @@ -1063,7 +961,7 @@ public virtual Response GetLivenessWithVerifySessionR } /// - /// [Protocol Method] Get session result of detectLivenessWithVerify/singleModal call. + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-with-verify-session-result for more details. /// /// /// @@ -1103,7 +1001,7 @@ public virtual async Task GetLivenessWithVerifySessionResultAsync(stri } /// - /// [Protocol Method] Get session result of detectLivenessWithVerify/singleModal call. + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-with-verify-session-result for more details. /// /// /// @@ -1146,11 +1044,7 @@ public virtual Response GetLivenessWithVerifySessionResult(string sessionId, Req /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. /// The number of items to list, ranging in [1, 1000]. Default is 1000. /// The cancellation token to use. - /// - /// List sessions from the last sessionId greater than the "start". - /// - /// The result should be ordered by sessionId in ascending order. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-with-verify-sessions for more details. /// public virtual async Task>> GetLivenessWithVerifySessionsAsync(string start = null, int? top = null, CancellationToken cancellationToken = default) { @@ -1171,11 +1065,7 @@ public virtual async Task>> GetLiven /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. /// The number of items to list, ranging in [1, 1000]. Default is 1000. /// The cancellation token to use. - /// - /// List sessions from the last sessionId greater than the "start". - /// - /// The result should be ordered by sessionId in ascending order. - /// + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-with-verify-sessions for more details. /// public virtual Response> GetLivenessWithVerifySessions(string start = null, int? top = null, CancellationToken cancellationToken = default) { @@ -1266,7 +1156,7 @@ public virtual Response GetLivenessWithVerifySessions(string start, int? top, Re } } - /// Gets session requests and response body for the session. + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-with-verify-session-audit-entries for more details. /// The unique ID to reference this session. /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. /// The number of items to list, ranging in [1, 1000]. Default is 1000. @@ -1291,7 +1181,7 @@ public virtual async Task>> Ge return Response.FromValue(value, response); } - /// Gets session requests and response body for the session. + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-with-verify-session-audit-entries for more details. /// The unique ID to reference this session. /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. /// The number of items to list, ranging in [1, 1000]. Default is 1000. @@ -1317,7 +1207,7 @@ public virtual Response> GetLivenessWit } /// - /// [Protocol Method] Gets session requests and response body for the session. + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-with-verify-session-audit-entries for more details. /// /// /// @@ -1359,7 +1249,7 @@ public virtual async Task GetLivenessWithVerifySessionAuditEntriesAsyn } /// - /// [Protocol Method] Gets session requests and response body for the session. + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-liveness-with-verify-session-audit-entries for more details. /// /// /// @@ -1400,6 +1290,270 @@ public virtual Response GetLivenessWithVerifySessionAuditEntries(string sessionI } } + /// Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes. + /// Id of session image. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations. + /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + /// Return faceIds of the detected faces or not. The default value is true. + /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost. + /// Return face landmarks of the detected faces or not. The default value is false. + /// Return 'recognitionModel' or not. The default value is false. This is only applicable when returnFaceId = true. + /// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours). + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-detection-operations/detect-from-session-image-id for more details. + /// + public virtual async Task>> DetectFromSessionImageAsync(string sessionImageId, FaceDetectionModel? detectionModel = null, FaceRecognitionModel? recognitionModel = null, bool? returnFaceId = null, IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = null, bool? returnRecognitionModel = null, int? faceIdTimeToLive = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(sessionImageId, nameof(sessionImageId)); + + DetectFromSessionImageRequest detectFromSessionImageRequest = new DetectFromSessionImageRequest(sessionImageId, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await DetectFromSessionImageAsync(detectFromSessionImageRequest.ToRequestContent(), detectionModel?.ToString(), recognitionModel?.ToString(), returnFaceId, returnFaceAttributes, returnFaceLandmarks, returnRecognitionModel, faceIdTimeToLive, context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(FaceDetectionResult.DeserializeFaceDetectionResult(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes. + /// Id of session image. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations. + /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + /// Return faceIds of the detected faces or not. The default value is true. + /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost. + /// Return face landmarks of the detected faces or not. The default value is false. + /// Return 'recognitionModel' or not. The default value is false. This is only applicable when returnFaceId = true. + /// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours). + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-detection-operations/detect-from-session-image-id for more details. + /// + public virtual Response> DetectFromSessionImage(string sessionImageId, FaceDetectionModel? detectionModel = null, FaceRecognitionModel? recognitionModel = null, bool? returnFaceId = null, IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = null, bool? returnRecognitionModel = null, int? faceIdTimeToLive = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(sessionImageId, nameof(sessionImageId)); + + DetectFromSessionImageRequest detectFromSessionImageRequest = new DetectFromSessionImageRequest(sessionImageId, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = DetectFromSessionImage(detectFromSessionImageRequest.ToRequestContent(), detectionModel?.ToString(), recognitionModel?.ToString(), returnFaceId, returnFaceAttributes, returnFaceLandmarks, returnRecognitionModel, faceIdTimeToLive, context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(FaceDetectionResult.DeserializeFaceDetectionResult(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04". + /// Return faceIds of the detected faces or not. The default value is true. + /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost. + /// Return face landmarks of the detected faces or not. The default value is false. + /// Return 'recognitionModel' or not. The default value is false. This is only applicable when returnFaceId = true. + /// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours). + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task DetectFromSessionImageAsync(RequestContent content, string detectionModel = null, string recognitionModel = null, bool? returnFaceId = null, IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = null, bool? returnRecognitionModel = null, int? faceIdTimeToLive = null, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.DetectFromSessionImage"); + scope.Start(); + try + { + using HttpMessage message = CreateDetectFromSessionImageRequest(content, detectionModel, recognitionModel, returnFaceId, returnFaceAttributes, returnFaceLandmarks, returnRecognitionModel, faceIdTimeToLive, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and attributes. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. 'detection_03' is recommended since its accuracy is improved on smaller faces (64x64 pixels) and rotated face orientations. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// The 'recognitionModel' associated with the detected faceIds. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02', 'recognition_03' or 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. Allowed values: "recognition_01" | "recognition_02" | "recognition_03" | "recognition_04". + /// Return faceIds of the detected faces or not. The default value is true. + /// Analyze and return the one or more specified face attributes in the comma-separated string like 'returnFaceAttributes=headPose,glasses'. Face attribute analysis has additional computational and time cost. + /// Return face landmarks of the detected faces or not. The default value is false. + /// Return 'recognitionModel' or not. The default value is false. This is only applicable when returnFaceId = true. + /// The number of seconds for the face ID being cached. Supported range from 60 seconds up to 86400 seconds. The default value is 86400 (24 hours). + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response DetectFromSessionImage(RequestContent content, string detectionModel = null, string recognitionModel = null, bool? returnFaceId = null, IEnumerable returnFaceAttributes = null, bool? returnFaceLandmarks = null, bool? returnRecognitionModel = null, int? faceIdTimeToLive = null, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.DetectFromSessionImage"); + scope.Start(); + try + { + using HttpMessage message = CreateDetectFromSessionImageRequest(content, detectionModel, recognitionModel, returnFaceId, returnFaceAttributes, returnFaceLandmarks, returnRecognitionModel, faceIdTimeToLive, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-session-image for more details. + /// The request ID of the image to be retrieved. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual async Task> GetSessionImageAsync(string sessionImageId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(sessionImageId, nameof(sessionImageId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetSessionImageAsync(sessionImageId, context).ConfigureAwait(false); + return Response.FromValue(response.Content, response); + } + + /// Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-session-image for more details. + /// The request ID of the image to be retrieved. + /// The cancellation token to use. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// + public virtual Response GetSessionImage(string sessionImageId, CancellationToken cancellationToken = default) + { + Argument.AssertNotNullOrEmpty(sessionImageId, nameof(sessionImageId)); + + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetSessionImage(sessionImageId, context); + return Response.FromValue(response.Content, response); + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-session-image for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The request ID of the image to be retrieved. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetSessionImageAsync(string sessionImageId, RequestContext context) + { + Argument.AssertNotNullOrEmpty(sessionImageId, nameof(sessionImageId)); + + using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.GetSessionImage"); + scope.Start(); + try + { + using HttpMessage message = CreateGetSessionImageRequest(sessionImageId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/liveness-session-operations/get-session-image for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The request ID of the image to be retrieved. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// is an empty string, and was expected to be non-empty. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetSessionImage(string sessionImageId, RequestContext context) + { + Argument.AssertNotNullOrEmpty(sessionImageId, nameof(sessionImageId)); + + using var scope = ClientDiagnostics.CreateScope("FaceSessionClient.GetSessionImage"); + scope.Start(); + try + { + using HttpMessage message = CreateGetSessionImageRequest(sessionImageId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + internal HttpMessage CreateCreateLivenessSessionRequest(RequestContent content, RequestContext context) { var message = _pipeline.CreateMessage(context, ResponseClassifier200); @@ -1611,6 +1765,67 @@ internal HttpMessage CreateGetLivenessWithVerifySessionAuditEntriesRequest(strin return message; } + internal HttpMessage CreateDetectFromSessionImageRequest(RequestContent content, string detectionModel, string recognitionModel, bool? returnFaceId, IEnumerable returnFaceAttributes, bool? returnFaceLandmarks, bool? returnRecognitionModel, int? faceIdTimeToLive, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/detect", false); + if (detectionModel != null) + { + uri.AppendQuery("detectionModel", detectionModel, true); + } + if (recognitionModel != null) + { + uri.AppendQuery("recognitionModel", recognitionModel, true); + } + if (returnFaceId != null) + { + uri.AppendQuery("returnFaceId", returnFaceId.Value, true); + } + if (returnFaceAttributes != null && !(returnFaceAttributes is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined)) + { + uri.AppendQueryDelimited("returnFaceAttributes", returnFaceAttributes, ",", true); + } + if (returnFaceLandmarks != null) + { + uri.AppendQuery("returnFaceLandmarks", returnFaceLandmarks.Value, true); + } + if (returnRecognitionModel != null) + { + uri.AppendQuery("returnRecognitionModel", returnRecognitionModel.Value, true); + } + if (faceIdTimeToLive != null) + { + uri.AppendQuery("faceIdTimeToLive", faceIdTimeToLive.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateGetSessionImageRequest(string sessionImageId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/session/sessionImages/", false); + uri.AppendPath(sessionImageId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/octet-stream"); + return message; + } + private static RequestContext DefaultRequestContext = new RequestContext(); internal static RequestContext FromCancellationToken(CancellationToken cancellationToken = default) { diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceTrainingResult.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceTrainingResult.Serialization.cs new file mode 100644 index 000000000000..3baf11bb5f72 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceTrainingResult.Serialization.cs @@ -0,0 +1,176 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class FaceTrainingResult : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceTrainingResult)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("status"u8); + writer.WriteStringValue(Status.ToString()); + writer.WritePropertyName("createdDateTime"u8); + writer.WriteStringValue(CreatedDateTime, "O"); + writer.WritePropertyName("lastActionDateTime"u8); + writer.WriteStringValue(LastActionDateTime, "O"); + writer.WritePropertyName("lastSuccessfulTrainingDateTime"u8); + writer.WriteStringValue(LastSuccessfulTrainingDateTime, "O"); + if (Optional.IsDefined(Message)) + { + writer.WritePropertyName("message"u8); + writer.WriteStringValue(Message); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + FaceTrainingResult IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FaceTrainingResult)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeFaceTrainingResult(document.RootElement, options); + } + + internal static FaceTrainingResult DeserializeFaceTrainingResult(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + FaceOperationStatus status = default; + DateTimeOffset createdDateTime = default; + DateTimeOffset lastActionDateTime = default; + DateTimeOffset lastSuccessfulTrainingDateTime = default; + string message = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("status"u8)) + { + status = new FaceOperationStatus(property.Value.GetString()); + continue; + } + if (property.NameEquals("createdDateTime"u8)) + { + createdDateTime = property.Value.GetDateTimeOffset("O"); + continue; + } + if (property.NameEquals("lastActionDateTime"u8)) + { + lastActionDateTime = property.Value.GetDateTimeOffset("O"); + continue; + } + if (property.NameEquals("lastSuccessfulTrainingDateTime"u8)) + { + lastSuccessfulTrainingDateTime = property.Value.GetDateTimeOffset("O"); + continue; + } + if (property.NameEquals("message"u8)) + { + message = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new FaceTrainingResult( + status, + createdDateTime, + lastActionDateTime, + lastSuccessfulTrainingDateTime, + message, + serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(FaceTrainingResult)} does not support writing '{options.Format}' format."); + } + } + + FaceTrainingResult IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeFaceTrainingResult(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(FaceTrainingResult)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static FaceTrainingResult FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeFaceTrainingResult(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceTrainingResult.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceTrainingResult.cs new file mode 100644 index 000000000000..79ac7a85a12b --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FaceTrainingResult.cs @@ -0,0 +1,94 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Training result of a container. + public partial class FaceTrainingResult + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Training status of the container. + /// A combined UTC date and time string that describes the created time of the person group, large person group or large face list. + /// A combined UTC date and time string that describes the last modify time of the person group, large person group or large face list, could be null value when the group is not successfully trained. + /// A combined UTC date and time string that describes the last successful training time of the person group, large person group or large face list. + internal FaceTrainingResult(FaceOperationStatus status, DateTimeOffset createdDateTime, DateTimeOffset lastActionDateTime, DateTimeOffset lastSuccessfulTrainingDateTime) + { + Status = status; + CreatedDateTime = createdDateTime; + LastActionDateTime = lastActionDateTime; + LastSuccessfulTrainingDateTime = lastSuccessfulTrainingDateTime; + } + + /// Initializes a new instance of . + /// Training status of the container. + /// A combined UTC date and time string that describes the created time of the person group, large person group or large face list. + /// A combined UTC date and time string that describes the last modify time of the person group, large person group or large face list, could be null value when the group is not successfully trained. + /// A combined UTC date and time string that describes the last successful training time of the person group, large person group or large face list. + /// Show failure message when training failed (omitted when training succeed). + /// Keeps track of any properties unknown to the library. + internal FaceTrainingResult(FaceOperationStatus status, DateTimeOffset createdDateTime, DateTimeOffset lastActionDateTime, DateTimeOffset lastSuccessfulTrainingDateTime, string message, IDictionary serializedAdditionalRawData) + { + Status = status; + CreatedDateTime = createdDateTime; + LastActionDateTime = lastActionDateTime; + LastSuccessfulTrainingDateTime = lastSuccessfulTrainingDateTime; + Message = message; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal FaceTrainingResult() + { + } + + /// Training status of the container. + public FaceOperationStatus Status { get; } + /// A combined UTC date and time string that describes the created time of the person group, large person group or large face list. + public DateTimeOffset CreatedDateTime { get; } + /// A combined UTC date and time string that describes the last modify time of the person group, large person group or large face list, could be null value when the group is not successfully trained. + public DateTimeOffset LastActionDateTime { get; } + /// A combined UTC date and time string that describes the last successful training time of the person group, large person group or large face list. + public DateTimeOffset LastSuccessfulTrainingDateTime { get; } + /// Show failure message when training failed (omitted when training succeed). + public string Message { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FindSimilarFromLargeFaceListRequest.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FindSimilarFromLargeFaceListRequest.Serialization.cs new file mode 100644 index 000000000000..dedc938a5798 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FindSimilarFromLargeFaceListRequest.Serialization.cs @@ -0,0 +1,173 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class FindSimilarFromLargeFaceListRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FindSimilarFromLargeFaceListRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("faceId"u8); + writer.WriteStringValue(FaceId); + if (Optional.IsDefined(MaxNumOfCandidatesReturned)) + { + writer.WritePropertyName("maxNumOfCandidatesReturned"u8); + writer.WriteNumberValue(MaxNumOfCandidatesReturned.Value); + } + if (Optional.IsDefined(Mode)) + { + writer.WritePropertyName("mode"u8); + writer.WriteStringValue(Mode.Value.ToString()); + } + writer.WritePropertyName("largeFaceListId"u8); + writer.WriteStringValue(LargeFaceListId); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + FindSimilarFromLargeFaceListRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(FindSimilarFromLargeFaceListRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeFindSimilarFromLargeFaceListRequest(document.RootElement, options); + } + + internal static FindSimilarFromLargeFaceListRequest DeserializeFindSimilarFromLargeFaceListRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid faceId = default; + int? maxNumOfCandidatesReturned = default; + FindSimilarMatchMode? mode = default; + string largeFaceListId = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("faceId"u8)) + { + faceId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("maxNumOfCandidatesReturned"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxNumOfCandidatesReturned = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("mode"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + mode = new FindSimilarMatchMode(property.Value.GetString()); + continue; + } + if (property.NameEquals("largeFaceListId"u8)) + { + largeFaceListId = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new FindSimilarFromLargeFaceListRequest(faceId, maxNumOfCandidatesReturned, mode, largeFaceListId, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(FindSimilarFromLargeFaceListRequest)} does not support writing '{options.Format}' format."); + } + } + + FindSimilarFromLargeFaceListRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeFindSimilarFromLargeFaceListRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(FindSimilarFromLargeFaceListRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static FindSimilarFromLargeFaceListRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeFindSimilarFromLargeFaceListRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/FindSimilarFromLargeFaceListRequest.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/FindSimilarFromLargeFaceListRequest.cs new file mode 100644 index 000000000000..8603dc4b91cf --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/FindSimilarFromLargeFaceListRequest.cs @@ -0,0 +1,89 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The FindSimilarFromLargeFaceListRequest. + internal partial class FindSimilarFromLargeFaceListRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// faceId of the query face. User needs to call "Detect" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call. + /// An existing user-specified unique candidate Large Face List, created in "Create Large Face List". Large Face List contains a set of persistedFaceIds which are persisted and will never expire. + /// is null. + internal FindSimilarFromLargeFaceListRequest(Guid faceId, string largeFaceListId) + { + Argument.AssertNotNull(largeFaceListId, nameof(largeFaceListId)); + + FaceId = faceId; + LargeFaceListId = largeFaceListId; + } + + /// Initializes a new instance of . + /// faceId of the query face. User needs to call "Detect" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call. + /// The number of top similar faces returned. The valid range is [1, 1000]. Default value is 20. + /// Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'. + /// An existing user-specified unique candidate Large Face List, created in "Create Large Face List". Large Face List contains a set of persistedFaceIds which are persisted and will never expire. + /// Keeps track of any properties unknown to the library. + internal FindSimilarFromLargeFaceListRequest(Guid faceId, int? maxNumOfCandidatesReturned, FindSimilarMatchMode? mode, string largeFaceListId, IDictionary serializedAdditionalRawData) + { + FaceId = faceId; + MaxNumOfCandidatesReturned = maxNumOfCandidatesReturned; + Mode = mode; + LargeFaceListId = largeFaceListId; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal FindSimilarFromLargeFaceListRequest() + { + } + + /// faceId of the query face. User needs to call "Detect" first to get a valid faceId. Note that this faceId is not persisted and will expire 24 hours after the detection call. + public Guid FaceId { get; } + /// The number of top similar faces returned. The valid range is [1, 1000]. Default value is 20. + public int? MaxNumOfCandidatesReturned { get; } + /// Similar face searching mode. It can be 'matchPerson' or 'matchFace'. Default value is 'matchPerson'. + public FindSimilarMatchMode? Mode { get; } + /// An existing user-specified unique candidate Large Face List, created in "Create Large Face List". Large Face List contains a set of persistedFaceIds which are persisted and will never expire. + public string LargeFaceListId { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/IdentifyFromLargePersonGroupRequest.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/IdentifyFromLargePersonGroupRequest.Serialization.cs new file mode 100644 index 000000000000..44d5b2573408 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/IdentifyFromLargePersonGroupRequest.Serialization.cs @@ -0,0 +1,183 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class IdentifyFromLargePersonGroupRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IdentifyFromLargePersonGroupRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("faceIds"u8); + writer.WriteStartArray(); + foreach (var item in FaceIds) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + writer.WritePropertyName("largePersonGroupId"u8); + writer.WriteStringValue(LargePersonGroupId); + if (Optional.IsDefined(MaxNumOfCandidatesReturned)) + { + writer.WritePropertyName("maxNumOfCandidatesReturned"u8); + writer.WriteNumberValue(MaxNumOfCandidatesReturned.Value); + } + if (Optional.IsDefined(ConfidenceThreshold)) + { + writer.WritePropertyName("confidenceThreshold"u8); + writer.WriteNumberValue(ConfidenceThreshold.Value); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + IdentifyFromLargePersonGroupRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(IdentifyFromLargePersonGroupRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeIdentifyFromLargePersonGroupRequest(document.RootElement, options); + } + + internal static IdentifyFromLargePersonGroupRequest DeserializeIdentifyFromLargePersonGroupRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + IReadOnlyList faceIds = default; + string largePersonGroupId = default; + int? maxNumOfCandidatesReturned = default; + float? confidenceThreshold = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("faceIds"u8)) + { + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetGuid()); + } + faceIds = array; + continue; + } + if (property.NameEquals("largePersonGroupId"u8)) + { + largePersonGroupId = property.Value.GetString(); + continue; + } + if (property.NameEquals("maxNumOfCandidatesReturned"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + maxNumOfCandidatesReturned = property.Value.GetInt32(); + continue; + } + if (property.NameEquals("confidenceThreshold"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + confidenceThreshold = property.Value.GetSingle(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new IdentifyFromLargePersonGroupRequest(faceIds, largePersonGroupId, maxNumOfCandidatesReturned, confidenceThreshold, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(IdentifyFromLargePersonGroupRequest)} does not support writing '{options.Format}' format."); + } + } + + IdentifyFromLargePersonGroupRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeIdentifyFromLargePersonGroupRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(IdentifyFromLargePersonGroupRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static IdentifyFromLargePersonGroupRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeIdentifyFromLargePersonGroupRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/IdentifyFromLargePersonGroupRequest.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/IdentifyFromLargePersonGroupRequest.cs new file mode 100644 index 000000000000..f8ae4acdf23c --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/IdentifyFromLargePersonGroupRequest.cs @@ -0,0 +1,91 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Linq; + +namespace Azure.AI.Vision.Face +{ + /// The IdentifyFromLargePersonGroupRequest. + internal partial class IdentifyFromLargePersonGroupRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10]. + /// largePersonGroupId of the target Large Person Group, created by "Create Large Person Group". Parameter personGroupId and largePersonGroupId should not be provided at the same time. + /// or is null. + internal IdentifyFromLargePersonGroupRequest(IEnumerable faceIds, string largePersonGroupId) + { + Argument.AssertNotNull(faceIds, nameof(faceIds)); + Argument.AssertNotNull(largePersonGroupId, nameof(largePersonGroupId)); + + FaceIds = faceIds.ToList(); + LargePersonGroupId = largePersonGroupId; + } + + /// Initializes a new instance of . + /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10]. + /// largePersonGroupId of the target Large Person Group, created by "Create Large Person Group". Parameter personGroupId and largePersonGroupId should not be provided at the same time. + /// The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10. + /// Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates. + /// Keeps track of any properties unknown to the library. + internal IdentifyFromLargePersonGroupRequest(IReadOnlyList faceIds, string largePersonGroupId, int? maxNumOfCandidatesReturned, float? confidenceThreshold, IDictionary serializedAdditionalRawData) + { + FaceIds = faceIds; + LargePersonGroupId = largePersonGroupId; + MaxNumOfCandidatesReturned = maxNumOfCandidatesReturned; + ConfidenceThreshold = confidenceThreshold; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal IdentifyFromLargePersonGroupRequest() + { + } + + /// Array of query faces faceIds, created by the "Detect". Each of the faces are identified independently. The valid number of faceIds is between [1, 10]. + public IReadOnlyList FaceIds { get; } + /// largePersonGroupId of the target Large Person Group, created by "Create Large Person Group". Parameter personGroupId and largePersonGroupId should not be provided at the same time. + public string LargePersonGroupId { get; } + /// The range of maxNumOfCandidatesReturned is between 1 and 100. Default value is 10. + public int? MaxNumOfCandidatesReturned { get; } + /// Customized identification confidence threshold, in the range of [0, 1]. Advanced user can tweak this value to override default internal threshold for better precision on their scenario data. Note there is no guarantee of this threshold value working on other data and after algorithm updates. + public float? ConfidenceThreshold { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LargeFaceList.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargeFaceList.Serialization.cs new file mode 100644 index 000000000000..d52d68e0bc12 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargeFaceList.Serialization.cs @@ -0,0 +1,172 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class LargeFaceList : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LargeFaceList)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (Optional.IsDefined(RecognitionModel)) + { + writer.WritePropertyName("recognitionModel"u8); + writer.WriteStringValue(RecognitionModel.Value.ToString()); + } + if (options.Format != "W") + { + writer.WritePropertyName("largeFaceListId"u8); + writer.WriteStringValue(LargeFaceListId); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + LargeFaceList IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LargeFaceList)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLargeFaceList(document.RootElement, options); + } + + internal static LargeFaceList DeserializeLargeFaceList(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string userData = default; + FaceRecognitionModel? recognitionModel = default; + string largeFaceListId = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (property.NameEquals("recognitionModel"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + recognitionModel = new FaceRecognitionModel(property.Value.GetString()); + continue; + } + if (property.NameEquals("largeFaceListId"u8)) + { + largeFaceListId = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new LargeFaceList(name, userData, recognitionModel, largeFaceListId, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LargeFaceList)} does not support writing '{options.Format}' format."); + } + } + + LargeFaceList IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeLargeFaceList(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LargeFaceList)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static LargeFaceList FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeLargeFaceList(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LargeFaceList.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargeFaceList.cs new file mode 100644 index 000000000000..586366e51336 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargeFaceList.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Large face list is a list of faces, up to 1,000,000 faces. + public partial class LargeFaceList + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// is null. + internal LargeFaceList(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + } + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds. + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// Keeps track of any properties unknown to the library. + internal LargeFaceList(string name, string userData, FaceRecognitionModel? recognitionModel, string largeFaceListId, IDictionary serializedAdditionalRawData) + { + Name = name; + UserData = userData; + RecognitionModel = recognitionModel; + LargeFaceListId = largeFaceListId; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal LargeFaceList() + { + } + + /// User defined name, maximum length is 128. + public string Name { get; } + /// Optional user defined data. Length should not exceed 16K. + public string UserData { get; } + /// Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds. + public FaceRecognitionModel? RecognitionModel { get; } + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + public string LargeFaceListId { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LargeFaceListClient.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargeFaceListClient.cs new file mode 100644 index 000000000000..2d63f135f43c --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargeFaceListClient.cs @@ -0,0 +1,1548 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Core.Pipeline; + +namespace Azure.AI.Vision.Face +{ + // Data plane generated sub-client. + /// The LargeFaceList sub-client. + public partial class LargeFaceListClient + { + private const string AuthorizationHeader = "Ocp-Apim-Subscription-Key"; + private readonly AzureKeyCredential _keyCredential; + private static readonly string[] AuthorizationScopes = new string[] { "https://cognitiveservices.azure.com/.default" }; + private readonly TokenCredential _tokenCredential; + private readonly HttpPipeline _pipeline; + private readonly Uri _endpoint; + private readonly string _largeFaceListId; + private readonly string _apiVersion; + + /// The ClientDiagnostics is used to provide tracing support for the client library. + internal ClientDiagnostics ClientDiagnostics { get; } + + /// The HTTP pipeline for sending and receiving REST requests and responses. + public virtual HttpPipeline Pipeline => _pipeline; + + /// Initializes a new instance of LargeFaceListClient for mocking. + protected LargeFaceListClient() + { + } + + /// Initializes a new instance of LargeFaceListClient. + /// The handler for diagnostic messaging in the client. + /// The HTTP pipeline for sending and receiving REST requests and responses. + /// The key credential to copy. + /// The token credential to copy. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// Valid character is letter in lower case or digit or '-' or '_', maximum length is 64. + /// API Version. Allowed values: "v1.1-preview.1" | "v1.2-preview.1". + internal LargeFaceListClient(ClientDiagnostics clientDiagnostics, HttpPipeline pipeline, AzureKeyCredential keyCredential, TokenCredential tokenCredential, Uri endpoint, string largeFaceListId, string apiVersion) + { + ClientDiagnostics = clientDiagnostics; + _pipeline = pipeline; + _keyCredential = keyCredential; + _tokenCredential = tokenCredential; + _endpoint = endpoint; + _largeFaceListId = largeFaceListId; + _apiVersion = apiVersion; + } + + /// Create an empty Large Face List with user-specified largeFaceListId, name, an optional userData and recognitionModel. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/create-large-face-list for more details. + /// + public virtual async Task CreateAsync(string name, string userData = null, FaceRecognitionModel? recognitionModel = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(name, nameof(name)); + + CreateRequest1 createRequest1 = new CreateRequest1(name, userData, recognitionModel, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await CreateAsync(createRequest1.ToRequestContent(), context).ConfigureAwait(false); + return response; + } + + /// Create an empty Large Face List with user-specified largeFaceListId, name, an optional userData and recognitionModel. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/create-large-face-list for more details. + /// + public virtual Response Create(string name, string userData = null, FaceRecognitionModel? recognitionModel = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(name, nameof(name)); + + CreateRequest1 createRequest1 = new CreateRequest1(name, userData, recognitionModel, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = Create(createRequest1.ToRequestContent(), context); + return response; + } + + /// + /// [Protocol Method] Create an empty Large Face List with user-specified largeFaceListId, name, an optional userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task CreateAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClient.Create"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Create an empty Large Face List with user-specified largeFaceListId, name, an optional userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response Create(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClient.Create"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete a face from a Large Face List by specified largeFaceListId and persistedFaceId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task DeleteAsync(RequestContext context = null) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClient.Delete"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteRequest(context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete a face from a Large Face List by specified largeFaceListId and persistedFaceId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response Delete(RequestContext context = null) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClient.Delete"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteRequest(context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-list for more details. + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// + public virtual async Task> GetLargeFaceListAsync(bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetLargeFaceListAsync(returnRecognitionModel, context).ConfigureAwait(false); + return Response.FromValue(LargeFaceList.FromResponse(response), response); + } + + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-list for more details. + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// + public virtual Response GetLargeFaceList(bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetLargeFaceList(returnRecognitionModel, context); + return Response.FromValue(LargeFaceList.FromResponse(response), response); + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-list for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetLargeFaceListAsync(bool? returnRecognitionModel, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClient.GetLargeFaceList"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargeFaceListRequest(returnRecognitionModel, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-list for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetLargeFaceList(bool? returnRecognitionModel, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClient.GetLargeFaceList"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargeFaceListRequest(returnRecognitionModel, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/update-large-face-list for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task UpdateAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClient.Update"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/update-large-face-list for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response Update(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClient.Update"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// List Large Face Lists' information of largeFaceListId, name, userData and recognitionModel. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-lists for more details. + /// + public virtual async Task>> GetLargeFaceListsAsync(string start = null, int? top = null, bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetLargeFaceListsAsync(start, top, returnRecognitionModel, context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LargeFaceList.DeserializeLargeFaceList(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// List Large Face Lists' information of largeFaceListId, name, userData and recognitionModel. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-lists for more details. + /// + public virtual Response> GetLargeFaceLists(string start = null, int? top = null, bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetLargeFaceLists(start, top, returnRecognitionModel, context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LargeFaceList.DeserializeLargeFaceList(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] List Large Face Lists' information of largeFaceListId, name, userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetLargeFaceListsAsync(string start, int? top, bool? returnRecognitionModel, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClient.GetLargeFaceLists"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargeFaceListsRequest(start, top, returnRecognitionModel, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] List Large Face Lists' information of largeFaceListId, name, userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetLargeFaceLists(string start, int? top, bool? returnRecognitionModel, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClient.GetLargeFaceLists"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargeFaceListsRequest(start, top, returnRecognitionModel, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-list-training-status for more details. + /// The cancellation token to use. + /// + public virtual async Task> GetTrainingStatusAsync(CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetTrainingStatusAsync(context).ConfigureAwait(false); + return Response.FromValue(FaceTrainingResult.FromResponse(response), response); + } + + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-list-training-status for more details. + /// The cancellation token to use. + /// + public virtual Response GetTrainingStatus(CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetTrainingStatus(context); + return Response.FromValue(FaceTrainingResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-list-training-status for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetTrainingStatusAsync(RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClient.GetTrainingStatus"); + scope.Start(); + try + { + using HttpMessage message = CreateGetTrainingStatusRequest(context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-list-training-status for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetTrainingStatus(RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClient.GetTrainingStatus"); + scope.Start(); + try + { + using HttpMessage message = CreateGetTrainingStatusRequest(context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Add a face to a specified Large Face List, up to 1,000,000 faces. + /// URL of input image. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/add-large-face-list-face-from-url for more details. + internal virtual async Task> AddFaceFromUrlImplAsync(Uri uri, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(uri, nameof(uri)); + + AddFaceFromUrlRequest1 addFaceFromUrlRequest1 = new AddFaceFromUrlRequest1(uri, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await AddFaceFromUrlImplAsync(addFaceFromUrlRequest1.ToRequestContent(), targetFace, detectionModel?.ToString(), userData, context).ConfigureAwait(false); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// Add a face to a specified Large Face List, up to 1,000,000 faces. + /// URL of input image. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/add-large-face-list-face-from-url for more details. + internal virtual Response AddFaceFromUrlImpl(Uri uri, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(uri, nameof(uri)); + + AddFaceFromUrlRequest1 addFaceFromUrlRequest1 = new AddFaceFromUrlRequest1(uri, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = AddFaceFromUrlImpl(addFaceFromUrlRequest1.ToRequestContent(), targetFace, detectionModel?.ToString(), userData, context); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Add a face to a specified Large Face List, up to 1,000,000 faces. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + internal virtual async Task AddFaceFromUrlImplAsync(RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClient.AddFaceFromUrlImpl"); + scope.Start(); + try + { + using HttpMessage message = CreateAddFaceFromUrlImplRequest(content, targetFace, detectionModel, userData, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Add a face to a specified Large Face List, up to 1,000,000 faces. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + internal virtual Response AddFaceFromUrlImpl(RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClient.AddFaceFromUrlImpl"); + scope.Start(); + try + { + using HttpMessage message = CreateAddFaceFromUrlImplRequest(content, targetFace, detectionModel, userData, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Add a face to a specified Large Face List, up to 1,000,000 faces. + /// The image to be analyzed. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/add-large-face-list-face for more details. + internal virtual async Task> AddFaceImplAsync(BinaryData imageContent, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(imageContent, nameof(imageContent)); + + using RequestContent content = imageContent; + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await AddFaceImplAsync(content, targetFace, detectionModel?.ToString(), userData, context).ConfigureAwait(false); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// Add a face to a specified Large Face List, up to 1,000,000 faces. + /// The image to be analyzed. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/add-large-face-list-face for more details. + internal virtual Response AddFaceImpl(BinaryData imageContent, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(imageContent, nameof(imageContent)); + + using RequestContent content = imageContent; + RequestContext context = FromCancellationToken(cancellationToken); + Response response = AddFaceImpl(content, targetFace, detectionModel?.ToString(), userData, context); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Add a face to a specified Large Face List, up to 1,000,000 faces. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + internal virtual async Task AddFaceImplAsync(RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClient.AddFaceImpl"); + scope.Start(); + try + { + using HttpMessage message = CreateAddFaceImplRequest(content, targetFace, detectionModel, userData, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Add a face to a specified Large Face List, up to 1,000,000 faces. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + internal virtual Response AddFaceImpl(RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClient.AddFaceImpl"); + scope.Start(); + try + { + using HttpMessage message = CreateAddFaceImplRequest(content, targetFace, detectionModel, userData, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/delete-large-face-list-face for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task DeleteFaceAsync(Guid persistedFaceId, RequestContext context = null) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClient.DeleteFace"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteFaceRequest(persistedFaceId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/delete-large-face-list-face for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response DeleteFace(Guid persistedFaceId, RequestContext context = null) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClient.DeleteFace"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteFaceRequest(persistedFaceId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-list-face for more details. + /// Face ID of the face. + /// The cancellation token to use. + /// + public virtual async Task> GetFaceAsync(Guid persistedFaceId, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetFaceAsync(persistedFaceId, context).ConfigureAwait(false); + return Response.FromValue(LargeFaceListFace.FromResponse(response), response); + } + + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-list-face for more details. + /// Face ID of the face. + /// The cancellation token to use. + /// + public virtual Response GetFace(Guid persistedFaceId, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetFace(persistedFaceId, context); + return Response.FromValue(LargeFaceListFace.FromResponse(response), response); + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-list-face for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetFaceAsync(Guid persistedFaceId, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClient.GetFace"); + scope.Start(); + try + { + using HttpMessage message = CreateGetFaceRequest(persistedFaceId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-list-face for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetFace(Guid persistedFaceId, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClient.GetFace"); + scope.Start(); + try + { + using HttpMessage message = CreateGetFaceRequest(persistedFaceId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/update-large-face-list-face for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Face ID of the face. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task UpdateFaceAsync(Guid persistedFaceId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClient.UpdateFace"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateFaceRequest(persistedFaceId, content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/update-large-face-list-face for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Face ID of the face. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response UpdateFace(Guid persistedFaceId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClient.UpdateFace"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateFaceRequest(persistedFaceId, content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// List faces' persistedFaceId and userData in a specified Large Face List. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The cancellation token to use. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-list-faces for more details. + /// + public virtual async Task>> GetFacesAsync(string start = null, int? top = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetFacesAsync(start, top, context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LargeFaceListFace.DeserializeLargeFaceListFace(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// List faces' persistedFaceId and userData in a specified Large Face List. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The cancellation token to use. + /// Please refer to https://learn.microsoft.com/rest/api/face/face-list-operations/get-large-face-list-faces for more details. + /// + public virtual Response> GetFaces(string start = null, int? top = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetFaces(start, top, context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LargeFaceListFace.DeserializeLargeFaceListFace(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] List faces' persistedFaceId and userData in a specified Large Face List. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetFacesAsync(string start, int? top, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClient.GetFaces"); + scope.Start(); + try + { + using HttpMessage message = CreateGetFacesRequest(start, top, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] List faces' persistedFaceId and userData in a specified Large Face List. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetFaces(string start, int? top, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClient.GetFaces"); + scope.Start(); + try + { + using HttpMessage message = CreateGetFacesRequest(start, top, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Submit a Large Face List training task. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The representing an asynchronous operation on the service. + /// + public virtual async Task TrainAsync(WaitUntil waitUntil, RequestContext context = null) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClient.Train"); + scope.Start(); + try + { + using HttpMessage message = CreateTrainRequest(context); + return await ProtocolOperationHelpers.ProcessMessageWithoutResponseValueAsync(_pipeline, message, ClientDiagnostics, "LargeFaceListClient.Train", OperationFinalStateVia.OperationLocation, context, waitUntil).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Submit a Large Face List training task. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The representing an asynchronous operation on the service. + /// + public virtual Operation Train(WaitUntil waitUntil, RequestContext context = null) + { + using var scope = ClientDiagnostics.CreateScope("LargeFaceListClient.Train"); + scope.Start(); + try + { + using HttpMessage message = CreateTrainRequest(context); + return ProtocolOperationHelpers.ProcessMessageWithoutResponseValue(_pipeline, message, ClientDiagnostics, "LargeFaceListClient.Train", OperationFinalStateVia.OperationLocation, context, waitUntil); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + internal HttpMessage CreateCreateRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Put; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(_largeFaceListId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeleteRequest(RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(_largeFaceListId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetLargeFaceListRequest(bool? returnRecognitionModel, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(_largeFaceListId, true); + if (returnRecognitionModel != null) + { + uri.AppendQuery("returnRecognitionModel", returnRecognitionModel.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateUpdateRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Patch; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(_largeFaceListId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateGetLargeFaceListsRequest(string start, int? top, bool? returnRecognitionModel, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists", false); + if (start != null) + { + uri.AppendQuery("start", start, true); + } + if (top != null) + { + uri.AppendQuery("top", top.Value, true); + } + if (returnRecognitionModel != null) + { + uri.AppendQuery("returnRecognitionModel", returnRecognitionModel.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetTrainingStatusRequest(RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(_largeFaceListId, true); + uri.AppendPath("/training", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateTrainRequest(RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier202); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(_largeFaceListId, true); + uri.AppendPath("/train", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateAddFaceFromUrlImplRequest(RequestContent content, IEnumerable targetFace, string detectionModel, string userData, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(_largeFaceListId, true); + uri.AppendPath("/persistedfaces", false); + if (targetFace != null && !(targetFace is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined)) + { + uri.AppendQueryDelimited("targetFace", targetFace, ",", true); + } + if (detectionModel != null) + { + uri.AppendQuery("detectionModel", detectionModel, true); + } + if (userData != null) + { + uri.AppendQuery("userData", userData, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateAddFaceImplRequest(RequestContent content, IEnumerable targetFace, string detectionModel, string userData, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(_largeFaceListId, true); + uri.AppendPath("/persistedfaces", false); + if (targetFace != null && !(targetFace is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined)) + { + uri.AppendQueryDelimited("targetFace", targetFace, ",", true); + } + if (detectionModel != null) + { + uri.AppendQuery("detectionModel", detectionModel, true); + } + if (userData != null) + { + uri.AppendQuery("userData", userData, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/octet-stream"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeleteFaceRequest(Guid persistedFaceId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(_largeFaceListId, true); + uri.AppendPath("/persistedfaces/", false); + uri.AppendPath(persistedFaceId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetFaceRequest(Guid persistedFaceId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(_largeFaceListId, true); + uri.AppendPath("/persistedfaces/", false); + uri.AppendPath(persistedFaceId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateUpdateFaceRequest(Guid persistedFaceId, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Patch; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(_largeFaceListId, true); + uri.AppendPath("/persistedfaces/", false); + uri.AppendPath(persistedFaceId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateGetFacesRequest(string start, int? top, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largefacelists/", false); + uri.AppendPath(_largeFaceListId, true); + uri.AppendPath("/persistedfaces", false); + if (start != null) + { + uri.AppendQuery("start", start, true); + } + if (top != null) + { + uri.AppendQuery("top", top.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + private static RequestContext DefaultRequestContext = new RequestContext(); + internal static RequestContext FromCancellationToken(CancellationToken cancellationToken = default) + { + if (!cancellationToken.CanBeCanceled) + { + return DefaultRequestContext; + } + + return new RequestContext() { CancellationToken = cancellationToken }; + } + + private static ResponseClassifier _responseClassifier200; + private static ResponseClassifier ResponseClassifier200 => _responseClassifier200 ??= new StatusCodeClassifier(stackalloc ushort[] { 200 }); + private static ResponseClassifier _responseClassifier202; + private static ResponseClassifier ResponseClassifier202 => _responseClassifier202 ??= new StatusCodeClassifier(stackalloc ushort[] { 202 }); + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LargeFaceListFace.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargeFaceListFace.Serialization.cs new file mode 100644 index 000000000000..2f9621a39bbc --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargeFaceListFace.Serialization.cs @@ -0,0 +1,149 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class LargeFaceListFace : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LargeFaceListFace)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + if (options.Format != "W") + { + writer.WritePropertyName("persistedFaceId"u8); + writer.WriteStringValue(PersistedFaceId); + } + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + LargeFaceListFace IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LargeFaceListFace)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLargeFaceListFace(document.RootElement, options); + } + + internal static LargeFaceListFace DeserializeLargeFaceListFace(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid persistedFaceId = default; + string userData = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("persistedFaceId"u8)) + { + persistedFaceId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new LargeFaceListFace(persistedFaceId, userData, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LargeFaceListFace)} does not support writing '{options.Format}' format."); + } + } + + LargeFaceListFace IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeLargeFaceListFace(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LargeFaceListFace)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static LargeFaceListFace FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeLargeFaceListFace(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LargeFaceListFace.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargeFaceListFace.cs new file mode 100644 index 000000000000..6a0efa75f2a0 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargeFaceListFace.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Face resource for large face list. + public partial class LargeFaceListFace + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + internal LargeFaceListFace() + { + } + + /// Initializes a new instance of . + /// Face ID of the face. + /// User-provided data attached to the face. The length limit is 1K. + /// Keeps track of any properties unknown to the library. + internal LargeFaceListFace(Guid persistedFaceId, string userData, IDictionary serializedAdditionalRawData) + { + PersistedFaceId = persistedFaceId; + UserData = userData; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Face ID of the face. + public Guid PersistedFaceId { get; } + /// User-provided data attached to the face. The length limit is 1K. + public string UserData { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroup.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroup.Serialization.cs new file mode 100644 index 000000000000..0a39ef9f4066 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroup.Serialization.cs @@ -0,0 +1,172 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class LargePersonGroup : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LargePersonGroup)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (Optional.IsDefined(RecognitionModel)) + { + writer.WritePropertyName("recognitionModel"u8); + writer.WriteStringValue(RecognitionModel.Value.ToString()); + } + if (options.Format != "W") + { + writer.WritePropertyName("largePersonGroupId"u8); + writer.WriteStringValue(LargePersonGroupId); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + LargePersonGroup IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LargePersonGroup)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLargePersonGroup(document.RootElement, options); + } + + internal static LargePersonGroup DeserializeLargePersonGroup(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + string name = default; + string userData = default; + FaceRecognitionModel? recognitionModel = default; + string largePersonGroupId = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (property.NameEquals("recognitionModel"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + recognitionModel = new FaceRecognitionModel(property.Value.GetString()); + continue; + } + if (property.NameEquals("largePersonGroupId"u8)) + { + largePersonGroupId = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new LargePersonGroup(name, userData, recognitionModel, largePersonGroupId, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LargePersonGroup)} does not support writing '{options.Format}' format."); + } + } + + LargePersonGroup IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeLargePersonGroup(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LargePersonGroup)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static LargePersonGroup FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeLargePersonGroup(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroup.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroup.cs new file mode 100644 index 000000000000..a38758c190fd --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroup.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The container of the uploaded person data, including face recognition feature, and up to 1,000,000 people. + public partial class LargePersonGroup + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// is null. + internal LargePersonGroup(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + } + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds. + /// ID of the container. + /// Keeps track of any properties unknown to the library. + internal LargePersonGroup(string name, string userData, FaceRecognitionModel? recognitionModel, string largePersonGroupId, IDictionary serializedAdditionalRawData) + { + Name = name; + UserData = userData; + RecognitionModel = recognitionModel; + LargePersonGroupId = largePersonGroupId; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal LargePersonGroup() + { + } + + /// User defined name, maximum length is 128. + public string Name { get; } + /// Optional user defined data. Length should not exceed 16K. + public string UserData { get; } + /// Name of recognition model. Recognition model is used when the face features are extracted and associated with detected faceIds. + public FaceRecognitionModel? RecognitionModel { get; } + /// ID of the container. + public string LargePersonGroupId { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroupClient.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroupClient.cs new file mode 100644 index 000000000000..0944da52e3a8 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroupClient.cs @@ -0,0 +1,1991 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Core.Pipeline; + +namespace Azure.AI.Vision.Face +{ + // Data plane generated sub-client. + /// The LargePersonGroup sub-client. + public partial class LargePersonGroupClient + { + private const string AuthorizationHeader = "Ocp-Apim-Subscription-Key"; + private readonly AzureKeyCredential _keyCredential; + private static readonly string[] AuthorizationScopes = new string[] { "https://cognitiveservices.azure.com/.default" }; + private readonly TokenCredential _tokenCredential; + private readonly HttpPipeline _pipeline; + private readonly Uri _endpoint; + private readonly string _largePersonGroupId; + private readonly string _apiVersion; + + /// The ClientDiagnostics is used to provide tracing support for the client library. + internal ClientDiagnostics ClientDiagnostics { get; } + + /// The HTTP pipeline for sending and receiving REST requests and responses. + public virtual HttpPipeline Pipeline => _pipeline; + + /// Initializes a new instance of LargePersonGroupClient for mocking. + protected LargePersonGroupClient() + { + } + + /// Initializes a new instance of LargePersonGroupClient. + /// The handler for diagnostic messaging in the client. + /// The HTTP pipeline for sending and receiving REST requests and responses. + /// The key credential to copy. + /// The token credential to copy. + /// + /// Supported Cognitive Services endpoints (protocol and hostname, for example: + /// https://{resource-name}.cognitiveservices.azure.com). + /// + /// ID of the container. + /// API Version. Allowed values: "v1.1-preview.1" | "v1.2-preview.1". + internal LargePersonGroupClient(ClientDiagnostics clientDiagnostics, HttpPipeline pipeline, AzureKeyCredential keyCredential, TokenCredential tokenCredential, Uri endpoint, string largePersonGroupId, string apiVersion) + { + ClientDiagnostics = clientDiagnostics; + _pipeline = pipeline; + _keyCredential = keyCredential; + _tokenCredential = tokenCredential; + _endpoint = endpoint; + _largePersonGroupId = largePersonGroupId; + _apiVersion = apiVersion; + } + + /// Create a new Large Person Group with user-specified largePersonGroupId, name, an optional userData and recognitionModel. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/create-large-person-group for more details. + /// + public virtual async Task CreateAsync(string name, string userData = null, FaceRecognitionModel? recognitionModel = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(name, nameof(name)); + + CreateRequest createRequest = new CreateRequest(name, userData, recognitionModel, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await CreateAsync(createRequest.ToRequestContent(), context).ConfigureAwait(false); + return response; + } + + /// Create a new Large Person Group with user-specified largePersonGroupId, name, an optional userData and recognitionModel. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The 'recognitionModel' associated with this face list. Supported 'recognitionModel' values include 'recognition_01', 'recognition_02, 'recognition_03', and 'recognition_04'. The default value is 'recognition_01'. 'recognition_04' is recommended since its accuracy is improved on faces wearing masks compared with 'recognition_03', and its overall accuracy is improved compared with 'recognition_01' and 'recognition_02'. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/create-large-person-group for more details. + /// + public virtual Response Create(string name, string userData = null, FaceRecognitionModel? recognitionModel = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(name, nameof(name)); + + CreateRequest createRequest = new CreateRequest(name, userData, recognitionModel, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = Create(createRequest.ToRequestContent(), context); + return response; + } + + /// + /// [Protocol Method] Create a new Large Person Group with user-specified largePersonGroupId, name, an optional userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task CreateAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.Create"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Create a new Large Person Group with user-specified largePersonGroupId, name, an optional userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response Create(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.Create"); + scope.Start(); + try + { + using HttpMessage message = CreateCreateRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/delete-large-person-group for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task DeleteAsync(RequestContext context = null) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.Delete"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteRequest(context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/delete-large-person-group for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response Delete(RequestContext context = null) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.Delete"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteRequest(context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group for more details. + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// + public virtual async Task> GetLargePersonGroupAsync(bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetLargePersonGroupAsync(returnRecognitionModel, context).ConfigureAwait(false); + return Response.FromValue(LargePersonGroup.FromResponse(response), response); + } + + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group for more details. + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// + public virtual Response GetLargePersonGroup(bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetLargePersonGroup(returnRecognitionModel, context); + return Response.FromValue(LargePersonGroup.FromResponse(response), response); + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetLargePersonGroupAsync(bool? returnRecognitionModel, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.GetLargePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargePersonGroupRequest(returnRecognitionModel, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetLargePersonGroup(bool? returnRecognitionModel, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.GetLargePersonGroup"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargePersonGroupRequest(returnRecognitionModel, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/update-large-person-group for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task UpdateAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.Update"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/update-large-person-group for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response Update(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.Update"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// List all existing Large Person Groups' largePersonGroupId, name, userData and recognitionModel. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-groups for more details. + /// + public virtual async Task>> GetLargePersonGroupsAsync(string start = null, int? top = null, bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetLargePersonGroupsAsync(start, top, returnRecognitionModel, context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LargePersonGroup.DeserializeLargePersonGroup(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// List all existing Large Person Groups' largePersonGroupId, name, userData and recognitionModel. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// Return 'recognitionModel' or not. The default value is false. + /// The cancellation token to use. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-groups for more details. + /// + public virtual Response> GetLargePersonGroups(string start = null, int? top = null, bool? returnRecognitionModel = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetLargePersonGroups(start, top, returnRecognitionModel, context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LargePersonGroup.DeserializeLargePersonGroup(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] List all existing Large Person Groups' largePersonGroupId, name, userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetLargePersonGroupsAsync(string start, int? top, bool? returnRecognitionModel, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.GetLargePersonGroups"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargePersonGroupsRequest(start, top, returnRecognitionModel, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] List all existing Large Person Groups' largePersonGroupId, name, userData and recognitionModel. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// Return 'recognitionModel' or not. The default value is false. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetLargePersonGroups(string start, int? top, bool? returnRecognitionModel, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.GetLargePersonGroups"); + scope.Start(); + try + { + using HttpMessage message = CreateGetLargePersonGroupsRequest(start, top, returnRecognitionModel, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// To check Large Person Group training status completed or still ongoing. Large Person Group training is an asynchronous operation triggered by "Train Large Person Group" API. + /// The cancellation token to use. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group-training-status for more details. + /// + public virtual async Task> GetTrainingStatusAsync(CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetTrainingStatusAsync(context).ConfigureAwait(false); + return Response.FromValue(FaceTrainingResult.FromResponse(response), response); + } + + /// To check Large Person Group training status completed or still ongoing. Large Person Group training is an asynchronous operation triggered by "Train Large Person Group" API. + /// The cancellation token to use. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group-training-status for more details. + /// + public virtual Response GetTrainingStatus(CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetTrainingStatus(context); + return Response.FromValue(FaceTrainingResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] To check Large Person Group training status completed or still ongoing. Large Person Group training is an asynchronous operation triggered by "Train Large Person Group" API. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetTrainingStatusAsync(RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.GetTrainingStatus"); + scope.Start(); + try + { + using HttpMessage message = CreateGetTrainingStatusRequest(context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] To check Large Person Group training status completed or still ongoing. Large Person Group training is an asynchronous operation triggered by "Train Large Person Group" API. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetTrainingStatus(RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.GetTrainingStatus"); + scope.Start(); + try + { + using HttpMessage message = CreateGetTrainingStatusRequest(context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Create a new person in a specified Large Person Group. To add face to this person, please call "Add Large Person Group Person Face". + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/create-large-person-group-person for more details. + /// + public virtual async Task> CreatePersonAsync(string name, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(name, nameof(name)); + + CreatePersonRequest createPersonRequest = new CreatePersonRequest(name, userData, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await CreatePersonAsync(createPersonRequest.ToRequestContent(), context).ConfigureAwait(false); + return Response.FromValue(CreatePersonResult.FromResponse(response), response); + } + + /// Create a new person in a specified Large Person Group. To add face to this person, please call "Add Large Person Group Person Face". + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/create-large-person-group-person for more details. + /// + public virtual Response CreatePerson(string name, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(name, nameof(name)); + + CreatePersonRequest createPersonRequest = new CreatePersonRequest(name, userData, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = CreatePerson(createPersonRequest.ToRequestContent(), context); + return Response.FromValue(CreatePersonResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Create a new person in a specified Large Person Group. To add face to this person, please call "Add Large Person Group Person Face". + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task CreatePersonAsync(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.CreatePerson"); + scope.Start(); + try + { + using HttpMessage message = CreateCreatePersonRequest(content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Create a new person in a specified Large Person Group. To add face to this person, please call "Add Large Person Group Person Face". + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response CreatePerson(RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.CreatePerson"); + scope.Start(); + try + { + using HttpMessage message = CreateCreatePersonRequest(content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/delete-large-person-group-person for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the person. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task DeletePersonAsync(Guid personId, RequestContext context = null) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.DeletePerson"); + scope.Start(); + try + { + using HttpMessage message = CreateDeletePersonRequest(personId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/delete-large-person-group-person for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the person. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response DeletePerson(Guid personId, RequestContext context = null) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.DeletePerson"); + scope.Start(); + try + { + using HttpMessage message = CreateDeletePersonRequest(personId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group-person for more details. + /// ID of the person. + /// The cancellation token to use. + /// + public virtual async Task> GetPersonAsync(Guid personId, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetPersonAsync(personId, context).ConfigureAwait(false); + return Response.FromValue(LargePersonGroupPerson.FromResponse(response), response); + } + + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group-person for more details. + /// ID of the person. + /// The cancellation token to use. + /// + public virtual Response GetPerson(Guid personId, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetPerson(personId, context); + return Response.FromValue(LargePersonGroupPerson.FromResponse(response), response); + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group-person for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the person. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetPersonAsync(Guid personId, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.GetPerson"); + scope.Start(); + try + { + using HttpMessage message = CreateGetPersonRequest(personId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group-person for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the person. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetPerson(Guid personId, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.GetPerson"); + scope.Start(); + try + { + using HttpMessage message = CreateGetPersonRequest(personId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/update-large-person-group-person for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the person. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task UpdatePersonAsync(Guid personId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.UpdatePerson"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdatePersonRequest(personId, content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/update-large-person-group-person for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the person. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response UpdatePerson(Guid personId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.UpdatePerson"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdatePersonRequest(personId, content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// List all persons' information in the specified Large Person Group, including personId, name, userData and persistedFaceIds of registered person faces. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The cancellation token to use. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group-persons for more details. + /// + public virtual async Task>> GetPersonsAsync(string start = null, int? top = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetPersonsAsync(start, top, context).ConfigureAwait(false); + IReadOnlyList value = default; + using var document = await JsonDocument.ParseAsync(response.ContentStream, default, cancellationToken).ConfigureAwait(false); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LargePersonGroupPerson.DeserializeLargePersonGroupPerson(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// List all persons' information in the specified Large Person Group, including personId, name, userData and persistedFaceIds of registered person faces. + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The cancellation token to use. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group-persons for more details. + /// + public virtual Response> GetPersons(string start = null, int? top = null, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetPersons(start, top, context); + IReadOnlyList value = default; + using var document = JsonDocument.Parse(response.ContentStream); + List array = new List(); + foreach (var item in document.RootElement.EnumerateArray()) + { + array.Add(LargePersonGroupPerson.DeserializeLargePersonGroupPerson(item)); + } + value = array; + return Response.FromValue(value, response); + } + + /// + /// [Protocol Method] List all persons' information in the specified Large Person Group, including personId, name, userData and persistedFaceIds of registered person faces. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetPersonsAsync(string start, int? top, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.GetPersons"); + scope.Start(); + try + { + using HttpMessage message = CreateGetPersonsRequest(start, top, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] List all persons' information in the specified Large Person Group, including personId, name, userData and persistedFaceIds of registered person faces. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// List resources greater than the "start". It contains no more than 64 characters. Default is empty. + /// The number of items to list, ranging in [1, 1000]. Default is 1000. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetPersons(string start, int? top, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.GetPersons"); + scope.Start(); + try + { + using HttpMessage message = CreateGetPersonsRequest(start, top, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Add a face to a person into a Large Person Group for face identification or verification. + /// ID of the person. + /// URL of input image. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/add-large-person-group-person-face-from-url for more details. + internal virtual async Task> AddFaceFromUrlImplAsync(Guid personId, Uri uri, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(uri, nameof(uri)); + + AddFaceFromUrlRequest addFaceFromUrlRequest = new AddFaceFromUrlRequest(uri, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await AddFaceFromUrlImplAsync(personId, addFaceFromUrlRequest.ToRequestContent(), targetFace, detectionModel?.ToString(), userData, context).ConfigureAwait(false); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// Add a face to a person into a Large Person Group for face identification or verification. + /// ID of the person. + /// URL of input image. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/add-large-person-group-person-face-from-url for more details. + internal virtual Response AddFaceFromUrlImpl(Guid personId, Uri uri, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(uri, nameof(uri)); + + AddFaceFromUrlRequest addFaceFromUrlRequest = new AddFaceFromUrlRequest(uri, null); + RequestContext context = FromCancellationToken(cancellationToken); + Response response = AddFaceFromUrlImpl(personId, addFaceFromUrlRequest.ToRequestContent(), targetFace, detectionModel?.ToString(), userData, context); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Add a face to a person into a Large Person Group for face identification or verification. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the person. + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + internal virtual async Task AddFaceFromUrlImplAsync(Guid personId, RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.AddFaceFromUrlImpl"); + scope.Start(); + try + { + using HttpMessage message = CreateAddFaceFromUrlImplRequest(personId, content, targetFace, detectionModel, userData, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Add a face to a person into a Large Person Group for face identification or verification. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the person. + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + internal virtual Response AddFaceFromUrlImpl(Guid personId, RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.AddFaceFromUrlImpl"); + scope.Start(); + try + { + using HttpMessage message = CreateAddFaceFromUrlImplRequest(personId, content, targetFace, detectionModel, userData, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Add a face to a person into a Large Person Group for face identification or verification. + /// ID of the person. + /// The image to be analyzed. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/add-large-person-group-person-face for more details. + internal virtual async Task> AddFaceImplAsync(Guid personId, BinaryData imageContent, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(imageContent, nameof(imageContent)); + + using RequestContent content = imageContent; + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await AddFaceImplAsync(personId, content, targetFace, detectionModel?.ToString(), userData, context).ConfigureAwait(false); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// Add a face to a person into a Large Person Group for face identification or verification. + /// ID of the person. + /// The image to be analyzed. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. + /// User-provided data attached to the face. The size limit is 1K. + /// The cancellation token to use. + /// is null. + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/add-large-person-group-person-face for more details. + internal virtual Response AddFaceImpl(Guid personId, BinaryData imageContent, IEnumerable targetFace = null, FaceDetectionModel? detectionModel = null, string userData = null, CancellationToken cancellationToken = default) + { + Argument.AssertNotNull(imageContent, nameof(imageContent)); + + using RequestContent content = imageContent; + RequestContext context = FromCancellationToken(cancellationToken); + Response response = AddFaceImpl(personId, content, targetFace, detectionModel?.ToString(), userData, context); + return Response.FromValue(AddFaceResult.FromResponse(response), response); + } + + /// + /// [Protocol Method] Add a face to a person into a Large Person Group for face identification or verification. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the person. + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + internal virtual async Task AddFaceImplAsync(Guid personId, RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.AddFaceImpl"); + scope.Start(); + try + { + using HttpMessage message = CreateAddFaceImplRequest(personId, content, targetFace, detectionModel, userData, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Add a face to a person into a Large Person Group for face identification or verification. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the person. + /// The content to send as the body of the request. + /// A face rectangle to specify the target face to be added to a person, in the format of 'targetFace=left,top,width,height'. + /// The 'detectionModel' associated with the detected faceIds. Supported 'detectionModel' values include 'detection_01', 'detection_02' and 'detection_03'. The default value is 'detection_01'. Allowed values: "detection_01" | "detection_02" | "detection_03". + /// User-provided data attached to the face. The size limit is 1K. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + internal virtual Response AddFaceImpl(Guid personId, RequestContent content, IEnumerable targetFace = null, string detectionModel = null, string userData = null, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.AddFaceImpl"); + scope.Start(); + try + { + using HttpMessage message = CreateAddFaceImplRequest(personId, content, targetFace, detectionModel, userData, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete a face from a person in a Large Person Group by specified largePersonGroupId, personId and persistedFaceId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the person. + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task DeleteFaceAsync(Guid personId, Guid persistedFaceId, RequestContext context = null) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.DeleteFace"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteFaceRequest(personId, persistedFaceId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Delete a face from a person in a Large Person Group by specified largePersonGroupId, personId and persistedFaceId. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the person. + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response DeleteFace(Guid personId, Guid persistedFaceId, RequestContext context = null) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.DeleteFace"); + scope.Start(); + try + { + using HttpMessage message = CreateDeleteFaceRequest(personId, persistedFaceId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group-person-face for more details. + /// ID of the person. + /// Face ID of the face. + /// The cancellation token to use. + /// + public virtual async Task> GetFaceAsync(Guid personId, Guid persistedFaceId, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = await GetFaceAsync(personId, persistedFaceId, context).ConfigureAwait(false); + return Response.FromValue(LargePersonGroupPersonFace.FromResponse(response), response); + } + + /// Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group-person-face for more details. + /// ID of the person. + /// Face ID of the face. + /// The cancellation token to use. + /// + public virtual Response GetFace(Guid personId, Guid persistedFaceId, CancellationToken cancellationToken = default) + { + RequestContext context = FromCancellationToken(cancellationToken); + Response response = GetFace(personId, persistedFaceId, context); + return Response.FromValue(LargePersonGroupPersonFace.FromResponse(response), response); + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group-person-face for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the person. + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task GetFaceAsync(Guid personId, Guid persistedFaceId, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.GetFace"); + scope.Start(); + try + { + using HttpMessage message = CreateGetFaceRequest(personId, persistedFaceId, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/get-large-person-group-person-face for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// Please try the simpler convenience overload with strongly typed models first. + /// + /// + /// + /// + /// ID of the person. + /// Face ID of the face. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response GetFace(Guid personId, Guid persistedFaceId, RequestContext context) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.GetFace"); + scope.Start(); + try + { + using HttpMessage message = CreateGetFaceRequest(personId, persistedFaceId, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/update-large-person-group-person-face for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the person. + /// Face ID of the face. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual async Task UpdateFaceAsync(Guid personId, Guid persistedFaceId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.UpdateFace"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateFaceRequest(personId, persistedFaceId, content, context); + return await _pipeline.ProcessMessageAsync(message, context).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + /// + /// [Protocol Method] Please refer to https://learn.microsoft.com/rest/api/face/person-group-operations/update-large-person-group-person-face for more details. + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// ID of the person. + /// Face ID of the face. + /// The content to send as the body of the request. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// is null. + /// Service returned a non-success status code. + /// The response returned from the service. + /// + public virtual Response UpdateFace(Guid personId, Guid persistedFaceId, RequestContent content, RequestContext context = null) + { + Argument.AssertNotNull(content, nameof(content)); + + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.UpdateFace"); + scope.Start(); + try + { + using HttpMessage message = CreateUpdateFaceRequest(personId, persistedFaceId, content, context); + return _pipeline.ProcessMessage(message, context); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Submit a Large Person Group training task. Training is a crucial step that only a trained Large Person Group can be used by "Identify From Large Person Group". + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The representing an asynchronous operation on the service. + /// + public virtual async Task TrainAsync(WaitUntil waitUntil, RequestContext context = null) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.Train"); + scope.Start(); + try + { + using HttpMessage message = CreateTrainRequest(context); + return await ProtocolOperationHelpers.ProcessMessageWithoutResponseValueAsync(_pipeline, message, ClientDiagnostics, "LargePersonGroupClient.Train", OperationFinalStateVia.OperationLocation, context, waitUntil).ConfigureAwait(false); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + // The convenience method is omitted here because it has exactly the same parameter list as the corresponding protocol method + /// + /// [Protocol Method] Submit a Large Person Group training task. Training is a crucial step that only a trained Large Person Group can be used by "Identify From Large Person Group". + /// + /// + /// + /// This protocol method allows explicit creation of the request and processing of the response for advanced scenarios. + /// + /// + /// + /// + /// if the method should wait to return until the long-running operation has completed on the service; if it should return after starting the operation. For more information on long-running operations, please see Azure.Core Long-Running Operation samples. + /// The request context, which can override default behaviors of the client pipeline on a per-call basis. + /// Service returned a non-success status code. + /// The representing an asynchronous operation on the service. + /// + public virtual Operation Train(WaitUntil waitUntil, RequestContext context = null) + { + using var scope = ClientDiagnostics.CreateScope("LargePersonGroupClient.Train"); + scope.Start(); + try + { + using HttpMessage message = CreateTrainRequest(context); + return ProtocolOperationHelpers.ProcessMessageWithoutResponseValue(_pipeline, message, ClientDiagnostics, "LargePersonGroupClient.Train", OperationFinalStateVia.OperationLocation, context, waitUntil); + } + catch (Exception e) + { + scope.Failed(e); + throw; + } + } + + internal HttpMessage CreateCreateRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Put; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeleteRequest(RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetLargePersonGroupRequest(bool? returnRecognitionModel, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + if (returnRecognitionModel != null) + { + uri.AppendQuery("returnRecognitionModel", returnRecognitionModel.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateUpdateRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Patch; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateGetLargePersonGroupsRequest(string start, int? top, bool? returnRecognitionModel, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups", false); + if (start != null) + { + uri.AppendQuery("start", start, true); + } + if (top != null) + { + uri.AppendQuery("top", top.Value, true); + } + if (returnRecognitionModel != null) + { + uri.AppendQuery("returnRecognitionModel", returnRecognitionModel.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetTrainingStatusRequest(RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + uri.AppendPath("/training", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateTrainRequest(RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier202); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + uri.AppendPath("/train", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateCreatePersonRequest(RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + uri.AppendPath("/persons", false); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeletePersonRequest(Guid personId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetPersonRequest(Guid personId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateUpdatePersonRequest(Guid personId, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Patch; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateGetPersonsRequest(string start, int? top, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + uri.AppendPath("/persons", false); + if (start != null) + { + uri.AppendQuery("start", start, true); + } + if (top != null) + { + uri.AppendQuery("top", top.Value, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateAddFaceFromUrlImplRequest(Guid personId, RequestContent content, IEnumerable targetFace, string detectionModel, string userData, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + uri.AppendPath("/persistedfaces", false); + if (targetFace != null && !(targetFace is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined)) + { + uri.AppendQueryDelimited("targetFace", targetFace, ",", true); + } + if (detectionModel != null) + { + uri.AppendQuery("detectionModel", detectionModel, true); + } + if (userData != null) + { + uri.AppendQuery("userData", userData, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + internal HttpMessage CreateAddFaceImplRequest(Guid personId, RequestContent content, IEnumerable targetFace, string detectionModel, string userData, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Post; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + uri.AppendPath("/persistedfaces", false); + if (targetFace != null && !(targetFace is ChangeTrackingList changeTrackingList && changeTrackingList.IsUndefined)) + { + uri.AppendQueryDelimited("targetFace", targetFace, ",", true); + } + if (detectionModel != null) + { + uri.AppendQuery("detectionModel", detectionModel, true); + } + if (userData != null) + { + uri.AppendQuery("userData", userData, true); + } + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/octet-stream"); + request.Content = content; + return message; + } + + internal HttpMessage CreateDeleteFaceRequest(Guid personId, Guid persistedFaceId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Delete; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + uri.AppendPath("/persistedfaces/", false); + uri.AppendPath(persistedFaceId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateGetFaceRequest(Guid personId, Guid persistedFaceId, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Get; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + uri.AppendPath("/persistedfaces/", false); + uri.AppendPath(persistedFaceId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + return message; + } + + internal HttpMessage CreateUpdateFaceRequest(Guid personId, Guid persistedFaceId, RequestContent content, RequestContext context) + { + var message = _pipeline.CreateMessage(context, ResponseClassifier200); + var request = message.Request; + request.Method = RequestMethod.Patch; + var uri = new RawRequestUriBuilder(); + uri.Reset(_endpoint); + uri.AppendRaw("/face/", false); + uri.AppendRaw(_apiVersion, true); + uri.AppendPath("/largepersongroups/", false); + uri.AppendPath(_largePersonGroupId, true); + uri.AppendPath("/persons/", false); + uri.AppendPath(personId, true); + uri.AppendPath("/persistedfaces/", false); + uri.AppendPath(persistedFaceId, true); + request.Uri = uri; + request.Headers.Add("Accept", "application/json"); + request.Headers.Add("Content-Type", "application/json"); + request.Content = content; + return message; + } + + private static RequestContext DefaultRequestContext = new RequestContext(); + internal static RequestContext FromCancellationToken(CancellationToken cancellationToken = default) + { + if (!cancellationToken.CanBeCanceled) + { + return DefaultRequestContext; + } + + return new RequestContext() { CancellationToken = cancellationToken }; + } + + private static ResponseClassifier _responseClassifier200; + private static ResponseClassifier ResponseClassifier200 => _responseClassifier200 ??= new StatusCodeClassifier(stackalloc ushort[] { 200 }); + private static ResponseClassifier _responseClassifier202; + private static ResponseClassifier ResponseClassifier202 => _responseClassifier202 ??= new StatusCodeClassifier(stackalloc ushort[] { 202 }); + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPerson.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPerson.Serialization.cs new file mode 100644 index 000000000000..eb363bd7f27c --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPerson.Serialization.cs @@ -0,0 +1,182 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class LargePersonGroupPerson : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LargePersonGroupPerson)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + if (options.Format != "W") + { + writer.WritePropertyName("personId"u8); + writer.WriteStringValue(PersonId); + } + writer.WritePropertyName("name"u8); + writer.WriteStringValue(Name); + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (Optional.IsCollectionDefined(PersistedFaceIds)) + { + writer.WritePropertyName("persistedFaceIds"u8); + writer.WriteStartArray(); + foreach (var item in PersistedFaceIds) + { + writer.WriteStringValue(item); + } + writer.WriteEndArray(); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + LargePersonGroupPerson IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LargePersonGroupPerson)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLargePersonGroupPerson(document.RootElement, options); + } + + internal static LargePersonGroupPerson DeserializeLargePersonGroupPerson(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid personId = default; + string name = default; + string userData = default; + IReadOnlyList persistedFaceIds = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("personId"u8)) + { + personId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("name"u8)) + { + name = property.Value.GetString(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (property.NameEquals("persistedFaceIds"u8)) + { + if (property.Value.ValueKind == JsonValueKind.Null) + { + continue; + } + List array = new List(); + foreach (var item in property.Value.EnumerateArray()) + { + array.Add(item.GetGuid()); + } + persistedFaceIds = array; + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new LargePersonGroupPerson(personId, name, userData, persistedFaceIds ?? new ChangeTrackingList(), serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LargePersonGroupPerson)} does not support writing '{options.Format}' format."); + } + } + + LargePersonGroupPerson IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeLargePersonGroupPerson(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LargePersonGroupPerson)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static LargePersonGroupPerson FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeLargePersonGroupPerson(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPerson.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPerson.cs new file mode 100644 index 000000000000..1884b1812ec4 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPerson.cs @@ -0,0 +1,88 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The person in a specified large person group. To add face to this person, please call "Add Large Person Group Person Face". + public partial class LargePersonGroupPerson + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// User defined name, maximum length is 128. + /// is null. + internal LargePersonGroupPerson(string name) + { + Argument.AssertNotNull(name, nameof(name)); + + Name = name; + PersistedFaceIds = new ChangeTrackingList(); + } + + /// Initializes a new instance of . + /// ID of the person. + /// User defined name, maximum length is 128. + /// Optional user defined data. Length should not exceed 16K. + /// Face ids of registered faces in the person. + /// Keeps track of any properties unknown to the library. + internal LargePersonGroupPerson(Guid personId, string name, string userData, IReadOnlyList persistedFaceIds, IDictionary serializedAdditionalRawData) + { + PersonId = personId; + Name = name; + UserData = userData; + PersistedFaceIds = persistedFaceIds; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal LargePersonGroupPerson() + { + } + + /// ID of the person. + public Guid PersonId { get; } + /// User defined name, maximum length is 128. + public string Name { get; } + /// Optional user defined data. Length should not exceed 16K. + public string UserData { get; } + /// Face ids of registered faces in the person. + public IReadOnlyList PersistedFaceIds { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPersonFace.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPersonFace.Serialization.cs new file mode 100644 index 000000000000..a602eb3457e0 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPersonFace.Serialization.cs @@ -0,0 +1,149 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + public partial class LargePersonGroupPersonFace : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LargePersonGroupPersonFace)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + if (options.Format != "W") + { + writer.WritePropertyName("persistedFaceId"u8); + writer.WriteStringValue(PersistedFaceId); + } + if (Optional.IsDefined(UserData)) + { + writer.WritePropertyName("userData"u8); + writer.WriteStringValue(UserData); + } + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + LargePersonGroupPersonFace IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(LargePersonGroupPersonFace)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeLargePersonGroupPersonFace(document.RootElement, options); + } + + internal static LargePersonGroupPersonFace DeserializeLargePersonGroupPersonFace(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid persistedFaceId = default; + string userData = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("persistedFaceId"u8)) + { + persistedFaceId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("userData"u8)) + { + userData = property.Value.GetString(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new LargePersonGroupPersonFace(persistedFaceId, userData, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(LargePersonGroupPersonFace)} does not support writing '{options.Format}' format."); + } + } + + LargePersonGroupPersonFace IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeLargePersonGroupPersonFace(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(LargePersonGroupPersonFace)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static LargePersonGroupPersonFace FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeLargePersonGroupPersonFace(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPersonFace.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPersonFace.cs new file mode 100644 index 000000000000..56aa4d0061ff --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LargePersonGroupPersonFace.cs @@ -0,0 +1,69 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// Face resource for large person group person. + public partial class LargePersonGroupPersonFace + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + internal LargePersonGroupPersonFace() + { + } + + /// Initializes a new instance of . + /// Face ID of the face. + /// User-provided data attached to the face. The length limit is 1K. + /// Keeps track of any properties unknown to the library. + internal LargePersonGroupPersonFace(Guid persistedFaceId, string userData, IDictionary serializedAdditionalRawData) + { + PersistedFaceId = persistedFaceId; + UserData = userData; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Face ID of the face. + public Guid PersistedFaceId { get; } + /// User-provided data attached to the face. The length limit is 1K. + public string UserData { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessModel.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessModel.cs index 2a3c6d8b1280..f5a0af9fc967 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessModel.cs +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessModel.cs @@ -22,19 +22,13 @@ public LivenessModel(string value) _value = value ?? throw new ArgumentNullException(nameof(value)); } - private const string V20200215Preview01Value = "2020-02-15-preview.01"; - private const string V20211112Preview03Value = "2021-11-12-preview.03"; private const string V20221015Preview04Value = "2022-10-15-preview.04"; - private const string V20230302Preview05Value = "2023-03-02-preview.05"; + private const string V20231220Preview06Value = "2023-12-20-preview.06"; - /// 2020-02-15-preview.01. - public static LivenessModel V20200215Preview01 { get; } = new LivenessModel(V20200215Preview01Value); - /// 2021-11-12-preview.03. - public static LivenessModel V20211112Preview03 { get; } = new LivenessModel(V20211112Preview03Value); /// 2022-10-15-preview.04. public static LivenessModel V20221015Preview04 { get; } = new LivenessModel(V20221015Preview04Value); - /// 2023-03-02-preview.05. - public static LivenessModel V20230302Preview05 { get; } = new LivenessModel(V20230302Preview05Value); + /// 2023-12-20-preview.06. + public static LivenessModel V20231220Preview06 { get; } = new LivenessModel(V20231220Preview06Value); /// Determines if two values are the same. public static bool operator ==(LivenessModel left, LivenessModel right) => left.Equals(right); /// Determines if two values are not the same. diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessOperationMode.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessOperationMode.cs index b29e45e21167..a1b1ed8b4884 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessOperationMode.cs +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessOperationMode.cs @@ -10,7 +10,7 @@ namespace Azure.AI.Vision.Face { - /// The liveness operation mode to drive the client’s end-user experience. + /// The liveness operation mode to drive the client's end-user experience. public readonly partial struct LivenessOperationMode : IEquatable { private readonly string _value; @@ -25,9 +25,9 @@ public LivenessOperationMode(string value) private const string PassiveValue = "Passive"; private const string PassiveActiveValue = "PassiveActive"; - /// Utilizes a passive liveness technique that requires no additional actions from the user. Requires normal indoor lighting and high screen brightness for optimal performance. And thus, this mode has a narrow operational envelope and will not be suitable for scenarios that requires the end-user’s to be in bright lighting conditions. Note: this is the only supported mode for the Mobile (iOS and Android) solution. + /// Utilizes a passive liveness technique that requires no additional actions from the user. Requires normal indoor lighting and high screen brightness for optimal performance. And thus, this mode has a narrow operational envelope and will not be suitable for scenarios that requires the end-user's to be in bright lighting conditions. Note: this is the only supported mode for the Mobile (iOS and Android) solution. public static LivenessOperationMode Passive { get; } = new LivenessOperationMode(PassiveValue); - /// This mode utilizes a hybrid passive or active liveness technique that necessitates user cooperation. It is optimized to require active motion only under suboptimal lighting conditions. Unlike the passive mode, this mode has no lighting restrictions, and thus offering a broader operational envelope. This mode is preferable on Web based solutions due to the lack of automatic screen brightness control available on browsers which hinders the Passive mode’s operational envelope on Web based solutions. + /// This mode utilizes a hybrid passive or active liveness technique that necessitates user cooperation. It is optimized to require active motion only under suboptimal lighting conditions. Unlike the passive mode, this mode has no lighting restrictions, and thus offering a broader operational envelope. This mode is preferable on Web based solutions due to the lack of automatic screen brightness control available on browsers which hinders the Passive mode's operational envelope on Web based solutions. public static LivenessOperationMode PassiveActive { get; } = new LivenessOperationMode(PassiveActiveValue); /// Determines if two values are the same. public static bool operator ==(LivenessOperationMode left, LivenessOperationMode right) => left.Equals(right); diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessSessionAuditEntry.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessSessionAuditEntry.Serialization.cs index 8db941366715..203873526c05 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessSessionAuditEntry.Serialization.cs +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessSessionAuditEntry.Serialization.cs @@ -42,6 +42,16 @@ void IJsonModel.Write(Utf8JsonWriter writer, ModelRea writer.WriteObjectValue(Response, options); writer.WritePropertyName("digest"u8); writer.WriteStringValue(Digest); + if (Optional.IsDefined(SessionImageId)) + { + writer.WritePropertyName("sessionImageId"u8); + writer.WriteStringValue(SessionImageId); + } + if (Optional.IsDefined(VerifyImageHash)) + { + writer.WritePropertyName("verifyImageHash"u8); + writer.WriteStringValue(VerifyImageHash); + } if (options.Format != "W" && _serializedAdditionalRawData != null) { foreach (var item in _serializedAdditionalRawData) @@ -88,6 +98,8 @@ internal static LivenessSessionAuditEntry DeserializeLivenessSessionAuditEntry(J AuditRequestInfo request = default; AuditLivenessResponseInfo response = default; string digest = default; + string sessionImageId = default; + string verifyImageHash = default; IDictionary serializedAdditionalRawData = default; Dictionary rawDataDictionary = new Dictionary(); foreach (var property in element.EnumerateObject()) @@ -132,6 +144,16 @@ internal static LivenessSessionAuditEntry DeserializeLivenessSessionAuditEntry(J digest = property.Value.GetString(); continue; } + if (property.NameEquals("sessionImageId"u8)) + { + sessionImageId = property.Value.GetString(); + continue; + } + if (property.NameEquals("verifyImageHash"u8)) + { + verifyImageHash = property.Value.GetString(); + continue; + } if (options.Format != "W") { rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); @@ -147,6 +169,8 @@ internal static LivenessSessionAuditEntry DeserializeLivenessSessionAuditEntry(J request, response, digest, + sessionImageId, + verifyImageHash, serializedAdditionalRawData); } diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessSessionAuditEntry.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessSessionAuditEntry.cs index 999391540240..46b364a13ffd 100644 --- a/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessSessionAuditEntry.cs +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/LivenessSessionAuditEntry.cs @@ -83,8 +83,10 @@ internal LivenessSessionAuditEntry(long id, string sessionId, string requestId, /// The request of this entry. /// The response of this entry. /// The server calculated digest for this request. If the client reported digest differs from the server calculated digest, then the message integrity between the client and service has been compromised and the result should not be trusted. For more information, see how to guides on how to leverage this value to secure your end-to-end solution. + /// The image ID of the session request. + /// The sha256 hash of the verify-image in the request. /// Keeps track of any properties unknown to the library. - internal LivenessSessionAuditEntry(long id, string sessionId, string requestId, string clientRequestId, DateTimeOffset receivedDateTime, AuditRequestInfo request, AuditLivenessResponseInfo response, string digest, IDictionary serializedAdditionalRawData) + internal LivenessSessionAuditEntry(long id, string sessionId, string requestId, string clientRequestId, DateTimeOffset receivedDateTime, AuditRequestInfo request, AuditLivenessResponseInfo response, string digest, string sessionImageId, string verifyImageHash, IDictionary serializedAdditionalRawData) { Id = id; SessionId = sessionId; @@ -94,6 +96,8 @@ internal LivenessSessionAuditEntry(long id, string sessionId, string requestId, Request = request; Response = response; Digest = digest; + SessionImageId = sessionImageId; + VerifyImageHash = verifyImageHash; _serializedAdditionalRawData = serializedAdditionalRawData; } @@ -118,5 +122,9 @@ internal LivenessSessionAuditEntry() public AuditLivenessResponseInfo Response { get; } /// The server calculated digest for this request. If the client reported digest differs from the server calculated digest, then the message integrity between the client and service has been compromised and the result should not be trusted. For more information, see how to guides on how to leverage this value to secure your end-to-end solution. public string Digest { get; } + /// The image ID of the session request. + public string SessionImageId { get; } + /// The sha256 hash of the verify-image in the request. + public string VerifyImageHash { get; } } } diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/VerifyFromLargePersonGroupRequest.Serialization.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/VerifyFromLargePersonGroupRequest.Serialization.cs new file mode 100644 index 000000000000..fa9cc27b3b68 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/VerifyFromLargePersonGroupRequest.Serialization.cs @@ -0,0 +1,151 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.ClientModel.Primitives; +using System.Collections.Generic; +using System.Text.Json; +using Azure.Core; + +namespace Azure.AI.Vision.Face +{ + internal partial class VerifyFromLargePersonGroupRequest : IUtf8JsonSerializable, IJsonModel + { + void IUtf8JsonSerializable.Write(Utf8JsonWriter writer) => ((IJsonModel)this).Write(writer, ModelSerializationExtensions.WireOptions); + + void IJsonModel.Write(Utf8JsonWriter writer, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VerifyFromLargePersonGroupRequest)} does not support writing '{format}' format."); + } + + writer.WriteStartObject(); + writer.WritePropertyName("faceId"u8); + writer.WriteStringValue(FaceId); + writer.WritePropertyName("largePersonGroupId"u8); + writer.WriteStringValue(LargePersonGroupId); + writer.WritePropertyName("personId"u8); + writer.WriteStringValue(PersonId); + if (options.Format != "W" && _serializedAdditionalRawData != null) + { + foreach (var item in _serializedAdditionalRawData) + { + writer.WritePropertyName(item.Key); +#if NET6_0_OR_GREATER + writer.WriteRawValue(item.Value); +#else + using (JsonDocument document = JsonDocument.Parse(item.Value)) + { + JsonSerializer.Serialize(writer, document.RootElement); + } +#endif + } + } + writer.WriteEndObject(); + } + + VerifyFromLargePersonGroupRequest IJsonModel.Create(ref Utf8JsonReader reader, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + if (format != "J") + { + throw new FormatException($"The model {nameof(VerifyFromLargePersonGroupRequest)} does not support reading '{format}' format."); + } + + using JsonDocument document = JsonDocument.ParseValue(ref reader); + return DeserializeVerifyFromLargePersonGroupRequest(document.RootElement, options); + } + + internal static VerifyFromLargePersonGroupRequest DeserializeVerifyFromLargePersonGroupRequest(JsonElement element, ModelReaderWriterOptions options = null) + { + options ??= ModelSerializationExtensions.WireOptions; + + if (element.ValueKind == JsonValueKind.Null) + { + return null; + } + Guid faceId = default; + string largePersonGroupId = default; + Guid personId = default; + IDictionary serializedAdditionalRawData = default; + Dictionary rawDataDictionary = new Dictionary(); + foreach (var property in element.EnumerateObject()) + { + if (property.NameEquals("faceId"u8)) + { + faceId = property.Value.GetGuid(); + continue; + } + if (property.NameEquals("largePersonGroupId"u8)) + { + largePersonGroupId = property.Value.GetString(); + continue; + } + if (property.NameEquals("personId"u8)) + { + personId = property.Value.GetGuid(); + continue; + } + if (options.Format != "W") + { + rawDataDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText())); + } + } + serializedAdditionalRawData = rawDataDictionary; + return new VerifyFromLargePersonGroupRequest(faceId, largePersonGroupId, personId, serializedAdditionalRawData); + } + + BinaryData IPersistableModel.Write(ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + return ModelReaderWriter.Write(this, options); + default: + throw new FormatException($"The model {nameof(VerifyFromLargePersonGroupRequest)} does not support writing '{options.Format}' format."); + } + } + + VerifyFromLargePersonGroupRequest IPersistableModel.Create(BinaryData data, ModelReaderWriterOptions options) + { + var format = options.Format == "W" ? ((IPersistableModel)this).GetFormatFromOptions(options) : options.Format; + + switch (format) + { + case "J": + { + using JsonDocument document = JsonDocument.Parse(data); + return DeserializeVerifyFromLargePersonGroupRequest(document.RootElement, options); + } + default: + throw new FormatException($"The model {nameof(VerifyFromLargePersonGroupRequest)} does not support reading '{options.Format}' format."); + } + } + + string IPersistableModel.GetFormatFromOptions(ModelReaderWriterOptions options) => "J"; + + /// Deserializes the model from a raw response. + /// The response to deserialize the model from. + internal static VerifyFromLargePersonGroupRequest FromResponse(Response response) + { + using var document = JsonDocument.Parse(response.Content); + return DeserializeVerifyFromLargePersonGroupRequest(document.RootElement); + } + + /// Convert into a . + internal virtual RequestContent ToRequestContent() + { + var content = new Utf8JsonRequestContent(); + content.JsonWriter.WriteObjectValue(this, ModelSerializationExtensions.WireOptions); + return content; + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/src/Generated/VerifyFromLargePersonGroupRequest.cs b/sdk/face/Azure.AI.Vision.Face/src/Generated/VerifyFromLargePersonGroupRequest.cs new file mode 100644 index 000000000000..e78b93c002c1 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/src/Generated/VerifyFromLargePersonGroupRequest.cs @@ -0,0 +1,87 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; + +namespace Azure.AI.Vision.Face +{ + /// The VerifyFromLargePersonGroupRequest. + internal partial class VerifyFromLargePersonGroupRequest + { + /// + /// Keeps track of any properties unknown to the library. + /// + /// To assign an object to the value of this property use . + /// + /// + /// To assign an already formatted json string to this property use . + /// + /// + /// Examples: + /// + /// + /// BinaryData.FromObjectAsJson("foo") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromString("\"foo\"") + /// Creates a payload of "foo". + /// + /// + /// BinaryData.FromObjectAsJson(new { key = "value" }) + /// Creates a payload of { "key": "value" }. + /// + /// + /// BinaryData.FromString("{\"key\": \"value\"}") + /// Creates a payload of { "key": "value" }. + /// + /// + /// + /// + private IDictionary _serializedAdditionalRawData; + + /// Initializes a new instance of . + /// The faceId of the face, come from "Detect". + /// Using existing largePersonGroupId and personId for fast loading a specified person. largePersonGroupId is created in "Create Large Person Group". + /// Specify a certain person in Large Person Group. + /// is null. + internal VerifyFromLargePersonGroupRequest(Guid faceId, string largePersonGroupId, Guid personId) + { + Argument.AssertNotNull(largePersonGroupId, nameof(largePersonGroupId)); + + FaceId = faceId; + LargePersonGroupId = largePersonGroupId; + PersonId = personId; + } + + /// Initializes a new instance of . + /// The faceId of the face, come from "Detect". + /// Using existing largePersonGroupId and personId for fast loading a specified person. largePersonGroupId is created in "Create Large Person Group". + /// Specify a certain person in Large Person Group. + /// Keeps track of any properties unknown to the library. + internal VerifyFromLargePersonGroupRequest(Guid faceId, string largePersonGroupId, Guid personId, IDictionary serializedAdditionalRawData) + { + FaceId = faceId; + LargePersonGroupId = largePersonGroupId; + PersonId = personId; + _serializedAdditionalRawData = serializedAdditionalRawData; + } + + /// Initializes a new instance of for deserialization. + internal VerifyFromLargePersonGroupRequest() + { + } + + /// The faceId of the face, come from "Detect". + public Guid FaceId { get; } + /// Using existing largePersonGroupId and personId for fast loading a specified person. largePersonGroupId is created in "Create Large Person Group". + public string LargePersonGroupId { get; } + /// Specify a certain person in Large Person Group. + public Guid PersonId { get; } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/tests/FaceSessionClient/FaceSessionClientTests.cs b/sdk/face/Azure.AI.Vision.Face/tests/FaceSessionClient/FaceSessionClientTests.cs index 9dbc472ffddc..49c92be1ef92 100644 --- a/sdk/face/Azure.AI.Vision.Face/tests/FaceSessionClient/FaceSessionClientTests.cs +++ b/sdk/face/Azure.AI.Vision.Face/tests/FaceSessionClient/FaceSessionClientTests.cs @@ -200,7 +200,7 @@ protected async Task CreateLivenessWithVe { var client = CreateSessionClient(nonRecordingClient: nonRecordingClient); - var createContent = new CreateLivenessSessionContent(LivenessOperationMode.Passive) + var createContent = new CreateLivenessWithVerifySessionContent(LivenessOperationMode.Passive) { DeviceCorrelationId = DeviceCorrelationId, }; diff --git a/sdk/face/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_FaceClient.cs b/sdk/face/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_FaceClient.cs index 0e9369d88787..eb2118560eaf 100644 --- a/sdk/face/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_FaceClient.cs +++ b/sdk/face/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_FaceClient.cs @@ -230,5 +230,207 @@ public async Task Example_FaceClient_Group_GroupFaceIDs_Convenience_Async() Response response = await client.GroupAsync(new Guid[] { Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426"), Guid.Parse("015839fb-fbd9-4f79-ace9-7675fc2f1dd9"), Guid.Parse("65d083d4-9447-47d1-af30-b626144bf0fb"), Guid.Parse("fce92aed-d578-4d2e-8114-068f8af4492e"), Guid.Parse("30ea1073-cc9e-4652-b1e3-d08fb7b95315"), Guid.Parse("be386ab3-af91-4104-9e6d-4dae4c9fddb7"), Guid.Parse("fbd2a038-dbff-452c-8e79-2ee81b1aa84e"), Guid.Parse("b64d5e15-8257-4af2-b20a-5a750f8940e7") }); } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_FindSimilarFromLargeFaceList_FindSimilarFromLargeFaceList() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "c5c24a82-6845-4031-9d5d-978df9175426", + maxNumOfCandidatesReturned = 3, + mode = "matchPerson", + largeFaceListId = "your_large_face_list_id", + }); + Response response = client.FindSimilarFromLargeFaceList(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_FindSimilarFromLargeFaceList_FindSimilarFromLargeFaceList_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "c5c24a82-6845-4031-9d5d-978df9175426", + maxNumOfCandidatesReturned = 3, + mode = "matchPerson", + largeFaceListId = "your_large_face_list_id", + }); + Response response = await client.FindSimilarFromLargeFaceListAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_FindSimilarFromLargeFaceList_FindSimilarFromLargeFaceList_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = client.FindSimilarFromLargeFaceList(Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426"), "your_large_face_list_id"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_FindSimilarFromLargeFaceList_FindSimilarFromLargeFaceList_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = await client.FindSimilarFromLargeFaceListAsync(Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426"), "your_large_face_list_id"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_IdentifyFromLargePersonGroup_IdentifyFromLargePersonGroup() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceIds = new object[] + { +"c5c24a82-6845-4031-9d5d-978df9175426" + }, + largePersonGroupId = "your_large_person_group_id", + maxNumOfCandidatesReturned = 9, + confidenceThreshold = 0.7F, + }); + Response response = client.IdentifyFromLargePersonGroup(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("faceId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_IdentifyFromLargePersonGroup_IdentifyFromLargePersonGroup_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceIds = new object[] + { +"c5c24a82-6845-4031-9d5d-978df9175426" + }, + largePersonGroupId = "your_large_person_group_id", + maxNumOfCandidatesReturned = 9, + confidenceThreshold = 0.7F, + }); + Response response = await client.IdentifyFromLargePersonGroupAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("faceId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("candidates")[0].GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_IdentifyFromLargePersonGroup_IdentifyFromLargePersonGroup_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = client.IdentifyFromLargePersonGroup(new Guid[] { Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426") }, "your_large_person_group_id"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_IdentifyFromLargePersonGroup_IdentifyFromLargePersonGroup_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response> response = await client.IdentifyFromLargePersonGroupAsync(new Guid[] { Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426") }, "your_large_person_group_id"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_VerifyFromLargePersonGroup_VerifyFromLargePersonGroup() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "c5c24a82-6845-4031-9d5d-978df9175426", + personId = "815df99c-598f-4926-930a-a734b3fd651c", + largePersonGroupId = "your_large_person_group", + }); + Response response = client.VerifyFromLargePersonGroup(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("isIdentical").ToString()); + Console.WriteLine(result.GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_VerifyFromLargePersonGroup_VerifyFromLargePersonGroup_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + faceId = "c5c24a82-6845-4031-9d5d-978df9175426", + personId = "815df99c-598f-4926-930a-a734b3fd651c", + largePersonGroupId = "your_large_person_group", + }); + Response response = await client.VerifyFromLargePersonGroupAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("isIdentical").ToString()); + Console.WriteLine(result.GetProperty("confidence").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceClient_VerifyFromLargePersonGroup_VerifyFromLargePersonGroup_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response response = client.VerifyFromLargePersonGroup(Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426"), "your_large_person_group", Guid.Parse("815df99c-598f-4926-930a-a734b3fd651c")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceClient_VerifyFromLargePersonGroup_VerifyFromLargePersonGroup_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceClient client = new FaceClient(endpoint, credential); + + Response response = await client.VerifyFromLargePersonGroupAsync(Guid.Parse("c5c24a82-6845-4031-9d5d-978df9175426"), "your_large_person_group", Guid.Parse("815df99c-598f-4926-930a-a734b3fd651c")); + } } } diff --git a/sdk/face/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_FaceSessionClient.cs b/sdk/face/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_FaceSessionClient.cs index 533e4ac10132..b793cf92c5e9 100644 --- a/sdk/face/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_FaceSessionClient.cs +++ b/sdk/face/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_FaceSessionClient.cs @@ -514,5 +514,119 @@ public async Task Example_FaceSessionClient_GetLivenessWithVerifySessionAuditEnt Response> response = await client.GetLivenessWithVerifySessionAuditEntriesAsync("b12e033e-bda7-4b83-a211-e721c661f30e"); } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceSessionClient_DetectFromSessionImage_DetectFromSessionImageId() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + sessionImageId = "aa93ce80-9a9b-48bd-ae1a-1c7543841e92", + }); + Response response = client.DetectFromSessionImage(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("top").ToString()); + Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("left").ToString()); + Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("width").ToString()); + Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("height").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceSessionClient_DetectFromSessionImage_DetectFromSessionImageId_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + using RequestContent content = RequestContent.Create(new + { + sessionImageId = "aa93ce80-9a9b-48bd-ae1a-1c7543841e92", + }); + Response response = await client.DetectFromSessionImageAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("top").ToString()); + Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("left").ToString()); + Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("width").ToString()); + Console.WriteLine(result[0].GetProperty("faceRectangle").GetProperty("height").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceSessionClient_DetectFromSessionImage_DetectFromSessionImageId_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response> response = client.DetectFromSessionImage("aa93ce80-9a9b-48bd-ae1a-1c7543841e92"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceSessionClient_DetectFromSessionImage_DetectFromSessionImageId_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response> response = await client.DetectFromSessionImageAsync("aa93ce80-9a9b-48bd-ae1a-1c7543841e92"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceSessionClient_GetSessionImage_GetSessionImage() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = client.GetSessionImage("3d035d35-2e01-4ed4-8935-577afde9caaa", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceSessionClient_GetSessionImage_GetSessionImage_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = await client.GetSessionImageAsync("3d035d35-2e01-4ed4-8935-577afde9caaa", null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_FaceSessionClient_GetSessionImage_GetSessionImage_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = client.GetSessionImage("3d035d35-2e01-4ed4-8935-577afde9caaa"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_FaceSessionClient_GetSessionImage_GetSessionImage_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + FaceSessionClient client = new FaceSessionClient(endpoint, credential); + + Response response = await client.GetSessionImageAsync("3d035d35-2e01-4ed4-8935-577afde9caaa"); + } } } diff --git a/sdk/face/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_LargeFaceListClient.cs b/sdk/face/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_LargeFaceListClient.cs new file mode 100644 index 000000000000..f08bb21a79c9 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_LargeFaceListClient.cs @@ -0,0 +1,484 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Identity; +using NUnit.Framework; + +namespace Azure.AI.Vision.Face.Samples +{ + public partial class Samples_LargeFaceListClient + { + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_Create_CreateLargeFaceList() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + + using RequestContent content = RequestContent.Create(new + { + name = "your_large_face_list_name", + userData = "your_user_data", + recognitionModel = "recognition_01", + }); + Response response = client.Create(content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_Create_CreateLargeFaceList_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + + using RequestContent content = RequestContent.Create(new + { + name = "your_large_face_list_name", + userData = "your_user_data", + recognitionModel = "recognition_01", + }); + Response response = await client.CreateAsync(content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_Create_CreateLargeFaceList_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + + Response response = client.Create("your_large_face_list_name"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_Create_CreateLargeFaceList_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + + Response response = await client.CreateAsync("your_large_face_list_name"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_Delete_DeleteLargeFaceList() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + + Response response = client.Delete(); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_Delete_DeleteLargeFaceList_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + + Response response = await client.DeleteAsync(); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_GetLargeFaceList_GetLargeFaceList() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + + Response response = client.GetLargeFaceList(true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("largeFaceListId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_GetLargeFaceList_GetLargeFaceList_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + + Response response = await client.GetLargeFaceListAsync(true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("largeFaceListId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_GetLargeFaceList_GetLargeFaceList_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + + Response response = client.GetLargeFaceList(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_GetLargeFaceList_GetLargeFaceList_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + + Response response = await client.GetLargeFaceListAsync(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_Update_UpdateLargeFaceList() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + + using RequestContent content = RequestContent.Create(new + { + name = "your_large_face_list_name", + userData = "your_user_data", + }); + Response response = client.Update(content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_Update_UpdateLargeFaceList_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + + using RequestContent content = RequestContent.Create(new + { + name = "your_large_face_list_name", + userData = "your_user_data", + }); + Response response = await client.UpdateAsync(content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_GetLargeFaceLists_GetLargeFaceLists() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient(null); + + Response response = client.GetLargeFaceLists("my_list_id", 20, true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("largeFaceListId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_GetLargeFaceLists_GetLargeFaceLists_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient(null); + + Response response = await client.GetLargeFaceListsAsync("my_list_id", 20, true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("largeFaceListId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_GetLargeFaceLists_GetLargeFaceLists_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient(null); + + Response> response = client.GetLargeFaceLists(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_GetLargeFaceLists_GetLargeFaceLists_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient(null); + + Response> response = await client.GetLargeFaceListsAsync(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_GetTrainingStatus_GetTrainingStatusOfLargeFaceList() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + + Response response = client.GetTrainingStatus(null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("status").ToString()); + Console.WriteLine(result.GetProperty("createdDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_GetTrainingStatus_GetTrainingStatusOfLargeFaceList_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + + Response response = await client.GetTrainingStatusAsync(null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("status").ToString()); + Console.WriteLine(result.GetProperty("createdDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_GetTrainingStatus_GetTrainingStatusOfLargeFaceList_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + + Response response = client.GetTrainingStatus(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_GetTrainingStatus_GetTrainingStatusOfLargeFaceList_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + + Response response = await client.GetTrainingStatusAsync(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_DeleteFace_DeleteFaceFromLargeFaceList() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + + Response response = client.DeleteFace(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_DeleteFace_DeleteFaceFromLargeFaceList_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + + Response response = await client.DeleteFaceAsync(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_GetFace_GetFaceFromLargeFaceList() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + + Response response = client.GetFace(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_GetFace_GetFaceFromLargeFaceList_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + + Response response = await client.GetFaceAsync(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_GetFace_GetFaceFromLargeFaceList_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + + Response response = client.GetFace(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_GetFace_GetFaceFromLargeFaceList_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + + Response response = await client.GetFaceAsync(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_UpdateFace_UpdateFaceInLargeFaceList() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + + using RequestContent content = RequestContent.Create(new + { + userData = "your_user_data", + }); + Response response = client.UpdateFace(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_UpdateFace_UpdateFaceInLargeFaceList_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + + using RequestContent content = RequestContent.Create(new + { + userData = "your_user_data", + }); + Response response = await client.UpdateFaceAsync(Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_GetFaces_GetFacesFromLargeFaceList() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + + Response response = client.GetFaces("00000000-0000-0000-0000-000000000000", 20, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_GetFaces_GetFacesFromLargeFaceList_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + + Response response = await client.GetFacesAsync("00000000-0000-0000-0000-000000000000", 20, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceListFace_GetFaces_GetFacesFromLargeFaceList_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + + Response> response = client.GetFaces(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceListFace_GetFaces_GetFacesFromLargeFaceList_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + + Response> response = await client.GetFacesAsync(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargeFaceList_Train_TrainLargeFaceList() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + + Operation operation = client.Train(WaitUntil.Completed); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargeFaceList_Train_TrainLargeFaceList_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargeFaceListClient client = new FaceAdministrationClient(endpoint, credential).GetLargeFaceListClient("your_large_face_list_id"); + + Operation operation = await client.TrainAsync(WaitUntil.Completed); + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_LargePersonGroupClient.cs b/sdk/face/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_LargePersonGroupClient.cs new file mode 100644 index 000000000000..8d667ade83b4 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/tests/Generated/Samples/Samples_LargePersonGroupClient.cs @@ -0,0 +1,660 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +// + +#nullable disable + +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Threading.Tasks; +using Azure.Core; +using Azure.Identity; +using NUnit.Framework; + +namespace Azure.AI.Vision.Face.Samples +{ + public partial class Samples_LargePersonGroupClient + { + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_Create_CreateLargePersonGroup() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + using RequestContent content = RequestContent.Create(new + { + name = "your_large_person_group_name", + userData = "your_user_data", + recognitionModel = "recognition_01", + }); + Response response = client.Create(content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_Create_CreateLargePersonGroup_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + using RequestContent content = RequestContent.Create(new + { + name = "your_large_person_group_name", + userData = "your_user_data", + recognitionModel = "recognition_01", + }); + Response response = await client.CreateAsync(content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_Create_CreateLargePersonGroup_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Response response = client.Create("your_large_person_group_name"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_Create_CreateLargePersonGroup_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Response response = await client.CreateAsync("your_large_person_group_name"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_Delete_DeleteLargePersonGroup() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Response response = client.Delete(); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_Delete_DeleteLargePersonGroup_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Response response = await client.DeleteAsync(); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_GetLargePersonGroup_GetLargePersonGroup() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Response response = client.GetLargePersonGroup(true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("largePersonGroupId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_GetLargePersonGroup_GetLargePersonGroup_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Response response = await client.GetLargePersonGroupAsync(true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("name").ToString()); + Console.WriteLine(result.GetProperty("largePersonGroupId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_GetLargePersonGroup_GetLargePersonGroup_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Response response = client.GetLargePersonGroup(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_GetLargePersonGroup_GetLargePersonGroup_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Response response = await client.GetLargePersonGroupAsync(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_Update_UpdateLargePersonGroup() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + using RequestContent content = RequestContent.Create(new + { + name = "your_large_person_group_name", + userData = "your_user_data", + }); + Response response = client.Update(content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_Update_UpdateLargePersonGroup_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + using RequestContent content = RequestContent.Create(new + { + name = "your_large_person_group_name", + userData = "your_user_data", + }); + Response response = await client.UpdateAsync(content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_GetLargePersonGroups_GetLargePersonGroups() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient(null); + + Response response = client.GetLargePersonGroups("00000000-0000-0000-0000-000000000000", 20, true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("largePersonGroupId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_GetLargePersonGroups_GetLargePersonGroups_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient(null); + + Response response = await client.GetLargePersonGroupsAsync("00000000-0000-0000-0000-000000000000", 20, true, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("name").ToString()); + Console.WriteLine(result[0].GetProperty("largePersonGroupId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_GetLargePersonGroups_GetLargePersonGroups_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient(null); + + Response> response = client.GetLargePersonGroups(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_GetLargePersonGroups_GetLargePersonGroups_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient(null); + + Response> response = await client.GetLargePersonGroupsAsync(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_GetTrainingStatus_GetTrainingStatusOfLargePersonGroup() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Response response = client.GetTrainingStatus(null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("status").ToString()); + Console.WriteLine(result.GetProperty("createdDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_GetTrainingStatus_GetTrainingStatusOfLargePersonGroup_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Response response = await client.GetTrainingStatusAsync(null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("status").ToString()); + Console.WriteLine(result.GetProperty("createdDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastActionDateTime").ToString()); + Console.WriteLine(result.GetProperty("lastSuccessfulTrainingDateTime").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_GetTrainingStatus_GetTrainingStatusOfLargePersonGroup_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Response response = client.GetTrainingStatus(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_GetTrainingStatus_GetTrainingStatusOfLargePersonGroup_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Response response = await client.GetTrainingStatusAsync(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_CreatePerson_CreatePersonInLargePersonGroup() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + using RequestContent content = RequestContent.Create(new + { + name = "your_large_person_group_person_name", + userData = "your_user_data", + }); + Response response = client.CreatePerson(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_CreatePerson_CreatePersonInLargePersonGroup_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + using RequestContent content = RequestContent.Create(new + { + name = "your_large_person_group_person_name", + userData = "your_user_data", + }); + Response response = await client.CreatePersonAsync(content); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_CreatePerson_CreatePersonInLargePersonGroup_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Response response = client.CreatePerson("your_large_person_group_person_name"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_CreatePerson_CreatePersonInLargePersonGroup_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Response response = await client.CreatePersonAsync("your_large_person_group_person_name"); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_DeletePerson_DeletePersonFromLargePersonGroup() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Response response = client.DeletePerson(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_DeletePerson_DeletePersonFromLargePersonGroup_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Response response = await client.DeletePersonAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_GetPerson_GetPersonFromLargePersonGroup() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Response response = client.GetPerson(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + Console.WriteLine(result.GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_GetPerson_GetPersonFromLargePersonGroup_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Response response = await client.GetPersonAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("personId").ToString()); + Console.WriteLine(result.GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_GetPerson_GetPersonFromLargePersonGroup_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Response response = client.GetPerson(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_GetPerson_GetPersonFromLargePersonGroup_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Response response = await client.GetPersonAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_UpdatePerson_UpdatePersonInLargePersonGroup() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + using RequestContent content = RequestContent.Create(new + { + name = "your_large_person_group_person_name", + userData = "your_user_data", + }); + Response response = client.UpdatePerson(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_UpdatePerson_UpdatePersonInLargePersonGroup_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + using RequestContent content = RequestContent.Create(new + { + name = "your_large_person_group_person_name", + userData = "your_user_data", + }); + Response response = await client.UpdatePersonAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_GetPersons_GetPersonsFromLargePersonGroup() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Response response = client.GetPersons("00000000-0000-0000-0000-000000000000", 20, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_GetPersons_GetPersonsFromLargePersonGroup_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Response response = await client.GetPersonsAsync("00000000-0000-0000-0000-000000000000", 20, null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result[0].GetProperty("personId").ToString()); + Console.WriteLine(result[0].GetProperty("name").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPerson_GetPersons_GetPersonsFromLargePersonGroup_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Response> response = client.GetPersons(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPerson_GetPersons_GetPersonsFromLargePersonGroup_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Response> response = await client.GetPersonsAsync(); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPersonFace_DeleteFace_DeleteFaceFromLargePersonGroupPerson() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Response response = client.DeleteFace(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPersonFace_DeleteFace_DeleteFaceFromLargePersonGroupPerson_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Response response = await client.DeleteFaceAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPersonFace_GetFace_GetFaceFromLargePersonGroupPerson() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Response response = client.GetFace(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPersonFace_GetFace_GetFaceFromLargePersonGroupPerson_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Response response = await client.GetFaceAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), null); + + JsonElement result = JsonDocument.Parse(response.ContentStream).RootElement; + Console.WriteLine(result.GetProperty("persistedFaceId").ToString()); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPersonFace_GetFace_GetFaceFromLargePersonGroupPerson_Convenience() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Response response = client.GetFace(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPersonFace_GetFace_GetFaceFromLargePersonGroupPerson_Convenience_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Response response = await client.GetFaceAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055")); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroupPersonFace_UpdateFace_UpdateFaceInLargePersonGroupPerson() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + using RequestContent content = RequestContent.Create(new + { + userData = "your_user_data", + }); + Response response = client.UpdateFace(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroupPersonFace_UpdateFace_UpdateFaceInLargePersonGroupPerson_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + using RequestContent content = RequestContent.Create(new + { + userData = "your_user_data", + }); + Response response = await client.UpdateFaceAsync(Guid.Parse("25985303-c537-4467-b41d-bdb45cd95ca1"), Guid.Parse("43897a75-8d6f-42cf-885e-74832febb055"), content); + + Console.WriteLine(response.Status); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public void Example_LargePersonGroup_Train_TrainLargePersonGroup() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Operation operation = client.Train(WaitUntil.Completed); + } + + [Test] + [Ignore("Only validating compilation of examples")] + public async Task Example_LargePersonGroup_Train_TrainLargePersonGroup_Async() + { + Uri endpoint = new Uri(""); + AzureKeyCredential credential = new AzureKeyCredential(""); + LargePersonGroupClient client = new FaceAdministrationClient(endpoint, credential).GetLargePersonGroupClient("your_large_person_group_id"); + + Operation operation = await client.TrainAsync(WaitUntil.Completed); + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/tests/samples/FaceSamplesBase.cs b/sdk/face/Azure.AI.Vision.Face/tests/samples/FaceSamplesBase.cs index 96d8c24cc236..19f26b3ceb13 100644 --- a/sdk/face/Azure.AI.Vision.Face/tests/samples/FaceSamplesBase.cs +++ b/sdk/face/Azure.AI.Vision.Face/tests/samples/FaceSamplesBase.cs @@ -40,6 +40,34 @@ public FaceSessionClient CreateSessionClient() return sessionClient; } + public LargePersonGroupClient CreateLargePersonGroupClient(string id) + { + #region Snippet:CreateLargePersonGroupClient +#if SNIPPET + Uri endpoint = new Uri(""); +#else + var endpoint = TestEnvironment.GetUrlVariable("FACE_ENDPOINT"); +#endif + DefaultAzureCredential credential = new DefaultAzureCredential(); + var groupClient = new LargePersonGroupClient(endpoint, credential, id); + #endregion + return groupClient; + } + + public LargeFaceListClient CreateLargeFaceListClient(string id) + { + #region Snippet:CreateLargeFaceListClient +#if SNIPPET + Uri endpoint = new Uri(""); +#else + var endpoint = TestEnvironment.GetUrlVariable("FACE_ENDPOINT"); +#endif + DefaultAzureCredential credential = new DefaultAzureCredential(); + var listClient = new LargeFaceListClient(endpoint, credential, id); + #endregion + return listClient; + } + public FaceClient CreateClientWithKey() { #region Snippet:CreateFaceClientWithKey @@ -49,5 +77,16 @@ public FaceClient CreateClientWithKey() #endregion return client; } + + public FaceClient CreateClientWithSpecifyVersion() + { + #region Snippet:CreateFaceClientWithVersion + Uri endpoint = new Uri(""); + DefaultAzureCredential credential = new DefaultAzureCredential(); + AzureAIVisionFaceClientOptions options = new AzureAIVisionFaceClientOptions(AzureAIVisionFaceClientOptions.ServiceVersion.V1_2_Preview_1); + FaceClient client = new FaceClient(endpoint, credential, options); + #endregion + return client; + } } } diff --git a/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample1_FaceDetection.cs b/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample1_FaceDetection.cs index 86a53d0cdd9c..fcf0d41a4c3a 100644 --- a/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample1_FaceDetection.cs +++ b/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample1_FaceDetection.cs @@ -37,7 +37,7 @@ public void Detect() { Console.WriteLine($"Face Rectangle: left={detectedFace.FaceRectangle.Left}, top={detectedFace.FaceRectangle.Top}, width={detectedFace.FaceRectangle.Width}, height={detectedFace.FaceRectangle.Height}"); Console.WriteLine($"Head pose: pitch={detectedFace.FaceAttributes.HeadPose.Pitch}, roll={detectedFace.FaceAttributes.HeadPose.Roll}, yaw={detectedFace.FaceAttributes.HeadPose.Yaw}"); - Console.WriteLine($"Mask: {detectedFace.FaceAttributes.Mask}"); + Console.WriteLine($"Mask: NoseAndMouthCovered={detectedFace.FaceAttributes.Mask.NoseAndMouthCovered}, Type={detectedFace.FaceAttributes.Mask.Type}"); Console.WriteLine($"Quality: {detectedFace.FaceAttributes.QualityForRecognition}"); Console.WriteLine($"Recognition model: {detectedFace.RecognitionModel}"); Console.WriteLine($"Landmarks: "); diff --git a/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample1_FaceDetectionAsync.cs b/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample1_FaceDetectionAsync.cs index e3fb7b3e2356..5fd99ba02f32 100644 --- a/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample1_FaceDetectionAsync.cs +++ b/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample1_FaceDetectionAsync.cs @@ -37,7 +37,7 @@ public async Task DetectAsync() { Console.WriteLine($"Face Rectangle: left={detectedFace.FaceRectangle.Left}, top={detectedFace.FaceRectangle.Top}, width={detectedFace.FaceRectangle.Width}, height={detectedFace.FaceRectangle.Height}"); Console.WriteLine($"Head pose: pitch={detectedFace.FaceAttributes.HeadPose.Pitch}, roll={detectedFace.FaceAttributes.HeadPose.Roll}, yaw={detectedFace.FaceAttributes.HeadPose.Yaw}"); - Console.WriteLine($"Mask: {detectedFace.FaceAttributes.Mask}"); + Console.WriteLine($"Mask: NoseAndMouthCovered={detectedFace.FaceAttributes.Mask.NoseAndMouthCovered}, Type={detectedFace.FaceAttributes.Mask.Type}"); Console.WriteLine($"Quality: {detectedFace.FaceAttributes.QualityForRecognition}"); Console.WriteLine($"Recognition model: {detectedFace.RecognitionModel}"); Console.WriteLine($"Landmarks: "); diff --git a/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample3_DetectLivenessWithVerifyWithSession.cs b/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample3_DetectLivenessWithVerifyWithSession.cs index fbdf0e5ace8a..bdbfd2144031 100644 --- a/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample3_DetectLivenessWithVerifyWithSession.cs +++ b/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample3_DetectLivenessWithVerifyWithSession.cs @@ -20,7 +20,7 @@ public void CreateDetectLivenessWithVerifySession(bool deleteSession) var sessionClient = CreateSessionClient(); #region Snippet:CreateLivenessWithVerifySession - var parameters = new CreateLivenessSessionContent(LivenessOperationMode.Passive) { + var parameters = new CreateLivenessWithVerifySessionContent(LivenessOperationMode.Passive) { SendResultsToClient = true, DeviceCorrelationId = Guid.NewGuid().ToString(), }; diff --git a/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample3_DetectLivenessWithVerifyWithSessionAsync.cs b/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample3_DetectLivenessWithVerifyWithSessionAsync.cs index d9160dbd6b1e..39e220103bae 100644 --- a/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample3_DetectLivenessWithVerifyWithSessionAsync.cs +++ b/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample3_DetectLivenessWithVerifyWithSessionAsync.cs @@ -19,7 +19,7 @@ public async Task CreateDetectLivenessWithVerifySessionAsync(bool deleteSession) var sessionClient = CreateSessionClient(); #region Snippet:CreateLivenessWithVerifySessionAsync - var parameters = new CreateLivenessSessionContent(LivenessOperationMode.Passive) { + var parameters = new CreateLivenessWithVerifySessionContent(LivenessOperationMode.Passive) { SendResultsToClient = true, DeviceCorrelationId = Guid.NewGuid().ToString(), }; diff --git a/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample4_StatelessFaceRecognition.cs b/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample4_StatelessFaceRecognition.cs new file mode 100644 index 000000000000..7daf2a582619 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample4_StatelessFaceRecognition.cs @@ -0,0 +1,113 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Linq; +using Azure.AI.Vision.Face.Tests; +using Azure.Core.TestFramework; +using NUnit.Framework; + +namespace Azure.AI.Vision.Face.Samples +{ + public partial class Sample4_StatelessFaceRecognition : FaceSamplesBase + { + [Test] + public void Grouping() + { + var client = CreateClient(); + #region Snippet:Group + var targetImages = new (string, Uri)[] { + ("Group image", new Uri(FaceTestConstant.UrlIdentification1Image)), + ("Dad image 1", new Uri(FaceTestConstant.UrlFamily1Dad1Image)), + ("Dad image 2", new Uri(FaceTestConstant.UrlFamily1Dad2Image)), + ("Son image 1", new Uri(FaceTestConstant.UrlFamily1Son1Image)) + }; + var faceIds = new Dictionary(); + + foreach (var (imageName, targetImage) in targetImages) + { + var detectResponse = client.Detect(targetImage, FaceDetectionModel.Detection03, FaceRecognitionModel.Recognition04, true); + Console.WriteLine($"Detected {detectResponse.Value.Count} face(s) in the image '{imageName}'."); + foreach (var face in detectResponse.Value) + { + faceIds[face.FaceId.Value] = (face, imageName); + } + } + + var groupResponse = client.Group(faceIds.Keys); + var groups = groupResponse.Value; + + Console.WriteLine($"Found {groups.Groups.Count} group(s) in the target images."); + foreach (var group in groups.Groups) + { + Console.WriteLine($"Group: "); + foreach (var faceId in group) + { + Console.WriteLine($" {faceId} from '{faceIds[faceId].Item2}', face rectangle: {faceIds[faceId].Item1.FaceRectangle.Left}, {faceIds[faceId].Item1.FaceRectangle.Top}, {faceIds[faceId].Item1.FaceRectangle.Width}, {faceIds[faceId].Item1.FaceRectangle.Height}"); + } + } + + Console.WriteLine($"Found {groups.MessyGroup.Count} face(s) that are not in any group."); + foreach (var faceId in groups.MessyGroup) + { + Console.WriteLine($" {faceId} from '{faceIds[faceId].Item2}', face rectangle: {faceIds[faceId].Item1.FaceRectangle.Left}, {faceIds[faceId].Item1.FaceRectangle.Top}, {faceIds[faceId].Item1.FaceRectangle.Width}, {faceIds[faceId].Item1.FaceRectangle.Height}"); + } + #endregion + } + + [Test] + public void Verification() + { + var client = CreateClient(); + #region Snippet:VerifyFaceToFace + var data = new (string Name, Uri Uri)[] { + ("Dad image 1", new Uri(FaceTestConstant.UrlFamily1Dad1Image)), + ("Dad image 2", new Uri(FaceTestConstant.UrlFamily1Dad2Image)), + ("Son image 1", new Uri(FaceTestConstant.UrlFamily1Son1Image)) + }; + var faceIds = new List(); + + foreach (var tuple in data) + { + var detectResponse = client.Detect(tuple.Uri, FaceDetectionModel.Detection03, FaceRecognitionModel.Recognition04, true); + Console.WriteLine($"Detected {detectResponse.Value.Count} face(s) in the image '{tuple.Name}'."); + faceIds.Add(detectResponse.Value.Single().FaceId.Value); + } + + var verifyDad1Dad2Response = client.VerifyFaceToFace(faceIds[0], faceIds[1]); + Console.WriteLine($"Verification between Dad image 1 and Dad image 2: {verifyDad1Dad2Response.Value.Confidence}"); + Console.WriteLine($"Is the same person: {verifyDad1Dad2Response.Value.IsIdentical}"); + + var verifyDad1SonResponse = client.VerifyFaceToFace(faceIds[0], faceIds[2]); + Console.WriteLine($"Verification between Dad image 1 and Son image 1: {verifyDad1SonResponse.Value.Confidence}"); + Console.WriteLine($"Is the same person: {verifyDad1SonResponse.Value.IsIdentical}"); + #endregion + } + + [Test] + public void FindSimilar() + { + var client = CreateClient(); + #region Snippet:FindSimilar + var dadImage = new Uri(FaceTestConstant.UrlFamily1Dad1Image); + var detectDadResponse = client.Detect(dadImage, FaceDetectionModel.Detection03, FaceRecognitionModel.Recognition04, true); + Console.WriteLine($"Detected {detectDadResponse.Value.Count} face(s) in the Dad image."); + var dadFaceId = detectDadResponse.Value.Single().FaceId.Value; + + var targetImage = new Uri(FaceTestConstant.UrlIdentification1Image); + var detectResponse = client.Detect(targetImage, FaceDetectionModel.Detection03, FaceRecognitionModel.Recognition04, true); + Console.WriteLine($"Detected {detectResponse.Value.Count} face(s) in the image."); + var faceIds = detectResponse.Value.Select(face => face.FaceId.Value); + + var response = client.FindSimilar(dadFaceId, faceIds); + var similarFaces = response.Value; + Console.WriteLine($"Found {similarFaces.Count} similar face(s) in the target image."); + foreach (var similarFace in similarFaces) + { + Console.WriteLine($"Face ID: {similarFace.FaceId}, confidence: {similarFace.Confidence}"); + } + #endregion + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample4_StatelessFaceRecognitionAsync.cs b/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample4_StatelessFaceRecognitionAsync.cs new file mode 100644 index 000000000000..f32e0dfe5e77 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample4_StatelessFaceRecognitionAsync.cs @@ -0,0 +1,115 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Threading.Tasks; +using Azure.AI.Vision.Face.Tests; +using Azure.Core.TestFramework; +using NUnit.Framework; + +namespace Azure.AI.Vision.Face.Samples +{ + public partial class Sample4_StatelessFaceRecognition : FaceSamplesBase + { + [Test] + public async Task GroupingAsync() + { + var client = CreateClient(); + #region Snippet:GroupAsync + var targetImages = new (string, Uri)[] { + ("Group image", new Uri(FaceTestConstant.UrlIdentification1Image)), + ("Dad image 1", new Uri(FaceTestConstant.UrlFamily1Dad1Image)), + ("Dad image 2", new Uri(FaceTestConstant.UrlFamily1Dad2Image)), + ("Son image 1", new Uri(FaceTestConstant.UrlFamily1Son1Image)) + }; + var faceIds = new Dictionary(); + + foreach (var (imageName, targetImage) in targetImages) + { + var detectResponse = await client.DetectAsync(targetImage, FaceDetectionModel.Detection03, FaceRecognitionModel.Recognition04, true); + Console.WriteLine($"Detected {detectResponse.Value.Count} face(s) in the image '{imageName}'."); + foreach (var face in detectResponse.Value) + { + faceIds[face.FaceId.Value] = (face, imageName); + } + } + + var groupResponse = await client.GroupAsync(faceIds.Keys); + var groups = groupResponse.Value; + + Console.WriteLine($"Found {groups.Groups.Count} group(s) in the target images."); + foreach (var group in groups.Groups) + { + Console.WriteLine($"Group: "); + foreach (var faceId in group) + { + Console.WriteLine($" {faceId} from '{faceIds[faceId].Item2}', face rectangle: {faceIds[faceId].Item1.FaceRectangle.Left}, {faceIds[faceId].Item1.FaceRectangle.Top}, {faceIds[faceId].Item1.FaceRectangle.Width}, {faceIds[faceId].Item1.FaceRectangle.Height}"); + } + } + + Console.WriteLine($"Found {groups.MessyGroup.Count} face(s) that are not in any group."); + foreach (var faceId in groups.MessyGroup) + { + Console.WriteLine($" {faceId} from '{faceIds[faceId].Item2}', face rectangle: {faceIds[faceId].Item1.FaceRectangle.Left}, {faceIds[faceId].Item1.FaceRectangle.Top}, {faceIds[faceId].Item1.FaceRectangle.Width}, {faceIds[faceId].Item1.FaceRectangle.Height}"); + } + #endregion + } + + [Test] + public async Task VerificationAsync() + { + var client = CreateClient(); + #region Snippet:VerifyFaceToFaceAsync + var data = new (string Name, Uri Uri)[] { + ("Dad image 1", new Uri(FaceTestConstant.UrlFamily1Dad1Image)), + ("Dad image 2", new Uri(FaceTestConstant.UrlFamily1Dad2Image)), + ("Son image 1", new Uri(FaceTestConstant.UrlFamily1Son1Image)) + }; + var faceIds = new List(); + + foreach (var tuple in data) + { + var detectResponse = await client.DetectAsync(tuple.Uri, FaceDetectionModel.Detection03, FaceRecognitionModel.Recognition04, true); + Console.WriteLine($"Detected {detectResponse.Value.Count} face(s) in the image '{tuple.Name}'."); + faceIds.Add(detectResponse.Value.Single().FaceId.Value); + } + + var verifyDad1Dad2Response = await client.VerifyFaceToFaceAsync(faceIds[0], faceIds[1]); + Console.WriteLine($"Verification between Dad image 1 and Dad image 2: {verifyDad1Dad2Response.Value.Confidence}"); + Console.WriteLine($"Is the same person: {verifyDad1Dad2Response.Value.IsIdentical}"); + + var verifyDad1SonResponse = await client.VerifyFaceToFaceAsync(faceIds[0], faceIds[2]); + Console.WriteLine($"Verification between Dad image 1 and Son image 1: {verifyDad1SonResponse.Value.Confidence}"); + Console.WriteLine($"Is the same person: {verifyDad1SonResponse.Value.IsIdentical}"); + #endregion + } + + [Test] + public async Task FindSimilarAsync() + { + var client = CreateClient(); + #region Snippet:FindSimilarAsync + var dadImage = new Uri(FaceTestConstant.UrlFamily1Dad1Image); + var detectDadResponse = await client.DetectAsync(dadImage, FaceDetectionModel.Detection03, FaceRecognitionModel.Recognition04, true); + Console.WriteLine($"Detected {detectDadResponse.Value.Count} face(s) in the Dad image."); + var dadFaceId = detectDadResponse.Value.Single().FaceId.Value; + + var targetImage = new Uri(FaceTestConstant.UrlIdentification1Image); + var detectResponse = await client.DetectAsync(targetImage, FaceDetectionModel.Detection03, FaceRecognitionModel.Recognition04, true); + Console.WriteLine($"Detected {detectResponse.Value.Count} face(s) in the image."); + var faceIds = detectResponse.Value.Select(face => face.FaceId.Value); + + var response = await client.FindSimilarAsync(dadFaceId, faceIds); + var similarFaces = response.Value; + Console.WriteLine($"Found {similarFaces.Count} similar face(s) in the target image."); + foreach (var similarFace in similarFaces) + { + Console.WriteLine($"Face ID: {similarFace.FaceId}, confidence: {similarFace.Confidence}"); + } + #endregion + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample5_LargePersonGroup.cs b/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample5_LargePersonGroup.cs new file mode 100644 index 000000000000..4df6ff03d421 --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample5_LargePersonGroup.cs @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using Azure.AI.Vision.Face.Tests; +using Azure.Core.TestFramework; +using NUnit.Framework; + +namespace Azure.AI.Vision.Face.Samples +{ + public partial class Sample5_LargePersonGroup : FaceSamplesBase + { + [Test] + public void VerifyAndIdentifyFromLargePersonGroup() + { + var groupId = "lpg_family1"; + var groupClient = CreateLargePersonGroupClient(groupId); + #region Snippet:VerifyAndIdentifyFromLargePersonGroup_CreateLargePersonGroup + + groupClient.Create("Family 1", userData: "A sweet family", recognitionModel: FaceRecognitionModel.Recognition04); + #endregion + + #region Snippet:VerifyAndIdentifyFromLargePersonGroup_CreatePersonAndAddFaces + var persons = new[] + { + new { Name = "Bill", UserData = "Dad", ImageUrls = new[] { FaceTestConstant.UrlFamily1Dad1Image, FaceTestConstant.UrlFamily1Dad2Image } }, + new { Name = "Clare", UserData = "Mom", ImageUrls = new[] { FaceTestConstant.UrlFamily1Mom1Image, FaceTestConstant.UrlFamily1Mom2Image } }, + new { Name = "Ron", UserData = "Son", ImageUrls = new[] { FaceTestConstant.UrlFamily1Son1Image, FaceTestConstant.UrlFamily1Son2Image } } + }; + var personIds = new Dictionary(); + + foreach (var person in persons) + { + var createPersonResponse = groupClient.CreatePerson(person.Name, userData: person.UserData); + var personId = createPersonResponse.Value.PersonId; + personIds.Add(person.Name, personId); + + foreach (var imageUrl in person.ImageUrls) + { + groupClient.AddFace(personId, new Uri(imageUrl), userData: $"{person.UserData}-{imageUrl}", detectionModel: FaceDetectionModel.Detection03); + } + } + #endregion + + #region Snippet:VerifyAndIdentifyFromLargePersonGroup_Train + var operation = groupClient.Train(WaitUntil.Completed); + operation.WaitForCompletionResponse(); + #endregion + + var faceClient = CreateClient(); + var detectResponse = faceClient.Detect(new Uri(FaceTestConstant.UrlFamily1Dad3Image), FaceDetectionModel.Detection03, FaceRecognitionModel.Recognition04, true); + var faceId = detectResponse.Value[0].FaceId.Value; + + #region Snippet:VerifyAndIdentifyFromLargePersonGroup_Verify + var verifyDadResponse = faceClient.VerifyFromLargePersonGroup(faceId, groupId, personIds["Bill"]); + Console.WriteLine($"Is the detected face Bill? {verifyDadResponse.Value.IsIdentical} ({verifyDadResponse.Value.Confidence})"); + + var verifyMomResponse = faceClient.VerifyFromLargePersonGroup(faceId, groupId, personIds["Clare"]); + Console.WriteLine($"Is the detected face Clare? {verifyMomResponse.Value.IsIdentical} ({verifyMomResponse.Value.Confidence})"); + #endregion + + #region Snippet:VerifyAndIdentifyFromLargePersonGroup_Identify + var identifyResponse = faceClient.IdentifyFromLargePersonGroup(new[] { faceId }, groupId); + foreach (var candidate in identifyResponse.Value[0].Candidates) + { + var person = groupClient.GetPerson(candidate.PersonId); + Console.WriteLine($"The detected face belongs to {person.Value.Name} ({candidate.Confidence})"); + } + #endregion + + #region Snippet:VerifyAndIdentifyFromLargePersonGroup_DeleteLargePersonGroup + groupClient.Delete(); + #endregion + } + } +} \ No newline at end of file diff --git a/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample5_LargePersonGroupAsync.cs b/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample5_LargePersonGroupAsync.cs new file mode 100644 index 000000000000..1a610d3c1f5c --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample5_LargePersonGroupAsync.cs @@ -0,0 +1,78 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using Azure.AI.Vision.Face.Tests; +using Azure.Core.TestFramework; +using NUnit.Framework; + +namespace Azure.AI.Vision.Face.Samples +{ + public partial class Sample5_LargePersonGroup : FaceSamplesBase + { + [Test] + public async Task VerifyAndIdentifyFromLargePersonGroupAsync() + { + var groupId = "lpg_family1"; + var groupClient = CreateLargePersonGroupClient(groupId); + #region Snippet:VerifyAndIdentifyFromLargePersonGroup_CreateLargePersonGroupAsync + + await groupClient.CreateAsync("Family 1", userData: "A sweet family", recognitionModel: FaceRecognitionModel.Recognition04); + #endregion + + #region Snippet:VerifyAndIdentifyFromLargePersonGroup_CreatePersonAndAddFacesAsync + var persons = new[] + { + new { Name = "Bill", UserData = "Dad", ImageUrls = new[] { FaceTestConstant.UrlFamily1Dad1Image, FaceTestConstant.UrlFamily1Dad2Image } }, + new { Name = "Clare", UserData = "Mom", ImageUrls = new[] { FaceTestConstant.UrlFamily1Mom1Image, FaceTestConstant.UrlFamily1Mom2Image } }, + new { Name = "Ron", UserData = "Son", ImageUrls = new[] { FaceTestConstant.UrlFamily1Son1Image, FaceTestConstant.UrlFamily1Son2Image } } + }; + var personIds = new Dictionary(); + + foreach (var person in persons) + { + var createPersonResponse = await groupClient.CreatePersonAsync(person.Name, userData: person.UserData); + var personId = createPersonResponse.Value.PersonId; + personIds.Add(person.Name, personId); + + foreach (var imageUrl in person.ImageUrls) + { + await groupClient.AddFaceAsync(personId, new Uri(imageUrl), userData: $"{person.UserData}-{imageUrl}", detectionModel: FaceDetectionModel.Detection03); + } + } + #endregion + + #region Snippet:VerifyAndIdentifyFromLargePersonGroup_TrainAsync + var operation = await groupClient.TrainAsync(WaitUntil.Completed); + await operation.WaitForCompletionResponseAsync(); + #endregion + + var faceClient = CreateClient(); + var detectResponse = await faceClient.DetectAsync(new Uri(FaceTestConstant.UrlFamily1Dad3Image), FaceDetectionModel.Detection03, FaceRecognitionModel.Recognition04, true); + var faceId = detectResponse.Value[0].FaceId.Value; + + #region Snippet:VerifyAndIdentifyFromLargePersonGroup_VerifyAsync + var verifyDadResponse = await faceClient.VerifyFromLargePersonGroupAsync(faceId, groupId, personIds["Bill"]); + Console.WriteLine($"Is the detected face Bill? {verifyDadResponse.Value.IsIdentical} ({verifyDadResponse.Value.Confidence})"); + + var verifyMomResponse = await faceClient.VerifyFromLargePersonGroupAsync(faceId, groupId, personIds["Clare"]); + Console.WriteLine($"Is the detected face Clare? {verifyMomResponse.Value.IsIdentical} ({verifyMomResponse.Value.Confidence})"); + #endregion + + #region Snippet:VerifyAndIdentifyFromLargePersonGroup_IdentifyAsync + var identifyResponse = await faceClient.IdentifyFromLargePersonGroupAsync(new[] { faceId }, groupId); + foreach (var candidate in identifyResponse.Value[0].Candidates) + { + var person = await groupClient.GetPersonAsync(candidate.PersonId); + Console.WriteLine($"The detected face belongs to {person.Value.Name} ({candidate.Confidence})"); + } + #endregion + + #region Snippet:VerifyAndIdentifyFromLargePersonGroup_DeleteLargePersonGroupAsync + await groupClient.DeleteAsync(); + #endregion + } + } +} \ No newline at end of file diff --git a/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample6_LargeFaceList.cs b/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample6_LargeFaceList.cs new file mode 100644 index 000000000000..3eace9b52a6b --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample6_LargeFaceList.cs @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using Azure.AI.Vision.Face.Tests; +using Azure.Core.TestFramework; +using NUnit.Framework; + +namespace Azure.AI.Vision.Face.Samples +{ + public partial class Sample6_LargeFaceList : FaceSamplesBase + { + [Test] + public void FindSimilarFromLargeFaceList() + { + var listId = "lfl_family1"; + var listClient = CreateLargeFaceListClient(listId); + + #region Snippet:CreateLargeFaceList + listClient.Create("Family 1", userData: "A sweet family", recognitionModel: FaceRecognitionModel.Recognition04); + #endregion + + #region Snippet:AddFacesToLargeFaceList + var faces = new[] + { + new { UserData = "Dad", ImageUrl = new Uri(FaceTestConstant.UrlFamily1Dad1Image) }, + new { UserData = "Mom", ImageUrl = new Uri(FaceTestConstant.UrlFamily1Mom1Image) }, + new { UserData = "Son", ImageUrl = new Uri(FaceTestConstant.UrlFamily1Son1Image) } + }; + var faceIds = new Dictionary(); + + foreach (var face in faces) + { + var addFaceResponse = listClient.AddFace(face.ImageUrl, userData: face.UserData); + faceIds[addFaceResponse.Value.PersistedFaceId] = face.UserData; + } + #endregion + + #region Snippet:TrainLargeFaceList + var operation = listClient.Train(WaitUntil.Completed); + operation.WaitForCompletionResponse(); + #endregion + + #region Snippet:FindSimilarFromLargeFaceList + var faceClient = CreateClient(); + var detectResponse = faceClient.Detect(new Uri(FaceTestConstant.UrlFamily1Dad3Image), FaceDetectionModel.Detection03, FaceRecognitionModel.Recognition04, true); + var faceId = detectResponse.Value[0].FaceId.Value; + + var findSimilarResponse = faceClient.FindSimilarFromLargeFaceList(faceId, listId); + foreach (var similarFace in findSimilarResponse.Value) + { + Console.WriteLine($"The detected face is similar to the face with '{faceIds[similarFace.PersistedFaceId.Value]}' ID {similarFace.PersistedFaceId} ({similarFace.Confidence})"); + } + #endregion + + #region Snippet:DeleteLargeFaceList + listClient.Delete(); + #endregion + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample6_LargeFaceListAsync.cs b/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample6_LargeFaceListAsync.cs new file mode 100644 index 000000000000..cb9f47de66fe --- /dev/null +++ b/sdk/face/Azure.AI.Vision.Face/tests/samples/Sample6_LargeFaceListAsync.cs @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT License. + +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using Azure.AI.Vision.Face.Tests; +using Azure.Core.TestFramework; +using NUnit.Framework; + +namespace Azure.AI.Vision.Face.Samples +{ + public partial class Sample6_LargeFaceList : FaceSamplesBase + { + [Test] + public async Task FindSimilarFromLargeFaceListAsync() + { + var listId = "lfl_family1"; + var listClient = CreateLargeFaceListClient(listId); + + #region Snippet:CreateLargeFaceListAsync + await listClient.CreateAsync("Family 1", userData: "A sweet family", recognitionModel: FaceRecognitionModel.Recognition04); + #endregion + + #region Snippet:AddFacesToLargeFaceListAsync + var faces = new[] + { + new { UserData = "Dad", ImageUrl = new Uri(FaceTestConstant.UrlFamily1Dad1Image) }, + new { UserData = "Mom", ImageUrl = new Uri(FaceTestConstant.UrlFamily1Mom1Image) }, + new { UserData = "Son", ImageUrl = new Uri(FaceTestConstant.UrlFamily1Son1Image) } + }; + var faceIds = new Dictionary(); + + foreach (var face in faces) + { + var addFaceResponse = await listClient.AddFaceAsync(face.ImageUrl, userData: face.UserData); + faceIds[addFaceResponse.Value.PersistedFaceId] = face.UserData; + } + #endregion + + #region Snippet:TrainLargeFaceListAsync + var operation = await listClient.TrainAsync(WaitUntil.Completed); + await operation.WaitForCompletionResponseAsync(); + #endregion + + #region Snippet:FindSimilarFromLargeFaceListAsync + var faceClient = CreateClient(); + var detectResponse = await faceClient.DetectAsync(new Uri(FaceTestConstant.UrlFamily1Dad3Image), FaceDetectionModel.Detection03, FaceRecognitionModel.Recognition04, true); + var faceId = detectResponse.Value[0].FaceId.Value; + + var findSimilarResponse = await faceClient.FindSimilarFromLargeFaceListAsync(faceId, listId); + foreach (var similarFace in findSimilarResponse.Value) + { + Console.WriteLine($"The detected face is similar to the face with '{faceIds[similarFace.PersistedFaceId.Value]}' ID {similarFace.PersistedFaceId} ({similarFace.Confidence})"); + } + #endregion + + #region Snippet:DeleteLargeFaceListAsync + await listClient.DeleteAsync(); + #endregion + } + } +} diff --git a/sdk/face/Azure.AI.Vision.Face/tsp-location.yaml b/sdk/face/Azure.AI.Vision.Face/tsp-location.yaml index d170fcf1cfd5..aa57e9f30142 100644 --- a/sdk/face/Azure.AI.Vision.Face/tsp-location.yaml +++ b/sdk/face/Azure.AI.Vision.Face/tsp-location.yaml @@ -1,5 +1,5 @@ directory: specification/ai/Face -commit: b9652b3e860e690c9ff53866071c591d59fed907 +commit: 4037b28c1014648f4cfa6f8c965e45f2476652e2 repo: Azure/azure-rest-api-specs additionalDirectories: []