...

Source file src/github.com/Azure/azure-sdk-for-go/services/cognitiveservices/v1.0/face/face.go

Documentation: github.com/Azure/azure-sdk-for-go/services/cognitiveservices/v1.0/face

     1  package face
     2  
     3  // Copyright (c) Microsoft Corporation. All rights reserved.
     4  // Licensed under the MIT License. See License.txt in the project root for license information.
     5  //
     6  // Code generated by Microsoft (R) AutoRest Code Generator.
     7  // Changes may cause incorrect behavior and will be lost if the code is regenerated.
     8  
     9  import (
    10  	"context"
    11  	"github.com/Azure/go-autorest/autorest"
    12  	"github.com/Azure/go-autorest/autorest/azure"
    13  	"github.com/Azure/go-autorest/autorest/validation"
    14  	"github.com/Azure/go-autorest/tracing"
    15  	"io"
    16  	"net/http"
    17  )
    18  
    19  // Client is the an API for face detection, verification, and identification.
    20  type Client struct {
    21  	BaseClient
    22  }
    23  
    24  // NewClient creates an instance of the Client client.
    25  func NewClient(endpoint string) Client {
    26  	return Client{New(endpoint)}
    27  }
    28  
    29  // DetectWithStream detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and
    30  // attributes.<br />
    31  // * No image will be stored. Only the extracted face feature will be stored on server. The faceId is an identifier of
    32  // the face feature and will be used in [Face -
    33  // Identify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/identify), [Face -
    34  // Verify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/verifyfacetoface), and [Face - Find
    35  // Similar](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/findsimilar). The stored face feature(s)
    36  // will expire and be deleted 24 hours after the original detection call.
    37  // * Optional parameters include faceId, landmarks, and attributes. Attributes include age, gender, headPose, smile,
    38  // facialHair, glasses, emotion, hair, makeup, occlusion, accessories, blur, exposure and noise. Some of the results
    39  // returned for specific attributes may not be highly accurate.
    40  // * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB.
    41  // * Up to 100 faces can be returned for an image. Faces are ranked by face rectangle size from large to small.
    42  // * For optimal results when querying [Face -
    43  // Identify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/identify), [Face -
    44  // Verify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/verifyfacetoface), and [Face - Find
    45  // Similar](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/findsimilar) ('returnFaceId' is true),
    46  // please use faces that are: frontal, clear, and with a minimum size of 200x200 pixels (100 pixels between eyes).
    47  // * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with
    48  // dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size.
    49  // * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to
    50  // [How to specify a detection
    51  // model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
    52  // | Model | Recommended use-case(s) |
    53  // | ---------- | -------- |
    54  // | 'detection_01': | The default detection model for [Face -
    55  // Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl). Recommend for near frontal
    56  // face detection. For scenarios with exceptionally large angle (head-pose) faces, occluded faces or wrong image
    57  // orientation, the faces in such cases may not be detected. |
    58  // | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on small, side and blurry
    59  // faces. |
    60  //
    61  // * Different 'recognitionModel' values are provided. If follow-up operations like Verify, Identify, Find Similar are
    62  // needed, please specify the recognition model with 'recognitionModel' parameter. The default value for
    63  // 'recognitionModel' is 'recognition_01', if latest model needed, please explicitly specify the model you need in this
    64  // parameter. Once specified, the detected faceIds will be associated with the specified recognition model. More
    65  // details, please refer to [How to specify a recognition
    66  // model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-recognition-model)
    67  // | Model | Recommended use-case(s) |
    68  // | ---------- | -------- |
    69  // | 'recognition_01': | The default recognition model for [Face -
    70  // Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl). All those faceIds created
    71  // before 2019 March are bonded with this recognition model. |
    72  // | 'recognition_02': | Recognition model released in 2019 March. |
    73  // | 'recognition_03': | Recognition model released in 2020 May. 'recognition_03' is recommended since its overall
    74  // accuracy is improved compared with 'recognition_01' and 'recognition_02'. |
    75  // Parameters:
    76  // imageParameter - an image stream.
    77  // returnFaceID - a value indicating whether the operation should return faceIds of detected faces.
    78  // returnFaceLandmarks - a value indicating whether the operation should return landmarks of the detected
    79  // faces.
    80  // returnFaceAttributes - analyze and return the one or more specified face attributes in the comma-separated
    81  // string like "returnFaceAttributes=age,gender". Supported face attributes include age, gender, headPose,
    82  // smile, facialHair, glasses and emotion. Note that each face attribute analysis has additional computational
    83  // and time cost.
    84  // recognitionModel - name of recognition model. Recognition model is used when the face features are extracted
    85  // and associated with detected faceIds, (Large)FaceList or (Large)PersonGroup. A recognition model name can be
    86  // provided when performing Face - Detect or (Large)FaceList - Create or (Large)PersonGroup - Create. The
    87  // default value is 'recognition_01', if latest model needed, please explicitly specify the model you need.
    88  // returnRecognitionModel - a value indicating whether the operation should return 'recognitionModel' in
    89  // response.
    90  // detectionModel - name of detection model. Detection model is used to detect faces in the submitted image. A
    91  // detection model name can be provided when performing Face - Detect or (Large)FaceList - Add Face or
    92  // (Large)PersonGroup - Add Face. The default value is 'detection_01', if another model is needed, please
    93  // explicitly specify it.
    94  func (client Client) DetectWithStream(ctx context.Context, imageParameter io.ReadCloser, returnFaceID *bool, returnFaceLandmarks *bool, returnFaceAttributes []AttributeType, recognitionModel RecognitionModel, returnRecognitionModel *bool, detectionModel DetectionModel) (result ListDetectedFace, err error) {
    95  	if tracing.IsEnabled() {
    96  		ctx = tracing.StartSpan(ctx, fqdn+"/Client.DetectWithStream")
    97  		defer func() {
    98  			sc := -1
    99  			if result.Response.Response != nil {
   100  				sc = result.Response.Response.StatusCode
   101  			}
   102  			tracing.EndSpan(ctx, sc, err)
   103  		}()
   104  	}
   105  	req, err := client.DetectWithStreamPreparer(ctx, imageParameter, returnFaceID, returnFaceLandmarks, returnFaceAttributes, recognitionModel, returnRecognitionModel, detectionModel)
   106  	if err != nil {
   107  		err = autorest.NewErrorWithError(err, "face.Client", "DetectWithStream", nil, "Failure preparing request")
   108  		return
   109  	}
   110  
   111  	resp, err := client.DetectWithStreamSender(req)
   112  	if err != nil {
   113  		result.Response = autorest.Response{Response: resp}
   114  		err = autorest.NewErrorWithError(err, "face.Client", "DetectWithStream", resp, "Failure sending request")
   115  		return
   116  	}
   117  
   118  	result, err = client.DetectWithStreamResponder(resp)
   119  	if err != nil {
   120  		err = autorest.NewErrorWithError(err, "face.Client", "DetectWithStream", resp, "Failure responding to request")
   121  		return
   122  	}
   123  
   124  	return
   125  }
   126  
   127  // DetectWithStreamPreparer prepares the DetectWithStream request.
   128  func (client Client) DetectWithStreamPreparer(ctx context.Context, imageParameter io.ReadCloser, returnFaceID *bool, returnFaceLandmarks *bool, returnFaceAttributes []AttributeType, recognitionModel RecognitionModel, returnRecognitionModel *bool, detectionModel DetectionModel) (*http.Request, error) {
   129  	urlParameters := map[string]interface{}{
   130  		"Endpoint": client.Endpoint,
   131  	}
   132  
   133  	queryParameters := map[string]interface{}{}
   134  	if returnFaceID != nil {
   135  		queryParameters["returnFaceId"] = autorest.Encode("query", *returnFaceID)
   136  	} else {
   137  		queryParameters["returnFaceId"] = autorest.Encode("query", true)
   138  	}
   139  	if returnFaceLandmarks != nil {
   140  		queryParameters["returnFaceLandmarks"] = autorest.Encode("query", *returnFaceLandmarks)
   141  	} else {
   142  		queryParameters["returnFaceLandmarks"] = autorest.Encode("query", false)
   143  	}
   144  	if returnFaceAttributes != nil && len(returnFaceAttributes) > 0 {
   145  		queryParameters["returnFaceAttributes"] = autorest.Encode("query", returnFaceAttributes, ",")
   146  	}
   147  	if len(string(recognitionModel)) > 0 {
   148  		queryParameters["recognitionModel"] = autorest.Encode("query", recognitionModel)
   149  	} else {
   150  		queryParameters["recognitionModel"] = autorest.Encode("query", "recognition_01")
   151  	}
   152  	if returnRecognitionModel != nil {
   153  		queryParameters["returnRecognitionModel"] = autorest.Encode("query", *returnRecognitionModel)
   154  	} else {
   155  		queryParameters["returnRecognitionModel"] = autorest.Encode("query", false)
   156  	}
   157  	if len(string(detectionModel)) > 0 {
   158  		queryParameters["detectionModel"] = autorest.Encode("query", detectionModel)
   159  	} else {
   160  		queryParameters["detectionModel"] = autorest.Encode("query", "detection_01")
   161  	}
   162  
   163  	preparer := autorest.CreatePreparer(
   164  		autorest.AsContentType("application/octet-stream"),
   165  		autorest.AsPost(),
   166  		autorest.WithCustomBaseURL("{Endpoint}/face/v1.0", urlParameters),
   167  		autorest.WithPath("/detect"),
   168  		autorest.WithFile(imageParameter),
   169  		autorest.WithQueryParameters(queryParameters))
   170  	return preparer.Prepare((&http.Request{}).WithContext(ctx))
   171  }
   172  
   173  // DetectWithStreamSender sends the DetectWithStream request. The method will close the
   174  // http.Response Body if it receives an error.
   175  func (client Client) DetectWithStreamSender(req *http.Request) (*http.Response, error) {
   176  	return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))
   177  }
   178  
   179  // DetectWithStreamResponder handles the response to the DetectWithStream request. The method always
   180  // closes the http.Response Body.
   181  func (client Client) DetectWithStreamResponder(resp *http.Response) (result ListDetectedFace, err error) {
   182  	err = autorest.Respond(
   183  		resp,
   184  		azure.WithErrorUnlessStatusCode(http.StatusOK),
   185  		autorest.ByUnmarshallingJSON(&result.Value),
   186  		autorest.ByClosing())
   187  	result.Response = autorest.Response{Response: resp}
   188  	return
   189  }
   190  
   191  // DetectWithURL detect human faces in an image, return face rectangles, and optionally with faceIds, landmarks, and
   192  // attributes.<br />
   193  // * No image will be stored. Only the extracted face feature will be stored on server. The faceId is an identifier of
   194  // the face feature and will be used in [Face -
   195  // Identify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/identify), [Face -
   196  // Verify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/verifyfacetoface), and [Face - Find
   197  // Similar](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/findsimilar). The stored face feature(s)
   198  // will expire and be deleted 24 hours after the original detection call.
   199  // * Optional parameters include faceId, landmarks, and attributes. Attributes include age, gender, headPose, smile,
   200  // facialHair, glasses, emotion, hair, makeup, occlusion, accessories, blur, exposure and noise. Some of the results
   201  // returned for specific attributes may not be highly accurate.
   202  // * JPEG, PNG, GIF (the first frame), and BMP format are supported. The allowed image file size is from 1KB to 6MB.
   203  // * Up to 100 faces can be returned for an image. Faces are ranked by face rectangle size from large to small.
   204  // * For optimal results when querying [Face -
   205  // Identify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/identify), [Face -
   206  // Verify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/verifyfacetoface), and [Face - Find
   207  // Similar](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/findsimilar) ('returnFaceId' is true),
   208  // please use faces that are: frontal, clear, and with a minimum size of 200x200 pixels (100 pixels between eyes).
   209  // * The minimum detectable face size is 36x36 pixels in an image no larger than 1920x1080 pixels. Images with
   210  // dimensions higher than 1920x1080 pixels will need a proportionally larger minimum face size.
   211  // * Different 'detectionModel' values can be provided. To use and compare different detection models, please refer to
   212  // [How to specify a detection
   213  // model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-detection-model)
   214  // | Model | Recommended use-case(s) |
   215  // | ---------- | -------- |
   216  // | 'detection_01': | The default detection model for [Face -
   217  // Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl). Recommend for near frontal
   218  // face detection. For scenarios with exceptionally large angle (head-pose) faces, occluded faces or wrong image
   219  // orientation, the faces in such cases may not be detected. |
   220  // | 'detection_02': | Detection model released in 2019 May with improved accuracy especially on small, side and blurry
   221  // faces. |
   222  //
   223  // * Different 'recognitionModel' values are provided. If follow-up operations like Verify, Identify, Find Similar are
   224  // needed, please specify the recognition model with 'recognitionModel' parameter. The default value for
   225  // 'recognitionModel' is 'recognition_01', if latest model needed, please explicitly specify the model you need in this
   226  // parameter. Once specified, the detected faceIds will be associated with the specified recognition model. More
   227  // details, please refer to [How to specify a recognition
   228  // model](https://docs.microsoft.com/azure/cognitive-services/face/face-api-how-to-topics/specify-recognition-model)
   229  // | Model | Recommended use-case(s) |
   230  // | ---------- | -------- |
   231  // | 'recognition_01': | The default recognition model for [Face -
   232  // Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl). All those faceIds created
   233  // before 2019 March are bonded with this recognition model. |
   234  // | 'recognition_02': | Recognition model released in 2019 March. |
   235  // | 'recognition_03': | Recognition model released in 2020 May. 'recognition_03' is recommended since its overall
   236  // accuracy is improved compared with 'recognition_01' and 'recognition_02'. |
   237  // Parameters:
   238  // imageURL - a JSON document with a URL pointing to the image that is to be analyzed.
   239  // returnFaceID - a value indicating whether the operation should return faceIds of detected faces.
   240  // returnFaceLandmarks - a value indicating whether the operation should return landmarks of the detected
   241  // faces.
   242  // returnFaceAttributes - analyze and return the one or more specified face attributes in the comma-separated
   243  // string like "returnFaceAttributes=age,gender". Supported face attributes include age, gender, headPose,
   244  // smile, facialHair, glasses and emotion. Note that each face attribute analysis has additional computational
   245  // and time cost.
   246  // recognitionModel - name of recognition model. Recognition model is used when the face features are extracted
   247  // and associated with detected faceIds, (Large)FaceList or (Large)PersonGroup. A recognition model name can be
   248  // provided when performing Face - Detect or (Large)FaceList - Create or (Large)PersonGroup - Create. The
   249  // default value is 'recognition_01', if latest model needed, please explicitly specify the model you need.
   250  // returnRecognitionModel - a value indicating whether the operation should return 'recognitionModel' in
   251  // response.
   252  // detectionModel - name of detection model. Detection model is used to detect faces in the submitted image. A
   253  // detection model name can be provided when performing Face - Detect or (Large)FaceList - Add Face or
   254  // (Large)PersonGroup - Add Face. The default value is 'detection_01', if another model is needed, please
   255  // explicitly specify it.
   256  func (client Client) DetectWithURL(ctx context.Context, imageURL ImageURL, returnFaceID *bool, returnFaceLandmarks *bool, returnFaceAttributes []AttributeType, recognitionModel RecognitionModel, returnRecognitionModel *bool, detectionModel DetectionModel) (result ListDetectedFace, err error) {
   257  	if tracing.IsEnabled() {
   258  		ctx = tracing.StartSpan(ctx, fqdn+"/Client.DetectWithURL")
   259  		defer func() {
   260  			sc := -1
   261  			if result.Response.Response != nil {
   262  				sc = result.Response.Response.StatusCode
   263  			}
   264  			tracing.EndSpan(ctx, sc, err)
   265  		}()
   266  	}
   267  	if err := validation.Validate([]validation.Validation{
   268  		{TargetValue: imageURL,
   269  			Constraints: []validation.Constraint{{Target: "imageURL.URL", Name: validation.Null, Rule: true, Chain: nil}}}}); err != nil {
   270  		return result, validation.NewError("face.Client", "DetectWithURL", err.Error())
   271  	}
   272  
   273  	req, err := client.DetectWithURLPreparer(ctx, imageURL, returnFaceID, returnFaceLandmarks, returnFaceAttributes, recognitionModel, returnRecognitionModel, detectionModel)
   274  	if err != nil {
   275  		err = autorest.NewErrorWithError(err, "face.Client", "DetectWithURL", nil, "Failure preparing request")
   276  		return
   277  	}
   278  
   279  	resp, err := client.DetectWithURLSender(req)
   280  	if err != nil {
   281  		result.Response = autorest.Response{Response: resp}
   282  		err = autorest.NewErrorWithError(err, "face.Client", "DetectWithURL", resp, "Failure sending request")
   283  		return
   284  	}
   285  
   286  	result, err = client.DetectWithURLResponder(resp)
   287  	if err != nil {
   288  		err = autorest.NewErrorWithError(err, "face.Client", "DetectWithURL", resp, "Failure responding to request")
   289  		return
   290  	}
   291  
   292  	return
   293  }
   294  
   295  // DetectWithURLPreparer prepares the DetectWithURL request.
   296  func (client Client) DetectWithURLPreparer(ctx context.Context, imageURL ImageURL, returnFaceID *bool, returnFaceLandmarks *bool, returnFaceAttributes []AttributeType, recognitionModel RecognitionModel, returnRecognitionModel *bool, detectionModel DetectionModel) (*http.Request, error) {
   297  	urlParameters := map[string]interface{}{
   298  		"Endpoint": client.Endpoint,
   299  	}
   300  
   301  	queryParameters := map[string]interface{}{}
   302  	if returnFaceID != nil {
   303  		queryParameters["returnFaceId"] = autorest.Encode("query", *returnFaceID)
   304  	} else {
   305  		queryParameters["returnFaceId"] = autorest.Encode("query", true)
   306  	}
   307  	if returnFaceLandmarks != nil {
   308  		queryParameters["returnFaceLandmarks"] = autorest.Encode("query", *returnFaceLandmarks)
   309  	} else {
   310  		queryParameters["returnFaceLandmarks"] = autorest.Encode("query", false)
   311  	}
   312  	if returnFaceAttributes != nil && len(returnFaceAttributes) > 0 {
   313  		queryParameters["returnFaceAttributes"] = autorest.Encode("query", returnFaceAttributes, ",")
   314  	}
   315  	if len(string(recognitionModel)) > 0 {
   316  		queryParameters["recognitionModel"] = autorest.Encode("query", recognitionModel)
   317  	} else {
   318  		queryParameters["recognitionModel"] = autorest.Encode("query", "recognition_01")
   319  	}
   320  	if returnRecognitionModel != nil {
   321  		queryParameters["returnRecognitionModel"] = autorest.Encode("query", *returnRecognitionModel)
   322  	} else {
   323  		queryParameters["returnRecognitionModel"] = autorest.Encode("query", false)
   324  	}
   325  	if len(string(detectionModel)) > 0 {
   326  		queryParameters["detectionModel"] = autorest.Encode("query", detectionModel)
   327  	} else {
   328  		queryParameters["detectionModel"] = autorest.Encode("query", "detection_01")
   329  	}
   330  
   331  	preparer := autorest.CreatePreparer(
   332  		autorest.AsContentType("application/json; charset=utf-8"),
   333  		autorest.AsPost(),
   334  		autorest.WithCustomBaseURL("{Endpoint}/face/v1.0", urlParameters),
   335  		autorest.WithPath("/detect"),
   336  		autorest.WithJSON(imageURL),
   337  		autorest.WithQueryParameters(queryParameters))
   338  	return preparer.Prepare((&http.Request{}).WithContext(ctx))
   339  }
   340  
   341  // DetectWithURLSender sends the DetectWithURL request. The method will close the
   342  // http.Response Body if it receives an error.
   343  func (client Client) DetectWithURLSender(req *http.Request) (*http.Response, error) {
   344  	return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))
   345  }
   346  
   347  // DetectWithURLResponder handles the response to the DetectWithURL request. The method always
   348  // closes the http.Response Body.
   349  func (client Client) DetectWithURLResponder(resp *http.Response) (result ListDetectedFace, err error) {
   350  	err = autorest.Respond(
   351  		resp,
   352  		azure.WithErrorUnlessStatusCode(http.StatusOK),
   353  		autorest.ByUnmarshallingJSON(&result.Value),
   354  		autorest.ByClosing())
   355  	result.Response = autorest.Response{Response: resp}
   356  	return
   357  }
   358  
   359  // FindSimilar given query face's faceId, to search the similar-looking faces from a faceId array, a face list or a
   360  // large face list. faceId array contains the faces created by [Face -
   361  // Detect](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/detectwithurl), which will expire 24 hours
   362  // after creation. A "faceListId" is created by [FaceList -
   363  // Create](https://docs.microsoft.com/rest/api/cognitiveservices/face/facelist/create) containing persistedFaceIds that
   364  // will not expire. And a "largeFaceListId" is created by [LargeFaceList -
   365  // Create](https://docs.microsoft.com/rest/api/cognitiveservices/face/largefacelist/create) containing persistedFaceIds
   366  // that will also not expire. Depending on the input the returned similar faces list contains faceIds or
   367  // persistedFaceIds ranked by similarity.
   368  // <br/>Find similar has two working modes, "matchPerson" and "matchFace". "matchPerson" is the default mode that it
   369  // tries to find faces of the same person as possible by using internal same-person thresholds. It is useful to find a
   370  // known person's other photos. Note that an empty list will be returned if no faces pass the internal thresholds.
   371  // "matchFace" mode ignores same-person thresholds and returns ranked similar faces anyway, even the similarity is low.
   372  // It can be used in the cases like searching celebrity-looking faces.
   373  // <br/>The 'recognitionModel' associated with the query face's faceId should be the same as the 'recognitionModel'
   374  // used by the target faceId array, face list or large face list.
   375  // Parameters:
   376  // body - request body for Find Similar.
   377  func (client Client) FindSimilar(ctx context.Context, body FindSimilarRequest) (result ListSimilarFace, err error) {
   378  	if tracing.IsEnabled() {
   379  		ctx = tracing.StartSpan(ctx, fqdn+"/Client.FindSimilar")
   380  		defer func() {
   381  			sc := -1
   382  			if result.Response.Response != nil {
   383  				sc = result.Response.Response.StatusCode
   384  			}
   385  			tracing.EndSpan(ctx, sc, err)
   386  		}()
   387  	}
   388  	if err := validation.Validate([]validation.Validation{
   389  		{TargetValue: body,
   390  			Constraints: []validation.Constraint{{Target: "body.FaceID", Name: validation.Null, Rule: true, Chain: nil},
   391  				{Target: "body.FaceListID", Name: validation.Null, Rule: false,
   392  					Chain: []validation.Constraint{{Target: "body.FaceListID", Name: validation.MaxLength, Rule: 64, Chain: nil},
   393  						{Target: "body.FaceListID", Name: validation.Pattern, Rule: `^[a-z0-9-_]+$`, Chain: nil},
   394  					}},
   395  				{Target: "body.LargeFaceListID", Name: validation.Null, Rule: false,
   396  					Chain: []validation.Constraint{{Target: "body.LargeFaceListID", Name: validation.MaxLength, Rule: 64, Chain: nil},
   397  						{Target: "body.LargeFaceListID", Name: validation.Pattern, Rule: `^[a-z0-9-_]+$`, Chain: nil},
   398  					}},
   399  				{Target: "body.FaceIds", Name: validation.Null, Rule: false,
   400  					Chain: []validation.Constraint{{Target: "body.FaceIds", Name: validation.MaxItems, Rule: 1000, Chain: nil}}},
   401  				{Target: "body.MaxNumOfCandidatesReturned", Name: validation.Null, Rule: false,
   402  					Chain: []validation.Constraint{{Target: "body.MaxNumOfCandidatesReturned", Name: validation.InclusiveMaximum, Rule: int64(1000), Chain: nil},
   403  						{Target: "body.MaxNumOfCandidatesReturned", Name: validation.InclusiveMinimum, Rule: int64(1), Chain: nil},
   404  					}}}}}); err != nil {
   405  		return result, validation.NewError("face.Client", "FindSimilar", err.Error())
   406  	}
   407  
   408  	req, err := client.FindSimilarPreparer(ctx, body)
   409  	if err != nil {
   410  		err = autorest.NewErrorWithError(err, "face.Client", "FindSimilar", nil, "Failure preparing request")
   411  		return
   412  	}
   413  
   414  	resp, err := client.FindSimilarSender(req)
   415  	if err != nil {
   416  		result.Response = autorest.Response{Response: resp}
   417  		err = autorest.NewErrorWithError(err, "face.Client", "FindSimilar", resp, "Failure sending request")
   418  		return
   419  	}
   420  
   421  	result, err = client.FindSimilarResponder(resp)
   422  	if err != nil {
   423  		err = autorest.NewErrorWithError(err, "face.Client", "FindSimilar", resp, "Failure responding to request")
   424  		return
   425  	}
   426  
   427  	return
   428  }
   429  
   430  // FindSimilarPreparer prepares the FindSimilar request.
   431  func (client Client) FindSimilarPreparer(ctx context.Context, body FindSimilarRequest) (*http.Request, error) {
   432  	urlParameters := map[string]interface{}{
   433  		"Endpoint": client.Endpoint,
   434  	}
   435  
   436  	preparer := autorest.CreatePreparer(
   437  		autorest.AsContentType("application/json; charset=utf-8"),
   438  		autorest.AsPost(),
   439  		autorest.WithCustomBaseURL("{Endpoint}/face/v1.0", urlParameters),
   440  		autorest.WithPath("/findsimilars"),
   441  		autorest.WithJSON(body))
   442  	return preparer.Prepare((&http.Request{}).WithContext(ctx))
   443  }
   444  
   445  // FindSimilarSender sends the FindSimilar request. The method will close the
   446  // http.Response Body if it receives an error.
   447  func (client Client) FindSimilarSender(req *http.Request) (*http.Response, error) {
   448  	return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))
   449  }
   450  
   451  // FindSimilarResponder handles the response to the FindSimilar request. The method always
   452  // closes the http.Response Body.
   453  func (client Client) FindSimilarResponder(resp *http.Response) (result ListSimilarFace, err error) {
   454  	err = autorest.Respond(
   455  		resp,
   456  		azure.WithErrorUnlessStatusCode(http.StatusOK),
   457  		autorest.ByUnmarshallingJSON(&result.Value),
   458  		autorest.ByClosing())
   459  	result.Response = autorest.Response{Response: resp}
   460  	return
   461  }
   462  
   463  // Group divide candidate faces into groups based on face similarity.<br />
   464  // * The output is one or more disjointed face groups and a messyGroup. A face group contains faces that have similar
   465  // looking, often of the same person. Face groups are ranked by group size, i.e. number of faces. Notice that faces
   466  // belonging to a same person might be split into several groups in the result.
   467  // * MessyGroup is a special face group containing faces that cannot find any similar counterpart face from original
   468  // faces. The messyGroup will not appear in the result if all faces found their counterparts.
   469  // * Group API needs at least 2 candidate faces and 1000 at most. We suggest to try [Face -
   470  // Verify](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/verifyfacetoface) when you only have 2
   471  // candidate faces.
   472  // * The 'recognitionModel' associated with the query faces' faceIds should be the same.
   473  // Parameters:
   474  // body - request body for grouping.
   475  func (client Client) Group(ctx context.Context, body GroupRequest) (result GroupResult, err error) {
   476  	if tracing.IsEnabled() {
   477  		ctx = tracing.StartSpan(ctx, fqdn+"/Client.Group")
   478  		defer func() {
   479  			sc := -1
   480  			if result.Response.Response != nil {
   481  				sc = result.Response.Response.StatusCode
   482  			}
   483  			tracing.EndSpan(ctx, sc, err)
   484  		}()
   485  	}
   486  	if err := validation.Validate([]validation.Validation{
   487  		{TargetValue: body,
   488  			Constraints: []validation.Constraint{{Target: "body.FaceIds", Name: validation.Null, Rule: true,
   489  				Chain: []validation.Constraint{{Target: "body.FaceIds", Name: validation.MaxItems, Rule: 1000, Chain: nil}}}}}}); err != nil {
   490  		return result, validation.NewError("face.Client", "Group", err.Error())
   491  	}
   492  
   493  	req, err := client.GroupPreparer(ctx, body)
   494  	if err != nil {
   495  		err = autorest.NewErrorWithError(err, "face.Client", "Group", nil, "Failure preparing request")
   496  		return
   497  	}
   498  
   499  	resp, err := client.GroupSender(req)
   500  	if err != nil {
   501  		result.Response = autorest.Response{Response: resp}
   502  		err = autorest.NewErrorWithError(err, "face.Client", "Group", resp, "Failure sending request")
   503  		return
   504  	}
   505  
   506  	result, err = client.GroupResponder(resp)
   507  	if err != nil {
   508  		err = autorest.NewErrorWithError(err, "face.Client", "Group", resp, "Failure responding to request")
   509  		return
   510  	}
   511  
   512  	return
   513  }
   514  
   515  // GroupPreparer prepares the Group request.
   516  func (client Client) GroupPreparer(ctx context.Context, body GroupRequest) (*http.Request, error) {
   517  	urlParameters := map[string]interface{}{
   518  		"Endpoint": client.Endpoint,
   519  	}
   520  
   521  	preparer := autorest.CreatePreparer(
   522  		autorest.AsContentType("application/json; charset=utf-8"),
   523  		autorest.AsPost(),
   524  		autorest.WithCustomBaseURL("{Endpoint}/face/v1.0", urlParameters),
   525  		autorest.WithPath("/group"),
   526  		autorest.WithJSON(body))
   527  	return preparer.Prepare((&http.Request{}).WithContext(ctx))
   528  }
   529  
   530  // GroupSender sends the Group request. The method will close the
   531  // http.Response Body if it receives an error.
   532  func (client Client) GroupSender(req *http.Request) (*http.Response, error) {
   533  	return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))
   534  }
   535  
   536  // GroupResponder handles the response to the Group request. The method always
   537  // closes the http.Response Body.
   538  func (client Client) GroupResponder(resp *http.Response) (result GroupResult, err error) {
   539  	err = autorest.Respond(
   540  		resp,
   541  		azure.WithErrorUnlessStatusCode(http.StatusOK),
   542  		autorest.ByUnmarshallingJSON(&result),
   543  		autorest.ByClosing())
   544  	result.Response = autorest.Response{Response: resp}
   545  	return
   546  }
   547  
   548  // Identify 1-to-many identification to find the closest matches of the specific query person face from a person group
   549  // or large person group.
   550  // <br/> For each face in the faceIds array, Face Identify will compute similarities between the query face and all the
   551  // faces in the person group (given by personGroupId) or large person group (given by largePersonGroupId), and return
   552  // candidate person(s) for that face ranked by similarity confidence. The person group/large person group should be
   553  // trained to make it ready for identification. See more in [PersonGroup -
   554  // Train](https://docs.microsoft.com/rest/api/cognitiveservices/face/persongroup/train) and [LargePersonGroup -
   555  // Train](https://docs.microsoft.com/rest/api/cognitiveservices/face/largepersongroup/train).
   556  // <br/>
   557  //
   558  // Remarks:<br />
   559  // * The algorithm allows more than one face to be identified independently at the same request, but no more than 10
   560  // faces.
   561  // * Each person in the person group/large person group could have more than one face, but no more than 248 faces.
   562  // * Higher face image quality means better identification precision. Please consider high-quality faces: frontal,
   563  // clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.
   564  // * Number of candidates returned is restricted by maxNumOfCandidatesReturned and confidenceThreshold. If no person is
   565  // identified, the returned candidates will be an empty array.
   566  // * Try [Face - Find Similar](https://docs.microsoft.com/rest/api/cognitiveservices/face/face/findsimilar) when you
   567  // need to find similar faces from a face list/large face list instead of a person group/large person group.
   568  // * The 'recognitionModel' associated with the query faces' faceIds should be the same as the 'recognitionModel' used
   569  // by the target person group or large person group.
   570  // Parameters:
   571  // body - request body for identify operation.
   572  func (client Client) Identify(ctx context.Context, body IdentifyRequest) (result ListIdentifyResult, err error) {
   573  	if tracing.IsEnabled() {
   574  		ctx = tracing.StartSpan(ctx, fqdn+"/Client.Identify")
   575  		defer func() {
   576  			sc := -1
   577  			if result.Response.Response != nil {
   578  				sc = result.Response.Response.StatusCode
   579  			}
   580  			tracing.EndSpan(ctx, sc, err)
   581  		}()
   582  	}
   583  	if err := validation.Validate([]validation.Validation{
   584  		{TargetValue: body,
   585  			Constraints: []validation.Constraint{{Target: "body.FaceIds", Name: validation.Null, Rule: true,
   586  				Chain: []validation.Constraint{{Target: "body.FaceIds", Name: validation.MaxItems, Rule: 10, Chain: nil}}},
   587  				{Target: "body.PersonGroupID", Name: validation.Null, Rule: false,
   588  					Chain: []validation.Constraint{{Target: "body.PersonGroupID", Name: validation.MaxLength, Rule: 64, Chain: nil},
   589  						{Target: "body.PersonGroupID", Name: validation.Pattern, Rule: `^[a-z0-9-_]+$`, Chain: nil},
   590  					}},
   591  				{Target: "body.LargePersonGroupID", Name: validation.Null, Rule: false,
   592  					Chain: []validation.Constraint{{Target: "body.LargePersonGroupID", Name: validation.MaxLength, Rule: 64, Chain: nil},
   593  						{Target: "body.LargePersonGroupID", Name: validation.Pattern, Rule: `^[a-z0-9-_]+$`, Chain: nil},
   594  					}},
   595  				{Target: "body.MaxNumOfCandidatesReturned", Name: validation.Null, Rule: false,
   596  					Chain: []validation.Constraint{{Target: "body.MaxNumOfCandidatesReturned", Name: validation.InclusiveMaximum, Rule: int64(5), Chain: nil},
   597  						{Target: "body.MaxNumOfCandidatesReturned", Name: validation.InclusiveMinimum, Rule: int64(1), Chain: nil},
   598  					}}}}}); err != nil {
   599  		return result, validation.NewError("face.Client", "Identify", err.Error())
   600  	}
   601  
   602  	req, err := client.IdentifyPreparer(ctx, body)
   603  	if err != nil {
   604  		err = autorest.NewErrorWithError(err, "face.Client", "Identify", nil, "Failure preparing request")
   605  		return
   606  	}
   607  
   608  	resp, err := client.IdentifySender(req)
   609  	if err != nil {
   610  		result.Response = autorest.Response{Response: resp}
   611  		err = autorest.NewErrorWithError(err, "face.Client", "Identify", resp, "Failure sending request")
   612  		return
   613  	}
   614  
   615  	result, err = client.IdentifyResponder(resp)
   616  	if err != nil {
   617  		err = autorest.NewErrorWithError(err, "face.Client", "Identify", resp, "Failure responding to request")
   618  		return
   619  	}
   620  
   621  	return
   622  }
   623  
   624  // IdentifyPreparer prepares the Identify request.
   625  func (client Client) IdentifyPreparer(ctx context.Context, body IdentifyRequest) (*http.Request, error) {
   626  	urlParameters := map[string]interface{}{
   627  		"Endpoint": client.Endpoint,
   628  	}
   629  
   630  	preparer := autorest.CreatePreparer(
   631  		autorest.AsContentType("application/json; charset=utf-8"),
   632  		autorest.AsPost(),
   633  		autorest.WithCustomBaseURL("{Endpoint}/face/v1.0", urlParameters),
   634  		autorest.WithPath("/identify"),
   635  		autorest.WithJSON(body))
   636  	return preparer.Prepare((&http.Request{}).WithContext(ctx))
   637  }
   638  
   639  // IdentifySender sends the Identify request. The method will close the
   640  // http.Response Body if it receives an error.
   641  func (client Client) IdentifySender(req *http.Request) (*http.Response, error) {
   642  	return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))
   643  }
   644  
   645  // IdentifyResponder handles the response to the Identify request. The method always
   646  // closes the http.Response Body.
   647  func (client Client) IdentifyResponder(resp *http.Response) (result ListIdentifyResult, err error) {
   648  	err = autorest.Respond(
   649  		resp,
   650  		azure.WithErrorUnlessStatusCode(http.StatusOK),
   651  		autorest.ByUnmarshallingJSON(&result.Value),
   652  		autorest.ByClosing())
   653  	result.Response = autorest.Response{Response: resp}
   654  	return
   655  }
   656  
   657  // VerifyFaceToFace verify whether two faces belong to a same person or whether one face belongs to a person.
   658  // <br/>
   659  // Remarks:<br />
   660  // * Higher face image quality means better identification precision. Please consider high-quality faces: frontal,
   661  // clear, and face size is 200x200 pixels (100 pixels between eyes) or bigger.
   662  // * For the scenarios that are sensitive to accuracy please make your own judgment.
   663  // * The 'recognitionModel' associated with the query faces' faceIds should be the same as the 'recognitionModel' used
   664  // by the target face, person group or large person group.
   665  // Parameters:
   666  // body - request body for face to face verification.
   667  func (client Client) VerifyFaceToFace(ctx context.Context, body VerifyFaceToFaceRequest) (result VerifyResult, err error) {
   668  	if tracing.IsEnabled() {
   669  		ctx = tracing.StartSpan(ctx, fqdn+"/Client.VerifyFaceToFace")
   670  		defer func() {
   671  			sc := -1
   672  			if result.Response.Response != nil {
   673  				sc = result.Response.Response.StatusCode
   674  			}
   675  			tracing.EndSpan(ctx, sc, err)
   676  		}()
   677  	}
   678  	if err := validation.Validate([]validation.Validation{
   679  		{TargetValue: body,
   680  			Constraints: []validation.Constraint{{Target: "body.FaceID1", Name: validation.Null, Rule: true, Chain: nil},
   681  				{Target: "body.FaceID2", Name: validation.Null, Rule: true, Chain: nil}}}}); err != nil {
   682  		return result, validation.NewError("face.Client", "VerifyFaceToFace", err.Error())
   683  	}
   684  
   685  	req, err := client.VerifyFaceToFacePreparer(ctx, body)
   686  	if err != nil {
   687  		err = autorest.NewErrorWithError(err, "face.Client", "VerifyFaceToFace", nil, "Failure preparing request")
   688  		return
   689  	}
   690  
   691  	resp, err := client.VerifyFaceToFaceSender(req)
   692  	if err != nil {
   693  		result.Response = autorest.Response{Response: resp}
   694  		err = autorest.NewErrorWithError(err, "face.Client", "VerifyFaceToFace", resp, "Failure sending request")
   695  		return
   696  	}
   697  
   698  	result, err = client.VerifyFaceToFaceResponder(resp)
   699  	if err != nil {
   700  		err = autorest.NewErrorWithError(err, "face.Client", "VerifyFaceToFace", resp, "Failure responding to request")
   701  		return
   702  	}
   703  
   704  	return
   705  }
   706  
   707  // VerifyFaceToFacePreparer prepares the VerifyFaceToFace request.
   708  func (client Client) VerifyFaceToFacePreparer(ctx context.Context, body VerifyFaceToFaceRequest) (*http.Request, error) {
   709  	urlParameters := map[string]interface{}{
   710  		"Endpoint": client.Endpoint,
   711  	}
   712  
   713  	preparer := autorest.CreatePreparer(
   714  		autorest.AsContentType("application/json; charset=utf-8"),
   715  		autorest.AsPost(),
   716  		autorest.WithCustomBaseURL("{Endpoint}/face/v1.0", urlParameters),
   717  		autorest.WithPath("/verify"),
   718  		autorest.WithJSON(body))
   719  	return preparer.Prepare((&http.Request{}).WithContext(ctx))
   720  }
   721  
   722  // VerifyFaceToFaceSender sends the VerifyFaceToFace request. The method will close the
   723  // http.Response Body if it receives an error.
   724  func (client Client) VerifyFaceToFaceSender(req *http.Request) (*http.Response, error) {
   725  	return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))
   726  }
   727  
   728  // VerifyFaceToFaceResponder handles the response to the VerifyFaceToFace request. The method always
   729  // closes the http.Response Body.
   730  func (client Client) VerifyFaceToFaceResponder(resp *http.Response) (result VerifyResult, err error) {
   731  	err = autorest.Respond(
   732  		resp,
   733  		azure.WithErrorUnlessStatusCode(http.StatusOK),
   734  		autorest.ByUnmarshallingJSON(&result),
   735  		autorest.ByClosing())
   736  	result.Response = autorest.Response{Response: resp}
   737  	return
   738  }
   739  
   740  // VerifyFaceToPerson verify whether two faces belong to a same person. Compares a face Id with a Person Id
   741  // Parameters:
   742  // body - request body for face to person verification.
   743  func (client Client) VerifyFaceToPerson(ctx context.Context, body VerifyFaceToPersonRequest) (result VerifyResult, err error) {
   744  	if tracing.IsEnabled() {
   745  		ctx = tracing.StartSpan(ctx, fqdn+"/Client.VerifyFaceToPerson")
   746  		defer func() {
   747  			sc := -1
   748  			if result.Response.Response != nil {
   749  				sc = result.Response.Response.StatusCode
   750  			}
   751  			tracing.EndSpan(ctx, sc, err)
   752  		}()
   753  	}
   754  	if err := validation.Validate([]validation.Validation{
   755  		{TargetValue: body,
   756  			Constraints: []validation.Constraint{{Target: "body.FaceID", Name: validation.Null, Rule: true, Chain: nil},
   757  				{Target: "body.PersonGroupID", Name: validation.Null, Rule: false,
   758  					Chain: []validation.Constraint{{Target: "body.PersonGroupID", Name: validation.MaxLength, Rule: 64, Chain: nil},
   759  						{Target: "body.PersonGroupID", Name: validation.Pattern, Rule: `^[a-z0-9-_]+$`, Chain: nil},
   760  					}},
   761  				{Target: "body.LargePersonGroupID", Name: validation.Null, Rule: false,
   762  					Chain: []validation.Constraint{{Target: "body.LargePersonGroupID", Name: validation.MaxLength, Rule: 64, Chain: nil},
   763  						{Target: "body.LargePersonGroupID", Name: validation.Pattern, Rule: `^[a-z0-9-_]+$`, Chain: nil},
   764  					}},
   765  				{Target: "body.PersonID", Name: validation.Null, Rule: true, Chain: nil}}}}); err != nil {
   766  		return result, validation.NewError("face.Client", "VerifyFaceToPerson", err.Error())
   767  	}
   768  
   769  	req, err := client.VerifyFaceToPersonPreparer(ctx, body)
   770  	if err != nil {
   771  		err = autorest.NewErrorWithError(err, "face.Client", "VerifyFaceToPerson", nil, "Failure preparing request")
   772  		return
   773  	}
   774  
   775  	resp, err := client.VerifyFaceToPersonSender(req)
   776  	if err != nil {
   777  		result.Response = autorest.Response{Response: resp}
   778  		err = autorest.NewErrorWithError(err, "face.Client", "VerifyFaceToPerson", resp, "Failure sending request")
   779  		return
   780  	}
   781  
   782  	result, err = client.VerifyFaceToPersonResponder(resp)
   783  	if err != nil {
   784  		err = autorest.NewErrorWithError(err, "face.Client", "VerifyFaceToPerson", resp, "Failure responding to request")
   785  		return
   786  	}
   787  
   788  	return
   789  }
   790  
   791  // VerifyFaceToPersonPreparer prepares the VerifyFaceToPerson request.
   792  func (client Client) VerifyFaceToPersonPreparer(ctx context.Context, body VerifyFaceToPersonRequest) (*http.Request, error) {
   793  	urlParameters := map[string]interface{}{
   794  		"Endpoint": client.Endpoint,
   795  	}
   796  
   797  	preparer := autorest.CreatePreparer(
   798  		autorest.AsContentType("application/json; charset=utf-8"),
   799  		autorest.AsPost(),
   800  		autorest.WithCustomBaseURL("{Endpoint}/face/v1.0", urlParameters),
   801  		autorest.WithPath("/verify"),
   802  		autorest.WithJSON(body))
   803  	return preparer.Prepare((&http.Request{}).WithContext(ctx))
   804  }
   805  
   806  // VerifyFaceToPersonSender sends the VerifyFaceToPerson request. The method will close the
   807  // http.Response Body if it receives an error.
   808  func (client Client) VerifyFaceToPersonSender(req *http.Request) (*http.Response, error) {
   809  	return client.Send(req, autorest.DoRetryForStatusCodes(client.RetryAttempts, client.RetryDuration, autorest.StatusCodesForRetry...))
   810  }
   811  
   812  // VerifyFaceToPersonResponder handles the response to the VerifyFaceToPerson request. The method always
   813  // closes the http.Response Body.
   814  func (client Client) VerifyFaceToPersonResponder(resp *http.Response) (result VerifyResult, err error) {
   815  	err = autorest.Respond(
   816  		resp,
   817  		azure.WithErrorUnlessStatusCode(http.StatusOK),
   818  		autorest.ByUnmarshallingJSON(&result),
   819  		autorest.ByClosing())
   820  	result.Response = autorest.Response{Response: resp}
   821  	return
   822  }
   823  

View as plain text