From 66866a46a8875f3b31dc8de14352539e3678392d Mon Sep 17 00:00:00 2001 From: giakas Date: Wed, 9 Dec 2020 14:26:15 -0800 Subject: [PATCH] =?UTF-8?q?LVA:=20move=20LVA=20swaggers=202.0=20version=20?= =?UTF-8?q?from=20private=20repo=20to=20public=20repo=20f=E2=80=A6=20(#119?= =?UTF-8?q?95)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * LVA: move LVA swaggers 2.0 version from private repo to public repo for SDK generation * fix PR failures * remove examples as not needed anymore. * update readme description * mark method name as required. * add a supression for RequiredReadOnlyProperties for methodName --- custom-words.txt | 1 + .../preview/2.0.0/LiveVideoAnalytics.json | 1128 +++++++++++++++++ .../LiveVideoAnalyticsSdkDefinitions.json | 196 +++ .../mediaservices/data-plane/readme.md | 105 ++ 4 files changed, 1430 insertions(+) create mode 100644 specification/mediaservices/data-plane/LiveVideoAnalytics.Edge/preview/2.0.0/LiveVideoAnalytics.json create mode 100644 specification/mediaservices/data-plane/LiveVideoAnalytics.Edge/preview/2.0.0/LiveVideoAnalyticsSdkDefinitions.json create mode 100644 specification/mediaservices/data-plane/readme.md diff --git a/custom-words.txt b/custom-words.txt index 7a3b4dbe0385..727afcfe86ab 100644 --- a/custom-words.txt +++ b/custom-words.txt @@ -1014,6 +1014,7 @@ mbaldwin Mbps MCAS MDATP +mebibytes mediaservices Mesos messagingplan diff --git a/specification/mediaservices/data-plane/LiveVideoAnalytics.Edge/preview/2.0.0/LiveVideoAnalytics.json b/specification/mediaservices/data-plane/LiveVideoAnalytics.Edge/preview/2.0.0/LiveVideoAnalytics.json new file mode 100644 index 000000000000..1ab16cb7b6a4 --- /dev/null +++ b/specification/mediaservices/data-plane/LiveVideoAnalytics.Edge/preview/2.0.0/LiveVideoAnalytics.json @@ -0,0 +1,1128 @@ +{ + "swagger": "2.0", + "info": { + "description": "Direct Methods for Live Video Analytics on IoT Edge.", + "version": "2.0.0", + "title": "Direct Methods for Live Video Analytics on IoT Edge", + "contact": { + "email": "amshelp@microsoft.com" + } + }, + "security": [ + { + "sharedAccessSignature": [] + } + ], + "paths": {}, + "securityDefinitions": { + "sharedAccessSignature": { + "type": "apiKey", + "name": "Authorization", + "in": "header" + } + }, + "definitions": { + "MediaGraphInstance": { + "type": "object", + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string", + "description": "The identifier for the media graph instance." + }, + "systemData": { + "$ref": "#/definitions/MediaGraphSystemData" + }, + "properties": { + "$ref": "#/definitions/MediaGraphInstanceProperties" + } + }, + "description": "Represents an instance of a media graph." + }, + "MediaGraphInstanceProperties": { + "type": "object", + "properties": { + "description": { + "type": "string", + "description": "An optional description for the instance." + }, + "topologyName": { + "type": "string", + "description": "The name of the media graph topology that this instance will run. A topology with this name should already have been set in the Edge module." + }, + "parameters": { + "type": "array", + "description": "List of one or more graph instance parameters.", + "items": { + "$ref": "#/definitions/MediaGraphParameterDefinition" + } + }, + "state": { + "type": "string", + "description": "Allowed states for a graph instance.", + "enum": [ + "Inactive", + "Activating", + "Active", + "Deactivating" + ], + "x-ms-enum": { + "name": "MediaGraphInstanceState", + "values": [ + { + "value": "Inactive", + "description": "The media graph instance is idle and not processing media." + }, + { + "value": "Activating", + "description": "The media graph instance is transitioning into the active state." + }, + { + "value": "Active", + "description": "The media graph instance is active and processing media." + }, + { + "value": "Deactivating", + "description": "The media graph instance is transitioning into the inactive state." + } + ], + "modelAsString": false + } + } + }, + "description": "Properties of a media graph instance." + }, + "MediaGraphParameterDefinition": { + "type": "object", + "required": [ + "name", + "value" + ], + "properties": { + "name": { + "type": "string", + "description": "The name of the parameter defined in the media graph topology." + }, + "value": { + "type": "string", + "description": "The value to supply for the named parameter defined in the media graph topology." + } + }, + "description": "A key-value pair. A media graph topology allows certain values to be parameterized. When an instance is created, the parameters are supplied with arguments specific to that instance. This allows the same graph topology to be used as a blueprint for multiple graph instances with different values for the parameters." + }, + "MediaGraphInstanceCollection": { + "type": "object", + "properties": { + "value": { + "type": "array", + "description": "A collection of media graph instances.", + "items": { + "$ref": "#/definitions/MediaGraphInstance" + } + }, + "@continuationToken": { + "type": "string", + "description": "A continuation token to use in subsequent calls to enumerate through the graph instance collection. This is used when the collection contains too many results to return in one response." + } + }, + "description": "A collection of media graph instances." + }, + "MediaGraphTopologyCollection": { + "type": "object", + "properties": { + "value": { + "type": "array", + "description": "A collection of media graph topologies.", + "items": { + "$ref": "#/definitions/MediaGraphTopology" + } + }, + "@continuationToken": { + "type": "string", + "description": "A continuation token to use in subsequent calls to enumerate through the graph topologies collection. This is used when the collection contains too many results to return in one response." + } + }, + "description": "A collection of media graph topologies." + }, + "MediaGraphTopology": { + "type": "object", + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string", + "description": "The identifier for the media graph topology." + }, + "systemData": { + "$ref": "#/definitions/MediaGraphSystemData" + }, + "properties": { + "$ref": "#/definitions/MediaGraphTopologyProperties" + } + }, + "description": "The definition of a media graph topology." + }, + "MediaGraphTopologyProperties": { + "type": "object", + "properties": { + "description": { + "type": "string", + "description": "A description of a media graph topology. It is recommended to use this to describe the expected use of the topology." + }, + "parameters": { + "type": "array", + "items": { + "$ref": "#/definitions/MediaGraphParameterDeclaration" + }, + "description": "The list of parameters defined in the topology. The value for these parameters are supplied by instances of this topology." + }, + "sources": { + "type": "array", + "items": { + "$ref": "#/definitions/MediaGraphSource" + }, + "description": "The list of source nodes in this topology." + }, + "processors": { + "type": "array", + "items": { + "$ref": "#/definitions/MediaGraphProcessor" + }, + "description": "The list of processor nodes in this topology." + }, + "sinks": { + "type": "array", + "items": { + "$ref": "#/definitions/MediaGraphSink" + }, + "description": "The list of sink nodes in this topology." + } + }, + "description": "A description of the properties of a media graph topology." + }, + "MediaGraphSystemData": { + "type": "object", + "properties": { + "createdAt": { + "type": "string", + "format": "date-time", + "description": "The timestamp of resource creation (UTC)." + }, + "lastModifiedAt": { + "type": "string", + "format": "date-time", + "description": "The timestamp of resource last modification (UTC)." + } + }, + "description": "The system data for a resource. This is used by both topologies and instances." + }, + "MediaGraphParameterDeclaration": { + "type": "object", + "required": [ + "name", + "type" + ], + "properties": { + "name": { + "type": "string", + "description": "The name of the parameter.", + "maxLength": 64 + }, + "type": { + "type": "string", + "description": "The type of the parameter.", + "enum": [ + "String", + "SecretString", + "Int", + "Double", + "Bool" + ], + "x-ms-enum": { + "name": "MediaGraphParameterType", + "values": [ + { + "value": "String", + "description": "A string parameter value." + }, + { + "value": "SecretString", + "description": "A string to hold sensitive information as parameter value." + }, + { + "value": "Int", + "description": "A 32-bit signed integer as parameter value." + }, + { + "value": "Double", + "description": "A 64-bit double-precision floating point type as parameter value." + }, + { + "value": "Bool", + "description": "A boolean value that is either true or false." + } + ], + "modelAsString": false + } + }, + "description": { + "type": "string", + "description": "Description of the parameter." + }, + "default": { + "type": "string", + "description": "The default value for the parameter to be used if the media graph instance does not specify a value." + } + }, + "description": "The declaration of a parameter in the media graph topology. A media graph topology can be authored with parameters. Then, during graph instance creation, the value for those parameters can be specified. This allows the same graph topology to be used as a blueprint for multiple graph instances with different values for the parameters." + }, + "MediaGraphSource": { + "type": "object", + "required": [ + "@type", + "name" + ], + "discriminator": "@type", + "properties": { + "@type": { + "type": "string", + "description": "The type of the source node. The discriminator for derived types." + }, + "name": { + "type": "string", + "description": "The name to be used for this source node." + } + }, + "description": "A source node in a media graph." + }, + "MediaGraphRtspSource": { + "type": "object", + "properties": { + "transport": { + "type": "string", + "description": "Underlying RTSP transport. This is used to enable or disable HTTP tunneling.", + "enum": [ + "Http", + "Tcp" + ], + "x-ms-enum": { + "name": "MediaGraphRtspTransport", + "values": [ + { + "value": "Http", + "description": "HTTP/HTTPS transport. This should be used when HTTP tunneling is desired." + }, + { + "value": "Tcp", + "description": "TCP transport. This should be used when HTTP tunneling is NOT desired." + } + ], + "modelAsString": true + } + }, + "endpoint": { + "description": "RTSP endpoint of the stream that is being connected to.", + "$ref": "#/definitions/MediaGraphEndpoint" + } + }, + "required": [ + "endpoint" + ], + "allOf": [ + { + "$ref": "#/definitions/MediaGraphSource" + } + ], + "description": "Enables a media graph to capture media from a RTSP server.", + "x-ms-discriminator-value": "#Microsoft.Media.MediaGraphRtspSource" + }, + "MediaGraphIoTHubMessageSource": { + "type": "object", + "properties": { + "hubInputName": { + "type": "string", + "description": "Name of the input path where messages can be routed to (via routes declared in the IoT Edge deployment manifest)." + } + }, + "allOf": [ + { + "$ref": "#/definitions/MediaGraphSource" + } + ], + "description": "Enables a media graph to receive messages via routes declared in the IoT Edge deployment manifest.", + "x-ms-discriminator-value": "#Microsoft.Media.MediaGraphIoTHubMessageSource" + }, + "MediaGraphIoTHubMessageSink": { + "type": "object", + "properties": { + "hubOutputName": { + "type": "string", + "description": "Name of the output path to which the media graph will publish message. These messages can then be delivered to desired destinations by declaring routes referencing the output path in the IoT Edge deployment manifest." + } + }, + "required": [ + "@type", + "hubOutputName" + ], + "allOf": [ + { + "$ref": "#/definitions/MediaGraphSink" + } + ], + "description": "Enables a media graph to publish messages that can be delivered via routes declared in the IoT Edge deployment manifest.", + "x-ms-discriminator-value": "#Microsoft.Media.MediaGraphIoTHubMessageSink" + }, + "MediaGraphEndpoint": { + "type": "object", + "required": [ + "@type", + "url" + ], + "discriminator": "@type", + "properties": { + "@type": { + "type": "string", + "description": "The discriminator for derived types." + }, + "credentials": { + "description": "Polymorphic credentials to be presented to the endpoint.", + "$ref": "#/definitions/MediaGraphCredentials" + }, + "url": { + "type": "string", + "description": "Url for the endpoint." + } + }, + "description": "Base class for endpoints." + }, + "MediaGraphCredentials": { + "type": "object", + "required": [ + "@type" + ], + "discriminator": "@type", + "properties": { + "@type": { + "type": "string", + "description": "The discriminator for derived types." + } + }, + "description": "Credentials to present during authentication." + }, + "MediaGraphUsernamePasswordCredentials": { + "type": "object", + "properties": { + "username": { + "type": "string", + "description": "Username for a username/password pair." + }, + "password": { + "type": "string", + "description": "Password for a username/password pair. Please use a parameter so that the actual value is not returned on PUT or GET requests." + } + }, + "required": [ + "username" + ], + "allOf": [ + { + "$ref": "#/definitions/MediaGraphCredentials" + } + ], + "description": "Username/password credential pair.", + "x-ms-discriminator-value": "#Microsoft.Media.MediaGraphUsernamePasswordCredentials" + }, + "MediaGraphHttpHeaderCredentials": { + "type": "object", + "properties": { + "headerName": { + "type": "string", + "description": "HTTP header name." + }, + "headerValue": { + "type": "string", + "description": "HTTP header value. Please use a parameter so that the actual value is not returned on PUT or GET requests." + } + }, + "required": [ + "headerName", + "headerValue" + ], + "allOf": [ + { + "$ref": "#/definitions/MediaGraphCredentials" + } + ], + "description": "Http header service credentials.", + "x-ms-discriminator-value": "#Microsoft.Media.MediaGraphHttpHeaderCredentials" + }, + "MediaGraphUnsecuredEndpoint": { + "type": "object", + "allOf": [ + { + "$ref": "#/definitions/MediaGraphEndpoint" + } + ], + "description": "An endpoint that the media graph can connect to, with no encryption in transit.", + "x-ms-discriminator-value": "#Microsoft.Media.MediaGraphUnsecuredEndpoint" + }, + "MediaGraphTlsEndpoint": { + "type": "object", + "properties": { + "trustedCertificates": { + "description": "Trusted certificates when authenticating a TLS connection. Null designates that Azure Media Service's source of trust should be used.", + "$ref": "#/definitions/MediaGraphCertificateSource" + }, + "validationOptions": { + "description": "Validation options to use when authenticating a TLS connection. By default, strict validation is used.", + "$ref": "#/definitions/MediaGraphTlsValidationOptions" + } + }, + "allOf": [ + { + "$ref": "#/definitions/MediaGraphEndpoint" + } + ], + "description": "A TLS endpoint for media graph external connections.", + "x-ms-discriminator-value": "#Microsoft.Media.MediaGraphTlsEndpoint" + }, + "MediaGraphCertificateSource": { + "type": "object", + "required": [ + "@type" + ], + "discriminator": "@type", + "properties": { + "@type": { + "type": "string", + "description": "The discriminator for derived types." + } + }, + "description": "Base class for certificate sources." + }, + "MediaGraphTlsValidationOptions": { + "type": "object", + "properties": { + "ignoreHostname": { + "type": "string", + "description": "Boolean value ignoring the host name (common name) during validation." + }, + "ignoreSignature": { + "type": "string", + "description": "Boolean value ignoring the integrity of the certificate chain at the current time." + } + }, + "description": "Options for controlling the authentication of TLS endpoints." + }, + "MediaGraphPemCertificateList": { + "type": "object", + "properties": { + "certificates": { + "type": "array", + "description": "PEM formatted public certificates one per entry.", + "items": { + "type": "string" + } + } + }, + "required": [ + "certificates" + ], + "allOf": [ + { + "$ref": "#/definitions/MediaGraphCertificateSource" + } + ], + "description": "A list of PEM formatted certificates.", + "x-ms-discriminator-value": "#Microsoft.Media.MediaGraphPemCertificateList" + }, + "MediaGraphSink": { + "type": "object", + "required": [ + "@type", + "inputs", + "name" + ], + "discriminator": "@type", + "properties": { + "@type": { + "type": "string", + "description": "The discriminator for derived types." + }, + "name": { + "type": "string", + "description": "The name to be used for the media graph sink." + }, + "inputs": { + "type": "array", + "description": "An array of the names of the other nodes in the media graph, the outputs of which are used as input for this sink node.", + "items": { + "$ref": "#/definitions/MediaGraphNodeInput" + } + } + }, + "description": "Enables a media graph to write media data to a destination outside of the Live Video Analytics IoT Edge module." + }, + "MediaGraphNodeInput": { + "type": "object", + "properties": { + "nodeName": { + "type": "string", + "description": "The name of another node in the media graph, the output of which is used as input to this node." + }, + "outputSelectors": { + "type": "array", + "description": "Allows for the selection of particular streams from another node.", + "items": { + "$ref": "#/definitions/MediaGraphOutputSelector" + } + } + }, + "description": "Represents the input to any node in a media graph." + }, + "MediaGraphOutputSelector": { + "type": "object", + "properties": { + "property": { + "type": "string", + "description": "The stream property to compare with.", + "enum": [ + "mediaType" + ], + "x-ms-enum": { + "name": "MediaGraphOutputSelectorProperty", + "values": [ + { + "value": "mediaType", + "description": "The stream's MIME type or subtype." + } + ], + "modelAsString": false + } + }, + "operator": { + "type": "string", + "description": "The operator to compare streams by.", + "enum": [ + "is", + "isNot" + ], + "x-ms-enum": { + "name": "MediaGraphOutputSelectorOperator", + "values": [ + { + "value": "is", + "description": "A media type is the same type or a subtype." + }, + { + "value": "isNot", + "description": "A media type is not the same type or a subtype." + } + ], + "modelAsString": false + } + }, + "value": { + "type": "string", + "description": "Value to compare against." + } + }, + "description": "Allows for the selection of particular streams from another node." + }, + "MediaGraphFileSink": { + "type": "object", + "properties": { + "baseDirectoryPath": { + "type": "string", + "description": "Absolute directory for all outputs to the Edge device from this sink.", + "example": "/var/media/output/" + }, + "fileNamePattern": { + "type": "string", + "description": "File name pattern for creating new files on the Edge device. The pattern must include at least one system variable. See the documentation for available variables and additional examples.", + "example": "mySampleFile-${System.GraphTopologyName}-${System.GraphInstanceName}-${System.DateTime}" + }, + "maximumSizeMiB": { + "type": "string", + "description": "Maximum amount of disk space that can be used for storing files from this sink." + } + }, + "required": [ + "fileNamePattern", + "baseDirectoryPath", + "maximumSizeMiB" + ], + "allOf": [ + { + "$ref": "#/definitions/MediaGraphSink" + } + ], + "description": "Enables a media graph to write/store media (video and audio) to a file on the Edge device.", + "x-ms-discriminator-value": "#Microsoft.Media.MediaGraphFileSink" + }, + "MediaGraphAssetSink": { + "type": "object", + "properties": { + "assetNamePattern": { + "type": "string", + "description": "A name pattern when creating new assets. The pattern must include at least one system variable. See the documentation for available variables and additional examples.", + "example": "MySampleAsset-${System.GraphTopologyName}-${System.GraphInstanceName}-${System.DateTime}" + }, + "segmentLength": { + "type": "string", + "example": "PT30S", + "description": "When writing media to an asset, wait until at least this duration of media has been accumulated on the Edge. Expressed in increments of 30 seconds, with a minimum of 30 seconds and a recommended maximum of 5 minutes." + }, + "localMediaCachePath": { + "type": "string", + "description": "Path to a local file system directory for temporary caching of media before writing to an Asset. Used when the Edge device is temporarily disconnected from Azure.", + "example": "/var/lib/lva/tmp/" + }, + "localMediaCacheMaximumSizeMiB": { + "type": "string", + "description": "Maximum amount of disk space that can be used for temporary caching of media." + } + }, + "required": [ + "@type", + "assetNamePattern", + "localMediaCachePath", + "localMediaCacheMaximumSizeMiB" + ], + "allOf": [ + { + "$ref": "#/definitions/MediaGraphSink" + } + ], + "description": "Enables a media graph to record media to an Azure Media Services asset for subsequent playback.", + "x-ms-discriminator-value": "#Microsoft.Media.MediaGraphAssetSink" + }, + "MediaGraphProcessor": { + "type": "object", + "required": [ + "@type", + "inputs", + "name" + ], + "discriminator": "@type", + "properties": { + "@type": { + "type": "string", + "description": "The discriminator for derived types." + }, + "name": { + "type": "string", + "description": "The name for this processor node." + }, + "inputs": { + "type": "array", + "description": "An array of the names of the other nodes in the media graph, the outputs of which are used as input for this processor node.", + "items": { + "$ref": "#/definitions/MediaGraphNodeInput" + } + } + }, + "description": "A node that represents the desired processing of media in a graph. Takes media and/or events as inputs, and emits media and/or event as output." + }, + "MediaGraphMotionDetectionProcessor": { + "type": "object", + "properties": { + "sensitivity": { + "type": "string", + "description": "Enumeration that specifies the sensitivity of the motion detection processor.", + "enum": [ + "Low", + "Medium", + "High" + ], + "x-ms-enum": { + "name": "MediaGraphMotionDetectionSensitivity", + "values": [ + { + "value": "Low", + "description": "Low Sensitivity." + }, + { + "value": "Medium", + "description": "Medium Sensitivity." + }, + { + "value": "High", + "description": "High Sensitivity." + } + ], + "modelAsString": true + } + }, + "outputMotionRegion": { + "type": "boolean", + "description": "Indicates whether the processor should detect and output the regions, within the video frame, where motion was detected. Default is true." + }, + "eventAggregationWindow": { + "type": "string", + "description": "Event aggregation window duration, or 0 for no aggregation." + } + }, + "allOf": [ + { + "$ref": "#/definitions/MediaGraphProcessor" + } + ], + "description": "A node that accepts raw video as input, and detects if there are moving objects present. If so, then it emits an event, and allows frames where motion was detected to pass through. Other frames are blocked/dropped.", + "x-ms-discriminator-value": "#Microsoft.Media.MediaGraphMotionDetectionProcessor" + }, + "MediaGraphExtensionProcessorBase": { + "type": "object", + "required": [ + "endpoint", + "image" + ], + "properties": { + "endpoint": { + "description": "Endpoint to which this processor should connect.", + "$ref": "#/definitions/MediaGraphEndpoint" + }, + "image": { + "description": "Describes the parameters of the image that is sent as input to the endpoint.", + "$ref": "#/definitions/MediaGraphImage" + }, + "samplingOptions": { + "description": "Describes the sampling options to be applied when forwarding samples to the extension.", + "$ref": "#/definitions/MediaGraphSamplingOptions" + } + }, + "allOf": [ + { + "$ref": "#/definitions/MediaGraphProcessor" + } + ], + "description": "Processor that allows for extensions outside of the Live Video Analytics Edge module to be integrated into the graph. It is the base class for various different kinds of extension processor types.", + "x-ms-discriminator-value": "#Microsoft.Media.MediaGraphExtensionProcessorBase" + }, + "MediaGraphCognitiveServicesVisionExtension": { + "type": "object", + "properties": {}, + "allOf": [ + { + "$ref": "#/definitions/MediaGraphExtensionProcessorBase" + } + ], + "description": "A processor that allows the media graph to send video frames to a Cognitive Services Vision extension. Inference results are relayed to downstream nodes.", + "x-ms-discriminator-value": "#Microsoft.Media.MediaGraphCognitiveServicesVisionExtension" + }, + "MediaGraphGrpcExtension": { + "type": "object", + "required": [ + "dataTransfer" + ], + "properties": { + "dataTransfer": { + "description": "How media should be transferred to the inference engine.", + "$ref": "#/definitions/MediaGraphGrpcExtensionDataTransfer" + }, + "extensionConfiguration": { + "type": "string", + "description": "Optional configuration to pass to the gRPC extension." + } + }, + "allOf": [ + { + "$ref": "#/definitions/MediaGraphExtensionProcessorBase" + } + ], + "description": "A processor that allows the media graph to send video frames to an external inference container over a gRPC connection. This can be done using shared memory (for high frame rates), or over the network. Inference results are relayed to downstream nodes.", + "x-ms-discriminator-value": "#Microsoft.Media.MediaGraphGrpcExtension" + }, + "MediaGraphGrpcExtensionDataTransfer": { + "type": "object", + "required": [ + "mode" + ], + "properties": { + "sharedMemorySizeMiB": { + "type": "string", + "description": "The size of the buffer for all in-flight frames in mebibytes if mode is SharedMemory. Should not be specified otherwise." + }, + "mode": { + "type": "string", + "description": "How frame data should be transmitted to the inference engine.", + "enum": [ + "Embedded", + "SharedMemory" + ], + "x-ms-enum": { + "name": "MediaGraphGrpcExtensionDataTransferMode", + "values": [ + { + "value": "Embedded", + "description": "Frames are transferred embedded into the gRPC messages." + }, + { + "value": "SharedMemory", + "description": "Frames are transferred through shared memory." + } + ], + "modelAsString": true + } + } + }, + "description": "Describes how media should be transferred to the inference engine.", + "x-ms-discriminator-value": "#Microsoft.Media.MediaGraphGrpcExtensionDataTransfer" + }, + "MediaGraphHttpExtension": { + "type": "object", + "allOf": [ + { + "$ref": "#/definitions/MediaGraphExtensionProcessorBase" + } + ], + "description": "A processor that allows the media graph to send video frames (mostly at low frame rates e.g. <5 fps) to an external inference container over an HTTP-based RESTful API. Inference results are relayed to downstream nodes.", + "x-ms-discriminator-value": "#Microsoft.Media.MediaGraphHttpExtension" + }, + "MediaGraphImage": { + "type": "object", + "properties": { + "scale": { + "$ref": "#/definitions/MediaGraphImageScale" + }, + "format": { + "$ref": "#/definitions/MediaGraphImageFormat" + } + }, + "description": "Describes the properties of an image frame." + }, + "MediaGraphSamplingOptions": { + "type": "object", + "properties": { + "skipSamplesWithoutAnnotation": { + "type": "string", + "description": "If true, limits the samples submitted to the extension to only samples which have associated inference(s)" + }, + "maximumSamplesPerSecond": { + "type": "string", + "description": "Maximum rate of samples submitted to the extension" + } + }, + "description": "Describes the properties of a sample." + }, + "MediaGraphImageScale": { + "type": "object", + "required": [ + "mode" + ], + "properties": { + "mode": { + "type": "string", + "description": "Describes the modes for scaling an input video frame into an image, before it is sent to an inference engine.", + "enum": [ + "PreserveAspectRatio", + "Pad", + "Stretch" + ], + "x-ms-enum": { + "name": "MediaGraphImageScaleMode", + "values": [ + { + "value": "PreserveAspectRatio", + "description": "Use the same aspect ratio as the input frame." + }, + { + "value": "Pad", + "description": "Center pad the input frame to match the given dimensions." + }, + { + "value": "Stretch", + "description": "Stretch input frame to match given dimensions." + } + ], + "modelAsString": true + } + }, + "width": { + "type": "string", + "description": "The desired output width of the image." + }, + "height": { + "type": "string", + "description": "The desired output height of the image." + } + }, + "description": "The scaling mode for the image." + }, + "MediaGraphImageFormat": { + "type": "object", + "required": [ + "@type" + ], + "discriminator": "@type", + "properties": { + "@type": { + "type": "string", + "description": "The discriminator for derived types." + } + }, + "description": "Encoding settings for an image.", + "x-ms-discriminator-value": "#Microsoft.Media.MediaGraphImageFormat" + }, + "MediaGraphImageFormatRaw": { + "type": "object", + "required": [ + "pixelFormat" + ], + "properties": { + "pixelFormat": { + "type": "string", + "description": "The pixel format that will be used to encode images.", + "enum": [ + "Yuv420p", + "Rgb565be", + "Rgb565le", + "Rgb555be", + "Rgb555le", + "Rgb24", + "Bgr24", + "Argb", + "Rgba", + "Abgr", + "Bgra" + ], + "x-ms-enum": { + "name": "MediaGraphImageFormatRawPixelFormat", + "values": [ + { + "value": "Yuv420p", + "description": "Planar YUV 4:2:0, 12bpp, (1 Cr and Cb sample per 2x2 Y samples)." + }, + { + "value": "Rgb565be", + "description": "Packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), big-endian." + }, + { + "value": "Rgb565le", + "description": "Packed RGB 5:6:5, 16bpp, (msb) 5R 6G 5B(lsb), little-endian." + }, + { + "value": "Rgb555be", + "description": "Packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), big-endian , X=unused/undefined." + }, + { + "value": "Rgb555le", + "description": "Packed RGB 5:5:5, 16bpp, (msb)1X 5R 5G 5B(lsb), little-endian, X=unused/undefined." + }, + { + "value": "Rgb24", + "description": "Packed RGB 8:8:8, 24bpp, RGBRGB." + }, + { + "value": "Bgr24", + "description": "Packed RGB 8:8:8, 24bpp, BGRBGR." + }, + { + "value": "Argb", + "description": "Packed ARGB 8:8:8:8, 32bpp, ARGBARGB." + }, + { + "value": "Rgba", + "description": "Packed RGBA 8:8:8:8, 32bpp, RGBARGBA." + }, + { + "value": "Abgr", + "description": "Packed ABGR 8:8:8:8, 32bpp, ABGRABGR." + }, + { + "value": "Bgra", + "description": "Packed BGRA 8:8:8:8, 32bpp, BGRABGRA." + } + ], + "modelAsString": true + } + } + }, + "allOf": [ + { + "$ref": "#/definitions/MediaGraphImageFormat" + } + ], + "description": "Encoding settings for raw images.", + "x-ms-discriminator-value": "#Microsoft.Media.MediaGraphImageFormatRaw" + }, + "MediaGraphImageFormatJpeg": { + "type": "object", + "properties": { + "quality": { + "type": "string", + "description": "The image quality. Value must be between 0 to 100 (best quality)." + } + }, + "allOf": [ + { + "$ref": "#/definitions/MediaGraphImageFormat" + } + ], + "description": "Encoding settings for Jpeg images.", + "x-ms-discriminator-value": "#Microsoft.Media.MediaGraphImageFormatJpeg" + }, + "MediaGraphImageFormatBmp": { + "type": "object", + "properties": {}, + "allOf": [ + { + "$ref": "#/definitions/MediaGraphImageFormat" + } + ], + "description": "Encoding settings for Bmp images.", + "x-ms-discriminator-value": "#Microsoft.Media.MediaGraphImageFormatBmp" + }, + "MediaGraphImageFormatPng": { + "type": "object", + "properties": {}, + "allOf": [ + { + "$ref": "#/definitions/MediaGraphImageFormat" + } + ], + "description": "Encoding settings for Png images.", + "x-ms-discriminator-value": "#Microsoft.Media.MediaGraphImageFormatPng" + }, + "MediaGraphSignalGateProcessor": { + "type": "object", + "required": [ + "activationSignalOffset", + "minimumActivationTime", + "maximumActivationTime" + ], + "properties": { + "activationEvaluationWindow": { + "type": "string", + "example": "PT1.0S", + "description": "The period of time over which the gate gathers input events before evaluating them." + }, + "activationSignalOffset": { + "type": "string", + "example": "-PT1.0S", + "description": "Signal offset once the gate is activated (can be negative). It is an offset between the time the event is received, and the timestamp of the first media sample (eg. video frame) that is allowed through by the gate." + }, + "minimumActivationTime": { + "type": "string", + "example": "PT1S", + "description": "The minimum period for which the gate remains open in the absence of subsequent triggers (events)." + }, + "maximumActivationTime": { + "type": "string", + "example": "PT2S", + "description": "The maximum period for which the gate remains open in the presence of subsequent events." + } + }, + "allOf": [ + { + "$ref": "#/definitions/MediaGraphProcessor" + } + ], + "description": "A signal gate determines when to block (gate) incoming media, and when to allow it through. It gathers input events over the activationEvaluationWindow, and determines whether to open or close the gate.", + "x-ms-discriminator-value": "#Microsoft.Media.MediaGraphSignalGateProcessor" + } + } +} diff --git a/specification/mediaservices/data-plane/LiveVideoAnalytics.Edge/preview/2.0.0/LiveVideoAnalyticsSdkDefinitions.json b/specification/mediaservices/data-plane/LiveVideoAnalytics.Edge/preview/2.0.0/LiveVideoAnalyticsSdkDefinitions.json new file mode 100644 index 000000000000..1c4e79356c1f --- /dev/null +++ b/specification/mediaservices/data-plane/LiveVideoAnalytics.Edge/preview/2.0.0/LiveVideoAnalyticsSdkDefinitions.json @@ -0,0 +1,196 @@ +{ + "swagger": "2.0", + "info": { + "description": "Direct Methods for Live Video Analytics on IoT Edge.", + "version": "2.0.0", + "title": "Direct Methods for Live Video Analytics on IoT Edge", + "contact": { + "email": "amshelp@microsoft.com" + } + }, + "security": [ + { + "sharedAccessSignature": [] + } + ], + "paths": {}, + "securityDefinitions": { + "sharedAccessSignature": { + "type": "apiKey", + "name": "Authorization", + "in": "header" + } + }, + "definitions": { + "MethodRequest": { + "type": "object", + "required": [ + "methodName" + ], + "properties": { + "methodName": { + "type": "string", + "description": "method name", + "readOnly": true + }, + "@apiVersion": { + "type": "string", + "description": "api version", + "enum": [ + "2.0" + ], + "x-ms-enum": { + "name": "ApiVersionEnum", + "modelAsString": false + } + } + }, + "discriminator": "methodName" + }, + "MediaGraphTopologySetRequest": { + "type": "object", + "x-ms-discriminator-value": "GraphTopologySet", + "allOf": [ + { + "$ref": "#/definitions/MethodRequest" + } + ], + "required": [ + "graph" + ], + "properties": { + "graph": { + "$ref": "./LiveVideoAnalytics.json#/definitions/MediaGraphTopology" + } + } + }, + "MediaGraphTopologySetRequestBody": { + "type": "object", + "allOf": [ + { + "$ref": "#/definitions/MethodRequest" + }, + { + "$ref": "./LiveVideoAnalytics.json#/definitions/MediaGraphTopology" + } + ] + }, + "MediaGraphInstanceSetRequest": { + "type": "object", + "x-ms-discriminator-value": "GraphInstanceSet", + "allOf": [ + { + "$ref": "#/definitions/MethodRequest" + } + ], + "required": [ + "instance" + ], + "properties": { + "instance": { + "$ref": "./LiveVideoAnalytics.json#/definitions/MediaGraphInstance" + } + } + }, + "MediaGraphInstanceSetRequestBody": { + "type": "object", + "allOf": [ + { + "$ref": "#/definitions/MethodRequest" + }, + { + "$ref": "./LiveVideoAnalytics.json#/definitions/MediaGraphInstance" + } + ] + }, + "ItemNonSetRequestBase": { + "type": "object", + "allOf": [ + { + "$ref": "#/definitions/MethodRequest" + } + ], + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string", + "description": "method name" + } + } + }, + "MediaGraphTopologyListRequest": { + "type": "object", + "x-ms-discriminator-value": "GraphTopologyList", + "allOf": [ + { + "$ref": "#/definitions/MethodRequest" + } + ] + }, + "MediaGraphTopologyGetRequest": { + "type": "object", + "x-ms-discriminator-value": "GraphTopologyGet", + "allOf": [ + { + "$ref": "#/definitions/ItemNonSetRequestBase" + } + ] + }, + "MediaGraphTopologyDeleteRequest": { + "type": "object", + "x-ms-discriminator-value": "GraphTopologyDelete", + "allOf": [ + { + "$ref": "#/definitions/ItemNonSetRequestBase" + } + ] + }, + "MediaGraphInstanceListRequest": { + "type": "object", + "x-ms-discriminator-value": "GraphInstanceList", + "allOf": [ + { + "$ref": "#/definitions/MethodRequest" + } + ] + }, + "MediaGraphInstanceGetRequest": { + "type": "object", + "x-ms-discriminator-value": "GraphInstanceGet", + "allOf": [ + { + "$ref": "#/definitions/ItemNonSetRequestBase" + } + ] + }, + "MediaGraphInstanceActivateRequest": { + "type": "object", + "x-ms-discriminator-value": "GraphInstanceActivate", + "allOf": [ + { + "$ref": "#/definitions/ItemNonSetRequestBase" + } + ] + }, + "MediaGraphInstanceDeActivateRequest": { + "type": "object", + "x-ms-discriminator-value": "GraphInstanceDeactivate", + "allOf": [ + { + "$ref": "#/definitions/ItemNonSetRequestBase" + } + ] + }, + "MediaGraphInstanceDeleteRequest": { + "type": "object", + "x-ms-discriminator-value": "GraphInstanceDelete", + "allOf": [ + { + "$ref": "#/definitions/ItemNonSetRequestBase" + } + ] + } + } +} diff --git a/specification/mediaservices/data-plane/readme.md b/specification/mediaservices/data-plane/readme.md new file mode 100644 index 000000000000..ad5b26baaa51 --- /dev/null +++ b/specification/mediaservices/data-plane/readme.md @@ -0,0 +1,105 @@ +# MediaServices - Live Video Analytics Edge + +> see https://aka.ms/autorest + +This is the AutoRest configuration file for Live video analytics edge. + +These swaggers are used to generate the SDKs for Live Video Analytics. These SDKs are models only (no client) and customer would need to use IoT SDK to send direct method calls to IoT hub. These SDKs are not ARM based and doesn't do any REST calls. all operations are sent as direct methods on IoT hub. + +--- + +## Getting Started + +To build the SDK for Live video analytics edge, simply [Install AutoRest](https://aka.ms/autorest/install) and in this folder, run: + +> `autorest` + +To see additional help and options, run: + +> `autorest --help` + +--- + +## Configuration + +### Basic Information + +These are the global settings for the Live video analytics API. + +``` yaml +openapi-type: data-plane +tag: package-lva-2-0-0-preview + +directive: + - where: + - $.definitions.MethodRequest.properties.methodName + suppress: + - RequiredReadOnlyProperties +``` + +### Tag: package-lva-1-0-4-preview + +These settings apply only when `--tag=package-lva-2-0-0-preview` is specified on the command line. + +``` yaml $(tag) == 'package-lva-2-0-0-preview' +input-file: + - LiveVideoAnalytics.Edge/preview/2.0.0/LiveVideoAnalytics.json + - LiveVideoAnalytics.Edge/preview/2.0.0/LiveVideoAnalyticsSdkDefinitions.json +``` + +--- + +# Code Generation + +## Swagger to SDK + +This section describes what SDK should be generated by the automatic system. +This is not used by Autorest itself. + +``` yaml $(swagger-to-sdk) +swagger-to-sdk: + - repo: azure-sdk-for-net + after_scripts: + - bundle install && rake arm:regen_all_profiles['azure_media_lva_edge'] +``` + +## C# + +These settings apply only when `--csharp` is specified on the command line. +Please also specify `--csharp-sdks-folder=`. + +``` yaml $(csharp) +csharp: + azure-arm: false + payload-flattening-threshold: 2 + license-header: MICROSOFT_MIT_NO_VERSION + namespace: Microsoft.Azure.Media.LiveVideoAnalytics.Edge + output-folder: $(csharp-sdks-folder)/mediaservices/Microsoft.Azure.Media.LiveVideoAnalytics.Edge/src/Generated + clear-output-folder: true + use-internal-constructors: true + override-client-name: LiveVideoAnalyticsEdgeClient + use-datetimeoffset: true +``` +## Multi-API/Profile support for AutoRest v3 generators + +AutoRest V3 generators require the use of `--tag=all-api-versions` to select api files. + +This block is updated by an automatic script. Edits may be lost! + +``` yaml $(tag) == 'all-api-versions' /* autogenerated */ +# include the azure profile definitions from the standard location +require: $(this-folder)/../../../profiles/readme.md + +# all the input files across all versions +input-file: + - $(this-folder)/LiveVideoAnalytics.Edge/preview/2.0.0/LiveVideoAnalytics.json + +``` + +If there are files that should not be in the `all-api-versions` set, +uncomment the `exclude-file` section below and add the file paths. + +``` yaml $(tag) == 'all-api-versions' +#exclude-file: +# - $(this-folder)/Microsoft.Example/stable/2010-01-01/somefile.json +```