def create_asset(client, account_name, resource_group_name, asset_name, alternate_id=None, description=None, storage_account=None, container=None): from azure.mgmt.media.models import Asset asset = Asset(alternate_id=alternate_id, description=description, storage_account_name=storage_account, container=container) return client.create_or_update(resource_group_name, account_name, asset_name, asset)
def _upload_and_encode(file): # clean up inputs basename = file.filename.replace('.', '-') basename = basename.replace(' ', '-') in_asset = "in__" + basename print(in_asset) # Create new input asset client.assets.create_or_update(RESOURCE_GROUP_NAME, ACCOUNT_NAME, in_asset, Asset()) #Get asset container and upload file to it response = client.assets.get(RESOURCE_GROUP_NAME, ACCOUNT_NAME, in_asset) print("got a response, know what kind of container??") print(response.container) blob_service.create_blob_from_stream(response.container, basename, stream=file.stream) #Create output asset and add streaming locator name to it as metadata out_asset = "out__" + basename locator_name = "loc__" + basename client.assets.create_or_update(RESOURCE_GROUP_NAME, ACCOUNT_NAME, out_asset, Asset(alternate_id=locator_name)) #Check if job exists, delete it if it does job_name = "job__" + basename response = client.jobs.get(RESOURCE_GROUP_NAME, ACCOUNT_NAME, TRANSFORM_NAME, job_name) if response: client.jobs.delete(RESOURCE_GROUP_NAME, ACCOUNT_NAME, TRANSFORM_NAME, job_name) #Create new job with corret inputs and outputs inputs = JobInputAsset(asset_name=in_asset) outputs = [JobOutputAsset(asset_name=out_asset)] new_job = Job(input=inputs, outputs=outputs) print("Creating new job") client.jobs.create(RESOURCE_GROUP_NAME, ACCOUNT_NAME, TRANSFORM_NAME, job_name, new_job)
def _get_music(asset_list, base_url): music = dict() for asset in asset_list: print(asset.name) if (asset.name.startswith('out__')): basename = asset.name[5:] url = asset.description #no streaming locator if not url: print("No streaming locator for " + asset.name) response = client.jobs.get(RESOURCE_GROUP_NAME, ACCOUNT_NAME, TRANSFORM_NAME, "job__" + basename) #No streaming locator, but finished encoding, so must create if response.state == 'Finished': print("Job is finished, creating locator for " + asset.name) #Check thaat locator doesnt already exist locator = client.streaming_locators.get( RESOURCE_GROUP_NAME, ACCOUNT_NAME, "loc__" + basename) if not locator: client.streaming_locators.create( RESOURCE_GROUP_NAME, ACCOUNT_NAME, "loc__" + basename, StreamingLocator(asset_name=asset.name, streaming_policy_name= 'Predefined_ClearStreamingOnly')) #Generate streaming path and add to asset paths = client.streaming_locators.list_paths( RESOURCE_GROUP_NAME, ACCOUNT_NAME, "loc__" + basename) path = "" for item in paths.streaming_paths: path = item.paths[0] url = base_url + path update = Asset(alternate_id=asset.alternate_id, description=url) print("adding url: " + url + "to " + asset.name) client.assets.create_or_update(RESOURCE_GROUP_NAME, ACCOUNT_NAME, asset.name, update) print(url) music[basename] = url # Streaming locator already exists else: print(url) music[basename] = url return music
def create_asset(client, account_name, resource_group_name, asset_name, alternate_id=None, description=None, storage_account=None, container=None): asset = Asset(alternate_id=alternate_id, description=description, storage_account_name=storage_account, container=container) return client.create_or_update(resource_group_name, account_name, asset_name, asset)
async def main(): async with client: time_start = time.perf_counter() client_live = await client.live_events.begin_create( resource_group_name=resource_group, account_name=account_name, live_event_name=live_event_name, parameters=live_event_create, auto_start=False) time_end = time.perf_counter() execution_time = (time_end - time_start) if client_live: print( f"Live Event Created - long running operation complete! Name: {live_event_name}" ) print( f"Execution time to create LiveEvent: {execution_time:.2f}seconds" ) print() poller = client_live print(await poller.result()) else: raise ValueError('Live Event creation failed!') # Create an Asset for the LiveOutput to use. Think of this as the "tape" that will be recorded to. # The asset entity points to a folder/container in your Azure Storage account. print(f"Creating an asset named: {asset_name}") print() out_alternate_id = f'outputALTid-{uniqueness}' out_description = f'outputdescription-{uniqueness}' # Create an output asset object out_asset = Asset(alternate_id=out_alternate_id, description=out_description) # Create an output asset output_asset = await client.assets.create_or_update( resource_group, account_name, asset_name, out_asset) if output_asset: # print output asset name print(f"The output asset name is: {output_asset.name}") print() else: raise ValueError('Output Asset creation failed!') # Create the Live Output - think of this as the "tape recorder for the live event". # Live outputs are optional, but are required if you want to archive the event to storage, # use the asset for on-demand playback later, or if you want to enable cloud DVR time-shifting. # We will use the asset created above for the "tape" to record to. manifest_name = "output" # See the REST API for details on each of the settings on Live Output # https://docs.microsoft.com/rest/api/media/liveoutputs/create print(f"Creating a live output named: {live_output_name}") print() if output_asset: time_start = time.perf_counter() live_output_create = LiveOutput( description= "Optional description when using more than one live output", asset_name=output_asset.name, manifest_name= manifest_name, # The HLS and DASH manifest file name. This is recommended to set if you want a deterministic manifest path up front. archive_window_length=timedelta( hours=1 ), # Sets an one hour time-shift DVR window. Uses ISO 8601 format string. hls=Hls(fragments_per_ts_segment= 1 # Advanced setting when using HLS TS output only. )) print(f"live_output_create object is {live_output_create}") print() # Create and await the live output live_output_await = await client.live_outputs.begin_create( resource_group_name=resource_group, account_name=account_name, live_event_name=live_event_name, live_output_name=live_output_name, parameters=live_output_create) if live_output_await: print(f"Live Output created: {live_output_name}") poller = live_output_await print(await poller.result()) time_end = time.perf_counter() execution_time = time_end - time_start print( f"Execution time to create LiveEvent: {execution_time:.2f}seconds" ) print() else: raise Exception("Live Output creation failed!") # Refresh the LiveEvent object's settings after starting it... live_event = await client.live_events.get(resource_group, account_name, live_event_name) # Get the RTMP ingest URL to configure in OBS Studio # The endpoints is a collection of RTMP primary and secondary, and RTMPS primary and secondary URLs. # to get the primary secure RTMPS, it is usually going to be index 3, but you could add a loop here to confirm... if live_event.input.endpoints: ingest_url = live_event.input.endpoints[0].url print("The RTMP ingest URL to enter into OBS Studio is:") print(f"RTMP ingest: {ingest_url}") print( "Make sure to enter a Stream Key into the OBS studio settings. It can be any value or you can repeat the accessToken used in the ingest URL path." ) print() if live_event.preview.endpoints: # Use the preview_endpoint to preview and verify # that the input from the encoder is actually being received # The preview endpoint URL also support the addition of various format strings for HLS (format=m3u8-cmaf) and DASH (format=mpd-time-cmaf) for example. # The default manifest is Smooth. preview_endpoint = live_event.preview.endpoints[0].url print(f"The preview url is: {preview_endpoint}") print() print( "Open the live preview in your browser and use any DASH and HLS player to monitor the preview playback." ) print( f"https://ampdemo.azureedge.net/?url={preview_endpoint}(format=mpd-time-cmaf)&heuristicprofile=lowlatency" ) print( "You will need to refresh the player page SEVERAL times until enough data has arrived to allow for manifest creation." ) print( "In a production player, the player can inspect the manifest to see if it contains enough content for the player to load and auto reload." ) print() print( "Start the live stream now, sending the input to the ingest url and verify that it is arriving with the preview url." ) print( "IMPORTANT TIP!: Make CERTAIN that the video is flowing to the Preview URL before continuing!" ) # Create the Streaming Locator URL for playback of the contents in the Live Output recoding print(f"Creating a streaming locator named: {streaming_locator_name}") print() streaming_locator = StreamingLocator( asset_name=asset_name, streaming_policy_name="Predefined_ClearStreamingOnly") locator = await client.streaming_locators.create( resource_group_name=resource_group, account_name=account_name, streaming_locator_name=streaming_locator_name, parameters=streaming_locator) # Get the default streaming endpoint on the account streaming_endpoint = await client.streaming_endpoints.get( resource_group_name=resource_group, account_name=account_name, streaming_endpoint_name=streaming_endpoint_name) if streaming_endpoint.resource_state != "Running": print( f"Streaming endpoint is stopped. Starting the endpoint named {streaming_endpoint_name}..." ) poller = await client.streaming_endpoints.begin_start( resource_group, account_name, streaming_endpoint_name) client_streaming_begin = await poller.result() print("Streaming Endpoint started.") if not client_streaming_begin: print("Streaming Endpoint was already started.") # Get the URL to stream the Output print( "The streaming URLs to stream the live output from a client player" ) print() host_name = streaming_endpoint.host_name scheme = 'https' # If you wish to get the streaming manifest ahead of time, make sure to set the manifest name in the LiveOutput as done above. # This allows you to have a deterministic manifest path. <streaming endpoint hostname>/<streaming locator ID>/manifestName.ism/manifest(<format string>) # Building the paths statically. Which is highly recommended when you want to share the stream manifests # to a player application or CMS system ahead of the live event. hls_format = "format=m3u8-cmaf" dash_format = "format=mpd-time-cmaf" manifest_base = f"{scheme}://{host_name}/{locator.streaming_locator_id}/{manifest_name}.ism/manifest" hls_manifest = f'{manifest_base}({hls_format})' print(f"The HLS (MP4) manifest URL is: {hls_manifest}") print( "Open the following URL to playback the live stream in an HLS compliant player (HLS.js, Shaka, ExoPlayer) or directly in an iOS device" ) print({hls_manifest}) print() dash_manifest = f'{manifest_base}({dash_format})' print(f"The DASH manifest URL is: {dash_manifest}") print( "Open the following URL to playback the live stream from the LiveOutput in the Azure Media Player" ) print( f"https://ampdemo.azureedge.net/?url={dash_manifest}&heuristicprofile=lowlatency" ) print() # closing media client print('Closing media client') await client.close() # closing credential client print('Closing credential client') await default_credential.close()
# The file you want to upload. For this example, put the file in the same folder as this script. # The file ignite.mp4 has been provided for you. source_file = "ignite.mp4" # This is a random string that will be added to the naming of things so that you don't have to keep doing this during testing uniqueness = "analyze-videoaudio" # Set the attributes of the input Asset using the random number in_asset_name = 'inputassetName' + uniqueness in_alternate_id = 'inputALTid' + uniqueness in_description = 'inputdescription' + uniqueness # Create an Asset object # The asset_id will be used for the container parameter for the storage SDK after the asset is created by the AMS client. in_asset = Asset(alternate_id=in_alternate_id, description=in_description) # Set the attributes of the output Asset using the random number out_asset_name = 'outputassetName' + uniqueness out_alternate_id = 'outputALTid' + uniqueness out_description = 'outputdescription' + uniqueness # Create an ouput asset object out_asset = Asset(alternate_id=out_alternate_id, description=out_description) # The AMS Client print("Creating AMS Client") client = AzureMediaServices(default_credential, subscription_id) # Create an input Asset print(f"Creating input asset {in_asset_name}")
# The AMS Client print("Creating AMS Client") client = AzureMediaServices(default_credential, subscription_id) # List the Assets in Account print("Listing assets in account:") asset_name = 'MyCustomAssetName' storage_container_name = 'mycustomcontainername' # Lower case, numbers and dashes are ok. Check MSDN for more information about valid container naming print( f"Creating a new Asset with the name: {asset_name} in storage container {storage_container_name}" ) # The asset_id will be used for the container parameter for the storage SDK after the asset is created by the AMS client. new_asset = Asset(alternate_id="MyCustomIdentifier", description="my description", container=storage_container_name) asset = client.assets.create_or_update(resource_group_name=resource_group, account_name=account_name, asset_name=asset_name, parameters=new_asset) if asset: print("Asset created") print( f"This asset is in storage account '{asset.storage_account_name}' in the container '{asset.container}'" ) else: raise Exception("There was an error while creating an asset.")
def main(event: func.EventGridEvent): result = json.dumps({ 'id': event.id, 'data': event.get_json(), 'topic': event.topic, 'subject': event.subject, 'event_type': event.event_type, }) logging.info('Python EventGrid trigger processed an event: %s', result) blob_url = event.get_json().get('url') logging.info('blob URL: %s', blob_url) blob_name = blob_url.split("/")[-1].split("?")[0] logging.info('blob name: %s', blob_name) origin_container_name = blob_url.split("/")[-2].split("?")[0] logging.info('container name: %s', origin_container_name) storage_account_name = blob_url.split("//")[1].split(".")[0] logging.info('storage account name: %s', storage_account_name) ams_account_name = os.getenv('ACCOUNTNAME') resource_group_name = os.getenv('RESOURCEGROUP') subscription_id = os.getenv('SUBSCRIPTIONID') client_id = os.getenv('AZURE_CLIENT_ID') client_secret = os.getenv('AZURE_CLIENT_SECRET') TENANT_ID = os.getenv('AZURE_TENANT_ID') storage_blob_url = 'https://' + storage_account_name + '.blob.core.windows.net/' transform_name = 'faceredact' LOGIN_ENDPOINT = AZURE_PUBLIC_CLOUD.endpoints.active_directory RESOURCE = AZURE_PUBLIC_CLOUD.endpoints.active_directory_resource_id logging.info('login_endpoint: %s', LOGIN_ENDPOINT) logging.info('tenant_id: %s', TENANT_ID) out_asset_name = 'faceblurringOutput_' + datetime.utcnow().strftime( "%m-%d-%Y_%H:%M:%S") out_alternate_id = 'faceblurringOutput_' + datetime.utcnow().strftime( "%m-%d-%Y_%H:%M:%S") out_description = 'Redacted video with blurred faces' context = adal.AuthenticationContext(LOGIN_ENDPOINT + "/" + TENANT_ID) credentials = AdalAuthentication( context.acquire_token_with_client_credentials, RESOURCE, client_id, client_secret) client = AzureMediaServices(credentials, subscription_id) output_asset = Asset(alternate_id=out_alternate_id, description=out_description) client.assets.create_or_update(resource_group_name, ams_account_name, out_asset_name, output_asset) token_credential = DefaultAzureCredential() datalake_service_client = DataLakeServiceClient( account_url=storage_blob_url, credential=token_credential) delegation_key = datalake_service_client.get_user_delegation_key( key_start_time=datetime.utcnow(), key_expiry_time=datetime.utcnow() + timedelta(hours=1)) sas_token = generate_file_sas(account_name=storage_account_name, file_system_name=origin_container_name, directory_name="", file_name=blob_name, credential=delegation_key, permission=FileSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(hours=1), protocol="https") sas_url = "{}?{}".format(blob_url, sas_token) logging.info(sas_url) job_name = 'Faceblurring-job_' + datetime.utcnow().strftime( "%m-%d-%Y_%H:%M:%S") job_input = JobInputHttp(label="Video_asset", files=[sas_url]) job_output = JobOutputAsset(asset_name=out_asset_name) job_parameters = Job(input=job_input, outputs=[job_output]) client.jobs.create(resource_group_name, ams_account_name, transform_name, job_name, parameters=job_parameters)