def __create_media_service_client(self): LOGIN_ENDPOINT = AZURE_PUBLIC_CLOUD.endpoints.active_directory RESOURCE = AZURE_PUBLIC_CLOUD.endpoints.active_directory_resource_id context = adal.AuthenticationContext( LOGIN_ENDPOINT + '/' + Config.AAD_TENANT_ID ) credentials = AdalAuthentication( context.acquire_token_with_client_credentials, RESOURCE, Config.AAD_CLIENT_ID, Config.AAD_SECRET ) self.media_service_client = AzureMediaServices( credentials, Config.SUBSCRIPTION_ID )
def main(event: func.EventGridEvent): result = json.dumps({ 'id': event.id, 'data': event.get_json(), 'topic': event.topic, 'subject': event.subject, 'event_type': event.event_type, }) context = adal.AuthenticationContext(LOGIN_ENDPOINT + '/' + TENANT_ID) credentials = AdalAuthentication( context.acquire_token_with_client_credentials, RESOURCE, CLIENT, KEY) # You can now use this object to perform different operations to your AMS account. client = AzureMediaServices(credentials, SUBSCRIPTION_ID) logging.info("signed in to ams") logging.info('assest list %s', client.assets.list(RESOUCE_GROUP_NAME, ACCOUNT_NAME).get(0)) logging.info('Python EventGrid trigger processed an event: %s', result)
# Get the default Azure credential from the environment variables AZURE_CLIENT_ID and AZURE_CLIENT_SECRET and AZURE_TENTANT_ID default_credential = DefaultAzureCredential() # Get the environment variables SUBSCRIPTIONID, RESOURCEGROUP, STORAGEACCOUNTNAME and AZURE_USER_ASSIGNED_IDENTITY subscription_id = os.getenv('SUBSCRIPTIONID') resource_group = os.getenv('RESOURCEGROUP') storage_account_name = os.getenv('STORAGEACCOUNTNAME') managed_identity_name = os.getenv('AZURE_USER_ASSIGNED_IDENTITY') # This is a random string that will be added to the naming of things so that you don't have to keep doing this during testing uniqueness = random.randint(0,9999) # The AMS Client print("Creating AMS Client") client = AzureMediaServices(default_credential, subscription_id) # The New Storage Account Name and the information for Managed Identity that will be used account_name = f'testaccount{uniqueness}' managed_identity_resource = f"/subscriptions/{subscription_id}/resourceGroups/{resource_group}/providers/Microsoft.ManagedIdentity/userAssignedIdentities/{managed_identity_name}" # Set this to one of the available region names using the format japanwest,japaneast,eastasia,southeastasia, # westeurope,northeurope,eastus,westus,australiaeast,australiasoutheast,eastus2,centralus,brazilsouth, # centralindia,westindia,southindia,northcentralus,southcentralus,uksouth,ukwest,canadacentral,canadaeast, # westcentralus,westus2,koreacentral,koreasouth,francecentral,francesouth,southafricanorth,southafricawest, # uaecentral,uaenorth,germanywestcentral,germanynorth,switzerlandwest,switzerlandnorth,norwayeast account_location = 'westus' # Check if the existing storage account name exists if storage_account_name is None: raise Exception("No storage account name provided in .env file.")
def main(event: func.EventGridEvent): result = json.dumps({ 'id': event.id, 'data': event.get_json(), 'topic': event.topic, 'subject': event.subject, 'event_type': event.event_type, }) logging.info('Python EventGrid trigger processed an event: %s', result) blob_url = event.get_json().get('url') logging.info('blob URL: %s', blob_url) blob_name = blob_url.split("/")[-1].split("?")[0] logging.info('blob name: %s', blob_name) origin_container_name = blob_url.split("/")[-2].split("?")[0] logging.info('container name: %s', origin_container_name) storage_account_name = blob_url.split("//")[1].split(".")[0] logging.info('storage account name: %s', storage_account_name) ams_account_name = os.getenv('ACCOUNTNAME') resource_group_name = os.getenv('RESOURCEGROUP') subscription_id = os.getenv('SUBSCRIPTIONID') client_id = os.getenv('AZURE_CLIENT_ID') client_secret = os.getenv('AZURE_CLIENT_SECRET') TENANT_ID = os.getenv('AZURE_TENANT_ID') storage_blob_url = 'https://' + storage_account_name + '.blob.core.windows.net/' transform_name = 'faceredact' LOGIN_ENDPOINT = AZURE_PUBLIC_CLOUD.endpoints.active_directory RESOURCE = AZURE_PUBLIC_CLOUD.endpoints.active_directory_resource_id logging.info('login_endpoint: %s', LOGIN_ENDPOINT) logging.info('tenant_id: %s', TENANT_ID) out_asset_name = 'faceblurringOutput_' + datetime.utcnow().strftime( "%m-%d-%Y_%H:%M:%S") out_alternate_id = 'faceblurringOutput_' + datetime.utcnow().strftime( "%m-%d-%Y_%H:%M:%S") out_description = 'Redacted video with blurred faces' context = adal.AuthenticationContext(LOGIN_ENDPOINT + "/" + TENANT_ID) credentials = AdalAuthentication( context.acquire_token_with_client_credentials, RESOURCE, client_id, client_secret) client = AzureMediaServices(credentials, subscription_id) output_asset = Asset(alternate_id=out_alternate_id, description=out_description) client.assets.create_or_update(resource_group_name, ams_account_name, out_asset_name, output_asset) token_credential = DefaultAzureCredential() datalake_service_client = DataLakeServiceClient( account_url=storage_blob_url, credential=token_credential) delegation_key = datalake_service_client.get_user_delegation_key( key_start_time=datetime.utcnow(), key_expiry_time=datetime.utcnow() + timedelta(hours=1)) sas_token = generate_file_sas(account_name=storage_account_name, file_system_name=origin_container_name, directory_name="", file_name=blob_name, credential=delegation_key, permission=FileSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(hours=1), protocol="https") sas_url = "{}?{}".format(blob_url, sas_token) logging.info(sas_url) job_name = 'Faceblurring-job_' + datetime.utcnow().strftime( "%m-%d-%Y_%H:%M:%S") job_input = JobInputHttp(label="Video_asset", files=[sas_url]) job_output = JobOutputAsset(asset_name=out_asset_name) job_parameters = Job(input=job_input, outputs=[job_output]) client.jobs.create(resource_group_name, ams_account_name, transform_name, job_name, parameters=job_parameters)
def video_analyze(): """VideoAnalyze. This will analyze a video and download insights for video analysis """ # Your configurations for your AMS account account_name = 'ams account' resource_group_name = 'ResourceGroup' subscription_id = '00000000-0000-0000-0000-000000000000' aad_client_id = '00000000-0000-0000-0000-000000000000' aad_secret = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX' tenant_id = 'tenant domain or tenant id' # Your input file name and output folder name for encoding input_mp4_file_name = 'ignite.mp4' input_mp4_files_baseurl = 'https://shigeyfampdemo.azurewebsites.net/videos/' input_mp4_files = ['ignite.mp4'] output_folder_name = 'output' transform_name = 'MyVideoAnalyzerTransformName' # The Azure Media Services (AMS) Client # You can now use this object to perform different operations to your AMS account. login_endpoint = AZURE_PUBLIC_CLOUD.endpoints.active_directory resource = AZURE_PUBLIC_CLOUD.endpoints.active_directory_resource_id context = adal.AuthenticationContext(login_endpoint + '/' + tenant_id) credentials = AdalAuthentication( context.acquire_token_with_client_credentials, resource, aad_client_id, aad_secret) client = AzureMediaServices(credentials, subscription_id) # Creating a unique suffix so that we don't have name collisions if you run the sample # multiple times without cleaning up. uniqueness = str(uuid.uuid1()) job_name = 'job-{}'.format(uniqueness) output_asset_name = 'output-{}'.format(uniqueness) input_asset_name = 'input-{}'.format(uniqueness) # Ensure that you have the desired video analyzer Transform. This is really a one time setup operation. get_or_create_transform(client, resource_group_name, account_name, transform_name, VideoAnalyzerPreset(audio_language='en-US')) # Create a new job input # Option 1) Create a new job input with Asset and upload the specified local video file into it. #create_input_asset(client, resource_group_name, account_name, input_asset_name, input_mp4_file_name) #job_input = JobInputAsset(asset_name = inputasset_name) # Option 2) Create a new job input with HTTP job_input = JobInputHttp(base_uri=input_mp4_files_baseurl, files=input_mp4_files) # Output from the encoding Job must be written to an Asset, so let's create one output_asset = create_output_asset(client, resource_group_name, account_name, output_asset_name) job = submit_job(client, resource_group_name, account_name, transform_name, job_name, job_input, output_asset.name) # In this demo code, we will poll for Job status # Polling is not a recommended best practice for production applications because of the latency it introduces. # Overuse of this API may trigger throttling. Developers should instead use Event Grid. job = wait_for_job_to_finish(client, resource_group_name, account_name, transform_name, job_name) if job.state == JobState.finished: print('Job finished.') download_output_asset(client, resource_group_name, account_name, output_asset.name, output_folder_name) print('Done.')
input_asset = Asset(alternate_id=in_alternate_id, description=in_description) # Set the attributes of the output Asset using the random number out_asset_name = 'outputassetName' + str(thisRandom) out_alternate_id = 'outputALTid' + str(thisRandom) out_description = 'outputdescription' + str(thisRandom) # From the SDK # Asset(*, alternate_id: str = None, description: str = None, container: str = None, storage_account_name: str = None, **kwargs) -> None output_asset = Asset(alternate_id=out_alternate_id, description=out_description) # The AMS Client print("Creating AMS client") # From SDK # AzureMediaServices(credentials, subscription_id, base_url=None) client = AzureMediaServices(credentials, subscription_id) # Create an input Asset print("Creating input asset " + in_asset_name) # From SDK # create_or_update(resource_group_name, account_name, asset_name, parameters, custom_headers=None, raw=False, **operation_config) inputAsset = client.assets.create_or_update(resource_group_name, account_name, in_asset_name, input_asset) # An AMS asset is a container with a specfic id that has "asset-" prepended to the GUID. # So, you need to create the asset id to identify it as the container # where Storage is to upload the video (as a block blob) in_container = 'asset-' + inputAsset.asset_id # create an output Asset print("Creating output asset " + out_asset_name)
ACCOUNT_NAME = 'blueplayermedia' RESOURCE_GROUP_NAME = 'BluePlayer' AZURE_STORAGE_KEY = os.environ['AZURE_STORAGE_KEY'] AZURE_ACCOUNT = 'blueplayerstorage' DEFAULT_CONTAINER = "blue-player" TRANSFORM_NAME = "blue-player-transform" LOGIN_ENDPOINT = AZURE_PUBLIC_CLOUD.endpoints.active_directory RESOURCE = AZURE_PUBLIC_CLOUD.endpoints.active_directory_resource_id context = adal.AuthenticationContext(LOGIN_ENDPOINT + '/' + TENANT_ID) credentials = AdalAuthentication(context.acquire_token_with_client_credentials, RESOURCE, CLIENT, KEY) # The AMS Client client = AzureMediaServices(credentials, SUBSCRIPTION_ID) # Blob service client blob_service = BlockBlobService(account_name=AZURE_ACCOUNT, account_key=AZURE_STORAGE_KEY) ALLOWED_EXTENSIONS = set(['mp3']) app = Flask(__name__) app.secret_key = os.environ['FLASK_SECRET'] container = DEFAULT_CONTAINER @app.route("/") def hello(): transforms = [ TransformOutput(preset=BuiltInStandardEncoderPreset(
input_asset = Asset(alternate_id=in_alternate_id, description=in_description) # Set the attributes of the output Asset using the random number out_asset_name = 'outputassetName' + str(uniqueness) out_alternate_id = 'outputALTid' + str(uniqueness) out_description = 'outputdescription' + str(uniqueness) # From the SDK # Asset(*, alternate_id: str = None, description: str = None, container: str = None, storage_account_name: str = None, **kwargs) -> None output_asset = Asset(alternate_id=out_alternate_id, description=out_description) # The AMS Client print("Creating AMS client") # From SDK # AzureMediaServices(credentials, subscription_id, base_url=None) client = AzureMediaServices(default_credential, os.getenv('SUBSCRIPTIONID')) # Create an input Asset print("Creating input asset " + in_asset_name) # From SDK # create_or_update(resource_group_name, account_name, asset_name, parameters, custom_headers=None, raw=False, **operation_config) inputAsset = client.assets.create_or_update(os.getenv("RESOURCEGROUP"), os.getenv("ACCOUNTNAME"), in_asset_name, input_asset) # An AMS asset is a container with a specific id that has "asset-" prepended to the GUID. # So, you need to create the asset id to identify it as the container # where Storage is to upload the video (as a block blob) in_container = 'asset-' + inputAsset.asset_id # create an output Asset