Ejemplo n.º 1
0
def trigger_renditions_bucket_event(data, context):
    """Background Cloud Function to be triggered by Cloud Storage.
       This function retrieves a source video and triggers
       the generation of renditions by means of an http asynchronous
       call to the create_renditions_http function

    Args:
        data (dict): The Cloud Functions event payload.
        context (google.cloud.functions.Context): Metadata of triggering event.
    Returns:
        None, the renditions cloud function are triggered asynchronously
    """

    name = data['name']

    # Create the folder for the renditions
    params_folder = '/tmp/{}'.format(os.path.dirname(name))
    if not os.path.exists(params_folder):
        os.makedirs(params_folder)

    resolutions = [1080, 720, 480, 384, 288, 144]
    crfs = [45, 40, 32, 25, 21, 18, 14]

    for resolution in resolutions:
        for crf in crfs:
            local_file = '{}/{}-{}-{}.json'.format(
                params_folder.replace(os.path.dirname(name), ''), name,
                resolution, crf)
            remote_file = '{}/{}-{}.json'.format(name, resolution, crf)
            file_output = open(local_file, "w")
            file_output.close()
            gce_utils.upload_blob(PARAMETERS_BUCKET, local_file, remote_file)

    return 'Renditions triggered for {}'.format(name)
Ejemplo n.º 2
0
def create_source_http(request):
    """HTTP Cloud Function.
    Args:
        request (flask.Request): The request object.
        <http://flask.pocoo.org/docs/1.0/api/#flask.Request>
    Returns:
        The status message if successful
    """
    request_json = request.get_json(silent=True)
    request_args = request.args

    if request_json:
        playlist_url = request_json['playlist_url']
        video_id = request_json['video_id']
        extension = request_json['extension']
        duration = request_json['duration']

    elif request_args:
        playlist_url = request_args['playlist_url']
        video_id = request_args['video_id']
        extension = request_args['extension']
        duration = request_args['duration']
    else:
        return 'Unable to read request'
    print(playlist_url, video_id, extension)
    ffmpeg_installer.install()

    local_file = '/tmp/{}.{}'.format(video_id, extension)
    destination_blob_name = '{}.{}'.format(video_id, extension)

    if not gce_utils.check_blob(SOURCES_BUCKET, destination_blob_name):
        if download_video_from_url(playlist_url, duration, local_file,
                                   extension):
            gce_utils.upload_blob(SOURCES_BUCKET, local_file,
                                  destination_blob_name)
    else:
        print('Video already uploaded, skipping')
    return 'FINISHED Processing source: {}'.format(video_id)
Ejemplo n.º 3
0
def create_renditions_bucket_event(data, context):
    """
    HTTP Cloud Function to generate video assets. Triggered by files
    deposited in PARAMETERS_BUCKET
    Args:
        data: The triggering object, containing name, resolution and quantization parameter
    Returns:
        The status message if successful
    """

    source_name = os.path.dirname(data['name'])
    params_name = data['name'].replace(source_name, '')
    resolution = params_name.split('-')[0][1:]
    crf_value = params_name.split('-')[1].replace('.json', '')

    print('Processing source: {} at resolution {}'.format(
        source_name, resolution))

    # Locate the ffmpeg binary
    ffmpeg_installer.install()

    # Create the folder for the source asset
    source_folder = '/tmp/source'

    # Create the folder for the renditions
    renditions_folder = '/tmp/renditions'
    if not os.path.exists(renditions_folder):
        os.makedirs(renditions_folder)

    # Get the file that has been uploaded to GCS
    asset_path = {'path': '{}/{}'.format(source_folder, source_name)}

    # Check if the source is not already in the path
    if not os.path.exists(asset_path['path']):
        print('Retrieving video from {}'.format(asset_path['path']))
        gce_utils.download_to_local(SOURCES_BUCKET, asset_path['path'],
                                    source_name)

    print('Processing resolution', resolution)
    # Create folder for each rendition

    bucket_path = '{}_{}/{}'.format(resolution, crf_value, source_name)
    print('Bucket path:', bucket_path)
    if not gce_utils.check_blob(RENDITIONS_BUCKET, bucket_path):
        crf_path = '{}/{}_{}/{}'.format(renditions_folder,
                                        resolution, crf_value,
                                        os.path.dirname(source_name))
        if not os.path.exists(crf_path):
            print('Creating rendition folder:', crf_path)
            os.makedirs(crf_path)

    # Generate renditions with ffmpeg
    renditions_worker(asset_path['path'], source_folder, CODEC_TO_USE,
                      resolution, crf_value, renditions_folder)

    #compute_metrics(asset_path, renditions_paths)

    # Upload renditions to GCE storage bucket

    local_path = '{}/{}_{}/{}'.format(renditions_folder, resolution, crf_value,
                                      source_name)
    bucket_path = '{}_{}/{}'.format(resolution, crf_value, source_name)
    gce_utils.upload_blob(RENDITIONS_BUCKET, local_path, bucket_path)
    os.remove(local_path)

    return 'FINISHED Processing source: {} at resolution {}'.format(
        source_name, resolution)