def encode(self, input_name, outputs):
        encoder = elastictranscoder.connect_to_region(
            self.aws_region, 
            aws_access_key_id=self.aws_access_key_id,
            aws_secret_access_key=self.aws_secret_access_key)

        self.message = encoder.create_job(self.pipeline_id, input_name, outputs=outputs)
Example #2
0
def start_transcoding_job(input, other, outputs):
    transcoder = elastictranscoder.connect_to_region(AWS_REGION)
    job = transcoder.create_job(settings.AWS_PIPELINE_ID, input, other,
                                outputs)

    if job:
        return job['Job']['Id']
    else:
        return False
Example #3
0
    def encode(self, input_name, outputs):
        encoder = elastictranscoder.connect_to_region(
            self.aws_region,
            aws_access_key_id=self.aws_access_key_id,
            aws_secret_access_key=self.aws_secret_access_key)

        self.message = encoder.create_job(self.pipeline_id,
                                          input_name,
                                          outputs=outputs)
Example #4
0
def check_connection():
    try:
        # connect
        t = elastictranscoder.connect_to_region(AWS_REGION)

        # check the presets we have defined in the settings
        t.read_pipeline(settings.AWS_PIPELINE_ID)
        # check the pipline we have defined
        t.read_preset(settings.AWS_TRANSCODE_PRESET_ID)
        t.read_preset(settings.AWS_TRANSCODE_PRESET_ID2)
        return True
    except Exception as e:
        return str(e)
Example #5
0
def index():
    transcode = elastictranscoder.connect_to_region(AWS_REGION, aws_access_key_id=AWS_ACCESS_KEY_ID,
                                                    aws_secret_access_key=AWS_SECRET_KEY)

    import pprint

    pp = pprint.PrettyPrinter()

    presets = []
    for preset in transcode.list_presets()['Presets']:
        presets.append({'Id': preset['Id'], 'Name': preset['Name'], 'Description': preset['Description'], 'Data': pp.pformat(preset)})

    return render_template("index.html", AWS_ACCESS_KEY_ID=AWS_ACCESS_KEY_ID, S3_BASE_URL=S3_BASE_URL, S3_BUCKET_NAME=S3_BUCKET_NAME, S3_BUCKET_URL=S3_BUCKET_URL,
                           S3_FOLDER=S3_FOLDER, presets=sorted(presets, key=lambda k: k['Name']))
Example #6
0
    def handle_noargs(self, *args, **options):
        conn = S3Connection(settings.AWS_ACCESS_KEY_ID,
                            settings.AWS_SECRET_ACCESS_KEY)
        self.bucket = conn.get_bucket("smallslivevid")
        self.transcoder = elastictranscoder.connect_to_region('us-east-1')
        self.files_transcoded = 0
        count = 0
        cutoff_date = timezone.datetime(2013, 11, 1).date()

        self.params_in = {
            'AspectRatio': 'auto',
            'Container': 'auto',
            'FrameRate': 'auto',
            'Interlaced': 'auto',
            'Key': '',
            'Resolution': 'auto'
        }
        self.params_out = {
            'Key': '',
            'PresetId': '1351620000001-000050',  # 360p preset
            'Rotate': 'auto',
            'ThumbnailPattern': ''
        }

        videos = Recording.objects.filter(
            media_file__media_type='video',
            media_file__sd_video_file="").order_by('event__start')
        for video in videos:
            original_file = str(video.media_file.file)
            folder, file = original_file.split('/')
            file_name, ext = os.path.splitext(file)
            filename_360p = os.path.join(folder, '360p',
                                         '{0}_360p{1}'.format(file_name, ext))
            thumbnail_filename = os.path.join(
                folder, 'thumbnails', '{0}_{{count}}'.format(file_name))
            if video.event.listing_date() < cutoff_date:
                self.params_out[
                    'PresetId'] = '1351620000001-000050'  # 360p 4:3 preset
            else:
                self.params_out[
                    'PresetId'] = '1351620000001-000040'  # 360p 16:9 preset
            self.transcode_video(original_file, filename_360p,
                                 thumbnail_filename)
            count += 1
            if count % 50 == 0:
                print count

        self.stdout.write("{0} files transcoded".format(self.files_transcoded))
Example #7
0
def transcode_video(upload_result):
    """
    If the uploaded file is a video, then trigger a transcoding job.

    :param upload_result: The result of the previous celery task (upload_to_s3)
    """
    aws_key = upload_result['aws_key']
    aws_secret_key = upload_result['aws_secret_key']
    source_file = upload_result['dest_filename']
    pipeline_name = '%s-pipeline' % upload_result['pipeline_name']

    transcoder = elastictranscoder.connect_to_region(
        'eu-west-1',
        aws_access_key_id=aws_key,
        aws_secret_access_key=aws_secret_key
    )
    pipelines = dict((
        (x.get('Name'), x.get('Id'))
        for x in transcoder.list_pipelines().get('Pipelines', [])
    ))
    if pipeline_name in pipelines:
        pipeline_id = pipelines[pipeline_name]
    else:
        logger.error('Pipeline %s does not exist. Please contact sysadmin',
                     pipeline_name)
        return

    logger.info('Creating transcoding job for %s', source_file)
    transcode_input = {
        'Key': source_file,
        'FrameRate': 'auto',
        'Resolution': 'auto',
        'AspectRatio': 'auto',
        'Interlaced': 'auto',
        'Container': 'auto'
    }
    transcode_output = {
        # TODO: Configurable preset
        'PresetId': '1351620000001-000020',
        'Key': source_file
    }
    transcoder.create_job(
        pipeline_id=pipeline_id,
        input_name=transcode_input,
        output=transcode_output
    )
    return upload_result
Example #8
0
def submit_transcode_job(obj):
    if len(obj.video_url) > 12:
        s3_url = obj.video_url
    else:
        return False
    try:
        full_name = s3_url.split('.com/')[-1]
        name = '/'.join(full_name.split('/')[1:])
        name = name.split('.')[0] + '.mp4'
    except IndexError:
        return False

    print name, full_name

    if isinstance(obj, Race):
        pipeline = app.config['RACE_PIPELINE']
    elif isinstance(obj, Session):
        pipeline = app.config['SESS_PIPELINE']
    else:
        return False

    print pipeline

    prefix = 'encoded/'
    inputs = {
        'Key': full_name,
    }
    outputs = [{
        'Key': name,
        'PresetId': app.config['PRESET']
    }]
    et = elastictranscoder.connect_to_region('us-east-1')

    bucket = 'race-videos' if isinstance(obj, Race) else 'session-videos'
    processed_url = 'https://s3.amazonaws.com/%s/encoded/%s' % (bucket, name)

    if app.config['TRANSCODE']:
        try:
            et.create_job(pipeline, input_name=inputs,
                    outputs=outputs, output_key_prefix=prefix)
            obj.video_processed_url = processed_url
        except:
            return False

    return True
    def handle_noargs(self, *args, **options):
        conn = S3Connection(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
        self.bucket = conn.get_bucket("smallslivevid")
        self.transcoder = elastictranscoder.connect_to_region('us-east-1')
        self.files_transcoded = 0
        count = 0
        cutoff_date = timezone.datetime(2013, 11, 1).date()

        self.params_in = {'AspectRatio': 'auto',
                          'Container': 'auto',
                          'FrameRate': 'auto',
                          'Interlaced': 'auto',
                          'Key': '',
                          'Resolution': 'auto'}
        self.params_out = {'Key': '',
                           'PresetId': '1351620000001-000050',  # 360p preset
                           'Rotate': 'auto',
                           'ThumbnailPattern': ''}

        videos = Recording.objects.filter(media_file__media_type='video', media_file__sd_video_file="").order_by('event__start')
        for video in videos:
            original_file = str(video.media_file.file)
            folder, file = original_file.split('/')
            file_name, ext = os.path.splitext(file)
            filename_360p = os.path.join(folder, '360p', '{0}_360p{1}'.format(file_name, ext))
            thumbnail_filename = os.path.join(folder, 'thumbnails', '{0}_{{count}}'.format(file_name))
            if video.event.listing_date() < cutoff_date:
                self.params_out['PresetId'] = '1351620000001-000050'  # 360p 4:3 preset
            else:
                self.params_out['PresetId'] = '1351620000001-000040'  # 360p 16:9 preset
            self.transcode_video(original_file, filename_360p, thumbnail_filename)
            count += 1
            if count % 50 == 0:
                print count

        self.stdout.write("{0} files transcoded".format(self.files_transcoded))
Example #10
0
def transcode():
    media_url = request.form['media_url']
    preset_id = request.form['preset_id']

    media_file = urllib.unquote(media_url).decode('utf8').rsplit('/', 1)[-1]

    transcode_input = {
        'Key': S3_FOLDER + '/' + media_file,
        'Container': 'auto',
        'AspectRatio': 'auto',
        'FrameRate': 'auto',
        'Resolution': 'auto',
        'Interlaced': 'auto'
    }

    transcode_outputs = [
        {
            'Key': path.splitext(media_file)[0],
            'PresetId': preset_id,
            'Rotate': 'auto',
            'ThumbnailPattern': 'thumbnail-{count}'
        }
    ]

    transcode = elastictranscoder.connect_to_region(AWS_REGION, aws_access_key_id=AWS_ACCESS_KEY_ID,
                                                    aws_secret_access_key=AWS_SECRET_KEY)
    pipeline_id = None
    for pipeline in transcode.list_pipelines()['Pipelines']:
        if pipeline['Name'] == app.config['ELASTIC_TRANSCODER_PIPELINE']:
            pipeline_id = pipeline['Id']

    job = transcode.create_job(pipeline_id=pipeline_id, input_name=transcode_input, outputs=transcode_outputs, output_key_prefix=S3_FOLDER + '/')

    pp = pprint.PrettyPrinter(indent=4)

    return render_template('transcode.html', job=pp.pformat(job))
 def get_et(self):
     return elastictranscoder.connect_to_region(
         self.aws_region,
         aws_access_key_id=self.aws_access_key_id,
         aws_secret_access_key=self.aws_secret_access_key)