コード例 #1
0
ファイル: app.py プロジェクト: wiwa1978/heroku-aws
def startTranscodingjob(input_filename, output_filename):
    conn_transcode=boto.connect_elastictranscoder()
    #conn_transcode.create_pipeline("pipelinewymediatest","wymedia.transcoding.in","wymedia.transcoding.out","arn:aws:iam::745665279123:role/Elastic_Transcoder_Default_Role",{'Progressing': '', 'Completed': '', 'Warning': '', 'Error': ''})
    
    transcode_input = {'Container':
        'auto',
        'Key': input_filename,
        'Resolution': 'auto',
        'AspectRatio': 'auto',
        'FrameRate': 'auto',
        'Interlaced': 'auto'
    }

    transcode_output = {'Rotate': 'auto',
        'PresetId': '1351620000001-100080',
        'Key': output_filename
    }

    
    job = conn_transcode.create_job("1395751910303-92ulfo", transcode_input, transcode_output)
    

    if job:
        return 0
    else:
        return -1
コード例 #2
0
ファイル: models.py プロジェクト: coryg/django-video-platform
	def push_to_aws(self):
		et = boto.connect_elastictranscoder(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)

		renders = self.render_set.all()

		transInput = {
			'Key': self.input_filename,
			'FrameRate': 'auto',
			'Resolution': 'auto',
			'AspectRatio': 'auto',
			'Interlaced': 'auto',
			'Container': 'auto'
		}
		outputs = []
		for r in renders:
			outputs.append(r.get_aws_output())

		et.create_job(
			pipeline_id=self.pipeline.aws_pipeline_id,
			input_name=transInput,
			outputs=outputs,
			)

		self.status = 'submitted'
		self.save()
コード例 #3
0
    def push_to_aws(self):
        et = boto.connect_elastictranscoder(settings.AWS_ACCESS_KEY_ID,
                                            settings.AWS_SECRET_ACCESS_KEY)

        renders = self.render_set.all()

        transInput = {
            'Key': self.input_filename,
            'FrameRate': 'auto',
            'Resolution': 'auto',
            'AspectRatio': 'auto',
            'Interlaced': 'auto',
            'Container': 'auto'
        }
        outputs = []
        for r in renders:
            outputs.append(r.get_aws_output())

        et.create_job(
            pipeline_id=self.pipeline.aws_pipeline_id,
            input_name=transInput,
            outputs=outputs,
        )

        self.status = 'submitted'
        self.save()
コード例 #4
0
ファイル: models.py プロジェクト: coryg/django-video-platform
	def import_videos(self):
		s3 = boto.connect_s3(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
		et = boto.connect_elastictranscoder(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
		b = s3.get_bucket(self.input_bucket)

		default_presets = RenderPreset.objects.filter(default=True)

		keys = b.list()

		for key in keys:
			if not key.key.endswith('/'):
				Video.create_video(key.key, self, default_presets)
コード例 #5
0
    def import_videos(self):
        s3 = boto.connect_s3(settings.AWS_ACCESS_KEY_ID,
                             settings.AWS_SECRET_ACCESS_KEY)
        et = boto.connect_elastictranscoder(settings.AWS_ACCESS_KEY_ID,
                                            settings.AWS_SECRET_ACCESS_KEY)
        b = s3.get_bucket(self.input_bucket)

        default_presets = RenderPreset.objects.filter(default=True)

        keys = b.list()

        for key in keys:
            if not key.key.endswith('/'):
                Video.create_video(key.key, self, default_presets)
コード例 #6
0
ファイル: models.py プロジェクト: coryg/django-video-platform
	def sync_with_aws():
		#LOG THIS WITH THE SYNC DB
		AWSSyncHistory.create_sync('PIPELINE')

		et = boto.connect_elastictranscoder(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
		pipelines = et.list_pipelines()
		for p in pipelines['Pipelines']:
			id = p['Id']
			found_object = Pipeline.objects.filter(aws_pipeline_id=id)
			if found_object.count() == 0:
				found_object = Pipeline()
				found_object.aws_pipeline_id = id
				found_object.auto_process_inputs = False
			else:
				found_object = found_object[0]

			found_object.name = p['Name']
			found_object.input_bucket = p['InputBucket']
			found_object.output_bucket = p['ContentConfig']['Bucket']
			found_object.save()
コード例 #7
0
ファイル: models.py プロジェクト: coryg/django-video-platform
	def sync_with_aws():
		#LOG THIS WITH THE SYNC DB
		AWSSyncHistory.create_sync('RENDER_PRESET')

		et = boto.connect_elastictranscoder(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY)
		presets = et.list_presets()
		for p in presets['Presets']:
			id = p['Id']
			found_object = RenderPreset.objects.filter(aws_preset_id=id)
			if found_object.count() == 0:
				found_object = RenderPreset()
				found_object.aws_preset_id = id
				found_object.default = False
			else:
				found_object = found_object[0]

			found_object.name = p['Name']
			found_object.description = p['Description']
			found_object.container = p['Container']
			found_object.save()
コード例 #8
0
    def sync_with_aws():
        #LOG THIS WITH THE SYNC DB
        AWSSyncHistory.create_sync('PIPELINE')

        et = boto.connect_elastictranscoder(settings.AWS_ACCESS_KEY_ID,
                                            settings.AWS_SECRET_ACCESS_KEY)
        pipelines = et.list_pipelines()
        for p in pipelines['Pipelines']:
            id = p['Id']
            found_object = Pipeline.objects.filter(aws_pipeline_id=id)
            if found_object.count() == 0:
                found_object = Pipeline()
                found_object.aws_pipeline_id = id
                found_object.auto_process_inputs = False
            else:
                found_object = found_object[0]

            found_object.name = p['Name']
            found_object.input_bucket = p['InputBucket']
            found_object.output_bucket = p['ContentConfig']['Bucket']
            found_object.save()
コード例 #9
0
    def sync_with_aws():
        #LOG THIS WITH THE SYNC DB
        AWSSyncHistory.create_sync('RENDER_PRESET')

        et = boto.connect_elastictranscoder(settings.AWS_ACCESS_KEY_ID,
                                            settings.AWS_SECRET_ACCESS_KEY)
        presets = et.list_presets()
        for p in presets['Presets']:
            id = p['Id']
            found_object = RenderPreset.objects.filter(aws_preset_id=id)
            if found_object.count() == 0:
                found_object = RenderPreset()
                found_object.aws_preset_id = id
                found_object.default = False
            else:
                found_object = found_object[0]

            found_object.name = p['Name']
            found_object.description = p['Description']
            found_object.container = p['Container']
            found_object.save()
コード例 #10
0
ファイル: tasks.py プロジェクト: neuman/indiepen
def poll_elastic_transcoder(media_id, job_id, update_media=True):
    """Watch an ET job and update the media status when it's done.

    args:
        media_id: Media object we're updating
        job_id: ET job ID to monitor
        update_media: if False, no changes will be made to the Media object
    """
    logger.warning("Polling Now for: %s", media_id)
    media = Media.objects.get(id=media_id)
    media.save()

    try:
        transcoder = boto.connect_elastictranscoder(
            settings.AWS_ACCESS_KEY_ID,
            settings.AWS_SECRET_ACCESS_KEY,
        )
    except boto.exception.BotoServerError, exc:
        logger.error("Unable to connect to Elastic Transcoder!", exc_info=exc)
        if update_media:
            media.status = 'E'
            media.save()
        raise
コード例 #11
0
 def connect (self):
     """
     Connects to AET and returns a handle.
     """
     return boto.connect_elastictranscoder()
コード例 #12
0
ファイル: transinterface.py プロジェクト: divcon/intermediate
 def __init__(self):
     self.aws_transcoder = boto.connect_elastictranscoder(AuthentiCation.access_key, AuthentiCation.secret_key,)
     self.aws_transcoder = boto.elastictranscoder.connect_to_region('ap-northeast-1')
     self.pipeline_id = '1437294072599-2bekkw'
     self.presetId = '1351620000001-000020'
コード例 #13
0
ファイル: tasks.py プロジェクト: neuman/indiepen
    # No point in starting another transcode for the same media
    if media.status in ('I', 'Q'):
        logger.warning("Transcode already in progress, %s, did not start transcoding.",
                       media_id)
        return

    '''
    # Ensure we have the original key saved
    if update_media:
        media.update(set__original_s3_key=media.s3_key)
    '''

    try:
        transcoder = boto.connect_elastictranscoder(
            settings.AWS_ACCESS_KEY_ID,
            settings.AWS_SECRET_ACCESS_KEY,
        )
    except boto.exception.BotoServerError, exc:
        logger.error("Unable to connect to Elastic Transcoder!", exc_info=exc)
        if update_media:
            media.status = 'E'
            media.save()
        raise

    # Get the Pipeline ID for the named pipeline
    pipeline_id = get_pipeline_id(transcoder, pipeline_name)
    if not pipeline_id:
        raise KeyError("Elastic Transcoder pipeline %s not found!",
                       pipeline_name)

    input_name = {