Example #1
0
    def get(self):
        job_name = ''
        try:
            job_name = self.get_query_argument('job_name')
        except MissingArgumentError:
            json_response = json.dumps(
                {'message': 'Not enough argument \'job_name\''},
                ensure_ascii=False)
            self.set_status(400)
            self.write(json_response)
            return

        response = aws.get_transcription_job(job_name)
        status = response['TranscriptionJob']['TranscriptionJobStatus']
        transcript = ''
        if status == 'COMPLETED':
            job_file_name = job_name + '.json'
            file_path = os.path.join('/tmp', job_file_name)
            aws.download_file(s3_bucket_name, job_file_name, file_path)

            f = open(file_path, 'r', encoding="utf-8")
            json_dict = json.load(f)
            transcript = json_dict['results']['transcripts'][0]['transcript']

        json_response = json.dumps({
            'status': status,
            'transcript': transcript
        },
                                   ensure_ascii=False)
        self.write(json_response)
Example #2
0
def initialize():
    if env['s3_role'] is not None:
        aws.change_s3_role(env['s3_role'])
    else:
        logger.info('not changing s3 role')
    try:
        check_lock()
    except FileExistsError:
        raise

    logger.info('Changing log level to {}\nnow {}'.format(
        env['log_level'], logger.level))
    change_log_level(env['log_level'])
    aws.change_log_level(env['log_level'])

    # Check if we still have the internal structure cached, if not then download backup from S3
    if not os.path.exists(
            env['config_dir']):  # If destination dir does not exists

        logger.debug('Creating dir {}'.format(env['config_dir']))
        os.makedirs(env['config_dir'])  # Create dir

        logger.info('Attempting to restore internal structure from backup')
        try:
            aws.download_file(env['bucket_dir'] + '/' + BACKUP_FILE,
                              '/tmp/' + BACKUP_FILE, env['bucket'])
            with tarfile.open('/tmp/' + BACKUP_FILE, 'r:gz') as archive:
                archive.extractall(path=env['config_dir'])
        except FileNotFoundError:
            logger.info('No backup found in S3, continuing')
    else:
        logger.info(
            'Internal structure still available, skipping restore from backup')
Example #3
0
def check_lock():
    logger.debug('Checking if lock exists')
    try:
        aws.download_file(env['bucket_dir'] + '/' + LOCK_FILE,
                          '/tmp/' + LOCK_FILE, env['bucket'])
        raise FileExistsError
    except FileNotFoundError:
        logger.debug('No lock found, continuing')
        lock()
Example #4
0
File: app.py Project: r1w1k/TUNApy
def download_file(email, filepath):
    return aws.download_file(email, filepath)
Example #5
0
def fetch_report(email, filename):
    return parse_response(
        aws.download_file(email, "credit/{}".format(filename), True))