def generate_file(self, agency_code=None):
        """ Generates a file based on the FileGeneration object and updates any Jobs referencing it """
        raw_filename = (GEN_FILENAMES[self.file_type]
                        if not self.file_generation else
                        GEN_FILENAMES[self.file_type].format(
                            self.file_generation.agency_type))
        file_name = S3Handler.get_timestamped_filename(raw_filename)
        if self.is_local:
            file_path = "".join([CONFIG_BROKER['broker_files'], file_name])
        else:
            file_path = "".join(["None/", file_name])

        # Generate the file and upload to S3
        log_data = {
            'message': 'Finished file {} generation'.format(self.file_type),
            'message_type': 'ValidatorInfo',
            'file_type': self.file_type,
            'file_path': file_path
        }
        if self.file_generation:
            self.generate_d_file(file_path)

            log_data.update({
                'agency_code':
                self.file_generation.agency_code,
                'agency_type':
                self.file_generation.agency_type,
                'start_date':
                self.file_generation.start_date,
                'end_date':
                self.file_generation.end_date,
                'file_generation_id':
                self.file_generation.file_generation_id
            })
        elif self.job.file_type.letter_name in ['A', 'E', 'F']:
            log_data['job_id'] = self.job.job_id
            mark_job_status(self.job.job_id, 'running')

            if self.job.file_type.letter_name == 'A':
                if not agency_code:
                    raise ResponseException(
                        'Agency code not provided for an A file generation')

                self.generate_a_file(agency_code, file_path)
            else:
                # Call self.generate_%s_file() where %s is e or f based on the Job's file_type
                file_type_lower = self.job.file_type.letter_name.lower()
                getattr(self, 'generate_%s_file' % file_type_lower)()

            mark_job_status(self.job.job_id, 'finished')
        else:
            e = 'No FileGeneration object for D file generation.' if self.file_type in ['D1', 'D2'] else \
                'Cannot generate file for {} file type.'.format(self.file_type if self.file_type else 'empty')
            raise ResponseException(e)

        logger.info(log_data)
    def generate_from_job(self):
        """ Generates a file for a specified job """
        # Mark Job as running
        mark_job_status(self.job.job_id, 'running')

        # Ensure this is a file generation job
        job_type = self.job.job_type.name
        if job_type != 'file_upload':
            raise ResponseException(
                'Job ID {} is not a file generation job (job type is {})'.format(self.job.job_id, job_type),
                StatusCode.CLIENT_ERROR, None, ValidationError.jobError)

        # Ensure there is an available agency_code
        if not self.agency_code:
            raise ResponseException(
                'An agency_code must be provided to generate a file'.format(self.job.job_id, job_type),
                StatusCode.CLIENT_ERROR, None, ValidationError.jobError)

        # Retrieve any FileRequest that may have started since the Broker sent the request to SQS
        skip_generation = None
        if self.job.file_type.letter_name in ['D1', 'D2']:
            skip_generation = retrieve_cached_file_request(self.job, self.agency_type, self.agency_code, self.is_local)

        if not skip_generation:
            # Generate timestamped file names
            raw_filename = CONFIG_BROKER["".join([str(self.job.file_type.name), "_file_name"])]
            self.job.original_filename = S3Handler.get_timestamped_filename(raw_filename)
            if self.is_local:
                self.job.filename = "".join([CONFIG_BROKER['broker_files'], self.job.original_filename])
            else:
                self.job.filename = "".join([str(self.job.submission_id), "/", self.job.original_filename])
            self.sess.commit()

            # Generate the file, and upload to S3
            if self.job.file_type.letter_name in ['D1', 'D2']:
                # Update the validation Job if necessary
                update_validation_job_info(self.sess, self.job)

                self.generate_d_file()
            elif self.job.file_type.letter_name == 'A':
                self.generate_a_file()
            elif self.job.file_type.letter_name == 'E':
                self.generate_e_file()
            else:
                self.generate_f_file()

            mark_job_status(self.job.job_id, 'finished')

        logger.info({
            'message': 'Finished file {} generation'.format(self.job.file_type.letter_name),
            'message_type': 'ValidatorInfo', 'job_id': self.job.job_id, 'agency_code': self.agency_code,
            'file_type': self.job.file_type.letter_name, 'start_date': self.job.start_date,
            'end_date': self.job.end_date, 'filename': self.job.original_filename
        })
    def generate_from_job(self, job_id, agency_code):
        """ Generates a file for a specified job

            Args:
                job_id: ID of the upload Job
                agency_code: FREC or CGAC code to generate data from
        """
        mark_job_status(job_id, 'running')

        with job_context(job_id, self.is_local) as context:
            sess, job = context

            # Ensure this is a file generation job
            if job.job_type.name != 'file_upload':
                raise ResponseException(
                    'Job ID {} is not a file generation job (job type is {})'.
                    format(job.job_id, job.job_type.name),
                    StatusCode.CLIENT_ERROR, None, ValidationError.jobError)

            # Ensure there is an available agency_code
            if not agency_code:
                if job.submission_id:
                    agency_code = job.submission.frec_code if job.submission.frec_code else job.submission.cgac_code
                else:
                    raise ResponseException(
                        'An agency_code must be provided to generate a file'.
                        format(job.job_id,
                               job.job_type.name), StatusCode.CLIENT_ERROR,
                        None, ValidationError.jobError)

            # Generate timestamped file names
            old_filename = job.original_filename
            job.original_filename = S3Handler.get_timestamped_filename(
                CONFIG_BROKER["".join([str(job.file_type.name),
                                       "_file_name"])])
            if self.is_local:
                job.filename = "".join(
                    [CONFIG_BROKER['broker_files'], job.original_filename])
            else:
                job.filename = "".join(
                    [str(job.submission_id), "/", job.original_filename])

            # Generate the file and upload to S3
            if job.file_type.letter_name in ['D1', 'D2']:
                # Update the validation Job if necessary
                if job.submission_id:
                    self.update_validation_job_info(job)

                generate_d_file(sess, job, agency_code, self.is_local,
                                old_filename)
            elif job.file_type.letter_name == 'E':
                generate_e_file(sess, job, self.is_local)
            else:
                generate_f_file(sess, job, self.is_local)
    def generate_file(self, agency_code=None):
        """ Generates a file based on the FileGeneration object and updates any Jobs referencing it """
        raw_filename = (GEN_FILENAMES[self.file_type] if not self.file_generation else
                        GEN_FILENAMES[self.file_type].format(self.file_generation.agency_type))
        file_name = S3Handler.get_timestamped_filename(raw_filename)
        if self.is_local:
            file_path = "".join([CONFIG_BROKER['broker_files'], file_name])
        else:
            file_path = "".join(["None/", file_name])

        # Generate the file and upload to S3
        log_data = {'message': 'Finished file {} generation'.format(self.file_type), 'message_type': 'ValidatorInfo',
                    'file_type': self.file_type, 'file_path': file_path}
        if self.file_generation:
            self.generate_d_file(file_path)

            log_data.update({
                'agency_code': self.file_generation.agency_code, 'agency_type': self.file_generation.agency_type,
                'start_date': self.file_generation.start_date, 'end_date': self.file_generation.end_date,
                'file_generation_id': self.file_generation.file_generation_id
            })
        elif self.job.file_type.letter_name in ['A', 'E', 'F']:
            log_data['job_id'] = self.job.job_id
            mark_job_status(self.job.job_id, 'running')

            if self.job.file_type.letter_name == 'A':
                if not agency_code:
                    raise ResponseException('Agency code not provided for an A file generation')

                self.generate_a_file(agency_code, file_path)
            else:
                # Call self.generate_%s_file() where %s is e or f based on the Job's file_type
                file_type_lower = self.job.file_type.letter_name.lower()
                getattr(self, 'generate_%s_file' % file_type_lower)()

            mark_job_status(self.job.job_id, 'finished')
        else:
            e = 'No FileGeneration object for D file generation.' if self.file_type in ['D1', 'D2'] else \
                'Cannot generate file for {} file type.'.format(self.file_type if self.file_type else 'empty')
            raise ResponseException(e)

        logger.info(log_data)