def test_start_d_generation_submission_change_request(database, monkeypatch):
    """ In-submission generations that change their requested start or end dates must actually generate files based on
        the new dates.
    """
    sess = database.session
    original_filename = 'D1_test_gen.csv'
    file_path = gen_file_path_from_submission('None/', original_filename)

    submission = SubmissionFactory(submission_id=1000,
                                   reporting_start_date='2017-01-01',
                                   reporting_end_date='2017-01-31',
                                   cgac_code='123',
                                   frec_code=None,
                                   is_quarter_format=False,
                                   publishable=False,
                                   reporting_fiscal_year='2017')
    file_gen = FileGenerationFactory(request_date=datetime.now().date(),
                                     start_date='2017-01-01',
                                     end_date='2017-01-31',
                                     file_type='D1',
                                     agency_code='123',
                                     agency_type='awarding',
                                     is_cached_file=True,
                                     file_path=file_path,
                                     file_generation_id=1000,
                                     file_format='csv')
    up_job = JobFactory(job_status_id=JOB_STATUS_DICT['waiting'],
                        error_message=None,
                        job_type_id=JOB_TYPE_DICT['file_upload'],
                        file_type_id=FILE_TYPE_DICT['award_procurement'],
                        filename=None,
                        submission_id=submission.submission_id,
                        file_generation_id=file_gen.file_generation_id,
                        original_filename=original_filename)
    val_job = JobFactory(job_status_id=JOB_STATUS_DICT['waiting'],
                         error_message=None,
                         file_type_id=FILE_TYPE_DICT['award_procurement'],
                         job_type_id=JOB_TYPE_DICT['csv_record_validation'],
                         filename=None,
                         submission_id=submission.submission_id,
                         original_filename=original_filename)
    sess.add_all([submission, file_gen, up_job, val_job])
    sess.commit()

    monkeypatch.setattr(
        generation_helper, 'g',
        Mock(return_value={'is_local': CONFIG_BROKER['local']}))
    start_d_generation(up_job, '01/01/2017', '01/30/2017', 'funding')

    assert up_job.file_generation_id != file_gen.file_generation_id
    assert up_job.start_date == date(2017, 1, 1)
    assert up_job.end_date == date(2017, 1, 30)
    assert up_job.original_filename != original_filename
    assert up_job.filename != gen_file_path_from_submission(
        up_job.submission_id, original_filename)

    assert up_job.start_date == date(2017, 1, 1)
    assert up_job.end_date == date(2017, 1, 30)
    assert up_job.original_filename == up_job.original_filename
    assert up_job.filename == up_job.filename
示例#2
0
def test_start_d_generation_submission_cached(database, monkeypatch):
    """ Cached D files must update the upload Job with the FileGeneration data. """
    sess = database.session
    original_filename = 'D1_test_gen.csv'
    file_path = gen_file_path_from_submission('None/', original_filename)

    submission = SubmissionFactory(submission_id=1000,
                                   reporting_start_date='2017-01-01',
                                   reporting_end_date='2017-01-31',
                                   frec_code='1234',
                                   cgac_code=None,
                                   is_quarter_format=False,
                                   publishable=False,
                                   reporting_fiscal_year='2017')
    file_gen = FileGenerationFactory(request_date=datetime.now().date(),
                                     start_date='2017-01-01',
                                     end_date='2017-01-31',
                                     file_type='D2',
                                     agency_code='1234',
                                     agency_type='awarding',
                                     is_cached_file=True,
                                     file_path=file_path)
    up_job = JobFactory(job_status_id=JOB_STATUS_DICT['waiting'],
                        file_type_id=FILE_TYPE_DICT['award'],
                        error_message=None,
                        job_type_id=JOB_TYPE_DICT['file_upload'],
                        filename=None,
                        original_filename=None,
                        submission_id=submission.submission_id)
    val_job = JobFactory(job_status_id=JOB_STATUS_DICT['waiting'],
                         error_message=None,
                         file_type_id=FILE_TYPE_DICT['award'],
                         job_type_id=JOB_TYPE_DICT['csv_record_validation'],
                         filename=None,
                         original_filename=None,
                         submission_id=submission.submission_id)
    sess.add_all([submission, file_gen, up_job, val_job])
    sess.commit()

    monkeypatch.setattr(
        generation_helper, 'g',
        Mock(return_value={'is_local': CONFIG_BROKER['local']}))
    start_d_generation(up_job, '01/01/2017', '01/31/2017', 'awarding')

    assert up_job.file_generation_id == file_gen.file_generation_id
    assert up_job.start_date == date(2017, 1, 1)
    assert up_job.end_date == date(2017, 1, 31)
    assert up_job.original_filename == original_filename
    assert up_job.filename == gen_file_path_from_submission(
        up_job.submission_id, original_filename)
    assert up_job.job_status_id == JOB_STATUS_DICT['finished']

    assert up_job.start_date == date(2017, 1, 1)
    assert up_job.end_date == date(2017, 1, 31)
    assert up_job.original_filename == original_filename
    assert up_job.filename == gen_file_path_from_submission(
        up_job.submission_id, original_filename)
    assert up_job.job_status_id != JOB_STATUS_DICT['waiting']
def test_start_d_generation_submission_new(database, monkeypatch):
    """ A new file generation must update the upload Job and create a new FileGeneration object. """
    sess = database.session
    original_filename = 'D2_test_gen.csv'

    submission = SubmissionFactory(submission_id=1000,
                                   reporting_start_date='2017-01-01',
                                   reporting_end_date='2017-01-31',
                                   cgac_code='123',
                                   frec_code=None,
                                   is_quarter_format=False,
                                   publishable=False,
                                   reporting_fiscal_year='2017')
    up_job = JobFactory(job_status_id=JOB_STATUS_DICT['waiting'],
                        error_message=None,
                        file_type_id=FILE_TYPE_DICT['award'],
                        job_type_id=JOB_TYPE_DICT['file_upload'],
                        filename=None,
                        submission_id=submission.submission_id,
                        original_filename=original_filename,
                        file_generation_id=None)
    val_job = JobFactory(job_status_id=JOB_STATUS_DICT['waiting'],
                         error_message=None,
                         file_type_id=FILE_TYPE_DICT['award'],
                         job_type_id=JOB_TYPE_DICT['csv_record_validation'],
                         filename=None,
                         submission_id=submission.submission_id,
                         original_filename=original_filename)
    sess.add_all([submission, up_job, val_job])
    sess.commit()

    monkeypatch.setattr(
        generation_helper, 'g',
        Mock(return_value={'is_local': CONFIG_BROKER['local']}))
    start_d_generation(up_job, '01/01/2017', '01/31/2017', 'awarding')

    assert up_job.file_generation_id is not None
    assert up_job.start_date == date(2017, 1, 1)
    assert up_job.end_date == date(2017, 1, 31)
    assert up_job.original_filename != original_filename
    assert up_job.filename != gen_file_path_from_submission(
        up_job.submission_id, original_filename)

    assert up_job.start_date == date(2017, 1, 1)
    assert up_job.end_date == date(2017, 1, 31)
    assert up_job.original_filename == up_job.original_filename
    assert up_job.filename == up_job.filename

    file_gen = sess.query(FileGeneration).filter_by(
        file_generation_id=up_job.file_generation_id).one_or_none()
    assert file_gen is not None
    assert file_gen.request_date == datetime.now().date()
    assert file_gen.start_date == date(2017, 1, 1)
    assert file_gen.end_date == date(2017, 1, 31)
    assert file_gen.file_type == 'D2'
    assert file_gen.file_path != gen_file_path_from_submission(
        'None', original_filename)
def test_start_d_generation_submission_new(database, monkeypatch):
    """ A new file generation must update the upload Job and create a new FileGeneration object. """
    sess = database.session
    original_filename = 'D2_test_gen.csv'

    submission = SubmissionFactory(
        submission_id=1000, reporting_start_date='2017-01-01', reporting_end_date='2017-01-31', cgac_code='123',
        frec_code=None, is_quarter_format=False, publishable=False, reporting_fiscal_year='2017')
    up_job = JobFactory(
        job_status_id=JOB_STATUS_DICT['waiting'], error_message=None, file_type_id=FILE_TYPE_DICT['award'],
        job_type_id=JOB_TYPE_DICT['file_upload'], filename=None, submission_id=submission.submission_id,
        original_filename=original_filename, file_generation_id=None)
    val_job = JobFactory(
        job_status_id=JOB_STATUS_DICT['waiting'], error_message=None, file_type_id=FILE_TYPE_DICT['award'],
        job_type_id=JOB_TYPE_DICT['csv_record_validation'],  filename=None, submission_id=submission.submission_id,
        original_filename=original_filename)
    sess.add_all([submission, up_job, val_job])
    sess.commit()

    monkeypatch.setattr(generation_helper, 'g', Mock(return_value={'is_local': CONFIG_BROKER['local']}))
    start_d_generation(up_job, '01/01/2017', '01/31/2017', 'awarding')

    assert up_job.file_generation_id is not None
    assert up_job.start_date == date(2017, 1, 1)
    assert up_job.end_date == date(2017, 1, 31)
    assert up_job.original_filename != original_filename
    assert up_job.filename != gen_file_path_from_submission(up_job.submission_id, original_filename)

    assert up_job.start_date == date(2017, 1, 1)
    assert up_job.end_date == date(2017, 1, 31)
    assert up_job.original_filename == up_job.original_filename
    assert up_job.filename == up_job.filename

    file_gen = sess.query(FileGeneration).filter_by(file_generation_id=up_job.file_generation_id).one_or_none()
    assert file_gen is not None
    assert file_gen.request_date == datetime.now().date()
    assert file_gen.start_date == date(2017, 1, 1)
    assert file_gen.end_date == date(2017, 1, 31)
    assert file_gen.file_type == 'D2'
    assert file_gen.file_path != gen_file_path_from_submission('None', original_filename)
def test_start_d_generation_submission_change_request(database, monkeypatch):
    """ In-submission generations that change their requested start or end dates must actually generate files based on
        the new dates.
    """
    sess = database.session
    original_filename = 'D1_test_gen.csv'
    file_path = gen_file_path_from_submission('None/', original_filename)

    submission = SubmissionFactory(
        submission_id=1000, reporting_start_date='2017-01-01', reporting_end_date='2017-01-31', cgac_code='123',
        frec_code=None, is_quarter_format=False, publishable=False, reporting_fiscal_year='2017')
    file_gen = FileGenerationFactory(
        request_date=datetime.now().date(), start_date='2017-01-01', end_date='2017-01-31', file_type='D1',
        agency_code='123', agency_type='awarding', is_cached_file=True, file_path=file_path, file_generation_id=1000)
    up_job = JobFactory(
        job_status_id=JOB_STATUS_DICT['waiting'], error_message=None, job_type_id=JOB_TYPE_DICT['file_upload'],
        file_type_id=FILE_TYPE_DICT['award_procurement'], filename=None, submission_id=submission.submission_id,
        file_generation_id=file_gen.file_generation_id, original_filename=original_filename)
    val_job = JobFactory(
        job_status_id=JOB_STATUS_DICT['waiting'], error_message=None, file_type_id=FILE_TYPE_DICT['award_procurement'],
        job_type_id=JOB_TYPE_DICT['csv_record_validation'], filename=None, submission_id=submission.submission_id,
        original_filename=original_filename)
    sess.add_all([submission, file_gen, up_job, val_job])
    sess.commit()

    monkeypatch.setattr(generation_helper, 'g', Mock(return_value={'is_local': CONFIG_BROKER['local']}))
    start_d_generation(up_job, '01/01/2017', '01/30/2017', 'funding')

    assert up_job.file_generation_id != file_gen.file_generation_id
    assert up_job.start_date == date(2017, 1, 1)
    assert up_job.end_date == date(2017, 1, 30)
    assert up_job.original_filename != original_filename
    assert up_job.filename != gen_file_path_from_submission(up_job.submission_id, original_filename)

    assert up_job.start_date == date(2017, 1, 1)
    assert up_job.end_date == date(2017, 1, 30)
    assert up_job.original_filename == up_job.original_filename
    assert up_job.filename == up_job.filename
def test_start_d_generation_submission_cached(database, monkeypatch):
    """ Cached D files must update the upload Job with the FileGeneration data. """
    sess = database.session
    original_filename = 'D1_test_gen.csv'
    file_path = gen_file_path_from_submission('None/', original_filename)

    submission = SubmissionFactory(
        submission_id=1000, reporting_start_date='2017-01-01', reporting_end_date='2017-01-31', frec_code='1234',
        cgac_code=None, is_quarter_format=False, publishable=False, reporting_fiscal_year='2017')
    file_gen = FileGenerationFactory(
        request_date=datetime.now().date(), start_date='2017-01-01', end_date='2017-01-31', file_type='D2',
        agency_code='1234', agency_type='awarding', is_cached_file=True, file_path=file_path)
    up_job = JobFactory(
        job_status_id=JOB_STATUS_DICT['waiting'], file_type_id=FILE_TYPE_DICT['award'], error_message=None,
        job_type_id=JOB_TYPE_DICT['file_upload'], filename=None, original_filename=None,
        submission_id=submission.submission_id)
    val_job = JobFactory(
        job_status_id=JOB_STATUS_DICT['waiting'], error_message=None, file_type_id=FILE_TYPE_DICT['award'],
        job_type_id=JOB_TYPE_DICT['csv_record_validation'], filename=None, original_filename=None,
        submission_id=submission.submission_id)
    sess.add_all([submission, file_gen, up_job, val_job])
    sess.commit()

    monkeypatch.setattr(generation_helper, 'g', Mock(return_value={'is_local': CONFIG_BROKER['local']}))
    start_d_generation(up_job, '01/01/2017', '01/31/2017', 'awarding')

    assert up_job.file_generation_id == file_gen.file_generation_id
    assert up_job.start_date == date(2017, 1, 1)
    assert up_job.end_date == date(2017, 1, 31)
    assert up_job.original_filename == original_filename
    assert up_job.filename == gen_file_path_from_submission(up_job.submission_id, original_filename)
    assert up_job.job_status_id == JOB_STATUS_DICT['finished']

    assert up_job.start_date == date(2017, 1, 1)
    assert up_job.end_date == date(2017, 1, 31)
    assert up_job.original_filename == original_filename
    assert up_job.filename == gen_file_path_from_submission(up_job.submission_id, original_filename)
    assert up_job.job_status_id != JOB_STATUS_DICT['waiting']
def generate_file(submission, file_type, start, end, agency_type):
    """ Start a file generation job for the specified file type within a submission

        Args:
            submission: submission for which we're generating the file
            file_type: type of file to generate the job for
            start: the start date for the file to generate
            end: the end date for the file to generate
            agency_type: The type of agency (awarding or funding) to generate the file for (only used for D file
                generation)

        Returns:
            Results of check_generation or JsonResponse object containing an error if the prerequisite job isn't
            complete.
    """
    error_message = None
    # submission is a FABS submission
    if submission.d2_submission:
        error_message = "Cannot generate files for FABS submissions."

    elif file_type in ['D1', 'D2']:
        # D file generation requires start and end date
        if not start or not end:
            error_message = "Must have a start and end date for D file generation."
        # D files can only be generated by awarding or funding agency
        elif agency_type not in ['awarding', 'funding']:
            error_message = "agency_type must be either awarding or funding for D file generation."

    # Only D1, D2, E, and F files can be generated
    elif file_type not in ['E', 'F']:
        error_message = "File type must be either D1, D2, E, or F"

    # Return any client errors
    if error_message:
        return JsonResponse.error(ValueError(error_message),
                                  StatusCode.CLIENT_ERROR)

    sess = GlobalDB.db().session
    job = sess.query(Job).filter(
        Job.submission_id == submission.submission_id,
        Job.file_type_id == lookups.FILE_TYPE_DICT_LETTER_ID[file_type],
        Job.job_type_id == lookups.JOB_TYPE_DICT['file_upload']).one()

    log_data = {
        'message':
        'Starting {} file generation within submission {}'.format(
            file_type, submission.submission_id),
        'message_type':
        'BrokerInfo',
        'submission_id':
        submission.submission_id,
        'job_id':
        job.job_id,
        'file_type':
        file_type
    }
    logger.info(log_data)

    # Check prerequisites on upload job
    if not generation_helper.check_generation_prereqs(submission.submission_id,
                                                      file_type):
        return JsonResponse.error(
            ResponseException(
                "Must wait for completion of prerequisite validation job",
                StatusCode.CLIENT_ERROR), StatusCode.CLIENT_ERROR)
    try:
        if file_type in ['D1', 'D2']:
            generation_helper.start_d_generation(job, start, end, agency_type)
        else:
            generation_helper.start_e_f_generation(job)
    except Exception as e:
        mark_job_status(job.job_id, 'failed')
        job.error_message = str(e)
        sess.commit()
        return JsonResponse.error(e, StatusCode.INTERNAL_ERROR)

    # Return same response as check generation route
    return check_generation(submission, file_type)
def generate_detached_file(file_type, cgac_code, frec_code, start, end,
                           quarter, agency_type):
    """ Start a file generation job for the specified file type not connected to a submission

        Args:
            file_type: type of file to be generated
            cgac_code: the code of a CGAC agency if generating for a CGAC agency
            frec_code: the code of a FREC agency if generating for a FREC agency
            start: start date in a string, formatted MM/DD/YYYY
            end: end date in a string, formatted MM/DD/YYYY
            quarter: quarter to generate for, formatted Q#/YYYY
            agency_type: The type of agency (awarding or funding) to generate the file for

        Returns:
            JSONResponse object with keys job_id, status, file_type, url, message, start, and end.

        Raises:
            ResponseException: if the start and end Strings cannot be parsed into dates
    """
    # Make sure it's a valid request
    if not cgac_code and not frec_code:
        return JsonResponse.error(
            ValueError(
                "Detached file generation requires CGAC or FR Entity Code"),
            StatusCode.CLIENT_ERROR)

    if file_type in ['D1', 'D2']:
        # Make sure we have a start and end date for D1/D2 generation
        if not start or not end:
            return JsonResponse.error(
                ValueError(
                    "Must have a start and end date for D file generation."),
                StatusCode.CLIENT_ERROR)
        # Check if date format is MM/DD/YYYY
        if not (StringCleaner.is_date(start) and StringCleaner.is_date(end)):
            raise ResponseException(
                'Start or end date cannot be parsed into a date',
                StatusCode.CLIENT_ERROR)

        if agency_type not in ('awarding', 'funding'):
            return JsonResponse.error(
                ValueError("agency_type must be either awarding or funding."),
                StatusCode.CLIENT_ERROR)
    else:
        # Check if date format is Q#/YYYY
        if not quarter:
            return JsonResponse.error(
                ValueError("Must have a quarter for A file generation."),
                StatusCode.CLIENT_ERROR)

        try:
            start, end = generic_helper.quarter_to_dates(quarter)
        except ResponseException as e:
            return JsonResponse.error(e, StatusCode.CLIENT_ERROR)

    # Add job info
    file_type_name = lookups.FILE_TYPE_DICT_LETTER_NAME[file_type]
    new_job = generation_helper.add_generation_job_info(
        file_type_name=file_type_name, start_date=start, end_date=end)

    agency_code = frec_code if frec_code else cgac_code
    log_data = {
        'message': 'Starting detached {} file generation'.format(file_type),
        'message_type': 'BrokerInfo',
        'job_id': new_job.job_id,
        'file_type': file_type,
        'agency_code': agency_code,
        'start_date': start,
        'end_date': end
    }
    logger.info(log_data)

    try:
        if file_type in ['D1', 'D2']:
            generation_helper.start_d_generation(new_job,
                                                 start,
                                                 end,
                                                 agency_type,
                                                 agency_code=agency_code)
        else:
            generation_helper.start_a_generation(new_job, start, end,
                                                 agency_code)
    except Exception as e:
        mark_job_status(new_job.job_id, 'failed')
        new_job.error_message = str(e)
        GlobalDB.db().session.commit()
        return JsonResponse.error(e, StatusCode.INTERNAL_ERROR)

    # Return same response as check generation route
    return check_detached_generation(new_job.job_id)
def generate_detached_file(file_type, cgac_code, frec_code, start_date,
                           end_date, year, period, agency_type, file_format):
    """ Start a file generation job for the specified file type not connected to a submission

        Args:
            file_type: type of file to be generated
            cgac_code: the code of a CGAC agency if generating for a CGAC agency
            frec_code: the code of a FREC agency if generating for a FREC agency
            start_date: start date in a string, formatted MM/DD/YYYY
            end_date: end date in a string, formatted MM/DD/YYYY
            year: year to generate for, integer 4 digits
            period: period to generate for, integer (2-12)
            agency_type: The type of agency (awarding or funding) to generate the file for
            file_format: determines if the file generated is a txt or a csv (only used for D file generation)

        Returns:
            JSONResponse object with keys job_id, status, file_type, url, message, start_date, and end_date.

        Raises:
            ResponseException: if the start_date and end_date Strings cannot be parsed into dates
    """
    # Make sure it's a valid request
    if not cgac_code and not frec_code:
        return JsonResponse.error(
            ValueError(
                "Detached file generation requires CGAC or FR Entity Code"),
            StatusCode.CLIENT_ERROR)

    if file_type in ['D1', 'D2']:
        # Make sure we have a start and end date for D1/D2 generation
        if not start_date or not end_date:
            return JsonResponse.error(
                ValueError(
                    'Must have a start and end date for D file generation.'),
                StatusCode.CLIENT_ERROR)

        # Check if date format is MM/DD/YYYY
        if not (StringCleaner.is_date(start_date)
                and StringCleaner.is_date(end_date)):
            raise ResponseException(
                'Start or end date cannot be parsed into a date',
                StatusCode.CLIENT_ERROR)

        if agency_type not in ['awarding', 'funding']:
            return JsonResponse.error(
                ValueError('agency_type must be either awarding or funding.'),
                StatusCode.CLIENT_ERROR)

        if file_format not in ['csv', 'txt']:
            return JsonResponse.error(
                ValueError('file_format must be either csv or txt.'),
                StatusCode.CLIENT_ERROR)
    else:
        # Make sure both year and period are provided
        if not (year and period):
            return JsonResponse.error(
                ValueError(
                    "Must have a year and period for A file generation."),
                StatusCode.CLIENT_ERROR)

        try:
            # Convert to real start and end dates
            start_date, end_date = generic_helper.year_period_to_dates(
                year, period)
        except ResponseException as e:
            return JsonResponse.error(e, StatusCode.CLIENT_ERROR)

    # Add job info
    file_type_name = lookups.FILE_TYPE_DICT_LETTER_NAME[file_type]
    new_job = generation_helper.create_generation_job(file_type_name,
                                                      start_date, end_date)

    agency_code = frec_code if frec_code else cgac_code
    logger.info({
        'message':
        'Starting detached {} file generation'.format(file_type),
        'message_type':
        'BrokerInfo',
        'job_id':
        new_job.job_id,
        'file_type':
        file_type,
        'agency_code':
        agency_code,
        'start_date':
        start_date,
        'end_date':
        end_date
    })

    try:
        if file_type in ['D1', 'D2']:
            generation_helper.start_d_generation(new_job,
                                                 start_date,
                                                 end_date,
                                                 agency_type,
                                                 agency_code=agency_code,
                                                 file_format=file_format)
        else:
            generation_helper.start_a_generation(new_job, start_date, end_date,
                                                 agency_code)
    except Exception as e:
        mark_job_status(new_job.job_id, 'failed')
        new_job.error_message = str(e)
        GlobalDB.db().session.commit()
        return JsonResponse.error(e, StatusCode.INTERNAL_ERROR)

    # Return same response as check generation route
    return check_detached_generation(new_job.job_id)
def generate_file(submission, file_type, start, end, agency_type):
    """ Start a file generation job for the specified file type within a submission

        Args:
            submission: submission for which we're generating the file
            file_type: type of file to generate the job for
            start: the start date for the file to generate
            end: the end date for the file to generate
            agency_type: The type of agency (awarding or funding) to generate the file for (only used for D file
                generation)

        Returns:
            Results of check_generation or JsonResponse object containing an error if the prerequisite job isn't
            complete.
    """
    error_message = None
    # submission is a FABS submission
    if submission.d2_submission:
        error_message = "Cannot generate files for FABS submissions."

    elif file_type in ['D1', 'D2']:
        # D file generation requires start and end date
        if not start or not end:
            error_message = "Must have a start and end date for D file generation."
        # D files can only be generated by awarding or funding agency
        elif agency_type not in ['awarding', 'funding']:
            error_message = "agency_type must be either awarding or funding for D file generation."

    # Only D1, D2, E, and F files can be generated
    elif file_type not in ['E', 'F']:
        error_message = "File type must be either D1, D2, E, or F"

    # Return any client errors
    if error_message:
        return JsonResponse.error(ValueError(error_message), StatusCode.CLIENT_ERROR)

    sess = GlobalDB.db().session
    job = sess.query(Job).filter(Job.submission_id == submission.submission_id,
                                 Job.file_type_id == lookups.FILE_TYPE_DICT_LETTER_ID[file_type],
                                 Job.job_type_id == lookups.JOB_TYPE_DICT['file_upload']).one()
    logger.info({
        'message': 'Starting {} file generation within submission {}'.format(file_type, submission.submission_id),
        'message_type': 'BrokerInfo',
        'submission_id': submission.submission_id,
        'job_id': job.job_id,
        'file_type': file_type
    })

    # Check prerequisites on upload job
    if not generation_helper.check_generation_prereqs(submission.submission_id, file_type):
        return JsonResponse.error(ResponseException("Must wait for completion of prerequisite validation job",
                                                    StatusCode.CLIENT_ERROR), StatusCode.CLIENT_ERROR)
    try:
        if file_type in ['D1', 'D2']:
            generation_helper.start_d_generation(job, start, end, agency_type)
        else:
            generation_helper.start_e_f_generation(job)
    except Exception as e:
        mark_job_status(job.job_id, 'failed')
        job.error_message = str(e)
        sess.commit()
        return JsonResponse.error(e, StatusCode.INTERNAL_ERROR)

    # Return same response as check generation route
    return check_generation(submission, file_type)
def generate_detached_file(file_type, cgac_code, frec_code, start_date, end_date, year, period, agency_type):
    """ Start a file generation job for the specified file type not connected to a submission

        Args:
            file_type: type of file to be generated
            cgac_code: the code of a CGAC agency if generating for a CGAC agency
            frec_code: the code of a FREC agency if generating for a FREC agency
            start_date: start date in a string, formatted MM/DD/YYYY
            end_date: end date in a string, formatted MM/DD/YYYY
            year: year to generate for, integer 4 digits
            period: period to generate for, integer (2-12)
            agency_type: The type of agency (awarding or funding) to generate the file for

        Returns:
            JSONResponse object with keys job_id, status, file_type, url, message, start_date, and end_date.

        Raises:
            ResponseException: if the start_date and end_date Strings cannot be parsed into dates
    """
    # Make sure it's a valid request
    if not cgac_code and not frec_code:
        return JsonResponse.error(ValueError("Detached file generation requires CGAC or FR Entity Code"),
                                  StatusCode.CLIENT_ERROR)

    if file_type in ['D1', 'D2']:
        # Make sure we have a start and end date for D1/D2 generation
        if not start_date or not end_date:
            return JsonResponse.error(ValueError("Must have a start and end date for D file generation."),
                                      StatusCode.CLIENT_ERROR)

        # Check if date format is MM/DD/YYYY
        if not (StringCleaner.is_date(start_date) and StringCleaner.is_date(end_date)):
            raise ResponseException('Start or end date cannot be parsed into a date', StatusCode.CLIENT_ERROR)

        if agency_type not in ('awarding', 'funding'):
            return JsonResponse.error(ValueError("agency_type must be either awarding or funding."),
                                      StatusCode.CLIENT_ERROR)
    else:
        # Make sure both year and period are provided
        if not (year and period):
            return JsonResponse.error(ValueError("Must have a year and period for A file generation."),
                                      StatusCode.CLIENT_ERROR)

        try:
            # Convert to real start and end dates
            start_date, end_date = generic_helper.year_period_to_dates(year, period)
        except ResponseException as e:
            return JsonResponse.error(e, StatusCode.CLIENT_ERROR)

    # Add job info
    file_type_name = lookups.FILE_TYPE_DICT_LETTER_NAME[file_type]
    new_job = generation_helper.create_generation_job(file_type_name, start_date, end_date)

    agency_code = frec_code if frec_code else cgac_code
    logger.info({'message': 'Starting detached {} file generation'.format(file_type), 'message_type': 'BrokerInfo',
                 'job_id': new_job.job_id, 'file_type': file_type, 'agency_code': agency_code, 'start_date': start_date,
                 'end_date': end_date})

    try:
        if file_type in ['D1', 'D2']:
            generation_helper.start_d_generation(new_job, start_date, end_date, agency_type, agency_code=agency_code)
        else:
            generation_helper.start_a_generation(new_job, start_date, end_date, agency_code)
    except Exception as e:
        mark_job_status(new_job.job_id, 'failed')
        new_job.error_message = str(e)
        GlobalDB.db().session.commit()
        return JsonResponse.error(e, StatusCode.INTERNAL_ERROR)

    # Return same response as check generation route
    return check_detached_generation(new_job.job_id)