def start_a_generation(job, start_date, end_date, agency_code):
    """ Validates the start and end dates of the generation and sends the job information to SQS.

        Args:
            job: File generation job to start
            start_date: String to parse as the start date of the generation
            end_date: String to parse as the end date of the generation
            agency_code: Agency code for A file generations
    """
    if not (StringCleaner.is_date(start_date) and StringCleaner.is_date(end_date)):
        raise ResponseException("Start or end date cannot be parsed into a date of format MM/DD/YYYY",
                                StatusCode.CLIENT_ERROR)

    # Update the Job's start and end dates
    sess = GlobalDB.db().session
    job.start_date = start_date
    job.end_date = end_date
    sess.commit()

    mark_job_status(job.job_id, "waiting")

    file_type = job.file_type.letter_name
    log_data = {'message': 'Sending {} file generation job {} to Validator in SQS'.format(file_type, job.job_id),
                'message_type': 'BrokerInfo', 'job_id': job.job_id, 'file_type': file_type}
    logger.info(log_data)

    # Set SQS message attributes
    message_attr = {'agency_code': {'DataType': 'String', 'StringValue': agency_code}}

    # Add job_id to the SQS job queue
    queue = sqs_queue()
    msg_response = queue.send_message(MessageBody=str(job.job_id), MessageAttributes=message_attr)

    log_data['message'] = 'SQS message response: {}'.format(msg_response)
    logger.debug(log_data)
def start_a_generation(job, start_date, end_date, agency_code):
    """ Validates the start and end dates of the generation and sends the job information to SQS.

        Args:
            job: File generation job to start
            start_date: String to parse as the start date of the generation
            end_date: String to parse as the end date of the generation
            agency_code: Agency code for A file generations
    """
    if not (StringCleaner.is_date(start_date) and StringCleaner.is_date(end_date)):
        raise ResponseException("Start or end date cannot be parsed into a date of format MM/DD/YYYY",
                                StatusCode.CLIENT_ERROR)

    # Update the Job's start and end dates
    sess = GlobalDB.db().session
    job.start_date = start_date
    job.end_date = end_date
    sess.commit()

    mark_job_status(job.job_id, "waiting")

    file_type = job.file_type.letter_name
    log_data = {'message': 'Sending {} file generation job {} to Validator in SQS'.format(file_type, job.job_id),
                'message_type': 'BrokerInfo', 'job_id': job.job_id, 'file_type': file_type}
    logger.info(log_data)

    # Set SQS message attributes
    message_attr = {'agency_code': {'DataType': 'String', 'StringValue': agency_code}}

    # Add job_id to the SQS job queue
    queue = sqs_queue()
    msg_response = queue.send_message(MessageBody=str(job.job_id), MessageAttributes=message_attr)

    log_data['message'] = 'SQS message response: {}'.format(msg_response)
    logger.debug(log_data)
コード例 #3
0
def start_d_generation(job, start_date, end_date, agency_type, agency_code=None):
    """ Validates the start and end dates of the generation, updates the submission's publish status and progress (if
        its not detached generation), and sends the job information to SQS.

        Args:
            job: File generation job to start
            start_date: String to parse as the start date of the generation
            end_date: String to parse as the end date of the generation
            agency_type: Type of Agency to generate files by: "awarding" or "funding"
            agency_code: Agency code for detached D file generations

        Returns:
            SQS send_message response
    """
    if not (StringCleaner.is_date(start_date) and StringCleaner.is_date(end_date)):
        raise ResponseException("Start or end date cannot be parsed into a date of format MM/DD/YYYY",
                                StatusCode.CLIENT_ERROR)

    # Update the Job's start and end dates
    sess = GlobalDB.db().session
    job.start_date = start_date
    job.end_date = end_date
    sess.commit()

    # Update submission
    if job.submission_id:
        agency_code = update_generation_submission(sess, job)

    mark_job_status(job.job_id, "waiting")

    log_data = {'message': 'Sending {} file generation job {} to SQS'.format(job.file_type.letter_name, job.job_id),
                'message_type': 'BrokerInfo', 'submission_id': job.submission_id, 'job_id': job.job_id,
                'file_type': job.file_type.letter_name}
    logger.info(log_data)

    file_request = retrieve_cached_file_request(job, agency_type, agency_code, g.is_local)
    if file_request:
        log_data['message'] = 'No new file generated, used FileRequest with ID {}'.format(file_request.file_request_id)
        logger.info(log_data)
    else:
        # Set SQS message attributes
        message_attr = {'agency_type': {'DataType': 'String', 'StringValue': agency_type}}
        if not job.submission_id:
            message_attr['agency_code'] = {'DataType': 'String', 'StringValue': agency_code}

        # Add job_id to the SQS job queue
        queue = sqs_queue()
        msg_response = queue.send_message(MessageBody=str(job.job_id), MessageAttributes=message_attr)

        log_data['message'] = 'SQS message response: {}'.format(msg_response)
        logger.debug(log_data)
コード例 #4
0
def start_d_generation(job,
                       start_date,
                       end_date,
                       agency_type,
                       agency_code=None,
                       file_format='csv'):
    """ Validates the start and end dates of the generation, updates the submission's publish status and progress (if
        its not detached generation), and sends the job information to SQS.

        Args:
            job: File generation job to start
            start_date: String to parse as the start date of the generation
            end_date: String to parse as the end date of the generation
            agency_type: Type of Agency to generate files by: "awarding" or "funding"
            agency_code: Agency code for detached D file generations
            file_format: determines if the file generated is a txt or a csv
    """
    if not (StringCleaner.is_date(start_date)
            and StringCleaner.is_date(end_date)):
        raise ResponseException(
            "Start or end date cannot be parsed into a date of format MM/DD/YYYY",
            StatusCode.CLIENT_ERROR)

    # Update the Job's start and end dates
    sess = GlobalDB.db().session
    job.start_date = start_date
    job.end_date = end_date
    sess.commit()

    # Update submission
    if job.submission_id:
        agency_code = update_generation_submission(sess, job)

    mark_job_status(job.job_id, 'waiting')

    file_generation = retrieve_cached_file_generation(job, agency_type,
                                                      agency_code, file_format)
    if file_generation:
        try:
            copy_file_generation_to_job(job, file_generation, g.is_local)
        except Exception as e:
            logger.error(traceback.format_exc())

            mark_job_status(job.job_id, 'failed')
            job.error_message = str(e)
            sess.commit()
    else:
        # Create new FileGeneration and reset Jobs
        file_generation = FileGeneration(request_date=datetime.now().date(),
                                         start_date=job.start_date,
                                         end_date=job.end_date,
                                         file_type=job.file_type.letter_name,
                                         agency_code=agency_code,
                                         agency_type=agency_type,
                                         file_format=file_format,
                                         is_cached_file=True)
        sess.add(file_generation)
        sess.commit()

        try:
            job.file_generation_id = file_generation.file_generation_id
            sess.commit()
            reset_generation_jobs(sess, job)
            logger.info({
                'message':
                'Sending new FileGeneration {} to SQS'.format(
                    file_generation.file_generation_id),
                'message_type':
                'BrokerInfo',
                'file_type':
                job.file_type.letter_name,
                'job_id':
                job.job_id,
                'submission_id':
                job.submission_id,
                'file_generation_id':
                file_generation.file_generation_id
            })

            # Add file_generation_id to the SQS job queue
            queue = sqs_queue()
            message_attr = {
                "validation_type": {
                    "DataType": "String",
                    "StringValue": "generation"
                }
            }
            queue.send_message(MessageBody=str(
                file_generation.file_generation_id),
                               MessageAttributes=message_attr)
        except Exception as e:
            logger.error(traceback.format_exc())

            mark_job_status(job.job_id, 'failed')
            job.error_message = str(e)
            file_generation.is_cached_file = False
            sess.commit()
コード例 #5
0
def start_generation_job(job, start_date, end_date, agency_code=None):
    """ Validates the dates for a D file generation job and passes the Job ID to SQS

        Args:
            job: File generation job to start
            start_date: Start date of the file generation
            end_date: End date of the file generation
            agency_code: Agency code for detached D file generations

        Returns:
            Tuple of boolean indicating successful start, and error response if False
    """
    sess = GlobalDB.db().session
    file_type = job.file_type.letter_name
    try:
        if file_type in ['D1', 'D2']:
            # Validate and set Job's start and end dates
            if not (StringCleaner.is_date(start_date)
                    and StringCleaner.is_date(end_date)):
                raise ResponseException(
                    "Start or end date cannot be parsed into a date",
                    StatusCode.CLIENT_ERROR)
            job.start_date = start_date
            job.end_date = end_date
            sess.commit()
        elif file_type not in ["E", "F"]:
            raise ResponseException("File type must be either D1, D2, E or F",
                                    StatusCode.CLIENT_ERROR)

    except ResponseException as e:
        return False, JsonResponse.error(e,
                                         e.status,
                                         file_type=file_type,
                                         status='failed')

    mark_job_status(job.job_id, "waiting")

    # Add job_id to the SQS job queue
    logger.info({
        'message_type':
        'ValidatorInfo',
        'job_id':
        job.job_id,
        'message':
        'Sending file generation job {} to Validator in SQS'.format(job.job_id)
    })
    queue = sqs_queue()

    message_attr = {
        'agency_code': {
            'DataType': 'String',
            'StringValue': agency_code
        }
    } if agency_code else {}
    response = queue.send_message(MessageBody=str(job.job_id),
                                  MessageAttributes=message_attr)
    logger.debug({
        'message_type': 'ValidatorInfo',
        'job_id': job.job_id,
        'message': 'Send message response: {}'.format(response)
    })

    return True, None
コード例 #6
0
def generate_detached_file(file_type, cgac_code, frec_code, start, end,
                           quarter, agency_type):
    """ Start a file generation job for the specified file type not connected to a submission

        Args:
            file_type: type of file to be generated
            cgac_code: the code of a CGAC agency if generating for a CGAC agency
            frec_code: the code of a FREC agency if generating for a FREC agency
            start: start date in a string, formatted MM/DD/YYYY
            end: end date in a string, formatted MM/DD/YYYY
            quarter: quarter to generate for, formatted Q#/YYYY
            agency_type: The type of agency (awarding or funding) to generate the file for

        Returns:
            JSONResponse object with keys job_id, status, file_type, url, message, start, and end.

        Raises:
            ResponseException: if the start and end Strings cannot be parsed into dates
    """
    # Make sure it's a valid request
    if not cgac_code and not frec_code:
        return JsonResponse.error(
            ValueError(
                "Detached file generation requires CGAC or FR Entity Code"),
            StatusCode.CLIENT_ERROR)

    if file_type in ['D1', 'D2']:
        # Make sure we have a start and end date for D1/D2 generation
        if not start or not end:
            return JsonResponse.error(
                ValueError(
                    "Must have a start and end date for D file generation."),
                StatusCode.CLIENT_ERROR)
        # Check if date format is MM/DD/YYYY
        if not (StringCleaner.is_date(start) and StringCleaner.is_date(end)):
            raise ResponseException(
                'Start or end date cannot be parsed into a date',
                StatusCode.CLIENT_ERROR)

        if agency_type not in ('awarding', 'funding'):
            return JsonResponse.error(
                ValueError("agency_type must be either awarding or funding."),
                StatusCode.CLIENT_ERROR)
    else:
        # Check if date format is Q#/YYYY
        if not quarter:
            return JsonResponse.error(
                ValueError("Must have a quarter for A file generation."),
                StatusCode.CLIENT_ERROR)

        try:
            start, end = generic_helper.quarter_to_dates(quarter)
        except ResponseException as e:
            return JsonResponse.error(e, StatusCode.CLIENT_ERROR)

    # Add job info
    file_type_name = lookups.FILE_TYPE_DICT_LETTER_NAME[file_type]
    new_job = generation_helper.add_generation_job_info(
        file_type_name=file_type_name, start_date=start, end_date=end)

    agency_code = frec_code if frec_code else cgac_code
    log_data = {
        'message': 'Starting detached {} file generation'.format(file_type),
        'message_type': 'BrokerInfo',
        'job_id': new_job.job_id,
        'file_type': file_type,
        'agency_code': agency_code,
        'start_date': start,
        'end_date': end
    }
    logger.info(log_data)

    try:
        if file_type in ['D1', 'D2']:
            generation_helper.start_d_generation(new_job,
                                                 start,
                                                 end,
                                                 agency_type,
                                                 agency_code=agency_code)
        else:
            generation_helper.start_a_generation(new_job, start, end,
                                                 agency_code)
    except Exception as e:
        mark_job_status(new_job.job_id, 'failed')
        new_job.error_message = str(e)
        GlobalDB.db().session.commit()
        return JsonResponse.error(e, StatusCode.INTERNAL_ERROR)

    # Return same response as check generation route
    return check_detached_generation(new_job.job_id)
def start_d_generation(job, start_date, end_date, agency_type, agency_code=None):
    """ Validates the start and end dates of the generation, updates the submission's publish status and progress (if
        its not detached generation), and sends the job information to SQS.

        Args:
            job: File generation job to start
            start_date: String to parse as the start date of the generation
            end_date: String to parse as the end date of the generation
            agency_type: Type of Agency to generate files by: "awarding" or "funding"
            agency_code: Agency code for detached D file generations
    """
    if not (StringCleaner.is_date(start_date) and StringCleaner.is_date(end_date)):
        raise ResponseException("Start or end date cannot be parsed into a date of format MM/DD/YYYY",
                                StatusCode.CLIENT_ERROR)

    # Update the Job's start and end dates
    sess = GlobalDB.db().session
    job.start_date = start_date
    job.end_date = end_date
    sess.commit()

    # Update submission
    if job.submission_id:
        agency_code = update_generation_submission(sess, job)

    mark_job_status(job.job_id, 'waiting')

    file_generation = retrieve_cached_file_generation(job, agency_type, agency_code)
    if file_generation:
        try:
            copy_file_generation_to_job(job, file_generation, g.is_local)
        except Exception as e:
            logger.error(traceback.format_exc())

            mark_job_status(job.job_id, 'failed')
            job.error_message = str(e)
            sess.commit()
    else:
        # Create new FileGeneration and reset Jobs
        file_generation = FileGeneration(
            request_date=datetime.now().date(), start_date=job.start_date, end_date=job.end_date,
            file_type=job.file_type.letter_name, agency_code=agency_code, agency_type=agency_type, is_cached_file=True)
        sess.add(file_generation)
        sess.commit()

        try:
            job.file_generation_id = file_generation.file_generation_id
            sess.commit()
            reset_generation_jobs(sess, job)
            logger.info({'message': 'Sending new FileGeneration {} to SQS'.format(file_generation.file_generation_id),
                         'message_type': 'BrokerInfo', 'file_type': job.file_type.letter_name, 'job_id': job.job_id,
                         'submission_id': job.submission_id, 'file_generation_id': file_generation.file_generation_id})

            # Add file_generation_id to the SQS job queue
            queue = sqs_queue()
            message_attr = {"validation_type": {"DataType": "String", "StringValue": "generation"}}
            queue.send_message(MessageBody=str(file_generation.file_generation_id), MessageAttributes=message_attr)
        except Exception as e:
            logger.error(traceback.format_exc())

            mark_job_status(job.job_id, 'failed')
            job.error_message = str(e)
            file_generation.is_cached_file = False
            sess.commit()
コード例 #8
0
def generate_detached_file(file_type, cgac_code, frec_code, start_date,
                           end_date, year, period, agency_type, file_format):
    """ Start a file generation job for the specified file type not connected to a submission

        Args:
            file_type: type of file to be generated
            cgac_code: the code of a CGAC agency if generating for a CGAC agency
            frec_code: the code of a FREC agency if generating for a FREC agency
            start_date: start date in a string, formatted MM/DD/YYYY
            end_date: end date in a string, formatted MM/DD/YYYY
            year: year to generate for, integer 4 digits
            period: period to generate for, integer (2-12)
            agency_type: The type of agency (awarding or funding) to generate the file for
            file_format: determines if the file generated is a txt or a csv (only used for D file generation)

        Returns:
            JSONResponse object with keys job_id, status, file_type, url, message, start_date, and end_date.

        Raises:
            ResponseException: if the start_date and end_date Strings cannot be parsed into dates
    """
    # Make sure it's a valid request
    if not cgac_code and not frec_code:
        return JsonResponse.error(
            ValueError(
                "Detached file generation requires CGAC or FR Entity Code"),
            StatusCode.CLIENT_ERROR)

    if file_type in ['D1', 'D2']:
        # Make sure we have a start and end date for D1/D2 generation
        if not start_date or not end_date:
            return JsonResponse.error(
                ValueError(
                    'Must have a start and end date for D file generation.'),
                StatusCode.CLIENT_ERROR)

        # Check if date format is MM/DD/YYYY
        if not (StringCleaner.is_date(start_date)
                and StringCleaner.is_date(end_date)):
            raise ResponseException(
                'Start or end date cannot be parsed into a date',
                StatusCode.CLIENT_ERROR)

        if agency_type not in ['awarding', 'funding']:
            return JsonResponse.error(
                ValueError('agency_type must be either awarding or funding.'),
                StatusCode.CLIENT_ERROR)

        if file_format not in ['csv', 'txt']:
            return JsonResponse.error(
                ValueError('file_format must be either csv or txt.'),
                StatusCode.CLIENT_ERROR)
    else:
        # Make sure both year and period are provided
        if not (year and period):
            return JsonResponse.error(
                ValueError(
                    "Must have a year and period for A file generation."),
                StatusCode.CLIENT_ERROR)

        try:
            # Convert to real start and end dates
            start_date, end_date = generic_helper.year_period_to_dates(
                year, period)
        except ResponseException as e:
            return JsonResponse.error(e, StatusCode.CLIENT_ERROR)

    # Add job info
    file_type_name = lookups.FILE_TYPE_DICT_LETTER_NAME[file_type]
    new_job = generation_helper.create_generation_job(file_type_name,
                                                      start_date, end_date)

    agency_code = frec_code if frec_code else cgac_code
    logger.info({
        'message':
        'Starting detached {} file generation'.format(file_type),
        'message_type':
        'BrokerInfo',
        'job_id':
        new_job.job_id,
        'file_type':
        file_type,
        'agency_code':
        agency_code,
        'start_date':
        start_date,
        'end_date':
        end_date
    })

    try:
        if file_type in ['D1', 'D2']:
            generation_helper.start_d_generation(new_job,
                                                 start_date,
                                                 end_date,
                                                 agency_type,
                                                 agency_code=agency_code,
                                                 file_format=file_format)
        else:
            generation_helper.start_a_generation(new_job, start_date, end_date,
                                                 agency_code)
    except Exception as e:
        mark_job_status(new_job.job_id, 'failed')
        new_job.error_message = str(e)
        GlobalDB.db().session.commit()
        return JsonResponse.error(e, StatusCode.INTERNAL_ERROR)

    # Return same response as check generation route
    return check_detached_generation(new_job.job_id)
def generate_detached_file(file_type, cgac_code, frec_code, start_date, end_date, year, period, agency_type):
    """ Start a file generation job for the specified file type not connected to a submission

        Args:
            file_type: type of file to be generated
            cgac_code: the code of a CGAC agency if generating for a CGAC agency
            frec_code: the code of a FREC agency if generating for a FREC agency
            start_date: start date in a string, formatted MM/DD/YYYY
            end_date: end date in a string, formatted MM/DD/YYYY
            year: year to generate for, integer 4 digits
            period: period to generate for, integer (2-12)
            agency_type: The type of agency (awarding or funding) to generate the file for

        Returns:
            JSONResponse object with keys job_id, status, file_type, url, message, start_date, and end_date.

        Raises:
            ResponseException: if the start_date and end_date Strings cannot be parsed into dates
    """
    # Make sure it's a valid request
    if not cgac_code and not frec_code:
        return JsonResponse.error(ValueError("Detached file generation requires CGAC or FR Entity Code"),
                                  StatusCode.CLIENT_ERROR)

    if file_type in ['D1', 'D2']:
        # Make sure we have a start and end date for D1/D2 generation
        if not start_date or not end_date:
            return JsonResponse.error(ValueError("Must have a start and end date for D file generation."),
                                      StatusCode.CLIENT_ERROR)

        # Check if date format is MM/DD/YYYY
        if not (StringCleaner.is_date(start_date) and StringCleaner.is_date(end_date)):
            raise ResponseException('Start or end date cannot be parsed into a date', StatusCode.CLIENT_ERROR)

        if agency_type not in ('awarding', 'funding'):
            return JsonResponse.error(ValueError("agency_type must be either awarding or funding."),
                                      StatusCode.CLIENT_ERROR)
    else:
        # Make sure both year and period are provided
        if not (year and period):
            return JsonResponse.error(ValueError("Must have a year and period for A file generation."),
                                      StatusCode.CLIENT_ERROR)

        try:
            # Convert to real start and end dates
            start_date, end_date = generic_helper.year_period_to_dates(year, period)
        except ResponseException as e:
            return JsonResponse.error(e, StatusCode.CLIENT_ERROR)

    # Add job info
    file_type_name = lookups.FILE_TYPE_DICT_LETTER_NAME[file_type]
    new_job = generation_helper.create_generation_job(file_type_name, start_date, end_date)

    agency_code = frec_code if frec_code else cgac_code
    logger.info({'message': 'Starting detached {} file generation'.format(file_type), 'message_type': 'BrokerInfo',
                 'job_id': new_job.job_id, 'file_type': file_type, 'agency_code': agency_code, 'start_date': start_date,
                 'end_date': end_date})

    try:
        if file_type in ['D1', 'D2']:
            generation_helper.start_d_generation(new_job, start_date, end_date, agency_type, agency_code=agency_code)
        else:
            generation_helper.start_a_generation(new_job, start_date, end_date, agency_code)
    except Exception as e:
        mark_job_status(new_job.job_id, 'failed')
        new_job.error_message = str(e)
        GlobalDB.db().session.commit()
        return JsonResponse.error(e, StatusCode.INTERNAL_ERROR)

    # Return same response as check generation route
    return check_detached_generation(new_job.job_id)