def delete_records(sess, delete_list, delete_dict):
    """ Delete the records listed and create a file for website deletion. """

    # only need to delete values if there's something to delete
    if delete_list:
        sess.query(DetachedAwardProcurement). \
            filter(DetachedAwardProcurement.detached_award_procurement_id.in_(delete_list)). \
            delete(synchronize_session=False)

    # writing the file
    seconds = int((datetime.datetime.utcnow() -
                   datetime.datetime(1970, 1, 1)).total_seconds())
    now = datetime.datetime.now()
    file_name = now.strftime('%m-%d-%Y') + "_delete_records_IDV_" + str(
        seconds) + ".csv"
    headers = ["detached_award_procurement_id", "detached_award_proc_unique"]
    if CONFIG_BROKER["use_aws"]:
        s3client = boto3.client('s3', region_name=CONFIG_BROKER['aws_region'])
        # add headers
        contents = bytes((",".join(headers) + "\n").encode())
        for key, value in delete_dict.items():
            contents += bytes('{},{}\n'.format(key, value).encode())
        s3client.put_object(Bucket=CONFIG_BROKER['fpds_delete_bucket'],
                            Key=file_name,
                            Body=contents)
    else:
        with CsvLocalWriter(file_name, headers) as writer:
            for key, value in delete_dict.items():
                writer.write([key, value])
            writer.finish_batch()
def get_write_csv_writer(file_name, upload_name, is_local, header):
    """Derive the relevant location.

        Args:
            file_name - pathless file name
            upload_name - file name to be used as S3 key
            is_local - True if in local development, False otherwise
            header - value to write as the first line of the file

        Return:
            the writer object
    """
    if is_local:
        file_name = CONFIG_BROKER['broker_files'] + file_name
        csv_writer = CsvLocalWriter(file_name, header)
        message = 'Writing file locally...'
    else:
        bucket = CONFIG_BROKER['aws_bucket']
        region = CONFIG_BROKER['aws_region']
        csv_writer = CsvS3Writer(region, bucket, upload_name, header)
        message = 'Writing file to S3...'

    logger.debug({
        'message': message,
        'message_type': 'ValidatorDebug'
    })

    return csv_writer
    def write_file_level_error(bucket_name, filename, header, error_content,
                               is_local):
        """ Writes file-level errors to an error file

            Args:
                bucket_name: Name of the S3 bucket to write to if not local
                filename: Name (including path) of the file to write
                header: The header line for the file
                error_content: list of lines representing content for the error file
                is_local: boolean indicating if the file is to be written locally or to S3
        """
        if is_local:
            with CsvLocalWriter(filename, header) as writer:
                for line in error_content:
                    if type(line) == str:
                        writer.write([line])
                    else:
                        writer.write(line)
                writer.finish_batch()
        else:
            s3client = boto3.client('s3',
                                    region_name=CONFIG_BROKER['aws_region'])
            # add headers
            contents = bytes((",".join(header) + "\n").encode())
            for line in error_content:
                if type(line) == str:
                    contents += bytes((line + "\n").encode())
                else:
                    contents += bytes((",".join(line) + "\n").encode())
            s3client.put_object(Bucket=bucket_name,
                                Key=filename,
                                Body=contents)
Beispiel #4
0
 def getWriter(self, regionName, bucketName, fileName, header):
     """
     Gets the write type based on if its a local install or not.
     """
     if (self.isLocal):
         return CsvLocalWriter(fileName, header)
     return CsvS3Writer(regionName, bucketName, fileName, header)
def write_idvs_to_file():
    """ Get a list of all IDVs and write them to a delete file in the fpds_delete_bucket. This is because we need
        to clean out the website of IDV records so everything is new and not duplicated when we pull it in.
    """
    start = time.time()
    logger.info("Writing IDV delete file for website")
    # Get all IDVs (only the ID and unique key)
    all_idvs = sess.query(DetachedAwardProcurement.detached_award_procurement_id,
                          DetachedAwardProcurement.detached_award_proc_unique).filter_by(pulled_from="IDV")
    now = datetime.datetime.now()
    seconds = int((datetime.datetime.utcnow() - datetime.datetime(1970, 1, 1)).total_seconds())
    file_name = now.strftime('%m-%d-%Y') + "_delete_records_IDV_" + str(seconds) + ".csv"
    headers = ["detached_award_procurement_id", "detached_award_proc_unique"]
    # Writing files
    if CONFIG_BROKER["use_aws"]:
        s3client = boto3.client('s3', region_name=CONFIG_BROKER['aws_region'])
        # add headers
        contents = bytes((",".join(headers) + "\n").encode())
        for idv in all_idvs:
            contents += bytes('{},{}\n'.format(idv.detached_award_procurement_id, idv.detached_award_proc_unique).
                              encode())
        s3client.put_object(Bucket=CONFIG_BROKER['fpds_delete_bucket'], Key=file_name, Body=contents)
    else:
        with CsvLocalWriter(file_name, headers) as writer:
            for idv in all_idvs:
                writer.write([idv.detached_award_procurement_id, idv.detached_award_proc_unique])
            writer.finish_batch()

    logger.info("Wrote IDV delete file for website, took {} seconds"
                .format(time.time() - start))

    # Deleting to free up space in case it doesn't auto-delete after this function closes
    del all_idvs
 def get_writer(bucket_name, filename, header, is_local, region=None):
     """
     Gets the write type based on if its a local install or not.
     """
     if is_local:
         return CsvLocalWriter(filename, header)
     if region is None:
         region = CONFIG_BROKER["aws_region"]
     return CsvS3Writer(region, bucket_name, filename, header)
    def get_writer(self, region_name, bucket_name, file_name, header):
        """ Gets the write type based on if its a local install or not.

        Args:
            region_name - AWS region to write to, not used for local
            bucket_name - AWS bucket to write to, not used for local
            file_name - File to be written
            header - Column headers for file to be written
        """
        if self.is_local:
            return CsvLocalWriter(file_name, header)
        return CsvS3Writer(region_name, bucket_name, file_name, header)
Beispiel #8
0
def write_csv(file_name, upload_name, is_local, header, body):
    """Derive the relevant location and write a CSV to it.
    :return: the final file name (complete with prefix)"""
    if is_local:
        file_name = CONFIG_BROKER['broker_files'] + file_name
        csv_writer = CsvLocalWriter(file_name, header)
        message = 'DEBUG: Writing file locally...'
    else:
        bucket = CONFIG_BROKER['aws_bucket']
        region = CONFIG_BROKER['aws_region']
        csv_writer = CsvS3Writer(region, bucket, upload_name, header)
        message = 'DEBUG: Writing file to S3...'

    CloudLogger.log(message, log_type="debug", file_name='smx_request.log')

    with csv_writer as writer:
        for line in body:
            writer.write(line)
        writer.finishBatch()
def write_csv(file_name, upload_name, is_local, header, body):
    """Derive the relevant location and write a CSV to it.
    :return: the final file name (complete with prefix)"""
    if is_local:
        file_name = CONFIG_BROKER['broker_files'] + file_name
        csv_writer = CsvLocalWriter(file_name, header)
        message = 'Writing file locally...'
    else:
        bucket = CONFIG_BROKER['aws_bucket']
        region = CONFIG_BROKER['aws_region']
        csv_writer = CsvS3Writer(region, bucket, upload_name, header)
        message = 'Writing file to S3...'

    logging.getLogger('deprecated.smx').debug(message)

    with csv_writer as writer:
        for line in body:
            writer.write(line)
        writer.finishBatch()