Esempio n. 1
0
def export_audit_csv(settings, audit_json):
    """
    Save audit CSV to disk.
    :param settings:    Settings from command line and configuration file
    :param audit_json:  Audit JSON
    """
    csv_exporter = csvExporter.CsvExporter(
        audit_json, settings[EXPORT_INACTIVE_ITEMS_TO_CSV])
    csv_export_filename = audit_json['template_id']
    csv_exporter.append_converted_audit_to_bulk_export_file(
        os.path.join(settings[EXPORT_PATH], csv_export_filename + '.csv'))
Esempio n. 2
0
def export_audit_sql(logger, settings, audit_json):
    """
    Save audit CSV to disk.
    :param settings:    Settings from command line and configuration file
    :param audit_json:  Audit JSON
    """
    engine = create_engine('{}://{}:{}@{}:{}/{}'.format(settings[DB_TYPE],settings[DB_USER],settings[DB_PWD],settings[DB_SERVER],settings[DB_PORT],settings[DB_NAME]))
    logger.debug('Making connection to '+str(engine))
    csv_exporter = csvExporter.CsvExporter(audit_json, settings[EXPORT_INACTIVE_ITEMS_TO_CSV])
    df = csv_exporter.audit_table
    df = pd.DataFrame.from_records(df, columns=SQL_HEADER_ROW)
    df.replace({'ItemScore':'','ItemMaxScore':'','ItemScorePercentage':''}, np.nan, inplace=True)
    df.fillna(value={'Latitude' : 0,'Longitude' : 0},inplace=True)
    # df.astype({'ItemScore': 'float64'})
    meta = MetaData()
    table = settings[SQL_TABLE]
    logger.info('Checking for table called '+table)
    if not engine.dialect.has_table(engine, settings[SQL_TABLE]):
        logger.info(table+ ' not Found. Creating table')
        table = Table(
            table, meta,
            Column('index', Integer),
            Column('ItemType', String),
            Column('Label', String),
            Column('Response', String),
            Column('Comment', String),
            Column('MediaHypertextReference', String),
            Column('Latitude', String),
            Column('Longitude', String),
            Column('ItemScore', Float),
            Column('ItemMaxScore', Float),
            Column('ItemScorePercentage', Float),
            Column('Mandatory', Boolean),
            Column('FailedResponse', Boolean),
            Column('Inactive', Boolean),
            Column('ItemID', String, primary_key=True),
            Column('ResponseID', String),
            Column('ParentID', String),
            Column('AuditOwner', String),
            Column('AuditAuthor', String),
            Column('AuditName', String),
            Column('AuditScore', Float),
            Column('AuditMaxScore', Float),
            Column('AuditScorePercentage', Float),
            Column('AuditDuration', Float),
            Column('DateStarted', DateTime),
            Column('DateCompleted', DateTime),
            Column('DateModified', DateTime),
            Column('AuditID', String, primary_key=True),
            Column('TemplateID', String),
            Column('TemplateName', String),
            Column('TemplateAuthor', String),
            Column('ItemCategory', String),
            Column('DocumentNo', String),
            Column('ConductedOn', String),
            Column('PreparedBy', String),
            Column('Location', String),
            Column('Personnel', String),
            Column('ClientSite', String),
            Column('AuditSite', String),
            Column('AuditArea', String),
            Column('AuditRegion', String)
        )
        meta.create_all(engine)
        logger.info('Table created successfully. Writing first row')
        df.to_sql(settings[SQL_TABLE], con=engine, if_exists='append', method = 'multi')
    else:
        logger.info('Table Exists, writing row')
        try:
            df.to_sql(settings[SQL_TABLE], con=engine, if_exists='append', method = 'multi')
        except:
            logger.error('Error Occured Writing to SQL')