コード例 #1
0
def export_audit_csv(settings, audit_json):
    """
    Save audit CSV to disk.
    :param settings:    Settings from command line and configuration file
    :param audit_json:  Audit JSON
    """

    csv_exporter = csvExporter.CsvExporter(audit_json, settings[EXPORT_INACTIVE_ITEMS_TO_CSV])
    count = 0
    if settings[USE_REAL_TEMPLATE_NAME] is False:
        csv_export_filename = audit_json['template_id']
    elif settings[USE_REAL_TEMPLATE_NAME] is True:
        csv_export_filename = audit_json['template_data']['metadata']['name'] + ' - ' + audit_json['template_id']
        csv_export_filename = csv_export_filename.replace('/', ' ').replace('\\', ' ')
    elif settings[USE_REAL_TEMPLATE_NAME] is str and settings[USE_REAL_TEMPLATE_NAME].startswith('single_file'):
        csv_export_filename = settings[CONFIG_NAME]
    else:
        csv_export_filename = audit_json['template_id']

    for row in csv_exporter.audit_table:
        count += 1
        row[0] = count

    csv_exporter.append_converted_audit_to_bulk_export_file(
        os.path.join(settings[EXPORT_PATH], csv_export_filename + '.csv'))
コード例 #2
0
def export_audit_pandas(logger, settings, audit_json, get_started):
    """
    Save audit to a database.
    :param logger:      The logger
    :param settings:    Settings from command line and configuration file
    :param audit_json:  Audit JSON

    Args:
        get_started: A tuple containing database connection settings
    """

    for export_format in settings[EXPORT_FORMATS]:
        if export_format == 'sql':
            export_audit_sql(logger, settings, audit_json, get_started)

        elif export_format == 'pickle':
            # export_audit_doc_creation(logger, settings, audit_json, 'docx', 'test.docx', media_list=[])
            logger.info('Writing to Pickle')
            csv_exporter = csvExporter.CsvExporter(
                audit_json, settings[EXPORT_INACTIVE_ITEMS_TO_CSV])
            df = csv_exporter.audit_table
            df = pd.DataFrame.from_records(df, columns=SQL_HEADER_ROW)
            df.replace(
                {
                    'ItemScore': '',
                    'ItemMaxScore': '',
                    'ItemScorePercentage': ''
                },
                np.nan,
                inplace=True)
            df.fillna(value={'Latitude': 0, 'Longitude': 0}, inplace=True)
            df.to_pickle('{}.pkl'.format(settings[SQL_TABLE]))
コード例 #3
0
 def test_Rugby_Union_Contest_Self_Assessment_(self):
     csv_exporter = csv.CsvExporter(
         json.load(
             open(
                 os.path.join(self.path_to_test_files,
                              'null_date_test.json'), 'r')))
     csv_exporter.save_converted_audit_to_file('test 32.csv',
                                               allow_overwrite=True)
     self.assertEqual(
         open('test 32.csv', 'r').read(),
         open(
             os.path.join(self.path_to_test_files,
                          'null_date_test_expected_output.csv'),
             'r').read())
     os.remove('test 32.csv')
コード例 #4
0
def export_audit_sql(logger, settings, audit_json, get_started):
    """
    Save audit to a database.
    :param logger:      The logger
    :param settings:    Settings from command line and configuration file
    :param audit_json:  Audit JSON
    :get_started:       Tuple containing settings
    """
    engine = get_started[1]
    database = get_started[4]

    csv_exporter = csvExporter.CsvExporter(audit_json, settings[EXPORT_INACTIVE_ITEMS_TO_CSV])
    df = csv_exporter.audit_table
    df = pd.DataFrame.from_records(df, columns=SQL_HEADER_ROW)
    df['DatePK'] = pd.to_datetime(df['DateModified']).values.astype(np.int64) // 10 ** 6
    if settings[DB_TYPE].startswith(('mysql', 'postgres')):
        df.replace({'DateCompleted': ''}, '1900-01-01 00:00:00', inplace=True)
        df.replace({'ConductedOn': ''}, '1900-01-01 00:00:00', inplace=True)
        df['DateStarted'] = pd.to_datetime(df['DateStarted'])
        df['DateCompleted'] = pd.to_datetime(df['DateCompleted'])
        df['DateModified'] = pd.to_datetime(df['DateModified'])
        df['ConductedOn'] = pd.to_datetime(df['ConductedOn'])
    df.replace({'ItemScore': '', 'ItemMaxScore': '', 'ItemScorePercentage': ''}, np.nan, inplace=True)
    df.fillna(0, inplace=True)
    df['SortingIndex'] = range(1, len(df) + 1)
    df_dict = df.to_dict(orient='records')
    Session = sessionmaker(bind=engine)
    session = Session()

    try:
        session.bulk_insert_mappings(database, df_dict)
    except KeyboardInterrupt:
        logger.warning('Interrupted by user, exiting.')
        session.rollback()
        sys.exit(0)
    except OperationalError as ex:
        session.rollback()
        logger.warning('Something went wrong. Here are the details: {}'.format(ex))
    except IntegrityError as ex:
        # If the bulk insert fails, we do a slower merge
        logger.warning('Duplicate found, attempting to update')
        session.rollback()
        for row in df_dict:
            row_to_dict = database(**row)
            session.merge(row_to_dict)
        logger.debug('Row successfully updated.')
    session.commit()
コード例 #5
0
 def test_failed_response_test_(self):
     csv_exporter = csv.CsvExporter(
         json.load(
             open(
                 os.path.join(self.path_to_test_files,
                              'unit_test_failed_response_test_.json'),
                 'r')))
     csv_exporter.save_converted_audit_to_file('test 33.csv',
                                               allow_overwrite=True)
     self.assertEqual(
         open('test 33.csv', 'r').read(),
         open(
             os.path.join(
                 self.path_to_test_files,
                 'unit_test_failed_response_test__expected_output.csv'),
             'r').read())
     os.remove('test 33.csv')
コード例 #6
0
 def test_single_checkbox_checked(self):
     csv_exporter = csv.CsvExporter(
         json.load(
             open(
                 os.path.join(self.path_to_test_files,
                              'unit_test_single_checkbox_checked.json'),
                 'r')))
     csv_exporter.save_converted_audit_to_file('test 2.csv',
                                               allow_overwrite=True)
     self.assertEqual(
         open('test 2.csv', 'r').read(),
         open(
             os.path.join(
                 self.path_to_test_files,
                 'unit_test_single_checkbox_checked_expected_output.csv'),
             'r').read())
     os.remove('test 2.csv')
コード例 #7
0
 def test_do_not_export_inactive_fields(self):
     csv_exporter = csv.CsvExporter(
         json.load(
             open(
                 os.path.join(self.path_to_test_files,
                              'do_not_export_inactive_fields.json'), 'r')),
         False)
     csv_exporter.save_converted_audit_to_file('test 37.csv',
                                               allow_overwrite=True)
     self.assertEqual(
         open('test 37.csv', 'r').read(),
         open(
             os.path.join(
                 self.path_to_test_files,
                 'do_not_export_inactive_fields_expected_output.csv'),
             'r').read())
     os.remove('test 37.csv')
コード例 #8
0
 def test_single_date_time_field_input_date_using_now_button(self):
     csv_exporter = csv.CsvExporter(
         json.load(
             open(
                 os.path.join(
                     self.path_to_test_files,
                     'unit_test_single_date_time_field_input_date_using_now_button.json'
                 ), 'r')))
     csv_exporter.save_converted_audit_to_file('test 3.csv',
                                               allow_overwrite=True)
     self.assertEqual(
         open('test 3.csv', 'r').read(),
         open(
             os.path.join(
                 self.path_to_test_files,
                 'unit_test_single_date_time_field_input_date_using_now_button_expected_output.csv'
             ), 'r').read())
     os.remove('test 3.csv')
コード例 #9
0
 def test_single_text_single_line_mathematical_symbols_in_text(self):
     csv_exporter = csv.CsvExporter(
         json.load(
             open(
                 os.path.join(
                     self.path_to_test_files,
                     'unit_test_single_text_single_line_mathematical_symbols_in_text.json'
                 ), 'r')))
     csv_exporter.save_converted_audit_to_file('test 14.csv',
                                               allow_overwrite=True)
     self.assertEqual(
         open('test 14.csv', 'r').read(),
         open(
             os.path.join(
                 self.path_to_test_files,
                 'unit_test_single_text_single_line_mathematical_symbols_in_text_expected_output.csv'
             ), 'r').read())
     os.remove('test 14.csv')
コード例 #10
0
 def test_SafetyCulture_iAuditor___The_Smartest_Checklist_App_(self):
     csv_exporter = csv.CsvExporter(
         json.load(
             open(
                 os.path.join(
                     self.path_to_test_files,
                     'unit_test_SafetyCulture_iAuditor___The_Smartest_Checklist_App_.json'
                 ), 'r')))
     csv_exporter.save_converted_audit_to_file('test 12.csv',
                                               allow_overwrite=True)
     self.assertEqual(
         open('test 12.csv', 'r').read(),
         open(
             os.path.join(
                 self.path_to_test_files,
                 'unit_test_SafetyCulture_iAuditor___The_Smartest_Checklist_App__expected_output.csv'
             ), 'r').read())
     os.remove('test 12.csv')
コード例 #11
0
 def test_single_question_safe___at_risk___na_answered_at_risk(self):
     csv_exporter = csv.CsvExporter(
         json.load(
             open(
                 os.path.join(
                     self.path_to_test_files,
                     'unit_test_single_question_safe___at_risk___na_answered_at_risk.json'
                 ), 'r')))
     csv_exporter.save_converted_audit_to_file('test 11.csv',
                                               allow_overwrite=True)
     self.assertEqual(
         open('test 11.csv', 'r').read(),
         open(
             os.path.join(
                 self.path_to_test_files,
                 'unit_test_single_question_safe___at_risk___na_answered_at_risk_expected_output.csv'
             ), 'r').read())
     os.remove('test 11.csv')
コード例 #12
0
 def test_single_question_custom_response_set_answered_california(self):
     csv_exporter = csv.CsvExporter(
         json.load(
             open(
                 os.path.join(
                     self.path_to_test_files,
                     'unit_test_single_question_custom_response_set_answered_california.json'
                 ), 'r')))
     csv_exporter.save_converted_audit_to_file('test 8.csv',
                                               allow_overwrite=True)
     self.assertEqual(
         open('test 8.csv', 'r').read(),
         open(
             os.path.join(
                 self.path_to_test_files,
                 'unit_test_single_question_custom_response_set_answered_california_expected_output.csv'
             ), 'r').read())
     os.remove('test 8.csv')
コード例 #13
0
 def test_single_media_field_Single_image_taken_with_camera(self):
     csv_exporter = csv.CsvExporter(
         json.load(
             open(
                 os.path.join(
                     self.path_to_test_files,
                     'unit_test_single_media_field_Single_image_taken_with_camera.json'
                 ), 'r')))
     csv_exporter.save_converted_audit_to_file('test 7.csv',
                                               allow_overwrite=True)
     self.assertEqual(
         open('test 7.csv', 'r').read(),
         open(
             os.path.join(
                 self.path_to_test_files,
                 'unit_test_single_media_field_Single_image_taken_with_camera_expected_output.csv'
             ), 'r').read())
     os.remove('test 7.csv')
コード例 #14
0
 def test_single_address_field_textually_input_address(self):
     csv_exporter = csv.CsvExporter(
         json.load(
             open(
                 os.path.join(
                     self.path_to_test_files,
                     'unit_test_single_address_field_textually_input_address.json'
                 ), 'r')))
     csv_exporter.save_converted_audit_to_file('test 6.csv',
                                               allow_overwrite=True)
     self.assertEqual(
         open('test 6.csv', 'r').read(),
         open(
             os.path.join(
                 self.path_to_test_files,
                 'unit_test_single_address_field_textually_input_address_expected_output.csv'
             ), 'r').read())
     os.remove('test 6.csv')
コード例 #15
0
 def test_single_information_field___weblink__Nothing_to_do_here(self):
     csv_exporter = csv.CsvExporter(
         json.load(
             open(
                 os.path.join(
                     self.path_to_test_files,
                     'unit_test_single_information_field___weblink__Nothing_to_do_here.json'
                 ), 'r')))
     csv_exporter.save_converted_audit_to_file('test 4.csv',
                                               allow_overwrite=True)
     self.assertEqual(
         open('test 4.csv', 'r').read(),
         open(
             os.path.join(
                 self.path_to_test_files,
                 'unit_test_single_information_field___weblink__Nothing_to_do_here_expected_output.csv'
             ), 'r').read())
     os.remove('test 4.csv')
コード例 #16
0
 def test_single_drawing_field_Drawing_done_in_iOS_device(self):
     csv_exporter = csv.CsvExporter(
         json.load(
             open(
                 os.path.join(
                     self.path_to_test_files,
                     'unit_test_single_drawing_field_Drawing_done_in_iOS_device.json'
                 ), 'r')))
     csv_exporter.save_converted_audit_to_file('test 5.csv',
                                               allow_overwrite=True)
     self.assertEqual(
         open('test 5.csv', 'r').read(),
         open(
             os.path.join(
                 self.path_to_test_files,
                 'unit_test_single_drawing_field_Drawing_done_in_iOS_device_expected_output.csv'
             ), 'r').read())
     os.remove('test 5.csv')
コード例 #17
0
 def test_single_dynamic_field_3_instances(self):
     csv_exporter = csv.CsvExporter(
         json.load(
             open(
                 os.path.join(
                     self.path_to_test_files,
                     'unit_test_single_dynamic_field_3_instances.json'),
                 'r')))
     csv_exporter.save_converted_audit_to_file('test 30.csv',
                                               allow_overwrite=True)
     self.assertEqual(
         open('test 30.csv', 'r').read(),
         open(
             os.path.join(
                 self.path_to_test_files,
                 'unit_test_single_dynamic_field_3_instances_expected_output.csv'
             ), 'r').read())
     os.remove('test 30.csv')
コード例 #18
0
 def test_single_question_as_mandatory_field_answered_mandatory_field(self):
     csv_exporter = csv.CsvExporter(
         json.load(
             open(
                 os.path.join(
                     self.path_to_test_files,
                     'unit_test_single_question_as_mandatory_field_answered_mandatory_field.json'
                 ), 'r')))
     csv_exporter.save_converted_audit_to_file('test 28.csv',
                                               allow_overwrite=True)
     self.assertEqual(
         open('test 28.csv', 'r').read(),
         open(
             os.path.join(
                 self.path_to_test_files,
                 'unit_test_single_question_as_mandatory_field_answered_mandatory_field_expected_output.csv'
             ), 'r').read())
     os.remove('test 28.csv')
コード例 #19
0
 def test_single_media_field_24_images_from_gallery(self):
     csv_exporter = csv.CsvExporter(
         json.load(
             open(
                 os.path.join(
                     self.path_to_test_files,
                     'unit_test_single_media_field_24_images_from_gallery.json'
                 ), 'r')))
     csv_exporter.save_converted_audit_to_file('test 27.csv',
                                               allow_overwrite=True)
     self.assertEqual(
         open('test 27.csv', 'r').read(),
         open(
             os.path.join(
                 self.path_to_test_files,
                 'unit_test_single_media_field_24_images_from_gallery_expected_output.csv'
             ), 'r').read())
     os.remove('test 27.csv')
コード例 #20
0
 def test_single_multiple_choice___multiple_selection_8_answers_chosen(
         self):
     csv_exporter = csv.CsvExporter(
         json.load(
             open(
                 os.path.join(
                     self.path_to_test_files,
                     'unit_test_single_multiple_choice___multiple_selection_8_answers_chosen.json'
                 ), 'r')))
     csv_exporter.save_converted_audit_to_file('test 25.csv',
                                               allow_overwrite=True)
     self.assertEqual(
         open('test 25.csv', 'r').read(),
         open(
             os.path.join(
                 self.path_to_test_files,
                 'unit_test_single_multiple_choice___multiple_selection_8_answers_chosen_expected_output.csv'
             ), 'r').read())
     os.remove('test 25.csv')
コード例 #21
0
 def test_single_question_yes___no___na_comment_and_images_added_to_question(
         self):
     csv_exporter = csv.CsvExporter(
         json.load(
             open(
                 os.path.join(
                     self.path_to_test_files,
                     'unit_test_single_question_yes___no___na_comment_and_images_added_to_question.json'
                 ), 'r')))
     csv_exporter.save_converted_audit_to_file('test 35.csv',
                                               allow_overwrite=True)
     self.assertEqual(
         open('test 35.csv', 'r').read(),
         open(
             os.path.join(
                 self.path_to_test_files,
                 'unit_test_single_question_yes___no___na_comment_and_images_added_to_question_expected_output.csv'
             ), 'r').read())
     os.remove('test 35.csv')
コード例 #22
0
 def test_single_multiple_choice_field___single_response_Answered_frog(
         self):
     csv_exporter = csv.CsvExporter(
         json.load(
             open(
                 os.path.join(
                     self.path_to_test_files,
                     'unit_test_single_multiple_choice_field___single_response_Answered_frog.json'
                 ), 'r')))
     csv_exporter.save_converted_audit_to_file('test 24.csv',
                                               allow_overwrite=True)
     self.assertEqual(
         open('test 24.csv', 'r').read(),
         open(
             os.path.join(
                 self.path_to_test_files,
                 'unit_test_single_multiple_choice_field___single_response_Answered_frog_expected_output.csv'
             ), 'r').read())
     os.remove('test 24.csv')
コード例 #23
0
 def test_single_address_field_used_map___15_Gilbert_St__Dover_Heights_NSW_2030__Australia(
         self):
     csv_exporter = csv.CsvExporter(
         json.load(
             open(
                 os.path.join(
                     self.path_to_test_files,
                     'unit_test_single_address_field_used_map___15_Gilbert_St__Dover_Heights_NSW_2030__Australia.json'
                 ), 'r')))
     csv_exporter.save_converted_audit_to_file('test 31.csv',
                                               allow_overwrite=True)
     self.assertEqual(
         open('test 31.csv', 'r').read(),
         open(
             os.path.join(
                 self.path_to_test_files,
                 'unit_test_single_address_field_used_map___15_Gilbert_St__Dover_Heights_NSW_2030__Australia_expected_output.csv'
             ), 'r').read())
     os.remove('test 31.csv')
コード例 #24
0
 def test_single_multi_line_text_field_500_unicode_characters__on_5_lines(
         self):
     csv_exporter = csv.CsvExporter(
         json.load(
             open(
                 os.path.join(
                     self.path_to_test_files,
                     'unit_test_single_multi_line_text_field_500_unicode_characters__on_5_lines.json'
                 ), 'r')))
     csv_exporter.save_converted_audit_to_file('test 26.csv',
                                               allow_overwrite=True)
     self.assertEqual(
         open('test 26.csv', 'r').read(),
         open(
             os.path.join(
                 self.path_to_test_files,
                 'unit_test_single_multi_line_text_field_500_unicode_characters__on_5_lines_expected_output.csv'
             ), 'r').read())
     os.remove('test 26.csv')
コード例 #25
0
def sync_exports(logger, sc_client, settings):
    """
    Perform sync, exporting documents modified since last execution

    :param logger:    the logger
    :param sc_client: instance of SDK object
    :param settings:  dict containing settings values
    """
    export_formats = settings['export_formats']
    export_profiles = settings['export_profiles']
    filename_item_id = settings['filename_item_id']
    export_path = settings['export_path']
    timezone = settings['timezone']
    export_inactive_items_to_csv = settings['export_inactive_items_to_csv']
    media_sync_offset = settings['media_sync_offset_in_seconds']
    last_successful = get_last_successful(logger)
    results = sc_client.discover_audits(modified_after=last_successful)

    if results is not None:
        logger.info(str(results['total']) + ' audits discovered')
        export_count = 1
        export_total = results['total']

        for audit in results['audits']:
            logger.info('Processing audit (' + str(export_count) + '/' +
                        str(export_total) + ')')
            modified_at = dateutil.parser.parse(audit['modified_at'])
            now = datetime.datetime.utcnow()
            elapsed_time_difference = (pytz.utc.localize(now) - modified_at)
            if elapsed_time_difference > datetime.timedelta(
                    seconds=media_sync_offset):
                export_count += 1
                audit_id = audit['audit_id']
                logger.info('downloading ' + audit_id)
                audit_json = sc_client.get_audit(audit_id)
                template_id = audit_json['template_id']
                if template_id in export_profiles.keys():
                    export_profile_id = export_profiles[template_id]
                else:
                    export_profile_id = None

                if filename_item_id is not None:
                    export_filename = parse_export_filename(
                        audit_json['header_items'], filename_item_id)
                    if export_filename is None:
                        export_filename = audit_id
                else:
                    export_filename = audit_id
                for export_format in export_formats:
                    if export_format in ['pdf', 'docx']:
                        export_doc = sc_client.get_export(
                            audit_id, timezone, export_profile_id,
                            export_format)
                        save_exported_document(logger, export_path, export_doc,
                                               export_filename, export_format)
                    elif export_format == 'json':
                        export_doc = json.dumps(audit_json, indent=4)
                        save_exported_document(logger, export_path, export_doc,
                                               export_filename, export_format)
                    elif export_format == 'csv':
                        csv_exporter = csvExporter.CsvExporter(
                            audit_json, export_inactive_items_to_csv)
                        csv_export_filename = audit_json['template_id']
                        csv_exporter.append_converted_audit_to_bulk_export_file(
                            os.path.join(export_path,
                                         csv_export_filename + '.csv'))
                    elif export_format == 'media':
                        media_export_path = os.path.join(
                            export_path, 'media', export_filename)
                        extension = 'jpg'
                        media_id_list = get_media_from_audit(
                            logger, audit_json)
                        for media_id in media_id_list:
                            logger.info(
                                "Saving media_{0} to disc.".format(media_id))
                            media_file = sc_client.get_media(
                                audit_id, media_id)
                            media_export_filename = media_id
                            save_exported_media_to_file(
                                logger, media_export_path, media_file,
                                media_export_filename, extension)
                    elif export_format == 'web-report-link':
                        web_report_link = sc_client.get_web_report(audit_id)
                        web_report_data = [
                            template_id,
                            csvExporter.get_json_property(
                                audit_json, 'template_data', 'metadata',
                                'name'), audit_id,
                            csvExporter.get_json_property(
                                audit_json, 'audit_data', 'name'),
                            web_report_link
                        ]
                        save_web_report_link_to_file(logger, export_path,
                                                     web_report_data)
                logger.debug('setting last modified to ' +
                             audit['modified_at'])
                update_sync_marker_file(audit['modified_at'])
            else:
                logger.info(
                    'Audit\'s modified_at value is less than {0} seconds in the past, skipping for now!'
                    .format(media_sync_offset))
コード例 #26
0
ファイル: sql.py プロジェクト: eddible/iauditor-exporter
def export_audit_sql(logger, settings, audit_json, get_started):
    """
    Save audit to a database.
    :param logger:      The logger
    :param settings:    Settings from command line and configuration file
    :param audit_json:  Audit JSON
    :get_started:       Tuple containing settings
    """
    database = get_started[2]
    session = get_started[1]

    csv_exporter = csvExporter.CsvExporter(
        audit_json, settings[EXPORT_INACTIVE_ITEMS_TO_CSV])
    df = csv_exporter.audit_table
    df = pd.DataFrame.from_records(df, columns=SQL_HEADER_ROW)
    df['DatePK'] = pd.to_datetime(df['DateModified']).values.astype(
        np.int64) // 10**6
    if settings[DB_TYPE].startswith('postgres'):
        df.replace({'DateCompleted': ''}, np.datetime64(None), inplace=True)
        df.replace({'ConductedOn': ''}, np.datetime64(None), inplace=True)
        empty_value = np.nan
        empty_score = empty_value
    elif settings[DB_TYPE].startswith('mysql'):
        df.replace(
            {
                'ItemScore': '',
                'ItemMaxScore': '',
                'ItemScorePercentage': ''
            },
            0.0,
            inplace=True)
        empty_value = '1970-01-01T00:00:01'
        df.replace({'DateCompleted': ''}, empty_value, inplace=True)
        df.replace({'ConductedOn': ''}, empty_value, inplace=True)
        df['DateStarted'] = pd.to_datetime(df['DateStarted'])
        df['DateCompleted'] = pd.to_datetime(df['DateCompleted'])
        df['DateModified'] = pd.to_datetime(df['DateModified'])
        df['ConductedOn'] = pd.to_datetime(df['ConductedOn'],
                                           format='%Y-%m-%d %H:%M:%S',
                                           utc=False)
        df['ConductedOn'] = df['ConductedOn'].dt.tz_localize(None)
        empty_value = None
        empty_score = 0.0
    else:
        empty_value = None
        empty_score = empty_value

    df.replace({
        'ItemScore': '',
        'ItemMaxScore': '',
        'ItemScorePercentage': ''
    },
               empty_score,
               inplace=True)
    df.replace(r'^\s*$', empty_value, regex=True, inplace=True)
    df['SortingIndex'] = range(1, len(df) + 1)
    df_dict = df.to_dict(orient='records')
    try:
        session.bulk_insert_mappings(database, df_dict)
    except KeyboardInterrupt:
        logger.warning('Interrupted by user, exiting.')
        session.rollback()
        session.close()
        sys.exit(0)
    except OperationalError as ex:
        session.rollback()
        session.close()
        logger.warning(
            'Something went wrong. Here are the details: {}'.format(ex))
    except IntegrityError as ex:
        # If the bulk insert fails, we do a slower merge
        logger.warning('Duplicate found, attempting to update')
        session.rollback()
        for row in df_dict:
            row_to_dict = database(**row)
            session.merge(row_to_dict)
        logger.debug('Row successfully updated.')
    session.commit()