def _delete_col(self, file_name, col_name): logging.info(f'Deleting column `{col_name}` in `{file_name}`') reader = PathManager.open_input_file(f'{file_name}', 'r') text_line = reader.readline().replace('\n', '') old_cols = text_line.split(',') cols = text_line.split(',') if cols == ['']: cols = [] if col_name in cols: cols.remove(col_name) reader.seek(0) writer = PathManager.open_input_file(f'{file_name}.tmp', 'w+') dict_reader = csv.DictReader(reader, fieldnames=old_cols) dict_writer = csv.DictWriter(writer, fieldnames=cols, dialect='unix', quoting=0) dict_writer.writeheader() for row in dict_reader: if dict_reader.line_num == 1: continue if col_name in row.keys(): row.pop(col_name) dict_writer.writerow(row) reader.close() writer.close() os.remove(file_name) os.rename(f'{file_name}.tmp', f'{file_name}') return
def _add_col(self, file_name, col_name, default_val): logging.info(f'Adding column `{col_name}` to `{file_name}`') reader = PathManager.open_input_file(f'{file_name}', 'r') cols = reader.readline().replace('\n', '').replace('\r', '').split(',') if cols == ['']: cols = [] if col_name not in cols: cols.append(col_name) reader.seek(0) writer = PathManager.open_input_file(f'{file_name}.tmp', 'w+') dict_writer = csv.DictWriter(writer, fieldnames=cols, dialect='unix', quoting=0) dict_reader = csv.DictReader(reader, fieldnames=cols) dict_writer.writeheader() for row in dict_reader: if dict_reader.line_num == 1: continue for k in row.keys(): if row[k] is None: row[k] = default_val dict_writer.writerow(row) reader.close() writer.close() original_name = PathManager.get_input_path(file_name) temp_name = f'{original_name}.tmp' os.remove(original_name) os.rename(temp_name, original_name) return
def test_validate_files_exist(self): files = ['input.csv', 'digital_contacts.csv', 'dmr_id.csv', 'zones.csv', 'user.csv'] for filename in files: f = PathManager.open_input_file(filename, 'w+') f.close() errors = Validator.validate_files_exist() self.assertEqual(0, len(errors))
def test_path_manager(self): w = PathManager.open_input_file('x.tst', 'w+') w.write('test success') w.close() f = open('./in/x.tst', 'r') line = f.readline() f.close() self.assertEqual(line, 'test success')
def test_migrate_four(self): self.manager._migrate_one() self.manager._migrate_two() self.manager._migrate_three() self.manager._migrate_four() f = PathManager.open_input_file('input.csv', 'r') dict_reader = DictReader(f) self.assertFalse('number' in dict_reader.fieldnames) f.close()
def test_sets_different_path(self): os.mkdir('./whoa_different') PathManager.set_input_path('whoa_different') w = PathManager.open_input_file('x.tst', 'w+') w.write('different test success') w.close() f = open('./whoa_different/x.tst', 'r') line = f.readline() f.close() self.assertEqual(line, 'different test success') FileUtil.safe_delete_dir('./whoa_different')
def test_migrate_five(self): self.manager._migrate_one() self.manager._migrate_two() self.manager._migrate_three() self.manager._migrate_four() self.manager._migrate_five() f = PathManager.open_input_file('input.csv', 'r') dict_reader = DictReader(f) self.assertTrue('latitude' in dict_reader.fieldnames) self.assertTrue('longitude' in dict_reader.fieldnames) f.close()
def _migration_check(self, input_file, needed_cols): f = PathManager.open_input_file(input_file, 'r') dict_reader = csv.DictReader(f) provided_fields = dict_reader.fieldnames f.close() needed_fields = needed_cols.keys() not_needed = [] for provided in provided_fields: if provided not in needed_fields: not_needed.append(provided) return not_needed
def test_three_does_not_stomp(self): logging.getLogger().setLevel(logging.CRITICAL) self.manager._migrate_one() self.manager._migrate_two() f = PathManager.open_input_file('input.csv', 'w+') f.write('foo\nspecial') f.close() self.manager._migrate_three() f = PathManager.open_input_file('input.csv.bak', 'r') contents = f.read() self.assertEqual('foo\nspecial', contents) f.close() f = PathManager.open_input_file('input.csv', 'r') dict_reader = DictReader(f) first_row = dict_reader.__next__() self.assertTrue('foo' in first_row.keys()) self.assertEqual(first_row['foo'], 'special') f.close()
def _migrate_two(self): logging.info('Running migration step 2: Creating in.csv') if not PathManager.input_path_exists('input.csv'): f = PathManager.open_input_file('input.csv', 'w+') f.close() if not PathManager.input_path_exists('digital_contacts.csv'): f = PathManager.open_input_file('digital_contacts.csv', 'w+') f.close() if not PathManager.input_path_exists('zones.csv'): f = PathManager.open_input_file('zones.csv', 'w+') f.close() if not PathManager.input_path_exists('user.csv'): f = PathManager.open_input_file('user.csv', 'w+') f.close() user_columns = ['RADIO_ID', 'CALLSIGN', 'FIRST_NAME', 'LAST_NAME', 'CITY', 'STATE', 'COUNTRY', 'REMARKS'] self._add_cols_to_file(PathManager.get_input_path('user.csv'), user_columns) if not PathManager.input_path_exists('dmr_id.csv'): f = PathManager.open_input_file('dmr_id.csv', 'w+') f.close()
def _generate_zone_data(self): logging.info('Processing zones') feed = PathManager.open_input_file('zones.csv', 'r') csv_feed = csv.DictReader(feed) zones = dict() errors = [] line_num = 1 for line in csv_feed: logging.debug(f'Processing line {line_num}: `{line}`') line_errors = self._validator.validate_radio_zone( line, line_num, feed.name) errors += line_errors line_num += 1 if len(line_errors) != 0: continue zone = RadioZone(line) zones[zone.number.fmt_val()] = zone feed.close() return zones, errors
def _generate_dmr_id_data(self): logging.info('Processing dmr ids') feed = PathManager.open_input_file('dmr_id.csv', 'r') csv_feed = csv.DictReader(feed) dmr_ids = dict() errors = [] line_num = 0 for line in csv_feed: logging.debug(f'Processing line {line_num}: `{line}`') line_num += 1 line_errors = self._validator.validate_dmr_id( line, line_num, feed.name) errors += line_errors if len(line_errors) != 0: continue dmr_id = DmrId(line) dmr_ids[line_num] = dmr_id feed.close() return dmr_ids, errors
def _generate_digital_contact_data(self): logging.info('Processing digital contacts') feed = PathManager.open_input_file('digital_contacts.csv', 'r') csv_feed = csv.DictReader(feed) digital_contacts = dict() errors = [] line_num = 1 for line in csv_feed: logging.debug(f'Processing line {line_num}: `{line}`') line_errors = self._validator.validate_digital_contact( line, 1, feed.name) errors += line_errors line_num += 1 if len(line_errors) != 0: continue contact = DmrContact(line) digital_contacts[contact.digital_id.fmt_val()] = contact feed.close() return digital_contacts, errors
def _generate_user_data(self): logging.info('Processing dmr IDs. This step can take a while.') feed = PathManager.open_input_file('user.csv', 'r') csv_feed = csv.DictReader(feed) users = dict() errors = [] rows_processed = 0 for line in csv_feed: line_errors = self._validator.validate_dmr_user( line, rows_processed + 1, feed.name) errors += line_errors rows_processed += 1 if len(line_errors) != 0: continue zone = DmrUser(line) users[zone.radio_id.fmt_val()] = zone logging.debug(f'Writing user row {rows_processed}') if rows_processed % file_util.USER_LINE_LOG_INTERVAL == 0: logging.info(f'Processed {rows_processed} DMR users') feed.close() return users, errors
def test_only_some_files_exist(self): f = PathManager.open_input_file('input.csv', 'w+') f.close() errors = Validator.validate_files_exist() self.assertEqual(4, len(errors))
def generate_all_declared(self): file_errors = self._validator.validate_files_exist() self._validator.flush_names() if len(file_errors) > 0: return False results = self._migrations.check_migrations_needed() if len(results.keys()) > 0: logging.warning( 'You may be using an old version of the input files. Have you run migrations?' ) logging.warning("Migrations check is under the 'File' menu.") sleep(1) for radio in radio_types.radio_choices(): radio_folder = PathManager.get_output_path(radio) if not os.path.exists(radio_folder): continue logging.info(f'Deleting old output folder `{radio}`') FileUtil.safe_delete_dir(radio_folder) digital_contacts, digi_contact_errors = self._generate_digital_contact_data( ) dmr_ids, dmr_id_errors = self._generate_dmr_id_data() zones, zone_errors = self._generate_zone_data() user, user_data_errors = self._generate_user_data() preload_errors = digi_contact_errors + dmr_id_errors + zone_errors + user_data_errors feed = PathManager.open_input_file('input.csv', 'r') csv_reader = csv.DictReader(feed) radio_channel_errors = [] radio_channels = [] line_num = 1 for line in csv_reader: line_errors = self._validator.validate_radio_channel( line, line_num, feed.name, digital_contacts, zones) radio_channel_errors += line_errors line_num += 1 if len(line_errors) > 0: continue radio_channel = RadioChannel(line, digital_contacts, dmr_ids) radio_channels.append(radio_channel) if radio_channel.zone_id.fmt_val(None) is not None: zones[radio_channel.zone_id.fmt_val()].add_channel( radio_channel) feed.close() all_errors = preload_errors + radio_channel_errors if len(all_errors) > 0: logging.error('--- VALIDATION ERRORS, CANNOT CONTINUE ---') for err in all_errors: logging.error( f'\t\tfile: `{err.file_name}` line:{err.line_num} validation error: {err.message}' ) return False else: logging.info( 'File validation complete, no obvious formatting errors found') radio_files = dict() headers_gen = RadioChannel.create_empty() FileUtil.safe_create_dir('out') channel_numbers = dict() for radio in self.radio_list: radio_casted = RadioChannelBuilder.casted(headers_gen, radio) FileUtil.safe_create_dir(f'out/{radio}') logging.info(f'Generating for radio type `{radio}`') if radio_casted.skip_radio_csv(): logging.info( f'`{radio}` uses special output style. Skipping channels csv' ) continue output = RadioWriter.output_writer(f'{radio}/{radio}_channels.csv', '\r\n') file_headers = radio_casted.headers() output.writerow(file_headers) radio_files[radio] = output channel_numbers[radio] = 1 logging.info('Processing radio channels') line = 1 for radio_channel in radio_channels: logging.debug(f'Processing radio line {line}') if line % file_util.RADIO_LINE_LOG_INTERVAL == 0: logging.info(f'Processing radio line {line}') line += 1 for radio in self.radio_list: if radio not in radio_files.keys(): continue if not radio_types.supports_dmr( radio) and radio_channel.is_digital(): continue casted_channel = RadioChannelBuilder.casted( radio_channel, radio) input_data = casted_channel.output(channel_numbers[radio]) radio_files[radio].writerow(input_data) channel_numbers[radio] += 1 additional_data = RadioAdditional(radio_channels, dmr_ids, digital_contacts, zones, user) for radio in self.radio_list: if radio in radio_files.keys(): radio_files[radio].close() casted_additional_data = RadioAdditionalBuilder.casted( additional_data, radio) casted_additional_data.output() logging.info(f'''Radio generator complete. Your output files are in `{os.path.abspath('out')}` The next step is to import these files into your radio programming application. (e.g. CHiRP)''' ) return True