def _delete_col(self, file_name, col_name): logging.info(f'Deleting column `{col_name}` in `{file_name}`') reader = PathManager.open_input_file(f'{file_name}', 'r') text_line = reader.readline().replace('\n', '') old_cols = text_line.split(',') cols = text_line.split(',') if cols == ['']: cols = [] if col_name in cols: cols.remove(col_name) reader.seek(0) writer = PathManager.open_input_file(f'{file_name}.tmp', 'w+') dict_reader = csv.DictReader(reader, fieldnames=old_cols) dict_writer = csv.DictWriter(writer, fieldnames=cols, dialect='unix', quoting=0) dict_writer.writeheader() for row in dict_reader: if dict_reader.line_num == 1: continue if col_name in row.keys(): row.pop(col_name) dict_writer.writerow(row) reader.close() writer.close() os.remove(file_name) os.rename(f'{file_name}.tmp', f'{file_name}') return
def _add_col(self, file_name, col_name, default_val): logging.info(f'Adding column `{col_name}` to `{file_name}`') reader = PathManager.open_input_file(f'{file_name}', 'r') cols = reader.readline().replace('\n', '').replace('\r', '').split(',') if cols == ['']: cols = [] if col_name not in cols: cols.append(col_name) reader.seek(0) writer = PathManager.open_input_file(f'{file_name}.tmp', 'w+') dict_writer = csv.DictWriter(writer, fieldnames=cols, dialect='unix', quoting=0) dict_reader = csv.DictReader(reader, fieldnames=cols) dict_writer.writeheader() for row in dict_reader: if dict_reader.line_num == 1: continue for k in row.keys(): if row[k] is None: row[k] = default_val dict_writer.writerow(row) reader.close() writer.close() original_name = PathManager.get_input_path(file_name) temp_name = f'{original_name}.tmp' os.remove(original_name) os.rename(temp_name, original_name) return
def test_imports_tone_repeater(self): f = open('in/import_chirp.csv', 'w+') f.write( """Location,Name,Frequency,Duplex,Offset,Tone,rToneFreq,cToneFreq,DtcsCode,DtcsPolarity,Mode,TStep,Skip,Comment,URCALL,RPT1CALL,RPT2CALL,DVCODE 0,TSQLRP,449.825000,-,5.000000,TSQL,103.5,103.5,023,NN,FM,5.00,,,,,, 0,DCSRPT,447.075000,-,5.000000,DTCS,67.0,67.0,165,NN,FM,5.00,,,,,,""") f.close() PathManager.set_import_file('in/import_chirp.csv', radio_types.CHIRP) channel = self.importer.run_import(radio_types.CHIRP, 'in/import_chirp.csv')[0] self.assertEqual('TSQLRP', channel.name.fmt_val()) self.assertEqual('TSQLRP', channel.medium_name.fmt_val()) self.assertEqual('TSQLRP', channel.short_name.fmt_val()) self.assertEqual(449.825, channel.rx_freq.fmt_val()) self.assertEqual(103.5, channel.rx_ctcss.fmt_val()) self.assertIsNone(channel.rx_dcs.fmt_val()) self.assertIsNone(channel.rx_dcs_invert.fmt_val()) self.assertEqual('High', channel.tx_power.fmt_val()) self.assertEqual(-5, channel.tx_offset.fmt_val()) self.assertEqual(103.5, channel.tx_ctcss.fmt_val()) self.assertIsNone(channel.tx_dcs.fmt_val()) self.assertIsNone(channel.tx_dcs_invert.fmt_val()) self.assertIsNone(channel.digital_timeslot.fmt_val()) self.assertIsNone(channel.digital_color.fmt_val()) self.assertIsNone(channel.digital_contact_id.fmt_val()) self.assertIsNone(channel.latitude.fmt_val()) self.assertIsNone(channel.longitude.fmt_val())
def setUp(self): FileUtil.safe_delete_dir('in') FileUtil.safe_delete_dir('out') FileUtil.safe_create_dir('in') FileUtil.safe_create_dir('out') PathManager.set_input_path('./in') PathManager.set_output_path('./out')
def check_migrations_needed(self): not_needed_cols = dict() channels_file = PathManager.get_input_path('input.csv') channel_cols = dict(RadioChannelDefault.create_empty().__dict__) extra_cols = self._migration_check(channels_file, channel_cols) if len(extra_cols) > 0: not_needed_cols['input.csv'] = extra_cols contacts_file = PathManager.get_input_path('digital_contacts.csv') contact_cols = dict(DmrContactDefault.create_empty().__dict__) extra_cols = self._migration_check(contacts_file, contact_cols) if len(extra_cols) > 0: not_needed_cols['digital_contacts.csv'] = extra_cols zones_file = PathManager.get_input_path('zones.csv') zone_cols = dict(RadioZoneDefault.create_empty().__dict__) extra_cols = self._migration_check(zones_file, zone_cols) if len(extra_cols) > 0: not_needed_cols['zones.csv'] = extra_cols return not_needed_cols
def remove_backups(self): if not PathManager.input_path_exists(''): return files_list = os.listdir(PathManager.get_input_path()) for file_name in files_list: if re.search('\\.bak$', file_name): file_path = PathManager.get_input_path(file_name) logging.info(f'Removing backup `{file_path}`') os.remove(file_path) return
def test_sets_different_path(self): os.mkdir('./whoa_different') PathManager.set_input_path('whoa_different') w = PathManager.open_input_file('x.tst', 'w+') w.write('different test success') w.close() f = open('./whoa_different/x.tst', 'r') line = f.readline() f.close() self.assertEqual(line, 'different test success') FileUtil.safe_delete_dir('./whoa_different')
def test_migration_three(self): self.manager._migrate_one() self.manager._migrate_two() self.manager._migrate_three() f = PathManager._open_file('in/input.csv', 'r') first_line = f.readline() self.assertEqual( 'number,' 'name,' 'medium_' 'name,' 'short_' 'name,zone_id,' 'rx_freq,' 'rx_ctcss,' 'rx_dcs,rx_' 'dcs_invert,' 'tx_power,' 'tx_offset,' 'tx_ctcss,' 'tx_dcs,' 'tx_dcs_invert,' 'digital_timeslot,' 'digital_color,' 'digital_contact_id\n', first_line) next_line = f.readline() self.assertEqual('', next_line) f.close()
def validate_files_exist(cls): errors = [] files_list = [ 'input.csv', 'digital_contacts.csv', 'dmr_id.csv', 'zones.csv', 'user.csv' ] for file_name in files_list: if not PathManager.input_path_exists(file_name): err = ValidationError(f"Cannot open file: `{file_name}`", None, file_name) errors.append(err) if len(errors) > 0: logging.error("--- FILE MISSING ERRORS, CANNOT CONTINUE ---") logging.info(f"Checked `{PathManager.get_input_path()}`") for err in errors: logging.error(f"\t\t{err.message}") logging.info( "Have you run `Wizard (new)` or `Migrations (update)` under `Dangerous Operations`?" ) else: logging.info("All necessary files found") return errors
def _headers(self): f = PathManager.open_output_file( f'{radio_types.D710}/{radio_types.D710}.hmk', 'w+') headers = self._channels[1].headers() f.write(headers + "\n") f.close() pass
def test_validate_files_exist(self): files = ['input.csv', 'digital_contacts.csv', 'dmr_id.csv', 'zones.csv', 'user.csv'] for filename in files: f = PathManager.open_input_file(filename, 'w+') f.close() errors = Validator.validate_files_exist() self.assertEqual(0, len(errors))
def test_path_manager(self): w = PathManager.open_input_file('x.tst', 'w+') w.write('test success') w.close() f = open('./in/x.tst', 'r') line = f.readline() f.close() self.assertEqual(line, 'test success')
def test_migrate_four(self): self.manager._migrate_one() self.manager._migrate_two() self.manager._migrate_three() self.manager._migrate_four() f = PathManager.open_input_file('input.csv', 'r') dict_reader = DictReader(f) self.assertFalse('number' in dict_reader.fieldnames) f.close()
def _add_cols_to_file(self, file_name, cols): original_name = PathManager.get_input_path(file_name) backup_name = f'{original_name}.bak' temp_name = f'{original_name}.tmp' os.rename(original_name, backup_name) shutil.copyfile(backup_name, temp_name) for col in cols: self._add_col(f'{file_name}.tmp', col, '') os.rename(temp_name, original_name)
def _output(self): f = PathManager.open_output_file( f'{radio_types.D710}/{radio_types.D710}.hmk', 'a') channel_number = 1 for channel in self._channels: if channel.is_digital(): continue channel_data = channel.output(channel_number) f.writelines(channel_data + "\n") channel_number += 1 f.close()
def _create_input(self): FileUtil.safe_create_dir(PathManager.get_input_path()) create_files = { 'channels': self._create_channel_file, 'digital_contacts': self._create_dmr_data, 'zones': self._create_zone_data, 'user': self._create_dmr_user_data, } for key in create_files: create_files[key]()
def _import_file_async(self): path = PathManager.get_import_path() channels = [] try: channels = self._radio_importer.run_import(radio_types.CHIRP, path) except Exception as e: logging.error(f'Failed to import {path}. Exception: {e.__class__}') if len(channels) > 0: self._radio_importer.channels_to_file(channels)
def migrate(self): existing_backups = False files_list = [] if PathManager.input_path_exists(''): files_list = os.listdir(PathManager.get_input_path()) for file_name in files_list: if re.search('\\.bak$', file_name): logging.warning(f'Existing backup file: `{PathManager.get_input_path(file_name)}`') existing_backups = True if existing_backups: logging.info('Backup files still exist. Please delete before continuing.') logging.info('MIGRATIONS HAVE NOT BEEN RUN') return self._migrate_one() self._migrate_two() self._migrate_three() self._migrate_four() self._migrate_five() logging.info('Migrations are complete. Your original files have been renamed to have a `.bak` extension.')
def test_migrate_five(self): self.manager._migrate_one() self.manager._migrate_two() self.manager._migrate_three() self.manager._migrate_four() self.manager._migrate_five() f = PathManager.open_input_file('input.csv', 'r') dict_reader = DictReader(f) self.assertTrue('latitude' in dict_reader.fieldnames) self.assertTrue('longitude' in dict_reader.fieldnames) f.close()
def _bind_console_log(self, layout): text_log = layout.ids[LayoutIds.log_output] self.text_log = text_log input_folder = layout.ids[LayoutIds.input_folder] output_folder = layout.ids[LayoutIds.output_folder] PathManager.input_folder_label = input_folder PathManager.output_folder_label = output_folder PathManager.set_input_path('./in') PathManager.set_output_path('./out') PathManager.set_import_file('./in/import.csv', radio_types.CHIRP) logger = logging.getLogger('radio_sync') formatter = GlobalConstants.logging_formatter text_box_logger = TextBoxHandler(self.text_log) handler = logging.StreamHandler(stream=text_box_logger) handler.setFormatter(formatter) logger.setLevel(logging.INFO) if self.force_debug: logger.setLevel(logging.DEBUG) logger.addHandler(handler)
def _migration_check(self, input_file, needed_cols): f = PathManager.open_input_file(input_file, 'r') dict_reader = csv.DictReader(f) provided_fields = dict_reader.fieldnames f.close() needed_fields = needed_cols.keys() not_needed = [] for provided in provided_fields: if provided not in needed_fields: not_needed.append(provided) return not_needed
def test_three_does_not_stomp(self): logging.getLogger().setLevel(logging.CRITICAL) self.manager._migrate_one() self.manager._migrate_two() f = PathManager.open_input_file('input.csv', 'w+') f.write('foo\nspecial') f.close() self.manager._migrate_three() f = PathManager.open_input_file('input.csv.bak', 'r') contents = f.read() self.assertEqual('foo\nspecial', contents) f.close() f = PathManager.open_input_file('input.csv', 'r') dict_reader = DictReader(f) first_row = dict_reader.__next__() self.assertTrue('foo' in first_row.keys()) self.assertEqual(first_row['foo'], 'special') f.close()
def _output_zones(self): if self._zones is None: logging.error(f"No zones list found for {radio_types.DEFAULT}.") return writer = PathManager.open_output_file(f'{radio_types.DEFAULT}/{radio_types.DEFAULT}_zone.csv', 'w+') zone_file = csv.writer(writer, lineterminator='\n') headers = RadioZoneDefault.create_empty() zone_file.writerow(headers.headers()) for zone in self._zones.values(): casted_zone = RadioZoneBuilder.casted(zone.cols, zone._associated_channels, radio_types.DEFAULT) zone_file.writerow(casted_zone.output()) writer.close()
def _output_user(self): if self._users is None: logging.error(f"No zones list found for {radio_types.DEFAULT}.") return writer = PathManager.open_output_file(f'{radio_types.DEFAULT}/{radio_types.DEFAULT}_user.csv', 'w+') users_file = csv.writer(writer, lineterminator='\n') headers = DmrUserDefault.create_empty() users_file.writerow(headers.headers()) for user in self._users.values(): casted_user = DmrUserBuilder.casted(user.cols, radio_types.DEFAULT) users_file.writerow(casted_user.output(None)) writer.close()
def channels_to_file(self, channels): logging.info('Writing imported files.') headers = RadioChannelDefault.create_empty().headers() writer = RadioWriter.output_writer('import_result.csv', '\r\n') writer.writerow(headers) channel_num = 1 for chan in channels: # who is this for chan channel_default = RadioChannelBuilder.casted( chan, radio_types.DEFAULT) writer.writerow(channel_default.output(channel_num)) channel_num += 1 writer.close() result_path = PathManager.get_output_path('import_result.csv') logging.info( f'Import complete! Your imported file is in `{result_path}`')
def _output_contacts(self): if self._digital_contacts is None: logging.error(f"No digital contacts found for {radio_types.DEFAULT}.") return writer = PathManager.open_output_file(f'{radio_types.DEFAULT}/{radio_types.DEFAULT}_contacts.csv', 'w+') dmr_contact_file = csv.writer(writer, lineterminator='\n') headers = DmrContactDefault.create_empty() dmr_contact_file.writerow(headers.headers()) for dmr_contact in self._digital_contacts.values(): casted_contact = DmrContactBuilder.casted(dmr_contact, radio_types.DEFAULT) row_data = casted_contact.output(None) dmr_contact_file.writerow(row_data) writer.close()
def bootstrap(self): self._create_input() self._create_output() abspath = PathManager.get_input_path() logging.info( f'''Wizard is complete! You may now open `input.csv` and add your radio channels. Input CSVs are located in `{abspath}` What each file does: input.csv: your radio channels. For best results, ONLY FILL OUT THE COLUMNS YOU NEED zones.csv: preset group that you would like your channel in (if radio supports multiple zones) DMR-ONLY FILES safe to ignore for analog radios: digital_contacts.csv: DMR contact IDs (e.g. Talkgroups) dmr_id.csv: Set your DMR id (from radioid.net) Be sure to check the 'help' menu for more guidance! Sample data has been added to each file as an example.''')
def _output_radioids(self): if self._dmr_ids is None: logging.error(f"No DMR ids found for {radio_types.DEFAULT}.") return writer = PathManager.open_output_file(f'{radio_types.DEFAULT}/{radio_types.DEFAULT}_radioid.csv', 'w+') radio_id_file = csv.writer(writer, lineterminator='\n') headers = DmrIdDefault.create_empty() radio_id_file.writerow(headers.headers()) number = 1 for dmr_id in self._dmr_ids.values(): casted_id = DmrIdBuilder.casted(dmr_id, radio_types.DEFAULT) radio_id_file.writerow(casted_id.output(None)) number += 1 writer.close() return
def _generate_zone_data(self): logging.info('Processing zones') feed = PathManager.open_input_file('zones.csv', 'r') csv_feed = csv.DictReader(feed) zones = dict() errors = [] line_num = 1 for line in csv_feed: logging.debug(f'Processing line {line_num}: `{line}`') line_errors = self._validator.validate_radio_zone( line, line_num, feed.name) errors += line_errors line_num += 1 if len(line_errors) != 0: continue zone = RadioZone(line) zones[zone.number.fmt_val()] = zone feed.close() return zones, errors
def _generate_dmr_id_data(self): logging.info('Processing dmr ids') feed = PathManager.open_input_file('dmr_id.csv', 'r') csv_feed = csv.DictReader(feed) dmr_ids = dict() errors = [] line_num = 0 for line in csv_feed: logging.debug(f'Processing line {line_num}: `{line}`') line_num += 1 line_errors = self._validator.validate_dmr_id( line, line_num, feed.name) errors += line_errors if len(line_errors) != 0: continue dmr_id = DmrId(line) dmr_ids[line_num] = dmr_id feed.close() return dmr_ids, errors