def __init__(self): LOG.debug("JellyfinClient initializing...") self.config = Config() self.http = HTTP(self) self.wsc = WSClient(self) self.auth = ConnectionManager(self) self.jellyfin = api.API(self.http) self.callback_ws = callback self.callback = callback
def test_graph_and_lines(self): """Tests simulation with graph and lines""" config_name = get_full_class_name(Config) with patch(config_name+'.graph_dict', new_callable=PropertyMock) as mock_graph_dict: with patch(config_name+'.lines_dict', new_callable=PropertyMock) as mock_lines_dict: with patch(config_name+'.traffic_data_dict', new_callable=PropertyMock) as mock_traffic_dict: mock_graph_dict.return_value = {'A': [('B', 7), ('D', 2)], 'B': [('A', 7), ('C', 1), ('E', 2)], 'C': [('B', 1), ('D', 3)], 'D': [('A', 2), ('C', 3)], 'E': [('B', 2), ('F', 2)], 'F': [('E', 2)]} mock_lines_dict.return_value = { 0: {'id': 0, 'bus_capacity': 20, 'frequency1': 17, 'frequency2': 17, 'route1': ['A', 'D', 'C', 'B', 'E', 'F'], 'route2': ['F', 'E', 'B', 'A']}} config = Config(["A", "B", "C", "D", "E", "F"], {}, {}, {}, 1.0) mock_traffic_dict.return_value = {'E': {'E': 0, 'F': 0, 'D': 0, 'A': 0, 'C': 0, 'B': 0}, 'F': {'E': 0, 'F': 0, 'D': 0, 'A': 0, 'C': 0, 'B': 0}, 'D': {'E': 0, 'F': 0, 'D': 0, 'A': 0, 'C': 0, 'B': 0}, 'A': {'E': 0, 'F': 0, 'D': 0, 'A': 0, 'C': 0, 'B': 0}, 'C': {'E': 0, 'F': 0, 'D': 0, 'A': 0, 'C': 0, 'B': 0}, 'B': {'E': 0, 'F': 0, 'D': 0, 'A': 0, 'C': 0, 'B': 0}} simulation = Simulation(config) def mocked_update(mocked_self): """Mocked update """ for bus in mocked_self.buses: if bus.route == 0: if bus.id not in mocked_self.mocked_dict.keys(): mocked_self.mocked_dict[bus.id] = [] if bus.time_to_next_stop == 0: mocked_self.mocked_dict[bus.id].append(bus.current_stop_name) else: mocked_self.mocked_dict[bus.id].append(bus.current_stop_name + bus.next_stop_name) def finished(mocked_self): mocked_self.mocked_update() return False add_property(simulation, "finished", finished) from types import MethodType simulation.mocked_update = MethodType(mocked_update, simulation) add_variable(simulation, "count_finished", 0) add_variable(simulation, "mocked_dict", {}) count = 0 while count < 35: count += 1 simulation.refresh() paths = ['PA', 'A', 'AD', 'AD', 'D', 'DC', 'DC', 'DC', 'C', 'CB', 'B', 'BE', 'BE', 'E', 'EF', 'EF', 'F'] self.assertEqual(len(simulation.mocked_dict), 2) for path in simulation.mocked_dict.values(): self.assertEqual(path, paths)
def test_graph_and_lines_transfer_2(self): """Tests simulation with graph and lines - duplication""" config_name = get_full_class_name(Config) with patch(config_name+'.graph_dict', new_callable=PropertyMock) as mock_graph_dict: with patch(config_name+'.lines_dict', new_callable=PropertyMock) as mock_lines_dict: with patch(config_name+'.traffic_data_dict', new_callable=PropertyMock) as mock_traffic_dict: class MockedGenerator: def __init__(self, empty_argument): self.done = False def generate(self, src, dest): if not self.done and src == 'C' and dest == 'F': self.done = True return 1 return 0 mock_graph_dict.return_value = {'A': [('B', 2), ('D', 2)], 'B': [('A', 2), ('C', 2), ('E', 2)], 'C': [('B', 2), ('D', 2)], 'D': [('A', 2), ('C', 2)], 'E': [('B', 2), ('F', 2)], 'F': [('E', 2)]} mock_lines_dict.return_value = { 0: {'id': 0, 'bus_capacity': 20, 'frequency1': 1000, 'frequency2': 1000, 'route1': ['B', 'A', 'D', 'C'], 'route2': ['C', 'D', 'A', 'B']}, 1: {'id': 1, 'bus_capacity': 20, 'frequency1': 1000, 'frequency2': 1000, 'route1': ['C', 'B', 'E', 'F'], 'route2': ['F', 'E', 'B', 'C']}} config = Config(["A", "B", "C", "D", "E", "F"], {}, {}, {}, 1.0) mock_traffic_dict.return_value = {'E': {'E': 0, 'F': 0, 'D': 0, 'A': 0, 'C': 0, 'B': 0}, 'F': {'E': 0, 'F': 0, 'D': 0, 'A': 0, 'C': 0, 'B': 0}, 'D': {'E': 0, 'F': 0, 'D': 0, 'A': 0, 'C': 0, 'B': 0}, 'A': {'E': 0, 'F': 0, 'D': 0, 'A': 0, 'C': 0, 'B': 0}, 'C': {'E': 0, 'F': 1, 'D': 0, 'A': 0, 'C': 0, 'B': 0}, 'B': {'E': 0, 'F': 0, 'D': 0, 'A': 0, 'C': 0, 'B': 0}} simulation = Simulation(config, MockedGenerator) simulation.refresh() self.are_lists_equal(simulation.stops['C'].passengers, [PassengersGroup('F', 1)], passenger_group_equality) simulation.refresh() self.are_lists_equal(simulation.stops['C'].passengers, [], passenger_group_equality) k = 0 for bus in simulation.buses: k += bus.count self.are_equal(k, 1)
def noninteractive_run(fa, args): impo = fa.get_importer() a,s = impo.add_files(args.input, exclude=args.exclude) logging.debug ("\nFound %d file(s), skipping %d file(s)." % (a,s)) if len(impo.get_files())==0: return if args.config: if not os.path.isfile(args.config): logging.debug ("Configuration file %s does not exist." % args.config) else: config = Config() config.read_from_json(args.config) else: config = Config() config.create_default() config.update({CONFIG_PARSERCLASS:args.parser}) hparser = cp.instantiate_parser(config.get(CONFIG_PARSERCLASS)) if hparser is None: logging.debug ("Error instantiating filename parser %s" % args.parser) return logging.debug ("\n","Importing raw data from %d file(s)..." % len(impo.get_files())) data,_,fheaders = impo.import_data(delimiter="\t", hparser=hparser) if data is None: logging.debug ("No data") return logging.debug ("Raw data contains %d rows, %d columns" % (data.shape[0], data.shape[1])) pp = fa.get_preprocessor() pp.set_replacementheaders({'Exc1_-Ch1-_':'trp ', 'Exc1_-Ch2-_':'NAD(P)H ', 'Exc2_-Ch3-_':'FAD '}) data,ch = pp.rename_headers(data) data, dc = pp.drop_columns(data, [' ', 'Exc1', 'Exc2'], func='startswith') logging.debug ("\nRenamed %d column header(s)" % len(ch)) logging.debug (ch) logging.debug ("\nDropped %d columns: data contains %d rows, %d columns" % (len(dc), data.shape[0], data.shape[1])) logging.debug (dc) # reorder headers logging.debug ("\nColumn headers parsed from file name(s): %s" % fheaders) logging.debug ("Other headers: %s" % (set(data.columns.values)-fheaders)) nh = list(fheaders) nh.extend(sorted(set(data.columns.values)-fheaders)) if len(nh) == len(data.columns): data = data.reindex(nh, axis=1) analyzer = fa.get_analyzer() analyzer.add_columns([ 'NAD(P)H tm', 'NAD(P)H a2[%]/a1[%]', 'NAD(P)H %', 'NADH %', 'NAD(P)H/NADH', 'trp tm', 'trp E%1', 'trp E%2', 'trp E%3', 'trp a1[%]/a2[%]', 'FAD tm', 'FAD a1[%]/a2[%]', 'FAD photons/NAD(P)H photons', 'NAD(P)H tm/FAD tm', 'FLIRR {NAD(P)H a2[%]/FAD a1[%])', ]) analyzer.add_rangefilters({ 'NAD(P)H tm': [0,5000], 'NAD(P)H chi': [0,7], 'FAD tm': [0,5000], 'FAD chi': [0,7], 'trp tm': [0,5000], 'trp chi': [0,7], 'trp E%1': [0,100], 'trp E%2': [0,100], 'trp E%3': [0,100], }) logging.info ("\nCalculating values for added columns...") data,capplied,cskipped = analyzer.calculate(data) logging.debug ("Applied %d calculation functions, skipped %d: data contains %d rows, %d columns" % (len(capplied),len(cskipped), data.shape[0], data.shape[1])) for afunc in capplied: logging.debug ("\tcalculated %s" % afunc) for sfunc in cskipped: logging.debug ("\tskipped %s" % sfunc) logging.info ("\nFiltering values...") data,fapplied,fskipped, droppedrows = analyzer.apply_filter(data) logging.info ("Applied %d filters, skipped %d filters, dropped %d rows: data contains %d rows, %d columns" % (len(fapplied),len(fskipped), droppedrows, data.shape[0], data.shape[1])) for afunc in fapplied: logging.debug ("\tapplied: %s (%s), dropped %d rows" % (afunc[0],afunc[1],afunc[2],)) for sfunc in fskipped: logging.debug ("\tskipped %s" % sfunc)