class MultiSampleSequencer: def __init__(self, settings: dict): self.transport = Sequencer(state=settings) self.interface = ConsoleInterface(self.transport) def run(self) -> None: self.transport.run()
def test_sequencer_compare(): """ Test an old output to ensure we don't regress """ input_dir = "data/sumaila/input" csv_file = os.path.join(input_dir, "metrics-local.csv") shp_file = os.path.join(input_dir, "networks-proposed.shp") nwp = NetworkPlan.from_files(shp_file, csv_file, prioritize='Population') model = Sequencer(nwp, 'Demand...Projected.nodal.demand.per.year') model.sequence() expected_dir = "data/sumaila/expected_output" exp_csv_file = os.path.join(expected_dir, "sequenced-results.csv") exp_df = pd.read_csv(exp_csv_file) # exp_shp_file = os.path.join(expected_dir, "edges.shp") # expected_nwp = NetworkPlan(shp_file, exp_csv_file, prioritize='Population') # now compare results to expected #expected_net = expected_nwp.network compare_fields = ['Sequence..Vertex.id', 'Sequence..Far.sighted.sequence'] # exp_node_dict = expected_net.nodes(data=True) # exp_node_tups = [tuple(map(d.get, compare_fields)) for d in exp_node_dict] exp_node_tups = map(tuple, exp_df[compare_fields].values) seq_node_tups = map(tuple, model.output_frame[compare_fields].values) exp_node_tups = filter(lambda tup: tup[0] > 0, exp_node_tups) seq_node_tups = filter(lambda tup: tup[0] > 0, seq_node_tups) seq_node_tups = map(lambda tup: tuple(map(int, tup)), seq_node_tups) # seq_node_tups = [tuple(map(seq_node_dict[d].get, compare_fields)) for d in seq_node_dict] assert sorted(exp_node_tups, key=lambda tup: tup[0]) == \ sorted(seq_node_tups, key=lambda tup: tup[0]),\ "expected nodes do not match sequenced"
def sequence_total_grid_mv_line_network(target_folder, infrastructure_graph): drafts_folder = make_folder(join(target_folder, 'drafts')) graph = infrastructure_graph if not graph.edges(): return {} # The network is empty and there is nothing to sequence node_table = get_table_from_graph( graph, ['longitude', 'latitude', 'population', 'peak_demand_in_kw']) node_table = node_table.rename(columns={'longitude': 'X', 'latitude': 'Y'}) node_table_path = join(drafts_folder, 'nodes-sequencer.csv') node_table.to_csv(node_table_path) edge_shapefile_path = join(drafts_folder, 'edges.shp') nwp = NetworkPlan.from_files(edge_shapefile_path, node_table_path, prioritize='population', proj='+proj=longlat +datum=WGS84 +no_defs') model = Sequencer(nwp, 'peak.demand.in.kw') model.sequence() order_series = model.output_frame['Sequence..Far.sighted.sequence'] for index, order in order_series.iteritems(): node_id = model.output_frame['Unnamed..0'][index] graph.node[node_id]['grid_mv_network_connection_order'] = order for node1_id, node2_id, edge_d in graph.cycle_edges(): node1_d = infrastructure_graph.node[node1_id] node2_d = infrastructure_graph.node[node2_id] edge_d['grid_mv_network_connection_order'] = min( node1_d.get('grid_mv_network_connection_order', float('inf')), node2_d.get('grid_mv_network_connection_order', float('inf'))) return {'infrastructure_graph': graph}
def main(): sequencer = Sequencer() sequencer.add_steps(KICK, [0, 3, 7]) sequencer.add_steps(SNARE, [2, 6]) sequencer.add_steps(HAT, [0, 1, 2, 3, 4, 5, 6, 7]) while True: tempo = sequencer.get_tempo() step = sequencer.get_step() if len(step) == 0: LED_Action((25, 25, 25)) elif len(step) == 1: play_sample(step[0][0]) LED_Action(step[0][1]) else: color = (0, 0, 0) i = 0 for sample in step: if i == 0: audio = sample[0] else: audio = audio.overlay(sample[0]) color = tuple(map(operator.add, color, sample[1])) i += 1 play_sample(audio) LED_Action(color) time.sleep(tempo)
def sequence_total_grid_mv_line_network(target_folder, infrastructure_graph): drafts_folder = make_folder(join(target_folder, 'drafts')) graph = infrastructure_graph if not graph.edges(): return {} # The network is empty and there is nothing to sequence node_table = get_table_from_graph(graph, [ 'longitude', 'latitude', 'population', 'peak_demand_in_kw']) node_table = node_table.rename(columns={'longitude': 'X', 'latitude': 'Y'}) node_table_path = join(drafts_folder, 'nodes-sequencer.csv') node_table.to_csv(node_table_path) edge_shapefile_path = join(drafts_folder, 'edges.shp') nwp = NetworkPlan.from_files( edge_shapefile_path, node_table_path, prioritize='population', proj='+proj=longlat +datum=WGS84 +no_defs') model = Sequencer(nwp, 'peak.demand.in.kw') model.sequence() order_series = model.output_frame['Sequence..Far.sighted.sequence'] for index, order in order_series.iteritems(): node_id = model.output_frame['Unnamed..0'][index] graph.node[node_id]['grid_mv_network_connection_order'] = order for node1_id, node2_id, edge_d in graph.cycle_edges(): node1_d = infrastructure_graph.node[node1_id] node2_d = infrastructure_graph.node[node2_id] edge_d['grid_mv_network_connection_order'] = min( node1_d.get('grid_mv_network_connection_order', float('inf')), node2_d.get('grid_mv_network_connection_order', float('inf'))) return {'infrastructure_graph': graph}
def test_set_note_for_step_range_sets_value_and_hold_state_correctly_on_all_steps( self): sequencer = Sequencer(0, 1, 4, 4) sequencer.set_note_for_step_range(0, 3, 12) self.assertEquals(sequencer.steps[0].value, 12) self.assertFalse(sequencer.steps[0].is_hold) self.assertEquals(sequencer.steps[1].value, 12) self.assertTrue(sequencer.steps[1].is_hold) self.assertEquals(sequencer.steps[2].value, 12) self.assertTrue(sequencer.steps[2].is_hold) self.assertEquals(sequencer.steps[3].value, 12) self.assertTrue(sequencer.steps[3].is_hold)
def test_sequencer_follows_topology(): """Tests that the sequencer doesn't skip nodes in the network""" nwp = get_network_plan() model = Sequencer(nwp, 'Demand') results = model.sequence() fnodes = results['Sequence..Upstream.id'] node_seq_num = {node: seq_num for seq_num, node in results['Sequence..Vertex.id'].iteritems()} #For each from_node, assert that the sequencer has already pointed to it or its a root eq_(np.all([fnode in nwp.roots or node_seq_num[fnode] < seq_num for seq_num, fnode in fnodes.iteritems()]), True)
def __init__(self, midiController, midiInput, midiOutput debug): serial = spi(port=0, device=0, gpio=noop()) self.device = max7219(serial, cascaded=2, rotate=1) print("Created device") self.printMsg("ESID", font=TINY_FONT) self.launchpad['in'] = mido.open_input(midiController) self.launchpad['out'] = mido.open_output(midiController) if debug: self.interface['in'] = mido.open_input() self.interface['out'] = mido.open_output() else: self.interface['in'] = mido.open_input(midiInput) self.interface['out'] = mido.open_output(midiOutput) self.sequences = [] new = False if new: self.sequences.append(Sequencer(36, "KK", self.launchpad, self.interface, outgoingCC=[10,12], silent=False, new=True)) self.sequences.append(Sequencer(38, "SD", self.launchpad, self.interface, outgoingCC=[18,19], silent=True, new=True)) self.sequences.append(Sequencer(39, "OH", self.launchpad, self.interface, outgoingCC=[58,59], silent=True, new=True)) self.sequences.append(Sequencer(46, "CY", self.launchpad, self.interface, outgoingCC=[50,51], silent=True, new=True)) for seq in self.sequences: self.saveSequence(seq) else: self.loadSequences()
def test_init_calculates_step_count_correctly(self): sequencer = Sequencer(0, 1, 4, 4) self.assertEquals(sequencer.bars, 1) self.assertEquals(sequencer.beats_per_bar, 4) self.assertEquals(sequencer.step_subdivision, 4) self.assertEquals(sequencer.step_count, 16) self.assertEquals(len(sequencer.steps.keys()), 16) sequencer = Sequencer(0, 4, 4, 4) self.assertEquals(sequencer.bars, 4) self.assertEquals(sequencer.beats_per_bar, 4) self.assertEquals(sequencer.step_subdivision, 4) self.assertEquals(sequencer.step_count, 64) self.assertEquals(len(sequencer.steps.keys()), 64)
def generate_beat(): # Print a newline print("") # Print the header print(colorama.Fore.BLACK + colorama.Back.GREEN + "Enter the settings for the new beat (press ENTER for the default value)") # Ask for the time signature to use while True: timeSignatureInput = input(" Enter the time signature (%s): " % defaults["timeSignature"]) timeSignature = time_signature(timeSignatureInput or defaults["timeSignature"]) if timeSignature: break else: print(colorama.Fore.RED + " Invalid time signature, make sure you use the format '7/8'. Please try again.") # Ask for the bpm to use while True: try: bpmInput = input(" Enter the desired BPM, between 30 and 200 (%d): " % defaults["bpm"]) bpm = int(bpmInput or defaults["bpm"]) if bpm < 30 or bpm > 200: raise ValueError break except ValueError: print(colorama.Fore.RED + " Invalid number, please try again.") # Create a new sequencer and return it return Sequencer.generate_irregular_beat(bpm,timeSignature[0],timeSignature[1]);
def opt_fun(params_values): sequencer = Sequencer(channels, channels_awg, awg_info, channels_delay) params = {} list_index_acc = 0 for ii, params_key in enumerate(params_init.keys()): index = list_index_acc + ii if 'list' in params_key: params[params_key] = params_values[index:index + len(params_init[params_key] )] list_index_acc += len(params_init[params_key]) - 1 else: params[params_key] = params_values[index] print("params: %s" % params) multiple_sequences, readout_time_list = arb_optimization_neldermead( sequencer, params, plot=True) awg_readout_time_list = get_awg_readout_time(readout_time_list) data, measured_data, dt = run_qutip_experiment( multiple_sequences, awg_readout_time_list['m8195a'], plot=True) Pe_list = measured_data[:, 1] print("Current value: %s" % Pe_list[0]) return (1 - Pe_list[0])
def __init__(self, midi_in, midi_out): super(Sequency, self).__init__() self._midi_out = midi_out self._midi_in = midi_in # Clock messages are ignored by default midi_in.ignore_types(timing=False) midi_in.set_callback(self._handle_input) self._lp = LaunchpadMini(midi_in, midi_out) self._controller = MidiToControl() self._boot_combo = [] self._reboot_set = False self._control_mode = CONTROL_MODE.DEFAULT sequence_states = self._load_from_state() self._clock = Clock() self._sequences = [] self._active_sequence = 0 for i in range(0, 8): self._sequences.append( Sequencer(i, self._on_trigger, sequence_states[i])) self.start()
def loadSequences(self): fileCount = 0 for seqfile in sorted(os.listdir(self.sequenceDir)): print(seqfile) if fileCount == 4: return fileCount += 1 with open(os.path.join(self.sequenceDir, seqfile), 'r', encoding='utf-8') as fp: data = json.load(fp) sequence = [] for step in data['sequence']: sequence.append(Step(step['note'], step['led'], step['incommingCC'], self.launchpad['out'], step['active'])) for cc in step['cc']: sequence[-1].addCc(cc["cc"], cc["value"]) temp = {} self.sequences.append(Sequencer(data['note'], data['name'], self.launchpad, self.interface, outgoingCC=data['outgoingCC'], silent=data['silent'])) self.sequences[-1].sequence = sequence
def test_accumulate_demand(): """Tests that the accumulated demand is correct""" nwp = get_network_plan() # Build dictionary of accumulated values for each node acc_dicts = { node: Sequencer(nwp, 'Demand').accumulate(node) for node in nwp.network.node.keys() } # Dictionary of known accumulated demand computed manually demands = { 0: (100 + 50 + 25 + 12 + 6 + 3), 1: (100 + 25 + 12), 2: (50 + 6 + 3), 3: 25, 4: 12, 5: 6, 6: 3 } # Assert that accumulate method and manual computation are equal eq_( np.all([ acc_dicts[node]['demand'] == demands[node] for node in nwp.network.node.keys() ]), True)
def test_sequencer_follows_topology(): """Tests that the sequencer doesn't skip nodes in the network""" nwp = get_network_plan() model = Sequencer(nwp, 'Demand') results = model.sequence() fnodes = results['Sequence..Upstream.id'] node_seq_num = { node: seq_num for seq_num, node in results['Sequence..Vertex.id'].iteritems() } #For each from_node, assert that the sequencer has already pointed to it or its a root eq_( np.all([ fnode in nwp.roots or node_seq_num[fnode] < seq_num for seq_num, fnode in fnodes.iteritems() ]), True)
def test_sequencer_with_fakes(): """ Make sure we work with fake nodes """ # for now, just make sure it runs without exceptions metrics, network, node_rank, edge_rank = gen_data_with_fakes() nwp = NetworkPlan(network, metrics, prioritize='Population', proj='wgs4') model = Sequencer(nwp, 'Demand...Projected.nodal.demand.per.year') results = model.sequence() node_ids = results['Sequence..Vertex.id'] sequence_ids = results['Sequence..Far.sighted.sequence'] actual_node_rank = dict(zip(node_ids, sequence_ids)) actual_edge_rank = {k: v['rank'] for k, v in model.networkplan.network.edge.iteritems()} assert node_rank == actual_node_rank,\ "Node sequencing is not what was expected" assert edge_rank == actual_edge_rank,\ "Edge sequencing is not what was expected"
def __init__(self, platform): platform.add_extension(ttl_extension) sys_clock_pads = platform.request("clk156") self.clock_domains.cd_sys = ClockDomain(reset_less=True) self.specials += Instance("IBUFGDS", i_I=sys_clock_pads.p, i_IB=sys_clock_pads.n, o_O=self.cd_sys.clk) sys_clk_freq = 156000000 self.comb += platform.request("sfp_tx_disable_n").eq(1) gtx = GTXReceiver( clock_pads=platform.request("sgmii_clock"), rx_pads=platform.request("sfp_rx"), sys_clk_freq=sys_clk_freq) self.submodules += gtx # clean up GTX clock using Si5324 i2c_master = I2CMaster(platform.request("i2c")) sequencer = Sequencer(get_i2c_program(sys_clk_freq)) si5324_clock_router = Si5324ClockRouter(platform, sys_clk_freq) self.submodules += i2c_master, sequencer, si5324_clock_router self.comb += sequencer.bus.connect(i2c_master.bus) # decode frames back_buffer = Signal(32) front_buffer = Signal(32) frame_hi = Signal() self.sync.rx_clean += [ If(gtx.decoders[0].k, front_buffer.eq(back_buffer), frame_hi.eq(0) ).Else( If(frame_hi, back_buffer[16:].eq( Cat(gtx.decoders[0].d, gtx.decoders[1].d)) ).Else( back_buffer[:16].eq( Cat(gtx.decoders[0].d, gtx.decoders[1].d)) ), frame_hi.eq(1) ) ] # drive TTLs self.comb += [ platform.request("user_sma_gpio_p").eq(front_buffer[0]), platform.request("user_sma_gpio_n").eq(front_buffer[1]) ] for i in range(8): self.comb += platform.request("user_led").eq(front_buffer[2+i]) for i in range(22): self.comb += platform.request("ttl").eq(front_buffer[10+i])
def test_sequencer_with_fakes(): """ Make sure we work with fake nodes """ # for now, just make sure it runs without exceptions metrics, network, node_rank, edge_rank = gen_data_with_fakes() nwp = NetworkPlan(network, metrics, prioritize='Population', proj='wgs4') model = Sequencer(nwp, 'Demand...Projected.nodal.demand.per.year') results = model.sequence() node_ids = results['Sequence..Vertex.id'] sequence_ids = results['Sequence..Far.sighted.sequence'] actual_node_rank = dict(zip(node_ids, sequence_ids)) actual_edge_rank = { k: v['rank'] for k, v in model.networkplan.network.edge.iteritems() } assert node_rank == actual_node_rank,\ "Node sequencing is not what was expected" assert edge_rank == actual_edge_rank,\ "Edge sequencing is not what was expected"
def test_process_step_only_starts_current_note_when_previous_note_is_empty( self, mock_send_message, mock_get_previous_step): mock_get_previous_step.return_value = Note(None, is_hold=False) sequencer = Sequencer(0, 1, 4, 4) sequencer.set_midi_channel(0) sequencer.process_step(Note(1, is_hold=False)) self.assertEquals(mock_send_message.call_count, 1) self.assertEquals(mock_send_message.call_args[0], ('NoteOn', 0, 1))
def fit(fold_nr, train_set, test_set, img_rows=96, img_cols=96, n_imgs=10 ** 4, batch_size=32, workers=1): X_train, y_train, X_test, y_test = extract_and_normalize_data(train_set, test_set) # Done With Preprocessing! :) x, y = np.meshgrid(np.arange(img_rows), np.arange(img_cols), indexing='ij') elastic = partial(elastic_transform, x=x, y=y, alpha=img_rows * 1.5, sigma=img_rows * 0.07) # we create two instances with the same arguments data_gen_args = dict( featurewise_center=False, featurewise_std_normalization=False, rotation_range=10., width_shift_range=0.1, height_shift_range=0.1, horizontal_flip=True, vertical_flip=True, zoom_range=[1, 1.2], fill_mode='constant', preprocessing_function=elastic) training_sequence = Sequencer(X_train, y_train, sequence_size=n_imgs, batch_size=batch_size, data_gen_args=data_gen_args) raw_model = UNet((img_rows, img_cols, 1), start_ch=8, depth=7, batchnorm=True, dropout=0.5, maxpool=True, residual=True) model = ModelMGPU(raw_model, 2) model.summary(print_fn=logging.info) model_checkpoint = ModelCheckpoint( '../data/weights-' + str(fold_nr) + '.h5', monitor='val_loss', save_best_only=True) metrics_callback = MetricsCallback(X_train, y_train, X_test, y_test, test_set) c_backs = [model_checkpoint, LoggingWriter(), metrics_callback] model.compile(optimizer=Adam(lr=0.001), loss=binary_crossentropy, metrics=[dice_coef]) history = model.fit_generator( training_sequence, epochs=50, verbose=1, shuffle=True, validation_data=(X_test, y_test), callbacks=c_backs, workers=workers, use_multiprocessing=True) logging.info(history.history) plot_learning_performance(history, 'loss-' + str(fold_nr) + '.png') metrics_callback.save('metrics-' + str(fold_nr) + '.png')
def resultsA(self): records = self.getRecords(self.basefile) if len(records) > 1: raise ImportError('FORMATTING ERROR in base text file'); sequencer = Sequencer() results = [] for record in records: record['sequence'] = record['sequence'].replace("\n", "") frames = list() for i in range (0, 3): sequence = sequencer.getExtrons(record,i) frames.append(sequence) result = { 'name' : record['name'], 'extrons' : frames.copy() } results.append(result) frames.clear() file = open("woosh_output_a.txt", 'w') print('=============================== R E S U L T S ===============================') file.write('=============================== R E S U L T S ===============================\n') for result in results: print(result['name']) file.write(result['name']) i = 1 for frame in result['extrons']: print('\n ------------ Frame ', i, ' ------------') file.write('\n ------------ Frame ' + str(i) + ' ------------') for sequence in frame: print(sequence) file.write(('\n' + sequence)) i+=1 print('**********************************************') file.write('\n**********************************************\n\n') file.close()
def test01_instant(self): """test01_instant() """ # sequ: Sequencer = Sequencer(0.1) # sequencer with an interval of .1 sec # sequencer with an interval of 15.0 sec sequ: Sequencer = Sequencer(5.0) # init for 5 sec delay self.assertAlmostEqual(5.0, sequ.delayseconds) self.assertEqual(20, len(sequ._to_do_sched)) self.assertFalse(sequ.do_it_now()) self.assertEqual(0, sequ.skipped) initiallastsched = sequ._to_do_sched[-1] cnt: int = 0 starttime = monotonic() while not sequ.do_it_now(): Sleep(0.25) self.assertEqual(0, sequ.skipped) cnt += 1 endtime = monotonic() toloop = endtime - starttime self.assertAlmostEqual(10.0, round(toloop, 0)) currentlastsched = sequ._to_do_sched[-1] delay1 = currentlastsched - initiallastsched self.assertAlmostEqual(5.0, delay1) self.assertTrue(38 < cnt < 41) sched: List[float] = list(sequ._to_do_sched) ll: List[float] = [] for i in range(1, len(sched)): ll.append(sched[i] - sched[i - 1]) self.assertEqual(5.0, mean(ll)) sequ = Sequencer(1) # init for 1 sec delay Sleep(20.5) self.assertEqual(20, len(sequ._to_do_sched)) self.assertTrue(sequ.do_it_now()) self.assertEqual(20, len(sequ._to_do_sched)) self.assertEqual(11, sequ.skipped) sequ = Sequencer(5.0) # init for 5 sec delay waittime: float = sequ.get_nxt_wait() Sleep(waittime - 0.5) waittime = sequ.get_nxt_wait() self.assertAlmostEqual(0.5, waittime, places=1) a = 0
def test_process_steps_stops_previous_note_when_previous_note_is_hold_and_current_note_is_empty( self, mock_send_message, mock_get_previous_step): mock_get_previous_step.return_value = Note(99, is_hold=True) sequencer = Sequencer(0, 1, 4, 4) sequencer.set_midi_channel(0) sequencer.process_step(Note(None, is_hold=False)) self.assertEquals(mock_send_message.call_count, 1) self.assertEquals(mock_send_message.call_args_list[0][0], ('NoteOff', 0, 99))
def test(): seq = Sequencer() bass = Drum(BassDrum()) snare = Drum(SnareDrum()) bass.get_beat(0).set_enabled(True) bass.get_beat(8).set_enabled(True) snare.get_beat(4).set_enabled(True) snare.get_beat(12).set_enabled(True) seq.add_drum(bass) seq.add_drum(snare) seq.start() sleep(3)
def test_accumulate_cost(): """Tests that the accumulates costs are correct""" nwp = get_network_plan() # Build dictionary of accumulated values for each node acc_dicts = { node: Sequencer(nwp, 'Demand').accumulate(node) for node in nwp.network.node.keys() } def get_distance(f, t): return nwp._distance(f, t) # Manually compute downstream distances costs = { 0: sum([ get_distance(0, 1), get_distance(0, 2), get_distance(1, 3), get_distance(1, 4), get_distance(2, 5), get_distance(2, 6) ]), 1: sum([get_distance(0, 1), get_distance(1, 3), get_distance(1, 4)]), 2: sum([get_distance(0, 2), get_distance(2, 5), get_distance(2, 6)]), 3: get_distance(1, 3), 4: get_distance(1, 4), 5: get_distance(2, 5), 6: get_distance(2, 6) } costs = { node: (acc_dicts[node]['cost'], costs[node]) for node in nwp.network.node.keys() } eq_(np.all(map(lambda tup: np.allclose(*tup), costs.values())), True)
def main(): d = {} for word in open(utils.dict_file): d[word.strip()] = 0 for word, ac in Sequencer(utils.train_file).Generator(): if ac: d[word] += 1 sd = set() for word in open('simple1000.txt'): sd.add(word.strip()) arr = sorted(d.values()) n = sum(arr) print 'n =', n chisq, p = stats.chisquare(map(lambda x: float(x) / n, arr)) print chisq, p print 'avg =', float(n) / len(d) print 'savg = ', float(sum(d[x] for x in d.keys() if x in sd)) / 1000
def build(self): self.initialize_app_state() self.sequencers = [ Sequencer(id=_id, bars=1, beats_per_bar=4, steps_per_beat=4, midi_channel=0) for _id in range(8) ] self.active_sequencer = self.sequencers[0] self.sequencer_view = SequencerView() self.sequencer_view.menu.sequencer_spinner.values = [ 'Sequencer #{}'.format(sequencer_id) for sequencer_id in range(len(self.sequencers)) ] self.sequencer_view.update_ui(None) return self.sequencer_view
class Test_getReadingFrame(unittest.TestCase): seq = Sequencer() def testNotFound(self): start = self.seq.getReadingFrame("AGGATACACACA") self.assertEqual(start, -1) def testFirstCodonStart(self): start = self.seq.getReadingFrame("ATGATACACACA") self.assertEqual(start, 0) def testLastCodonStart(self): start = self.seq.getReadingFrame("AGGATACACATG") self.assertEqual(start, 9) def testMiddleCodonStart(self): start = self.seq.getReadingFrame("AGATGCACACA") self.assertEqual(start, 2)
def test_set_note_for_step_range_wont_override_a_step_that_is_already_set( self): sequencer = Sequencer(0, 1, 4, 4) sequencer.set_note_for_step(2, 12) sequencer.set_note_for_step_range(0, 3, 99) self.assertEquals(sequencer.steps[0].value, 99) self.assertFalse(sequencer.steps[0].is_hold) self.assertEquals(sequencer.steps[1].value, 99) self.assertTrue(sequencer.steps[1].is_hold) # Test previous step hasn't been overridden self.assertEquals(sequencer.steps[2].value, 12) self.assertFalse(sequencer.steps[2].is_hold) # Test next step after non-hold step is blank self.assertIsNone(sequencer.steps[3].value) self.assertFalse(sequencer.steps[3].is_hold)
def opt_fun(params_values): sequencer = Sequencer(channels, channels_awg, awg_info, channels_delay) params = {} for ii, params_key in enumerate(params_init.keys()): params[params_key] = params_values[ii] print("params: %s" % params) multiple_sequences, readout_time_list = sideband_optimization_neldermead( sequencer, params, plot=True) awg_readout_time_list = get_awg_readout_time(readout_time_list) data, measured_data, dt = run_qutip_experiment( multiple_sequences, awg_readout_time_list['m8195a'], plot=True) Pe_list = measured_data[:, 1] print("Current value: %s" % Pe_list[0]) return (Pe_list[0])
def main(): # Test sur l'utilisation du programme if len(sys.argv) != 2: print("Utilisation : python3 wirefish.py <nom du fichier>") exit(1) # Récuperer les fichiers dirname = os.path.dirname(__file__) if not os.path.exists(os.path.join(dirname, '../inputs/')): os.makedirs(os.path.join(dirname, '../inputs/')) if not os.path.exists(os.path.join(dirname, '../outputs/')): os.makedirs(os.path.join(dirname, '../outputs/')) inputname = sys.argv[1] inputpath = os.path.join(dirname, '../inputs/' + inputname) outputname = sys.argv[1].replace('/', '_') + '.txt' outputpath = os.path.join(dirname, '../outputs/analyse_' + outputname) if not os.path.isfile(inputpath): print("{} n'existe pas ".format(inputpath)) exit(1) # Construction du dictionnaire de la trace. trace_dict = Sequencer(get_trames_list(inputpath)).sequence() # A partir de trace_dict, générer le fichier texte résumant l'analyse du fichier. with open(outputpath, 'w') as f: f.write('Analyse du fichier : ' + sys.argv[1] + '\n\n' + get_text_output(trace_dict)) # Construction de l'arbe de la trace à partir de trace_dict retval = {"name": "Fichier analysé : " + inputname, "children": []} tree = get_trace_tree(trace_dict, retval) # Afficher l'interface Interface(tree).main()
class Test_translateDNA(unittest.TestCase): seq = Sequencer() dnaSeq = {'name': 'Test Sequence', 'sequence': ""} def test1_basic(self): self.dnaSeq['sequence'] = "ATGATAATCTTTGTTGTGTAA" start = self.seq.getReadingFrame(self.dnaSeq['sequence']) protein = self.seq.translate_dna(self.dnaSeq) self.assertEqual(protein, "MIIFVV") def test2_middleStart(self): self.dnaSeq['sequence'] = "ATAATCTTTATGGTTGTGTAG" protein = self.seq.translate_dna(self.dnaSeq) self.assertEqual(protein, "MVV") def test3_earlyStop(self): self.dnaSeq['sequence'] = "ATGATAATCTAATTGTTGTGTAA" protein = self.seq.translate_dna(self.dnaSeq) self.assertEqual(protein, "MII") def test3_midStartEarlyStop(self): self.dnaSeq['sequence'] = "ATAATCTTTATGGTTTGAGTGTAG" protein = self.seq.translate_dna(self.dnaSeq) self.assertEqual(protein, "MV")
from nmigen import * from nmigen.asserts import * from nmigen.hdl.ast import * from nmigen.back import pysim from sequencer import Sequencer from arch import Registers from incdec import IncDec from mcycler import MCycler from muxing import * if __name__ == "__main__": m = Module() m.submodules.sequencer = sequencer = Sequencer() m.submodules.registers = registers = Registers() m.submodules.mcycler = mcycler = MCycler() m.submodules.incdec = incdec = IncDec(16) addrBus = Signal(16) dataBus = Signal(8) m.d.comb += [ mcycler.addr.eq(addrBus), mcycler.refresh_addr.eq(addrBus), mcycler.cycle.eq(sequencer.cycle), mcycler.extend.eq(sequencer.extend), mcycler.busreq.eq(0), ] m.d.comb += [ incdec.input.eq(addrBus),
MAG_PAN = -1 * STEREO_WIDTH HANNABIELL_PAN = 1 * STEREO_WIDTH MAG_AGE_TYPE = "decay" HANNABIELL_AGE_TYPE = "rejuvinate" from sequencer import Sequencer import logging logging.basicConfig(filename="play.log", level=logging.DEBUG, filemode="w", format="%(asctime)s - %(name)s - %(levelname)s - %(message)s") sequencer = Sequencer() sequencer.load_sounds("sound/*/*.wav") # Buses sequencer.add_bus("bubbles") sequencer.set_bus_params("bubbles", { "reverb_room": 0.5, "reverb_mix": 0.6 }) sequencer.add_bus("long_reverb") sequencer.set_bus_params("long_reverb", { "reverb_room": 0.85, "reverb_mix": 0.75
n = len(word) for i in xrange(27): if appear[i]: score *= self.appear_prob[n][i] else: score *= 1.0 - self.appear_prob[n][i] return score def Predict(self, word): if not self.predict_ready: raise Exception('prediction is not ready') if len(word) not in utils.train_range: return False return self.Score(word) > self.threshold[len(word)] if __name__ == '__main__': from sequencer import Sequencer maid = AppearSep() maid.Train(Sequencer(utils.train_file).Generator()) ac = [0] * 40 cnt = [0] * 40 for w, tar in Sequencer(utils.valid_file).Generator(): if len(w) in utils.train_range: cnt[len(w)] += 1 if maid.Predict(w) == tar: ac[len(w)] += 1 for i in utils.train_range: print 'Validation', i, float(ac[i] * 100) / cnt[i] print 'Validation', float(sum(ac) * 100) / sum(cnt)