def test_timeline_default_output_device(): timeline = iso.Timeline() try: track = timeline.schedule({ "note": 0 }) assert issubclass(type(track.output_device), MidiOutputDevice) except iso.DeviceNotFoundException: # Ignore exception on machines without a MIDI device pass
def test_timeline_stop_when_done(): # When the Timeline ticks without any tracks, it should by default keep running timeline = iso.Timeline(output_device=DummyOutputDevice()) timeline.tick() assert timeline.current_time == pytest.approx(1.0 / iso.DEFAULT_TICKS_PER_BEAT) # When stop_when_done is True, the timeline should stop as soon as it runs on empty timeline.stop_when_done = True with pytest.raises(StopIteration): timeline.tick()
def __init__(self, name='mysong', bpm=132, device=None): threading.Thread.__init__(self) self.bpm = bpm self.quit = False self.name = name output_device = iso.io.midi.MidiOut(device) self.t = iso.Timeline(bpm, ticks_per_beat=96, division=4, debug=False, device=output_device)
def test_timeline_background(): timeline = iso.Timeline(60, output_device=iso.io.DummyOutputDevice()) timeline.ticks_per_beat = 100 executed = 0 def set_executed(): nonlocal executed executed += 1 timeline.background() timeline.schedule({"action": set_executed, "duration": 0.05}, delay=0.01) time.sleep(0.2) timeline.stop() assert executed == 4
def test_timeline_schedule_real_clock(): timeline = iso.Timeline(60, output_device=DummyOutputDevice()) times = [] timeline.stop_when_done = True def record_time(): times.append(time.time()) timeline.schedule({ iso.EVENT_NOTE: iso.PSequence([1, 1], 1), iso.EVENT_ACTION: record_time, iso.EVENT_DURATION: 0.1 }, delay=0.1) t0 = time.time() timeline.run() diff = times[1] - times[0] assert diff == pytest.approx(0.1, abs=timeline.tick_duration)
def save(self, filename): """ Save pattern data to a MIDI file. Args: filename (str): Filename to write to (.mid) quantize (float): Quantization level. 1.0 = quantize to beat, 0.25 = quantize to quarter-beat, etc. """ from isobar.io.midifile import MidiFileOutputDevice writer = MidiFileOutputDevice(filename) clock = isobar.DummyClock() timeline = isobar.Timeline(self, output_device=writer, clock_source=clock) timeline.schedule(self) timeline.stop_when_done = True try: clock.run() except StopIteration: pass writer.write()
def test_timeline_ticks_per_beat(ticks_per_beat): #-------------------------------------------------------------------------------- # Schedule a single event #-------------------------------------------------------------------------------- delay_time = 0.1 timeline = iso.Timeline(120, output_device=iso.DummyOutputDevice()) timeline.stop_when_done = True timeline.ticks_per_beat = ticks_per_beat timeline.event_times = [] timeline.schedule({ iso.EVENT_ACTION: lambda: timeline.event_times.append(time.time()), iso.EVENT_DURATION: iso.PSequence([timeline.seconds_to_beats(delay_time)], 2) }) timeline.run() for index, t in enumerate(timeline.event_times[:-1]): dt = timeline.event_times[index + 1] - t assert (delay_time * 0.95) < dt < (delay_time * 1.05)
def test_io_midi(): """ Send a MIDI message through a virtual loopback device. Note that virtual=True is not needed for subsequent calls, as it has already been created so is visible to rtmidi as an existing device. """ events = [] def log_event(message): nonlocal events events.append(message) midi_in = iso.MidiInputDevice(VIRTUAL_DEVICE_NAME, virtual=True) midi_in.callback = log_event midi_out = iso.MidiOutputDevice(VIRTUAL_DEVICE_NAME) timeline = iso.Timeline(120, midi_out) timeline.stop_when_done = True timeline.schedule({"note": iso.PSequence([60], 1), "duration": 0.1}) timeline.run() assert len(events) == 1
def test_timeline_clock_accuracy(): #-------------------------------------------------------------------------------- # 480 ticks per beat @ 125bpm = 1 tick per 1ms #-------------------------------------------------------------------------------- timeline = iso.Timeline(125, output_device=iso.DummyOutputDevice()) timeline.stop_when_done = True timeline.ticks_per_beat = 480 timeline.event_times = [] timeline.schedule({ iso.EVENT_ACTION: lambda: timeline.event_times.append(time.time()), iso.EVENT_DURATION: iso.PSequence([0.001], 50) }) timeline.run() #-------------------------------------------------------------------------------- # Check that timing is accurate to +/- 1ms #-------------------------------------------------------------------------------- for index, t in enumerate(timeline.event_times[:-1]): dt = timeline.event_times[index + 1] - t assert 0.0002 < dt < 0.002
# # Simple example of writing to a MIDI file in real time. #------------------------------------------------------------------------ import isobar as iso from isobar.io import MidiFileOutputDevice import logging logging.basicConfig(level=logging.DEBUG, format="[%(asctime)s] %(message)s") key = iso.Key("C", "major") filename = "output.mid" output = MidiFileOutputDevice(filename) timeline = iso.Timeline(iso.MAX_CLOCK_RATE, output_device=output) timeline.stop_when_done = True timeline.schedule({ "note": iso.PDegree(iso.PSequence([ 0, 1, 2, 4 ], 4), key), "octave": 5, "gate": iso.PSequence([ 0.5, 1, 2, 1 ]), "amplitude": iso.PSequence([ 100, 80, 60, 40], 4), "duration": 1.0 }) timeline.schedule({ "note": iso.PDegree(iso.PSequence([ 7, 6, 4, 2 ], 4), key), "octave": 6, "gate": 1, "amplitude": iso.PSequence([ 80, 70, 60, 50], 4),
import isobar as iso import logging logging.basicConfig(level=logging.INFO, format="[%(asctime)s] %(message)s") #------------------------------------------------------------------------ # Melody line #------------------------------------------------------------------------ sequence = iso.PSequence([-7, -5, 0, 2, 3, -5, -7, 2, 0, -5, 3, 2]) #------------------------------------------------------------------------ # Create a timeline at 160BPM #------------------------------------------------------------------------ timeline = iso.Timeline(160) #------------------------------------------------------------------------ # Schedule two identical melodies. # We must copy the note sequence or else the position will be stepped # by two every note... try removing the .copy() and see what happens! #------------------------------------------------------------------------ timeline.schedule({"note": sequence.copy() + 60, "duration": 0.5}) timeline.schedule({"note": sequence.copy() + 72, "duration": 0.5 * 1.01}) #------------------------------------------------------------------------ # Start playing via default MIDI out, and block forever. # Alternatively, use timeline.background() to retain foreground control. #------------------------------------------------------------------------ try: timeline.run()
#!/usr/bin/env python3 #------------------------------------------------------------------------ # isobar: ex-static-pattern # # Bind a chord sequence to a static sequence, which is then referenced # in other patterns. #------------------------------------------------------------------------ import isobar as iso key_sequence = iso.PSequence([ iso.Key("C", "minor"), iso.Key("G", "minor"), iso.Key("Bb", "major"), iso.Key("F", "major"), ]) key = iso.PStaticPattern(key_sequence, 4) timeline = iso.Timeline(120) timeline.schedule({"degree": 0, "key": key, "octave": 3}) timeline.schedule({ "degree": iso.PCreep(iso.PWhite(0, 6), 2, 2, 3), "key": key, "octave": 6, "duration": 0.25 }) timeline.run()
def dummy_timeline(): timeline = iso.Timeline(output_device=iso.io.DummyOutputDevice(), clock_source=iso.DummyClock()) timeline.stop_when_done = True return timeline
#!/usr/bin/env python3 #------------------------------------------------------------------------ # isobar: ex-lsystem-stochastic # # Generates a stochastic L-system arpeggio #------------------------------------------------------------------------ import isobar as iso import logging logging.basicConfig(level=logging.INFO, format="[%(asctime)s] %(message)s") notes = iso.PLSystem("N[+N--?N]+N[+?N]", depth=4) notes = iso.PDegree(notes, iso.Scale.majorPenta) notes = notes % 36 + 52 timeline = iso.Timeline(180) timeline.schedule({"note": notes, "duration": 0.25}) timeline.run()
def test_timeline_seconds_to_beats(dummy_timeline): timeline = iso.Timeline(120) assert timeline.seconds_to_beats(1) == pytest.approx(2) assert timeline.seconds_to_beats(0) == pytest.approx(0.0) timeline.tempo = 180 assert timeline.seconds_to_beats(1) == pytest.approx(3)
#!/usr/bin/python import isobar as iso # create a repeating sequence with scalar transposition: # [ 36, 38, 43, 39, ... ] a = iso.PSeq([0, 2, 7, 3]) + 36 # apply pattern-wise transposition # [ 36, 50, 43, 51, ... ] a = a + iso.PSeq([0, 12]) # create a geometric chromatic series, repeated back and forth b = iso.PSeries(0, 1, 12) + 72 b = iso.PPingPong(b) b = iso.PLoop(b) # create an velocity series, with emphasis every 4th note, # plus a random walk to create gradual dynamic changes amp = iso.PSeq([50, 35, 25, 35]) + iso.PBrown(0, 1, -20, 20) # a Timeline schedules events at a given BPM. # by default, send these over the first MIDI output. output_device = iso.io.midi.MidiOut("IAC Driver IAC Bus 1") timeline = iso.Timeline(120, device=output_device, debug=True) # assign each of our Patterns to particular properties timeline.sched({'note': a, 'dur': 1, 'gate': 2}) timeline.run()
import isobar as iso import logging logging.basicConfig(level=logging.INFO, format="[%(asctime)s] %(message)s") #------------------------------------------------------------------------ # walk up and down a minor scale #------------------------------------------------------------------------ scale = iso.Scale([0, 2, 3, 7, 9, 11]) degree = iso.PBrown(0, 2, -8, 16) notes = iso.PDegree(degree, scale) + 60 #------------------------------------------------------------------------ # add a slight 4/4 emphasis and moderate variation in velocity #------------------------------------------------------------------------ amp = iso.PSequence([40, 30, 20, 25]) + iso.PBrown(0, 2, -10, 10) timeline = iso.Timeline(170) timeline.schedule({ "note": notes, "duration": 0.25, "gate": 0.9, "amplitude": amp }) try: timeline.run() except KeyboardInterrupt: timeline.output_device.all_notes_off()
#------------------------------------------------------------------------ bassline = iso.PSequence([0, 2, 7, 3]) + 36 #------------------------------------------------------------------------ # Repeat each note 3 times, and transpose each into a different octave # [ 36, 48, 60, 38, 50, 62, ... ] #------------------------------------------------------------------------ bassline = iso.PStutter(bassline, 3) + iso.PSequence([0, 12, 24]) #------------------------------------------------------------------------ # A Timeline schedules events at a specified tempo. By default, events # are send to the system's default MIDI output. #------------------------------------------------------------------------ output = iso.MidiOutputDevice() timeline = iso.Timeline(120, output) #------------------------------------------------------------------------ # Schedule events, with properties generated by the Pattern objects. #------------------------------------------------------------------------ timeline.schedule({ "note": arpeggio, "duration": 0.25, "amplitude": amplitude }) timeline.schedule({ "note": bassline, "duration": 1 }) try:
def test_timeline_output_device(): dummy = DummyOutputDevice() timeline = iso.Timeline(output_device=dummy) track = timeline.schedule({ "note" : 0 }) assert track.output_device == dummy
learner.register([message.note, velocity, dur]) clock0 = clock except KeyboardInterrupt: pass print("----------------------------------------------------") print("Ctrl-C detected, now playing back") print("----------------------------------------------------") chains = learner.chains() pitch = chains[0] amp = chains[1] dur = chains[2] if len(pitch.nodes) == 0: print("No notes detected") else: timeline = iso.Timeline(120, midi_out) timeline.schedule({ "note": pitch, "duration": dur, "amplitude": amp, "channel": 0 }) try: timeline.run() except KeyboardInterrupt: timeline.output_device.all_notes_off()
#!/usr/bin/env python3 #------------------------------------------------------------------------ # isobar: ex-euclidean # # Uses Euclidean rhythms to generate multiple polyrhythmic voices. #------------------------------------------------------------------------ import isobar as iso import logging logging.basicConfig(level=logging.INFO, format="[%(asctime)s] %(message)s") timeline = iso.Timeline(100) timeline.schedule({ "note": 60 * iso.PEuclidean(5, 8), "duration": 0.25 }, delay=0.0) timeline.schedule({ "note": 62 * iso.PEuclidean(5, 13), "duration": 0.5 }, delay=0.25) timeline.schedule({ "note": 64 * iso.PEuclidean(7, 15), "duration": 0.5 }, delay=0.5) timeline.schedule({
#!/usr/bin/env python3 #------------------------------------------------------------------------ # MIDI clock sync input example. # Start an external MIDI clock with this device as the clock target. # The MidiInputDevice object estimates the input tempo via a moving average. #------------------------------------------------------------------------ import isobar as iso midi_in = iso.MidiInputDevice() def print_tempo(): if midi_in.tempo: print("Estimated tempo: %.3f" % midi_in.tempo) timeline = iso.Timeline(120, clock_source=midi_in) timeline.schedule({ "action": print_tempo }) print("Awaiting MIDI clock signal from %s..." % midi_in.device_name) try: timeline.run() except KeyboardInterrupt: timeline.output_device.all_notes_off()
def test_timeline_tempo(): timeline = iso.Timeline(100) assert timeline.clock_source.tempo == pytest.approx(100)
#!/usr/bin/env python3 #------------------------------------------------------------------------ # ex-osc-send # # Send OSC messages with a specified pattern. #------------------------------------------------------------------------ import isobar as iso osc_device = iso.OSCOutputDevice("127.0.0.1", 8010) timeline = iso.Timeline(120, output_device=osc_device) timeline.schedule({ "osc_address": "/freq", "osc_params": [iso.PSequence([440, 880])] }) try: timeline.run() except KeyboardInterrupt: timeline.output_device.all_notes_off()
while True: note = midi_in.receive() if note is not None: print(" - Read note: %s" % note.note) notes.append(note) if last_note_time is not None: durations.append(time.time() - last_note_time) last_note_time = time.time() except KeyboardInterrupt: pass if last_note_time: durations.append(time.time() - last_note_time) print() print("----------------------------------------------------") print("Ctrl-C detected, now playing back") print("----------------------------------------------------") timeline = iso.Timeline(60) timeline.stop_when_done = True timeline.schedule({ "note": iso.PSequence([note.note for note in notes], 1), "duration": iso.PSequence(durations, 1) }) timeline.run() else: print() print("No notes detected")
# Example of reading from a MIDI file in real time. #------------------------------------------------------------------------ import isobar as iso from isobar.io import MidiFileInputDevice import argparse import logging logging.basicConfig(level=logging.INFO, format="[%(asctime)s] %(message)s") parser = argparse.ArgumentParser(description="Read and play a .mid file") parser.add_argument("filename", type=str, help="File to load (.mid)") args = parser.parse_args() #-------------------------------------------------------------------------------- # Read a MIDI file into a pattern. # The resulting pattern is a PDict, with keys containing patterns for each # of the event properties (note, duration, amplitude) #-------------------------------------------------------------------------------- pattern = MidiFileInputDevice(args.filename).read() print("Read pattern containing %d note events" % len(pattern["note"])) timeline = iso.Timeline() timeline.schedule(pattern) try: timeline.run() except KeyboardInterrupt: timeline.output_device.all_notes_off()
# Learn note, duration and amplitude series separately. #------------------------------------------------------------------------ note_learner = iso.MarkovLearner() note_learner.learn_pattern(pattern["note"]) dur_learner = iso.MarkovLearner() dur_learner.learn_pattern(pattern["duration"]) #------------------------------------------------------------------------ # Quantize velocities to the nearest 10 to make chains easier to # learn with a small sample set. #------------------------------------------------------------------------ amp_learner = iso.MarkovLearner() amp_learner.learn_pattern( iso.PInt(iso.PRound(iso.PScalar(pattern["amplitude"]), -1))) #------------------------------------------------------------------------ # The markov property of a learner is a PMarkov, which generates # outputs by traversing the Markov chain stochastically. #------------------------------------------------------------------------ timeline = iso.Timeline(90) timeline.schedule({ "note": note_learner.markov, "duration": dur_learner.markov, "amplitude": amp_learner.markov }) try: timeline.run() except KeyboardInterrupt: timeline.output_device.all_notes_off()
import isobar as iso ''' Volca Beats Four-on-the-floor ''' # setting sequences BD = iso.PSeq([36, None, None, None]) CL = iso.PSeq([None, 39]) CH = iso.PSeq([42, None, 42, None]) # a Timeline schedules events at a given BPM. # by default, send these over the first MIDI output. timeline = iso.Timeline( 136, ticks_per_beat=96, division=4, debug=False ) # duration of 1 is now set to 1/16 note per beat timeline.sched({'note': BD, 'channel': 9, 'dur': 1}) timeline.sched({'note': CL, 'channel': 9, 'dur': 4}) timeline.sched({'note': CH, 'channel': 9, 'dur': 2}) timeline.background() import ipdb; ipdb.set_trace() ''' Try to update the patterns on the fly '''