コード例 #1
0
def main(argv):
    midi = midiparser.File(argv[1])
    print midi.file, midi.format, midi.num_tracks, midi.division
    for track in midi.tracks:
        for event in track.events:
            if event.type == midiparser.voice.NoteOn:
                print(event.absolute)
                print(event.detail.note_no, event.detail.velocity)
            if event.type == midiparser.meta.TrackName:
                print(event.detail.text.strip())
            if event.type == midiparser.meta.CuePoint:
                print(event.detail.text.strip())
            if event.type == midiparser.meta.Lyric:
                print(event.detail.text.strip())
コード例 #2
0
    def execute(self, app):
        try:
            import midiparser as midiparser
        except:
            app.setStatus(_("Error: This plugin requires midiparser.py"))
            return

        n = self["name"]
        if not n or n == "default": n = "Midi2CNC"

        fileName = self["File"]

        x = 0.0
        y = 0.0
        z = 0.0

        x_dir = 1.0
        y_dir = 1.0
        z_dir = 1.0

        # List of MIDI channels (instruments) to import.
        # Channel 10 is percussion, so better to omit it
        channels = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]

        axes = self["AxisUsed"]
        active_axes = len(axes)

        transpose = (0, 0, 0)
        ppu = [200, 200, 200]
        ppu[0] = self["ppu_X"]
        ppu[1] = self["ppu_X"]
        ppu[2] = self["ppu_X"]

        safemin = [0, 0, 0]
        safemax = [100, 100, 50]
        safemax[0] = self["max_X"]
        safemax[1] = self["max_Y"]
        safemax[2] = self["max_Z"]

        try:
            midi = midiparser.File(fileName)
        except:
            app.setStatus(_("Error: Sorry can't parse the Midi file."))
            return

        noteEventList = []
        all_channels = set()

        for track in midi.tracks:
            #channels=set()
            for event in track.events:
                if event.type == midiparser.meta.SetTempo:
                    tempo = event.detail.tempo

                # filter undesired instruments
                if ((event.type == midiparser.voice.NoteOn)
                        and (event.channel in channels)):

                    if event.channel not in channels:
                        channels.add(event.channel)

                    # NB: looks like some use "note on (vel 0)" as equivalent to note off, so check for vel=0 here and treat it as a note-off.
                    if event.detail.velocity > 0:
                        noteEventList.append([
                            event.absolute, 1, event.detail.note_no,
                            event.detail.velocity
                        ])
                    else:
                        noteEventList.append([
                            event.absolute, 0, event.detail.note_no,
                            event.detail.velocity
                        ])

                if (event.type == midiparser.voice.NoteOff) and (event.channel
                                                                 in channels):
                    if event.channel not in channels:
                        channels.add(event.channel)
                    noteEventList.append([
                        event.absolute, 0, event.detail.note_no,
                        event.detail.velocity
                    ])

            # Finished with this track
            if len(channels) > 0:
                msg = ', '.join(['%2d' % ch for ch in sorted(channels)])
                #print 'Processed track %d, containing channels numbered: [%s ]' % (track.number, msg)
                all_channels = all_channels.union(channels)

        # List all channels encountered
        if len(all_channels) > 0:
            msg = ', '.join(['%2d' % ch for ch in sorted(all_channels)])
            #print 'The file as a whole contains channels numbered: [%s ]' % msg

        # We now have entire file's notes with abs time from all channels
        # We don't care which channel/voice is which, but we do care about having all the notes in order
        # so sort event list by abstime to dechannelify

        noteEventList.sort()
        # print noteEventList
        # print len(noteEventList)

        last_time = -0
        active_notes = {
        }  # make this a dict so we can add and remove notes by name

        # Start the output
        #Init blocks
        blocks = []
        block = Block(self.name)
        block.append("(Midi2CNC)")
        block.append("(Midi:%s)" % fileName)
        block.append(CNC.zsafe())
        block.append(CNC.grapid(0, 0))
        block.append(CNC.zenter(0))

        for note in noteEventList:
            # note[timestamp, note off/note on, note_no, velocity]
            if last_time < note[0]:

                freq_xyz = [0, 0, 0]
                feed_xyz = [0, 0, 0]
                distance_xyz = [0, 0, 0]
                duration = 0

                # "i" ranges from 0 to "the number of active notes *or* the number of active axes,
                # whichever is LOWER". Note that the range operator stops
                # short of the maximum, so this means 0 to 2 at most for a 3-axis machine.
                # E.g. only look for the first few active notes to play despite what
                # is going on in the actual score.

                for i in range(0, min(len(active_notes.values()),
                                      active_axes)):

                    # Which axis are should we be writing to?
                    #
                    j = self.axes_dict.get(axes)[i]

                    # Debug
                    # print"Axes %s: item %d is %d" % (axes_dict.get(args.axes), i, j)

                    # Sound higher pitched notes first by sorting by pitch then indexing by axis
                    #
                    nownote = sorted(active_notes.values(), reverse=True)[i]

                    # MIDI note 69	 = A4(440Hz)
                    # 2 to the power (69-69) / 12 * 440 = A4 440Hz
                    # 2 to the power (64-69) / 12 * 440 = E4 329.627Hz
                    #
                    freq_xyz[j] = pow(
                        2.0, (nownote - 69 + transpose[j]) / 12.0) * 440.0

                    # Here is where we need smart per-axis feed conversions
                    # to enable use of X/Y *and* Z on a Makerbot
                    #
                    # feed_xyz[0] = X; feed_xyz[1] = Y; feed_xyz[2] = Z;
                    #
                    # Feed rate is expressed in mm / minutes so 60 times
                    # scaling factor is required.

                    feed_xyz[j] = (freq_xyz[j] * 60.0) / ppu[j]

                    # Get the duration in seconds from the MIDI values in divisions, at the given tempo
                    duration = (((note[0] - last_time) + 0.0) /
                                (midi.division + 0.0) * (tempo / 1000000.0))

                    # Get the actual relative distance travelled per axis in mm
                    distance_xyz[j] = (feed_xyz[j] * duration) / 60.0

                # Now that axes can be addressed in any order, need to make sure
                # that all of them are silent before declaring a rest is due.
                if distance_xyz[0] + distance_xyz[1] + distance_xyz[2] > 0:
                    # At least one axis is playing, so process the note into
                    # movements
                    combined_feedrate = math.sqrt(feed_xyz[0]**2 +
                                                  feed_xyz[1]**2 +
                                                  feed_xyz[2]**2)

                    # Turn around BEFORE crossing the limits of the
                    # safe working envelope
                    if self.reached_limit(x, distance_xyz[0], x_dir,
                                          safemin[0], safemax[0]):
                        x_dir = x_dir * -1
                    x = (x + (distance_xyz[0] * x_dir))

                    if self.reached_limit(y, distance_xyz[1], y_dir,
                                          safemin[1], safemax[1]):
                        y_dir = y_dir * -1
                    y = (y + (distance_xyz[1] * y_dir))

                    if self.reached_limit(z, distance_xyz[2], z_dir,
                                          safemin[2], safemax[2]):
                        z_dir = z_dir * -1
                    z = (z + (distance_xyz[2] * z_dir))

                    v = (x, y, z)
                    block.append(CNC.glinev(1, v, combined_feedrate))

                else:
                    # Handle 'rests' in addition to notes.
                    duration = (((note[0] - last_time) + 0.0) /
                                (midi.division + 0.0)) * (tempo / 1000000.0)
                    block.append(CNC.gcode(4, [("P", duration)]))

                # finally, set this absolute time as the new starting time
                last_time = note[0]

            if note[1] == 1:  # Note on
                if active_notes.has_key(note[2]):
                    pass
                else:
                    # key and value are the same, but we don't really care.
                    active_notes[note[2]] = note[2]
            elif note[1] == 0:  # Note off
                if (active_notes.has_key(note[2])):
                    active_notes.pop(note[2])

        blocks.append(block)
        active = app.activeBlock()
        if active == 0: active = 1
        app.gcode.insBlocks(active, blocks, "Midi2CNC")
        app.refresh()
        app.setStatus(_("Generated Midi2CNC, ready to play?"))
コード例 #3
0
def main(argv):

    #ファイル書き出し
    FILE = open(outfile, "w")

    #MIDIファイルの読み込み
    midi = midiparser.File(midifile)

    noteEventList = []

    #MIDIファイルの情報を表示
    print "FileName:" + args[1]
    print "トラック数:%d" % midi.num_tracks
    print "フォーマット形式:%d" % midi.format
    print "4分音符基準の分解能:%d" % midi.division
    print "===================================="
    #分解能に合わせてBPMの係数を計算
    division_level = float(midi.division) / 480

    #トラックの読み込み
    for track in midi.tracks:

        #イベントの読み込み
        for event in track.events:

            #イベントタイプがテンポ情報の場合
            if event.type == midiparser.meta.SetTempo:

                #MIDIのテンポ情報を取得
                tempo = event.detail.tempo
                #print "4分音符の長さ(µs): " + str(event.detail.tempo)

                #BPMの計算(BPM=60÷音符の長さ(ms)×定数A×1000)
                midi_bpm = float(60) / (division_level * tempo * 0.001 * 0.001)
                #print "BPM:%d" % midi_bpm
                #print "===================================="

            #イベントタイプがノートオンの場合(音を鳴らす)
            if ((event.type == midiparser.voice.NoteOn)
                    and (event.channel in imported_channels)):
                if event.detail.velocity > 0:
                    noteEventList.append([
                        event.absolute, 1, event.detail.note_no,
                        event.detail.velocity
                    ])
                else:
                    noteEventList.append([
                        event.absolute, 0, event.detail.note_no,
                        event.detail.velocity
                    ])

            #イベントタイプがノートオフの場合(音を止める)
            if (event.type
                    == midiparser.voice.NoteOff) and (event.channel
                                                      in imported_channels):
                noteEventList.append([
                    event.absolute, 0, event.detail.note_no,
                    event.detail.velocity
                ])

    noteEventList.sort()
    nowtimecount = 0
    print "イベントの長さ:%d" % len(noteEventList)
    print "===================================="
    last_time = -0
    active_notes = {}
    for note in noteEventList:
        if last_time < note[0]:
            freq_xyz = [0, 0, 0]
            for i in range(0,
                           min(len(active_notes.values()),
                               2)):  # number of axes for which to build notes
                nownote = sorted(active_notes.values(), reverse=True)[i]
                freq_xyz[i] = pow(2.0, (nownote - 69) / 12.0) * 440.0
                #print "チャンネル:%d  ノート:%d  周波数:%f  %d" % (i,nownote,freq_xyz[i],note[0]-last_time)

            if mode == 1:
                ###ラズパイマウスで動かす用
                #モータの電源ON
                FILE.write("echo 1 > /dev/rtmotoren0\n")
                #演出用に距離センサを動かす
                FILE.write("cat < /dev/rtlightsensor0\n")
                #モータへの指令
                FILE.write(
                    "echo %d %d %d > /dev/rtmotor0 && " %
                    (freq_xyz[0] * int(args[4]), freq_xyz[1] * int(args[4]),
                     ((float(note[0] - last_time) / midi.division) * tempo *
                      0.001)))
                #モータの電源OFF
                FILE.write("echo 0 > /dev/rtmotoren0\n")

                #別方式
                #FILE.write ("echo %d > /dev/rtmotor_raw_l0 && echo %d > /dev/rtmotor_raw_r0\n" % (freq_xyz[0],freq_xyz[1]))
                #FILE.write ("sleep %f\n" % ((float(note[0] - last_time)/midi.division)*tempo*0.001*0.001))

            elif mode == 2:
                ###画面に周波数やウェイト時間を表示させる(デバッグ用)
                nowtimecount += ((float(note[0] - last_time) / midi.division) *
                                 tempo * 0.001 * 0.001)
                FILE.write("echo -------------------------------\n")
                FILE.write("echo MusicTime[s]:%f\n" % nowtimecount)
                FILE.write("echo LeftMotor[Hz]:%d\n" % freq_xyz[0] *
                           int(args[4]))
                FILE.write("echo RightMotor[Hz]:%d\n" % freq_xyz[1] *
                           int(args[4]))
                FILE.write("echo DeltaTime[s]:%f\n" %
                           ((float(note[0] - last_time) / midi.division) *
                            tempo * 0.001 * 0.001))
                #FILE.write ("echo left:%7dRight:%-5d\tTime:%0.6f\n" % (freq_xyz[0]*int(args[4]),freq_xyz[1]*int(args[4]),((float(note[0] - last_time)/midi.division)*tempo*0.001*0.001)))
                #ウェイト
                FILE.write("sleep %f\n" %
                           ((float(note[0] - last_time) / midi.division) *
                            tempo * 0.001 * 0.001))

            #print freq_xyz[1],freq_xyz[0]
            #print "経過時間:%d  周波数:%d  時間差:%d" % (last_time,freq_xyz[0],(note[0] - last_time))

            last_time = note[0]
        if note[1] == 1:  # Note on
            if active_notes.has_key(note[2]):
                pass
                #print "Warning: tried to turn on note already on!"
            else:
                active_notes[note[2]] = note[
                    2]  # key and value are the same, but we don't really care.
        elif note[1] == 0:  # Note off
            if (active_notes.has_key(note[2])):
                active_notes.pop(note[2])
            else:
                pass
                #print "Warning: tried to turn off note that wasn't on!"
    print("書き出し完了")
コード例 #4
0
def main(argv):

    x = 0.0
    y = 0.0
    z = 0.0

    x_dir = 1.0
    y_dir = 1.0
    z_dir = 1.0

    FILE = open(outfile, "w")

    #midi = midiparser.File(argv[1])
    midi = midiparser.File(midifile)

    print midi.file, midi.format, midi.num_tracks, midi.division

    noteEventList = []

    for track in midi.tracks:
        for event in track.events:
            if event.type == midiparser.meta.SetTempo:
                tempo = event.detail.tempo
                print "Tempo change: " + str(event.detail.tempo)
            if ((event.type == midiparser.voice.NoteOn) and
                (event.channel
                 in imported_channels)):  # filter undesired instruments
                #print event.absolute,
                #print event.detail.note_no, event.detail.velocity
                # NB: looks like some use "note on (vel 0)" as equivalent to note off, so check for vel=0 here and treat it as a note-off.
                if event.detail.velocity > 0:
                    noteEventList.append([
                        event.absolute, 1, event.detail.note_no,
                        event.detail.velocity
                    ])
                else:
                    noteEventList.append([
                        event.absolute, 0, event.detail.note_no,
                        event.detail.velocity
                    ])
            if (event.type
                    == midiparser.voice.NoteOff) and (event.channel
                                                      in imported_channels):
                #print event.absolute,
                #print event.detail.note_no, event.detail.velocity
                noteEventList.append([
                    event.absolute, 0, event.detail.note_no,
                    event.detail.velocity
                ])
            if event.type == midiparser.meta.TrackName:
                print event.detail.text.strip()
            if event.type == midiparser.meta.CuePoint:
                print event.detail.text.strip()
            if event.type == midiparser.meta.Lyric:
                print event.detail.text.strip()
                #if event.type == midiparser.meta.KeySignature:
                # ...

    # We now have entire file's notes with abs time from all channels
    # We don't care which channel/voice is which, but we do care about having all the notes in order
    # so sort event list by abstime to dechannelify

    noteEventList.sort()
    #print noteEventList

    print len(noteEventList)
    last_time = -0
    active_notes = {
    }  # make this a dict so we can add and remove notes by name

    # Start the file...
    # It would be nice to add some metadata here, such as who/what generated the output, what the input file was,
    # and important playback parameters (such as steps/in assumed and machine envelope).
    # Unfortunately G-code comments are not 100% standardized...

    if suppress_comments == 0:
        FILE.write(
            "( File created with mid2cnc.py - http://tim.cexx.org/?p=633 )\n")
        FILE.write("( Input file was " + midifile + " )\n")

        FILE.write("( Steps per inch: " + str(machine_ppi) + " )\n")
        FILE.write("( Machine envelope: )\n")
        FILE.write("( x = " + str(machine_limit_x) + " )\n")
        FILE.write("( y = " + str(machine_limit_y) + " )\n")
        FILE.write("( z = " + str(machine_limit_z) + " )\n")

    FILE.write("G20\n")  # Set units to Imperial
    FILE.write("G00 X0 Y0 Z0\n")  # Home

    # General description of what follows: going through the chronologically-sorted list of note events, (in big outer loop) adding
    # or removing them from a running list of active notes (active_notes{}). Generally all the notes of a chord will turn on at the
    # same time, so nothing further needs to be done. If the delta time changes since the last note, though, we know how long the
    # last chord should play for, so dump out the running list as a linear move and continue collecting note events until the next
    # delta change...

    for note in noteEventList:
        #print note # [event.absolute, 0, event.detail.note_no, event.detail.velocity]
        if last_time < note[0]:
            # New time, so dump out current noteset for the time between last_time and the present, BEFORE processing new updates.
            # Whatever changes at this time (delta=0) will be handled when the next new time (nonzero delta) appears.

            freq_xyz = [0, 0, 0]
            feed_xyz = [0, 0, 0]
            distance_xyz = [0, 0, 0]

            for i in range(0,
                           min(len(active_notes.values()),
                               3)):  # number of axes for which to build notes
                # If there are more notes than axes, use the highest of the available notes, since they seem to sound the best
                # (lowest frequencies just tend to sound like growling and not musical at all)
                nownote = sorted(active_notes.values(), reverse=True)[i]
                freq_xyz[i] = pow(
                    2.0, (nownote - 69) / 12.0
                ) * 440.0  # convert note numbers to frequency for each axis in Hz
                feed_xyz[i] = freq_xyz[
                    i] * 60.0 / machine_ppi  # feedrate in IPM for each axis individually
                distance_xyz[i] = feed_xyz[i] * ((
                    (note[0] - last_time) + 0.0) / (midi.division + 0.0)) * (
                        tempo / 60000000.0)  #distance in inches for each axis
                # Also- what on earth were they smoking when they made precision of a math operation's output dependent on its undeclared-types value at any given moment?
                # (adding 0.0 to numbers above forces operations involving them to be computed with floating-point precision in case the number they contain happens to be an integer once in a while)

            print "Chord: [%.3f, %.3f, %.3f] for %d deltas" % (
                freq_xyz[0], freq_xyz[0], freq_xyz[0], (note[0] - last_time))

            # So, we now know the frequencies assigned to each axis and how long to play them, thus the distance.
            # So write it out as a linear move...

            # Feedrate from frequency: f*60/machine_ppi
            # Distance (move length): feedrate/60 (seconds); feedrate/60000 (ms)

            # And for the combined (multi-axis) feedrate... arbitrarily select one note as the reference, and the ratio of the
            # final (unknown) feedrate to the reference feedrate should equal the ratio of the 3D vector length (known) to the
            # reference length (known). That sounds too easy.

            # First, an ugly bit of logic to reverse directions if approaching the machine's limits

            x = x + (distance_xyz[0] * x_dir)
            if x > (machine_limit_x - machine_safety):
                x_dir = -1
            if x < machine_safety:
                x_dir = 1

            y = y + (distance_xyz[1] * y_dir)
            if y > (machine_limit_y - machine_safety):
                y_dir = -1
            if y < machine_safety:
                y_dir = 1

            z = z + (distance_xyz[2] * z_dir)
            if z > (machine_limit_z - machine_safety):
                z_dir = -1
            if z < machine_safety:
                z_dir = 1

            if distance_xyz[
                    0] > 0:  # handle 'rests' in addition to notes. How standard is this pause gcode, anyway?
                vector_length = math.sqrt(distance_xyz[0]**2 +
                                          distance_xyz[1]**2 +
                                          distance_xyz[2]**2)
                combined_feedrate = (vector_length /
                                     distance_xyz[0]) * feed_xyz[0]
                FILE.write("G01 X%.10f Y%.10f Z%.10f F%.10f\n" %
                           (x, y, z, combined_feedrate))
            else:
                temp = int((((note[0] - last_time) + 0.0) /
                            (midi.division + 0.0)) * (tempo / 1000.0))
                FILE.write("G04 P%0.4f\n" % (temp / 1000.0))

            # finally, set this absolute time as the new starting time
            last_time = note[0]

        if note[1] == 1:  # Note on
            if active_notes.has_key(note[2]):
                print "Warning: tried to turn on note already on!"
            else:
                active_notes[note[2]] = note[
                    2]  # key and value are the same, but we don't really care.
        elif note[1] == 0:  # Note off
            if (active_notes.has_key(note[2])):
                active_notes.pop(note[2])
            else:
                print "Warning: tried to turn off note that wasn't on!"