def initialization(dataset_path=None, metadatas=None, voice_ids=voice_ids_default, BACH_DATASET=BACH_DATASET): from glob import glob print('Creating dataset') if dataset_path: chorale_list = filter_file_list(glob(dataset_path + '/*.mid') + glob(dataset_path + '/*.xml'), num_voices=NUM_VOICES) pickled_dataset = 'datasets/custom_dataset/' + dataset_path.split( '/')[-1] + '.pickle' else: chorale_list = filter_file_list( corpus.getBachChorales(fileExtensions='xml')) pickled_dataset = BACH_DATASET # remove wrong chorales: min_pitches, max_pitches = compute_min_max_pitches(chorale_list, voices=voice_ids) make_dataset(chorale_list, pickled_dataset, voice_ids=voice_ids, transpose=True, metadatas=metadatas)
def demoBachSearchBrief(): choraleList = corpus.getBachChorales() results = stream.Stream() for filePath in choraleList: fileName = os.path.split(filePath)[1] pieceName = fileName.replace('.xml', '') chorale = converter.parse(filePath) print(fileName) key = chorale.analyze('key') if key.mode == 'minor': lastChordPitches = [] for part in chorale.parts: lastChordPitches.append(part.flat.pitches[-1]) lastChord = chord.Chord(lastChordPitches) lastChord.duration.type = "whole" lastChord.transpose("P8", inPlace=True) if lastChord.isMinorTriad( ) is False and lastChord.isIncompleteMinorTriad() is False: continue lastChord.lyric = pieceName m = stream.Measure() m.keySignature = chorale.flat.getElementsByClass('KeySignature')[0] m.append(lastChord) results.append(m.makeAccidentals(inPlace=True)) results.show()
def demoBachSearchBrief(): choraleList = corpus.getBachChorales() results = stream.Stream() for filePath in choraleList: fileName = os.path.split(filePath)[1] pieceName = fileName.replace('.xml', '') chorale = converter.parse(filePath) print fileName key = chorale.analyze('key') if key.mode == 'minor': lastChordPitches = [] for part in chorale.parts: lastChordPitches.append(part.flat.pitches[-1]) lastChord = chord.Chord(lastChordPitches) lastChord.duration.type = "whole" lastChord.transpose("P8", inPlace=True) if lastChord.isMinorTriad() is False and lastChord.isIncompleteMinorTriad() is False: continue lastChord.lyric = pieceName m = stream.Measure() m.keySignature = chorale.flat.getElementsByClass( 'KeySignature')[0] m.append(lastChord) results.append(m.makeAccidentals(inPlace=True)) results.show()
def get_random_choral(log=True): """ Gets a choral from the J. S. Bach chorals corpus (in Music21). """ choral_file = corpus.getBachChorales()[random.randint(0, 399)] choral = corpus.parse(choral_file) if log: print("Chosen choral:", choral.metadata.title) return choral
def initialization(dataset_path=None, metadatas=None, voice_ids=voice_ids_default, BACH_DATASET=BACH_DATASET): from glob import glob print('Creating dataset') # Switched the backdash directions, hopefully will fix the error if dataset_path: chorale_list = filter_file_list(glob(dataset_path + '/*.mid') + glob(dataset_path + '/*.xml'), num_voices=NUM_VOICES) # hardcode below for now, original code is not working. Try in cloud linux instance next # pickled_dataset = 'datasets/custom_dataset/' + dataset_path.split('/')[-1] + '.pickle' pickled_dataset = 'datasets/custom_dataset/custom_dataset.pickle' else: chorale_list = filter_file_list( corpus.getBachChorales(fileExtensions='xml')) pickled_dataset = BACH_DATASET # remove wrong chorales: min_pitches, max_pitches = compute_min_max_pitches(chorale_list, voices=voice_ids) make_dataset(chorale_list, pickled_dataset, voice_ids=voice_ids, transpose=True, metadatas=metadatas)
def demoBachSearch(): import os, random from music21 import corpus, key fpList = corpus.getBachChorales('.xml') random.shuffle(fpList) results = stream.Stream() for fp in fpList[:40]: fn = os.path.split(fp)[1] print fn s = converter.parse(fp) # get key, mode key, mode = s.analyze('key')[:2] if mode == 'minor': pFirst = [] pLast = [] for pStream in s.parts: # clear accidental display status pFirst.append(pStream.flat.getElementsByClass('Note')[0].pitch) pLast.append(pStream.flat.getElementsByClass('Note')[-1].pitch) cFirst = chord.Chord(pFirst) cFirst.quarterLength = 2 cFirst.transpose(12, inPlace=True) cFirst.addLyric(fn) cFirst.addLyric('%s %s' % (key, mode)) cLast = chord.Chord(pLast) cLast.quarterLength = 2 cLast.transpose(12, inPlace=True) if cLast.isMajorTriad(): cLast.addLyric('M') elif cLast.isMinorTriad(): cLast.addLyric('m') else: cLast.addLyric('?') m = stream.Measure() m.keySignature = s.flat.getElementsByClass('KeySignature')[0] print 'got', m.keySignature m.append(cFirst) m.append(cLast) results.append(m.makeAccidentals(inPlace=True)) results.show()
def demoBachSearch(): import random from music21 import key fpList = corpus.getBachChorales('.xml') random.shuffle(fpList) results = stream.Stream() for fp in fpList[:40]: fn = os.path.split(fp)[1] print(fn) s = converter.parse(fp) # get key, mode key, mode = s.analyze('key')[:2] if mode == 'minor': pFirst = [] pLast = [] for pStream in s.parts: # clear accidental display status pFirst.append(pStream.flat.getElementsByClass('Note')[0].pitch) pLast.append(pStream.flat.getElementsByClass('Note')[-1].pitch) cFirst = chord.Chord(pFirst) cFirst.quarterLength = 2 cFirst.transpose(12, inPlace=True) cFirst.addLyric(fn) cFirst.addLyric('%s %s' % (key, mode)) cLast = chord.Chord(pLast) cLast.quarterLength = 2 cLast.transpose(12, inPlace=True) if cLast.isMajorTriad(): cLast.addLyric('M') elif cLast.isMinorTriad(): cLast.addLyric('m') else: cLast.addLyric('?') m = stream.Measure() m.keySignature = s.flat.getElementsByClass('KeySignature')[0] print('got', m.keySignature) m.append(cFirst) m.append(cLast) results.append(m.makeAccidentals(inPlace=True)) results.show()
def demoBachSearch(): import random from music21 import key fpList = corpus.getBachChorales(".xml") random.shuffle(fpList) results = stream.Stream() for fp in fpList[:40]: fn = os.path.split(fp)[1] print(fn) s = converter.parse(fp) # get key, mode key, mode = s.analyze("key")[:2] if mode == "minor": pFirst = [] pLast = [] for pStream in s.parts: # clear accidental display status pFirst.append(pStream.flat.getElementsByClass("Note")[0].pitch) pLast.append(pStream.flat.getElementsByClass("Note")[-1].pitch) cFirst = chord.Chord(pFirst) cFirst.quarterLength = 2 cFirst.transpose(12, inPlace=True) cFirst.addLyric(fn) cFirst.addLyric("%s %s" % (key, mode)) cLast = chord.Chord(pLast) cLast.quarterLength = 2 cLast.transpose(12, inPlace=True) if cLast.isMajorTriad(): cLast.addLyric("M") elif cLast.isMinorTriad(): cLast.addLyric("m") else: cLast.addLyric("?") m = stream.Measure() m.keySignature = s.flat.getElementsByClass("KeySignature")[0] print("got", m.keySignature) m.append(cFirst) m.append(cLast) results.append(m.makeAccidentals(inPlace=True)) results.show()
def testEx03(self): # What is the most common closing soprano scale degree by key signature #s in the bach chorales? #from music21 import graph results = {} for fn in corpus.getBachChorales()[:2]: s = corpus.parse(fn) ksScale = s.flat.getElementsByClass('KeySignature')[0].getScale() for p in s.parts: if p.id.lower() == 'soprano': n = s.parts['soprano'].flat.getElementsByClass('Note')[-1] degree = ksScale.getScaleDegreeFromPitch(n.pitch) if degree not in results.keys(): results[degree] = 0 results[degree] += 1
def initialize_transposition_dataset(dataset_dir=None, metadatas: List[Metadata] = []): """ Create 'datasets/transpose/bach_sop.pickle' or 'datasets/transpose/custom_dataset.pickle' :param dataset_dir: use Bach chorales if None :param metadatas: :type metadatas: Metadata :return: :rtype: """ from glob import glob PACKAGE_DIR = os.path.dirname(__file__) NUM_VOICES = 1 voice_ids = [SOP_INDEX] print('Creating dataset') if dataset_dir: chorale_list = filter_file_list(glob(dataset_dir + '/*.mid') + glob(dataset_dir + '/*.xml'), num_voices=NUM_VOICES) pickled_dataset = os.path.join(PACKAGE_DIR, 'datasets/transpose/' + \ dataset_dir.split( '/')[ -1] + '.pickle') else: chorale_list = filter_file_list( corpus.getBachChorales(fileExtensions='xml')) pickled_dataset = os.path.join(PACKAGE_DIR, 'datasets/transpose/bach_sop.pickle') # remove wrong chorales: min_pitches, max_pitches = compute_min_max_pitches(chorale_list, voices=voice_ids) make_transposition_dataset(chorale_list, pickled_dataset, voice_ids=voice_ids, metadatas=metadatas)
def bachParallels(): ''' find all instances of parallel fifths or octaves in Bach chorales. Checking the work of George Fitsioris and Darrell Conklin, "Parallel successions of perfect fifths in the Bach chorales" Proceedings of the fourth Conference on Interdisciplinary Musicology (CIM08) Thessaloniki, Greece, 3-6 July 2008, http://web.auth.gr/cim08/ ''' from music21 import corpus for fn in corpus.getBachChorales(): print(fn) c = corpus.parse(fn) displayMe = False for i in range(len(c.parts) - 1): iName = c.parts[i].id if iName.lower() not in ['soprano', 'alto', 'tenor', 'bass']: continue ifn = c.parts[i].flat.notesAndRests omi = ifn.offsetMap for j in range(i + 1, len(c.parts)): jName = c.parts[j].id if jName.lower() not in ['soprano', 'alto', 'tenor', 'bass']: continue jfn = c.parts[j].flat.notesAndRests for k in range(len(omi) - 1): n1pi = omi[k]['element'] n2pi = omi[k + 1]['element'] n1pj = jfn.getElementsByOffset( offsetStart=omi[k]['endTime'] - .001, offsetEnd=omi[k]['endTime'] - .001, mustBeginInSpan=False)[0] n2pj = jfn.getElementsByOffset( offsetStart=omi[k + 1]['offset'], offsetEnd=omi[k + 1]['offset'], mustBeginInSpan=False)[0] if n1pj is n2pj: continue # no oblique motion if n1pi.isRest or n2pi.isRest or n1pj.isRest or n2pj.isRest: continue if n1pi.isChord or n2pi.isChord or n1pj.isChord or n2pj.isChord: continue vlq = voiceLeading.VoiceLeadingQuartet( n1pi, n2pi, n1pj, n2pj) if vlq.parallelMotion( 'P8') is False and vlq.parallelMotion( 'P5') is False: continue displayMe = True n1pi.addLyric('par ' + str(vlq.vIntervals[0].name)) n2pi.addLyric(' w/ ' + jName) # m1 = stream.Measure() # m1.append(n1pi) # m1.append(n2pi) # r1 = note.Rest() # r1.duration.quarterLength = 8 - m1.duration.quarterLength # m1.append(r1) # m2 = stream.Measure() # m2.append(n1pj) # m2.append(n2pj) # r2 = note.Rest() # r2.duration.quarterLength = 8 - m2.duration.quarterLength # m2.append(r2) # # p1.append(m1) # p2.append(m2) # sc.append(p1) # sc.append(p2) # sc.show() if displayMe: c.show()
def getMusicDataset(): if path.isfile("music_dataset.p"): print("Loading input data") music_file = open("music_dataset.p", "rb") input_data = load(music_file) music_file.close() return input_data else: bach_files = getBachChorales(fileExtensions='xml') chorale_files = bach_files[:100] chorales = list() for chorale_file in tqdm(chorale_files, desc="Parsing chorale files"): chorales.append(parse(chorale_file)) alto_chorales = [] for chorale in tqdm(chorales, desc="Extracting alto parts"): for part in chorale.parts: if part.partName == "Alto": alto_chorales.append(part.flat.notesAndRests) chorale_data = [] for part in tqdm(alto_chorales, desc="Creating note/rest lists"): data = [] for nor in part: if nor.isNote or nor.isRest: data.append((nor.name, nor.duration.quarterLength / 4)) chorale_data.append(data) print("Finding Minimum Duration") min_dur = min([nor[1] for chorale in chorale_data for nor in chorale]) input_data = [] def extendNor(p, d): results = list() if p == "rest": p = "R" elif p == "D-": p = "C#" elif p == "F-": p = "E" elif p == "G-": p = "F#" elif p == "B-": p = "A#" elif p == "E-": p = "D#" elif p == "A-": p = "G#" elif p == "E#": p = "F" if d > min_dur: num_notes = int(d / min_dur) results.append(p) for i in range(num_notes - 1): results.append("H") else: results.append(p) return results for chorale in tqdm(chorale_data, desc="Creating regularized note data"): new_input = [] for (pitch, dur) in chorale: new_notes = extendNor(pitch, dur) new_input.extend(new_notes) input_data.append(new_input) print("Dumping input data to pickle file") music_file = open("music_dataset.p", "wb") dump(input_data, music_file) music_file.close() return input_data
def bachParallels(): ''' find all instances of parallel fifths or octaves in Bach chorales. Checking the work of George Fitsioris and Darrell Conklin, "Parallel successions of perfect fifths in the Bach chorales" Proceedings of the fourth Conference on Interdisciplinary Musicology (CIM08) Thessaloniki, Greece, 3-6 July 2008, http://web.auth.gr/cim08/ ''' from music21 import corpus for fn in corpus.getBachChorales(): print (fn) c = corpus.parse(fn) displayMe = False for i in range(len(c.parts) - 1): iName = c.parts[i].id if iName.lower() not in ['soprano', 'alto', 'tenor', 'bass']: continue ifn = c.parts[i].flat.notesAndRests omi = ifn.offsetMap for j in range(i+1, len(c.parts)): jName = c.parts[j].id if jName.lower() not in ['soprano', 'alto', 'tenor', 'bass']: continue jfn = c.parts[j].flat.notesAndRests for k in range(len(omi) - 1): n1pi = omi[k]['element'] n2pi = omi[k+1]['element'] n1pj = jfn.getElementsByOffset(offsetStart = omi[k]['endTime'] - .001, offsetEnd = omi[k]['endTime'] - .001, mustBeginInSpan = False)[0] n2pj = jfn.getElementsByOffset(offsetStart = omi[k+1]['offset'], offsetEnd = omi[k+1]['offset'], mustBeginInSpan = False)[0] if n1pj is n2pj: continue # no oblique motion if n1pi.isRest or n2pi.isRest or n1pj.isRest or n2pj.isRest: continue if n1pi.isChord or n2pi.isChord or n1pj.isChord or n2pj.isChord: continue vlq = voiceLeading.VoiceLeadingQuartet(n1pi, n2pi, n1pj, n2pj) if vlq.parallelMotion('P8') is False and vlq.parallelMotion('P5') is False: continue displayMe = True n1pi.addLyric('par ' + str(vlq.vIntervals[0].name)) n2pi.addLyric(' w/ ' + jName) # m1 = stream.Measure() # m1.append(n1pi) # m1.append(n2pi) # r1 = note.Rest() # r1.duration.quarterLength = 8 - m1.duration.quarterLength # m1.append(r1) # m2 = stream.Measure() # m2.append(n1pj) # m2.append(n2pj) # r2 = note.Rest() # r2.duration.quarterLength = 8 - m2.duration.quarterLength # m2.append(r2) # # p1.append(m1) # p2.append(m2) # sc.append(p1) # sc.append(p2) # sc.show() if displayMe: c.show()
def ks_mem(freq): """ Alternative memory for Karplus-Strong """ return (sum(lz.sinusoid(x * freq) for x in [1, 3, 9]) + lz.white_noise() + lz.Stream(-1, 1)) / 5 # Configuration rate = 44100 s, Hz = lz.sHz(rate) ms = 1e-3 * s beat = 90 # bpm step = 60. / beat * s # Open the choral file choral_file = corpus.getBachChorales()[random.randint(0, 399)] choral = corpus.parse(choral_file) print u"Playing", choral.metadata.title # Creates the score from the music21 data score = reduce(operator.concat, [[(pitch.frequency * Hz, # Note note.offset * step, # Starting time note.quarterLength * step, # Duration Fermata in note.expressions) for pitch in note.pitches] for note in choral.flat.notes] ) # Mix all notes into song song = lz.Streamix() last_start = 0