def write(self, obj, fmt, fp=None, subformats=None, makeNotation=True, addRecipSpine=False, expandTremolos=True, **keywords): if fp is None: fp = self.getTemporaryFile() else: fp = common.cleanpath(fp, returnPathlib=True) if not fp.suffix: fp = fp.with_suffix('.krn') hdw = HumdrumWriter(obj) hdw.makeNotation = makeNotation hdw.addRecipSpine = addRecipSpine hdw.expandTremolos = expandTremolos with open(fp, 'w', encoding='utf8') as f: hdw.write(f) return fp
def removePath(self, directoryPath): r''' Remove a directory path from a local corpus. If that path is included in the list of persisted paths for the given corpus, it will be removed permanently. >>> testCorpus = corpus.corpora.LocalCorpus(name='test') >>> testCorpus.addPath('~/Desktop') >>> len(testCorpus.directoryPaths) 1 >>> testCorpus.removePath('~/Desktop') >>> testCorpus.directoryPaths () TODO: test for corpus persisted to disk without actually reindexing files on user's Desktop. ''' temporaryPaths = LocalCorpus._temporaryLocalPaths.get(self.name, []) directoryPathObj: pathlib.Path = common.cleanpath(directoryPath, returnPathlib=True) if directoryPathObj in temporaryPaths: temporaryPaths.remove(directoryPathObj) # Also need string version because LocalCorpusSettings is a list-like # container of strings (see comments in environment.py) directoryPathStr = str(directoryPathObj) if self.existsInSettings: settings = self._getSettings() if settings is not None and directoryPathStr in settings: settings.remove(directoryPathStr) self.save() self._removeNameFromCache(self.name)
def _findPaths( self, rootDirectoryPath: pathlib.Path, fileExtensions: List[str] ): ''' Given a root filePath file path, recursively search all contained paths for files in `rootFilePath` matching any of the file extensions in `fileExtensions`. The `fileExtensions` is a list of file extensions. NB: we've tried optimizing with `fnmatch` but it does not save any time. Generally cached. ''' rdp = common.cleanpath(rootDirectoryPath, returnPathlib=True) matched = [] for filename in sorted(rdp.rglob('*')): if filename.name.startswith('__'): continue if filename.name.startswith('.'): continue for extension in fileExtensions: if filename.suffix.endswith(extension): matched.append(filename) break # this is actually twice as slow... # for extension in fileExtensions: # for filename in rdp.rglob('*' + extension): # ... etc ... return matched
def addPath(self, directoryPath): r''' Add a directory path to a local corpus: >>> localCorpus = corpus.corpora.LocalCorpus('a new corpus') >>> localCorpus.addPath('~/Desktop') Paths added in this way will not be persisted from session to session unless explicitly saved by a call to ``LocalCorpus.save()``. ''' from music21 import corpus if not isinstance(directoryPath, six.string_types): raise corpus.CorpusException( 'an invalid file path has been provided: {0!r}'.format( directoryPath)) directoryPath = common.cleanpath(directoryPath) if (not os.path.exists(directoryPath) or not os.path.isdir(directoryPath)): raise corpus.CorpusException( 'an invalid file path has been provided: {0!r}'.format( directoryPath)) if self.name not in LocalCorpus._temporaryLocalPaths: LocalCorpus._temporaryLocalPaths[self.name] = set() LocalCorpus._temporaryLocalPaths[self.name].add(directoryPath) self._removeNameFromCache(self.name)
def addPath(self, directoryPath): r''' Add a directory path to a local corpus: >>> localCorpus = corpus.corpora.LocalCorpus('a new corpus') >>> localCorpus.addPath('~/Desktop') Paths added in this way will not be persisted from session to session unless explicitly saved by a call to ``LocalCorpus.save()``. ''' from music21 import corpus if not isinstance(directoryPath, (str, pathlib.Path)): raise corpus.CorpusException( 'an invalid file path has been provided: {0!r}'.format( directoryPath)) directoryPath = common.cleanpath(directoryPath, returnPathlib=True) if (not directoryPath.exists() or not directoryPath.is_dir()): raise corpus.CorpusException( 'an invalid file path has been provided: {0!r}'.format( directoryPath)) if self.name not in LocalCorpus._temporaryLocalPaths: LocalCorpus._temporaryLocalPaths[self.name] = set() LocalCorpus._temporaryLocalPaths[self.name].add(directoryPath) self._removeNameFromCache(self.name)
def manualCoreCorpusPath(self, expr): userSettings = environment.UserSettings() if expr is not None: path = common.cleanpath(expr) if not os.path.isdir(path) or not os.path.exists(path): raise CorpusException("path needs to be a path to an existing directory") userSettings['manualCoreCorpusPath'] = path else: userSettings['manualCoreCorpusPath'] = None environment.Environment().write()
def manualCoreCorpusPath(self, expr): # pragma: no cover userSettings = environment.UserSettings() if expr is not None: path = common.cleanpath(expr) if not os.path.isdir(path) or not os.path.exists(path): raise CorpusException('path needs to be a path to an existing directory') userSettings['manualCoreCorpusPath'] = path else: userSettings['manualCoreCorpusPath'] = None environment.Environment().write()
def manualCoreCorpusPath(self, expr): # pragma: no cover userSettings = environment.UserSettings() if expr is not None: path = common.cleanpath(expr, returnPathlib=True) if not path.is_dir() or not path.exists(): raise CorpusException('path needs to be a path to an existing directory') userSettings['manualCoreCorpusPath'] = path else: userSettings['manualCoreCorpusPath'] = None environment.Environment().write()
def __setitem__(self, key, value): #saxutils.escape # used for escaping strings going to xml # with unicode encoding # http://www.xml.com/pub/a/2002/11/13/py-xml.html?page=2 # saxutils.escape(msg).encode('UTF-8') # add local corpus path as a key #if six.PY3 and isinstance(value, bytes): # value = value.decode(errors='replace') if 'path' in key.lower() and value is not None: value = common.cleanpath(value) if key not in self._ref: if key != 'localCorpusPath': raise EnvironmentException('no preference: %s' % key) if value == '': value = None # always replace '' with None valid = False if key == 'showFormat': value = value.lower() if value in common.VALID_SHOW_FORMATS: valid = True elif key == 'writeFormat': value = value.lower() if value in common.VALID_WRITE_FORMATS: valid = True elif key == 'autoDownload': value = value.lower() if value in common.VALID_AUTO_DOWNLOAD: valid = True elif key == 'localCorpusSettings': # needs to be a list of strings for now if common.isListLike(value): valid = True else: # temporarily not validating other preferences valid = True if not valid: raise EnvironmentException( '{} is not an acceptable value for preference: {}'.format( value, key)) # need to escape problematic characters for xml storage if isinstance(value, six.string_types): value = saxutils.escape(value) #.encode('UTF-8') # set value if key == 'localCorpusPath': # only add if unique #value = xmlnode.fixBytes(value) if value not in self._ref['localCorpusSettings']: # check for malicious values here self._ref['localCorpusSettings'].append(value) else: self._ref[key] = value
def writeDataStream(self, fp, dataBytes): if fp is None: fp = self.getTemporaryFile() else: fp = common.cleanpath(fp) writeFlags = 'wb' with open(fp, writeFlags) as f: f.write(dataBytes) return fp
def cacheFilePath(self, value): ''' Set the path to the file path that stores the .json file. ''' if not self.existsInSettings: raise CorpusException('Save this corpus before changing the cacheFilePath') localCorpusSettings = self._getSettings() localCorpusSettings.cacheFilePath = common.cleanpath(value) en = environment.Environment() if self.name == 'local': en['localCorpusSettings'] = localCorpusSettings else: en['localCorporaSettings'][self.name] = localCorpusSettings en.write()
def cacheFilePath(self, value): ''' Set the path to the file path that stores the .json file. ''' if not self.existsInSettings: raise CorpusException('Save this corpus before changing the cacheFilePath') localCorpusSettings = self._getSettings() localCorpusSettings.cacheFilePath = common.cleanpath(value, returnPathlib=True) en = environment.Environment() if self.name == 'local': en['localCorpusSettings'] = localCorpusSettings else: en['localCorporaSettings'][self.name] = localCorpusSettings en.write()
def removePath(self, directoryPath): r''' Remove a directory path from a local corpus. If that path is included in the list of persisted paths for the given corpus, it will be removed permanently. ''' temporaryPaths = LocalCorpus._temporaryLocalPaths.get(self.name, []) directoryPath = common.cleanpath(directoryPath) if directoryPath in temporaryPaths: temporaryPaths.remove(directoryPath) if self.existsInSettings: settings = self._getSettings() if settings is not None and directoryPath in settings: settings.remove(directoryPath) self.save() self._removeNameFromCache(self.name)
def removePath(self, directoryPath): r''' Remove a directory path from a local corpus. If that path is included in the list of persisted paths for the given corpus, it will be removed permanently. ''' temporaryPaths = LocalCorpus._temporaryLocalPaths.get( self.name, []) directoryPath = common.cleanpath(directoryPath) if directoryPath in temporaryPaths: temporaryPaths.remove(directoryPath) if self.existsInSettings: settings = self._getSettings() if settings is not None and directoryPath in settings: settings.remove(directoryPath) self.save() self._removeNameFromCache(self.name)
def writeDataStream(self, fp, dataStr): # pragma: no cover ''' Writes the data stream to `fp` or to a temporary file and returns the filename written. ''' if fp is None: fp = self.getTemporaryFile() if self.readBinary is False: writeFlags = 'w' else: writeFlags = 'wb' if self.codecWrite is False: if hasattr(fp, 'write'): # is a filelike object f = fp else: fp = common.cleanpath(fp) f = open(fp, writeFlags) try: if isinstance(dataStr, bytes): f.write(dataStr.decode('utf-8')) else: f.write(dataStr) except UnicodeEncodeError: f.close() f = io.open(fp, mode=writeFlags, encoding=self.stringEncoding) f.write(dataStr) f.close() except TypeError as te: raise SubConverterException("Could not convert %r : %r" % (dataStr, te)) else: if hasattr(fp, 'write'): # is a filelike object f = fp else: f = io.open(fp, mode=writeFlags, encoding=self.stringEncoding) f.write(dataStr) f.close() return fp
def writeDataStream(self, fp, dataStr): if fp is None: fp = self.getTemporaryFile() if self.readBinary is False: writeFlags = 'w' else: writeFlags = 'wb' if self.codecWrite is False: if hasattr(fp, 'write'): # is a filelike object f = fp else: fp = common.cleanpath(fp) f = open(fp, writeFlags) try: if six.PY3 and isinstance(dataStr, bytes): f.write(dataStr.decode('utf-8')) else: f.write(dataStr) except UnicodeEncodeError: f.close() f = io.open(fp, mode=writeFlags, encoding=self.stringEncoding) f.write(dataStr) f.close() except TypeError as te: raise SubConverterException("Could not convert %r : %r" % (dataStr, te)) else: if hasattr(fp, 'write'): # is a filelike object f = fp else: f = io.open(fp, mode=writeFlags, encoding=self.stringEncoding) f.write(dataStr) f.close() return fp
def validate(self): r''' Validate each metadata entry in a metadata bundle. If the entry represents a non-virtual corpus asset, test that its source path is locatable on disk. If not, remove the metadata entry from the metadata bundle. ''' timer = common.Timer() timer.start() environLocal.printDebug(['MetadataBundle: validating...']) invalidatedKeys = [] validatedPaths = set() for key, metadataEntry in self._metadataEntries.items(): # MetadataEntries for core corpus items use a relative path as # their source path, always starting with 'music21/corpus'. sourcePath = metadataEntry.sourcePath if sourcePath in validatedPaths: continue if sourcePath.startswith('http:'): validatedPaths.add(metadataEntry.sourcePath) continue if not os.path.isabs(sourcePath): sourcePath = common.cleanpath( os.path.join( common.getCorpusFilePath(), sourcePath, )) if not os.path.exists(sourcePath): invalidatedKeys.append(key) validatedPaths.add(metadataEntry.sourcePath) for key in invalidatedKeys: del (self._metadataEntries[key]) message = 'MetadataBundle: finished validating in {0} seconds.'.format( timer) environLocal.printDebug(message) return len(invalidatedKeys)
def validate(self): r''' Validate each metadata entry in a metadata bundle. If the entry represents a non-virtual corpus asset, test that its source path is locatable on disk. If not, remove the metadata entry from the metadata bundle. ''' timer = common.Timer() timer.start() environLocal.printDebug(['MetadataBundle: validating...']) invalidatedKeys = [] validatedPaths = set() for key, metadataEntry in self._metadataEntries.items(): # MetadataEntries for core corpus items use a relative path as # their source path, always starting with 'music21/corpus'. sourcePath = metadataEntry.sourcePath if sourcePath in validatedPaths: continue if sourcePath.startswith('http:'): validatedPaths.add(metadataEntry.sourcePath) continue if not os.path.isabs(sourcePath): sourcePath = common.cleanpath(os.path.join( common.getCorpusFilePath(), sourcePath, )) if not os.path.exists(sourcePath): invalidatedKeys.append(key) validatedPaths.add(metadataEntry.sourcePath) for key in invalidatedKeys: del(self._metadataEntries[key]) message = 'MetadataBundle: finished validating in {0} seconds.'.format( timer) environLocal.printDebug(message) return len(invalidatedKeys)
def _findPaths(self, rootDirectoryPath, fileExtensions): ''' Given a root filePath file path, recursively search all contained paths for files in `rootFilePath` matching any of the file extensions in `fileExtensions`. The `fileExtensions` is a list of file file extensions. NB: we've tried optimizing with `fnmatch` but it does not save any time. ''' rdp = common.cleanpath(rootDirectoryPath, returnPathlib=True) matched = [] for filename in rdp.rglob('*'): if filename.name.startswith('__'): continue if filename.name.startswith('.'): continue for extension in fileExtensions: if filename.suffix.endswith(extension): matched.append(filename) break return matched
def _findPaths(self, rootDirectoryPath, fileExtensions): ''' Given a root filePath file path, recursively search all contained paths for files in `rootFilePath` matching any of the file extensions in `fileExtensions`. The `fileExtensions` is a list of file file extensions. NB: we've tried optimizing with `fnmatch` but it does not save any time. ''' rdp = common.cleanpath(rootDirectoryPath, returnPathlib=True) matched = [] for filename in sorted(rdp.rglob('*')): if filename.name.startswith('__'): continue if filename.name.startswith('.'): continue for extension in fileExtensions: if filename.suffix.endswith(extension): matched.append(filename) break return matched
def _loadDefaults(self, forcePlatform=None): """ Load defaults. All keys are derived from these defaults. """ # path to a directory for temporary files self._ref["directoryScratch"] = None # path to lilypond self._ref["lilypondPath"] = None # version of lilypond self._ref["lilypondVersion"] = None self._ref["lilypondFormat"] = "pdf" self._ref["lilypondBackend"] = "ps" # path to a MusicXML reader: default, will find "Finale Notepad" self._ref["musicxmlPath"] = None # path to a midi reader self._ref["midiPath"] = None # path to a graphics viewer self._ref["graphicsPath"] = None # path to a vector graphics viewer self._ref["vectorPath"] = None # path to a pdf viewer self._ref["pdfPath"] = None # path to a braille viewer self._ref["braillePath"] = None # path to MuseScore (if not the musicxmlPath...) # for direct creation of PNG from MusicXML self._ref["musescoreDirectPNGPath"] = None self._ref["showFormat"] = "musicxml" self._ref["writeFormat"] = "musicxml" self._ref["ipythonShowFormat"] = "ipython.musicxml.png" self._ref["autoDownload"] = "ask" self._ref["debug"] = 0 # printing of missing import warnings # default/non-zero is on self._ref["warnings"] = 1 # store a list of strings self._ref["localCorpusSettings"] = [] self._ref["localCorporaSettings"] = {} self._ref["manualCoreCorpusPath"] = None if forcePlatform is None: platform = common.getPlatform() else: platform = forcePlatform if platform == "win": for name, value in [ ("lilypondPath", "lilypond"), ("musescoreDirectPNGPath", common.cleanpath(r"%PROGRAMFILES(x86)%\MuseScore 2\MuseScore.exe")), ]: self.__setitem__(name, value) # use for key checking elif platform == "nix": for name, value in [("lilypondPath", "lilypond")]: self.__setitem__(name, value) # use for key checking elif platform == "darwin": for name, value in [ ("lilypondPath", "/Applications/Lilypond.app/Contents/Resources/bin/lilypond"), ("musicxmlPath", "/Applications/Finale Notepad 2014.app"), ("graphicsPath", "/Applications/Preview.app"), ("vectorPath", "/Applications/Preview.app"), ("pdfPath", "/Applications/Preview.app"), ("midiPath", "/Applications/QuickTime Player.app"), ("musescoreDirectPNGPath", "/Applications/MuseScore 2.app/Contents/MacOS/mscore"), ]: self.__setitem__(name, value) # use for key checking
def _loadDefaults(self, forcePlatform=None): ''' Load defaults. All keys are derived from these defaults. ''' # path to a directory for temporary files self._ref['directoryScratch'] = None # path to lilypond self._ref['lilypondPath'] = None # version of lilypond self._ref['lilypondVersion'] = None self._ref['lilypondFormat'] = 'pdf' self._ref['lilypondBackend'] = 'ps' # path to a MusicXML reader: default, will find "Finale Notepad" self._ref['musicxmlPath'] = None # path to a midi reader self._ref['midiPath'] = None # path to a graphics viewer self._ref['graphicsPath'] = None # path to a vector graphics viewer self._ref['vectorPath'] = None # path to a pdf viewer self._ref['pdfPath'] = None # path to a braille viewer self._ref['braillePath'] = None # path to MuseScore (if not the musicxmlPath...) # for direct creation of PNG from MusicXML self._ref['musescoreDirectPNGPath'] = None self._ref['showFormat'] = 'musicxml' self._ref['writeFormat'] = 'musicxml' self._ref['ipythonShowFormat'] = 'ipython.musicxml.png' self._ref['autoDownload'] = 'ask' self._ref['debug'] = 0 # printing of missing import warnings # default/non-zero is on self._ref['warnings'] = 1 # store a list of strings self._ref['localCorpusSettings'] = [] self._ref['localCorporaSettings'] = {} self._ref['manualCoreCorpusPath'] = None if forcePlatform is None: platform = common.getPlatform() else: platform = forcePlatform if platform == 'win': for name, value in [ ('lilypondPath', 'lilypond'), ('musescoreDirectPNGPath', common.cleanpath(r'%PROGRAMFILES(x86)%\MuseScore 2\MuseScore.exe')), ]: self.__setitem__(name, value) # use for key checking elif platform == 'nix': for name, value in [('lilypondPath', 'lilypond')]: self.__setitem__(name, value) # use for key checking elif platform == 'darwin': for name, value in [ ('lilypondPath', '/Applications/Lilypond.app/Contents/Resources/bin/lilypond'), ('musicxmlPath', '/Applications/Finale Notepad 2014.app'), ('graphicsPath', '/Applications/Preview.app'), ('vectorPath', '/Applications/Preview.app'), ('pdfPath', '/Applications/Preview.app'), ('midiPath', '/Applications/QuickTime Player.app'), ('musescoreDirectPNGPath', '/Applications/MuseScore 2.app/Contents/MacOS/mscore'), ]: self.__setitem__(name, value) # use for key checking
def _loadDefaults(self, forcePlatform=None): ''' Load defaults. All keys are derived from these defaults. ''' # path to a directory for temporary files self._ref['directoryScratch'] = None # path to lilypond self._ref['lilypondPath'] = None # version of lilypond self._ref['lilypondVersion'] = None self._ref['lilypondFormat'] = 'pdf' self._ref['lilypondBackend'] = 'ps' # path to a MusicXML reader: default, will find "Finale Notepad" self._ref['musicxmlPath'] = None # path to a midi reader self._ref['midiPath'] = None # path to a graphics viewer self._ref['graphicsPath'] = None # path to a vector graphics viewer self._ref['vectorPath'] = None # path to a pdf viewer self._ref['pdfPath'] = None # path to a braille viewer self._ref['braillePath'] = None # path to MuseScore (if not the musicxmlPath...) # for direct creation of PNG from MusicXML self._ref['musescoreDirectPNGPath'] = None self._ref['showFormat'] = 'musicxml' self._ref['writeFormat'] = 'musicxml' self._ref['ipythonShowFormat'] = 'ipython.musicxml.png' self._ref['autoDownload'] = 'ask' self._ref['debug'] = 0 # printing of missing import warnings # default/non-zero is on self._ref['warnings'] = 1 # store a list of strings self._ref['localCorpusSettings'] = [] self._ref['localCorporaSettings'] = {} self._ref['manualCoreCorpusPath'] = None if forcePlatform is None: platform = common.getPlatform() else: platform = forcePlatform if platform == 'win': for name, value in [ ('lilypondPath', 'lilypond'), ('musescoreDirectPNGPath', common.cleanpath( r'%PROGRAMFILES(x86)%\MuseScore 2\MuseScore.exe')), ]: self.__setitem__(name, value) # use for key checking elif platform == 'nix': for name, value in [('lilypondPath', 'lilypond')]: self.__setitem__(name, value) # use for key checking elif platform == 'darwin': for name, value in [ ('lilypondPath', '/Applications/Lilypond.app/Contents/Resources/bin/lilypond'), ('musicxmlPath', '/Applications/Finale Notepad 2014.app'), ('graphicsPath', '/Applications/Preview.app'), ('vectorPath', '/Applications/Preview.app'), ('pdfPath', '/Applications/Preview.app'), ('midiPath', '/Applications/QuickTime Player.app'), ('musescoreDirectPNGPath', '/Applications/MuseScore 2.app/Contents/MacOS/mscore'), ]: self.__setitem__(name, value) # use for key checking
def addFromPaths(self, paths, parseUsingCorpus=False, useMultiprocessing=True, storeOnDisk=True, verbose=False): ''' Parse and store metadata from numerous files. If any files cannot be loaded, their file paths will be collected in a list that is returned. Returns a list of file paths with errors and stores the extracted metadata in `self._metadataEntries`. >>> from music21 import corpus, metadata >>> metadataBundle = metadata.bundles.MetadataBundle() >>> p = corpus.corpora.CoreCorpus().getWorkList('bach/bwv66.6') >>> metadataBundle.addFromPaths( ... p, ... parseUsingCorpus=False, ... useMultiprocessing=False, ... storeOnDisk=False, #_DOCS_HIDE ... ) [] >>> len(metadataBundle._metadataEntries) 1 Set Verbose to True to get updates even if debug is off. ''' from music21 import metadata jobs = [] accumulatedResults = [] accumulatedErrors = [] if self.filePath is not None and os.path.exists(self.filePath): metadataBundleModificationTime = os.path.getctime(self.filePath) else: metadataBundleModificationTime = time.time() message = 'MetadataBundle Modification Time: {0}'.format( metadataBundleModificationTime) if verbose is True: environLocal.warn(message) else: environLocal.printDebug(message) currentJobNumber = 0 skippedJobsCount = 0 for path in paths: if not path.startswith('http'): path = common.cleanpath(path) key = self.corpusPathToKey(path) if key in self._metadataEntries and not key.startswith('http'): pathModificationTime = os.path.getctime(path) if pathModificationTime < metadataBundleModificationTime: skippedJobsCount += 1 continue currentJobNumber += 1 corpusName = self.name if corpusName is None: corpusName = 'core' # TODO: remove this after rebuilding if corpusName.startswith('local-'): corpusName = corpusName[6:] job = metadata.caching.MetadataCachingJob( path, jobNumber=currentJobNumber, parseUsingCorpus=parseUsingCorpus, corpusName=corpusName, ) jobs.append(job) currentIteration = 0 message = 'Skipped {0} sources already in cache.'.format( skippedJobsCount) if verbose is True: environLocal.warn(message) else: environLocal.printDebug(message) if useMultiprocessing: jobProcessor = metadata.caching.JobProcessor.process_parallel else: jobProcessor = metadata.caching.JobProcessor.process_serial for result in jobProcessor(jobs): message = metadata.caching.JobProcessor._report( len(jobs), result['remainingJobs'], result['filePath'], len(accumulatedErrors), ) if verbose is True: environLocal.warn(message) else: environLocal.printDebug(message) currentIteration += 1 accumulatedResults.extend(result['metadataEntries']) accumulatedErrors.extend(result['errors']) for metadataEntry in result['metadataEntries']: self._metadataEntries[metadataEntry.corpusPath] = metadataEntry if (currentIteration % 50) and (storeOnDisk is True) == 0: self.write() self.validate() if storeOnDisk is True: self.write() return accumulatedErrors
def addFromPaths( self, paths, parseUsingCorpus=False, useMultiprocessing=True, storeOnDisk=True, verbose=False ): ''' Parse and store metadata from numerous files. If any files cannot be loaded, their file paths will be collected in a list that is returned. Returns a list of file paths with errors and stores the extracted metadata in `self._metadataEntries`. >>> from music21 import corpus, metadata >>> metadataBundle = metadata.bundles.MetadataBundle() >>> p = corpus.corpora.CoreCorpus().getWorkList('bach/bwv66.6') >>> metadataBundle.addFromPaths( ... p, ... parseUsingCorpus=False, ... useMultiprocessing=False, ... storeOnDisk=False, #_DOCS_HIDE ... ) [] >>> len(metadataBundle._metadataEntries) 1 Set Verbose to True to get updates even if debug is off. ''' from music21 import metadata jobs = [] accumulatedResults = [] accumulatedErrors = [] if self.filePath is not None and os.path.exists(self.filePath): metadataBundleModificationTime = os.path.getctime(self.filePath) else: metadataBundleModificationTime = time.time() message = 'MetadataBundle Modification Time: {0}'.format( metadataBundleModificationTime) if verbose is True: environLocal.warn(message) else: environLocal.printDebug(message) currentJobNumber = 0 skippedJobsCount = 0 for path in paths: if not path.startswith('http'): path = common.cleanpath(path) key = self.corpusPathToKey(path) if key in self._metadataEntries and not key.startswith('http'): pathModificationTime = os.path.getctime(path) if pathModificationTime < metadataBundleModificationTime: skippedJobsCount += 1 continue currentJobNumber += 1 corpusName = self.name if corpusName is None: corpusName = 'core' # TODO: remove this after rebuilding if corpusName.startswith('local-'): corpusName = corpusName[6:] job = metadata.caching.MetadataCachingJob( path, jobNumber=currentJobNumber, parseUsingCorpus=parseUsingCorpus, corpusName=corpusName, ) jobs.append(job) currentIteration = 0 message = 'Skipped {0} sources already in cache.'.format( skippedJobsCount) if verbose is True: environLocal.warn(message) else: environLocal.printDebug(message) if useMultiprocessing: jobProcessor = metadata.caching.JobProcessor.process_parallel else: jobProcessor = metadata.caching.JobProcessor.process_serial for result in jobProcessor(jobs): message = metadata.caching.JobProcessor._report( len(jobs), result['remainingJobs'], result['filePath'], len(accumulatedErrors), ) if verbose is True: environLocal.warn(message) else: environLocal.printDebug(message) currentIteration += 1 accumulatedResults.extend(result['metadataEntries']) accumulatedErrors.extend(result['errors']) for metadataEntry in result['metadataEntries']: self._metadataEntries[metadataEntry.corpusPath] = metadataEntry if (currentIteration % 50) and (storeOnDisk is True) == 0: self.write() self.validate() if storeOnDisk is True: self.write() return accumulatedErrors