def walk(self): ''' Get all the modules in reverse order, storing them in self.modulePaths ''' def manyCoreSortFunc(name): ''' for many core systems, like the MacPro, running slowest modules first helps there be fewer idle cores later ''' name = name[len(self.dirParent) + 1:] name = name.replace('.py', '') return (name in self.slowModules, name) # the results of this are stored in self.curFiles, self.dirList for dirPath, unused_dirNames, filenames in os.walk(self.dirParent): self._visitFunc(None, dirPath, filenames) if common.cpus() > 4: # @UndefinedVariable self.modulePaths.sort(key=manyCoreSortFunc) else: self.modulePaths.sort() # for p in self.modulePaths: # print(p) self.modulePaths.reverse()
def walk(self): ''' Get all the modules in reverse order, storing them in self.modulePaths ''' def manyCoreSortFunc(name): ''' for many core systems, like the MacPro, running slowest modules first helps there be fewer idle cores later ''' name = name[len(self.dirParent) + 1:] name = name.replace('.py', '') return (name in self.slowModules, name) # the results of this are stored in self.curFiles, self.dirList for dirpath, unused_dirnames, filenames in os.walk(self.dirParent): self._visitFunc(None, dirpath, filenames) if common.cpus() > 4:# @UndefinedVariable self.modulePaths.sort(key=manyCoreSortFunc) else: self.modulePaths.sort() #for p in self.modulePaths: # print p# self.modulePaths self.modulePaths.reverse()
def walk(self): ''' Get all the modules in reverse order, storing them in self.modulePaths ''' def manyCoreSortFunc(name): ''' for many core systems, like the MacPro, running slowest modules first helps there be fewer idle cores later ''' name = name[len(self.dirParent) + 1:] name = name.replace('.py', '') return (name in self.slowModules, name) # the results of this are stored in self.curFiles, self.dirList for dirPath, unused_dirNames, filenames in os.walk(self.dirParent): self._visitFunc(None, dirPath, filenames) if common.cpus() > 4: # @UndefinedVariable self.modulePaths.sort(key=manyCoreSortFunc) else: self.modulePaths.sort() # I don't know why we do this, but it happened this way in an early m21 version, # and it's just tradition now. It'd wig me out # if I ever didn't see them in reverse alphabetical order. self.modulePaths.reverse()
def __init__(self, command='html'): self.useMultiprocessing = True self.cpus_to_use = common.cpus() if self.cpus_to_use == 1: self.useMultiprocessing = False self.useMultiprocessing = False # too unstable still self.documentationDirectoryPath = None self.autogenDirectoryPath = None self.buildDirectoryPath = None self.doctreesDirectoryPath = None self.command = command self.getPaths()
def process_parallel(jobs, processCount=None): ''' Process jobs in parallel, with `processCount` processes. If `processCount` is none, use 1 fewer process than the number of available cores. jobs is a list of :class:`~music21.metadata.MetadataCachingJob` objects. ''' processCount = processCount or common.cpus() # @UndefinedVariable if processCount < 1: processCount = 1 remainingJobs = len(jobs) if processCount > remainingJobs: # do not start more processes than jobs... processCount = remainingJobs environLocal.printDebug( 'Processing {0} jobs in parallel, with {1} processes.'.format( remainingJobs, processCount)) results = [] job_queue = multiprocessing.JoinableQueue() # @UndefinedVariable result_queue = multiprocessing.Queue() # @UndefinedVariable workers = [ WorkerProcess(job_queue, result_queue) for _ in range(processCount) ] for worker in workers: worker.start() if jobs: for job in jobs: job_queue.put( pickle.dumps(job, protocol=pickle.HIGHEST_PROTOCOL)) for unused_jobCounter in range(len(jobs)): job = pickle.loads(result_queue.get()) results = job.getResults() errors = job.getErrors() remainingJobs -= 1 yield { 'metadataEntries': results, 'errors': errors, 'filePath': job.filePath, 'remainingJobs': remainingJobs, } for worker in workers: job_queue.put(None) job_queue.join() result_queue.close() job_queue.close() for worker in workers: worker.join() raise StopIteration
def process_parallel(jobs, processCount=None): ''' Process jobs in parallel, with `processCount` processes. If `processCount` is none, use 1 fewer process than the number of available cores. jobs is a list of :class:`~music21.metadata.MetadataCachingJob` objects. ''' processCount = processCount or common.cpus() processCount = max(processCount, 1) # do not start more processes than jobs... remainingJobs = len(jobs) processCount = min(processCount, remainingJobs) environLocal.printDebug( f'Processing {remainingJobs} jobs in parallel, with {processCount} processes.' ) results = [] job_queue = multiprocessing.JoinableQueue() result_queue = multiprocessing.Queue() workers = [ WorkerProcess(job_queue, result_queue) for _ in range(processCount) ] for worker in workers: worker.start() if jobs: for job in jobs: job_queue.put(pickle.dumps( job)) # do not use highest protocol to generate. for unused_jobCounter in range(len(jobs)): job = pickle.loads(result_queue.get()) results = job.getResults() errors = job.getErrors() remainingJobs -= 1 yield { 'metadataEntries': results, 'errors': errors, 'filePath': job.filePath, 'remainingJobs': remainingJobs, } for worker in workers: job_queue.put(None) job_queue.join() result_queue.close() job_queue.close() for worker in workers: worker.join()
def process_parallel(jobs, processCount=None): ''' Process jobs in parallel, with `processCount` processes. If `processCount` is none, use 1 fewer process than the number of available cores. jobs is a list of :class:`~music21.metadata.MetadataCachingJob` objects. ''' processCount = processCount or common.cpus() # @UndefinedVariable if processCount < 1: processCount = 1 remainingJobs = len(jobs) if processCount > remainingJobs: # do not start more processes than jobs... processCount = remainingJobs environLocal.printDebug( 'Processing {0} jobs in parallel, with {1} processes.'.format( remainingJobs, processCount)) results = [] job_queue = multiprocessing.JoinableQueue() # @UndefinedVariable result_queue = multiprocessing.Queue() # @UndefinedVariable workers = [WorkerProcess(job_queue, result_queue) for _ in range(processCount)] for worker in workers: worker.start() if jobs: for job in jobs: job_queue.put(pickle.dumps(job, protocol=pickle.HIGHEST_PROTOCOL)) for unused_jobCounter in range(len(jobs)): job = pickle.loads(result_queue.get()) results = job.getResults() errors = job.getErrors() remainingJobs -= 1 yield { 'metadataEntries': results, 'errors': errors, 'filePath': job.filePath, 'remainingJobs': remainingJobs, } for worker in workers: job_queue.put(None) job_queue.join() result_queue.close() job_queue.close() for worker in workers: worker.join() raise StopIteration
def mainPoolRunner( testGroup=('test', ), restoreEnvironmentDefaults=False, leaveOut=1): ''' Run all tests. Group can be test and/or external ''' normalStdError = sys.stderr timeStart = time.time() poolSize = common.cpus() print( 'Creating %d processes for multiprocessing (omitting %d processors)' % (poolSize, leaveOut)) modGather = commonTest.ModuleGather(useExtended=True) maxTimeout = 200 pathsToRun = modGather.modulePaths # [30:60] pool = multiprocessing.Pool(processes=poolSize) # @UndefinedVariable # pylint: disable=not-callable # imap returns the results as they are completed. Since the number of files is small, # the overhead of returning is outweighed by the positive aspect of getting results immediately # unordered says that results can RETURN in any order; not that they'd be pooled out in any # order. res = pool.imap_unordered(runOneModuleWithoutImp, ((modGather, fp) for fp in pathsToRun)) continueIt = True timeouts = 0 eventsProcessed = 0 summaryOutput = [] while continueIt is True: try: newResult = res.next(timeout=1) if timeouts >= 5: print("") if newResult is not None: if newResult.moduleName is not None: mn = newResult.moduleName mn = mn.replace('___init__', '') mn = mn.replace('_', '.') else: mn = "" rt = newResult.runTime if rt is not None: rt = round(newResult.runTime * 10) / 10.0 if not newResult.errors and not newResult.failures: print("\t\t\t\t{0}: {1} tests in {2} secs".format( mn, newResult.testsRun, rt)) else: print( "\t\t\t\t{0}: {1} tests, {2} errors {3} failures in {4} secs" .format(mn, newResult.testsRun, len(newResult.errors), len(newResult.failures), rt)) timeouts = 0 eventsProcessed += 1 summaryOutput.append(newResult) except multiprocessing.TimeoutError: # @UndefinedVariable timeouts += 1 if timeouts == 5 and eventsProcessed > 0: print("Delay in processing, seconds: ", end="") elif timeouts == 5: print("Starting first modules, should take 5-10 seconds: ", end="") if timeouts % 5 == 0: print(str(timeouts) + " ", end="") if timeouts > maxTimeout and eventsProcessed > 0: print("\nToo many delays, giving up...") continueIt = False printSummary(summaryOutput, timeStart, pathsToRun) pool.close() exit() except StopIteration: continueIt = False pool.close() pool.join() except Exception as excp: # pylint: disable=broad-except eventsProcessed += 1 exceptionLog = ModuleResponse("UntrappedException", None, "%s" % excp) summaryOutput.append(exceptionLog) sys.stderr = normalStdError printSummary(summaryOutput, timeStart, pathsToRun)
def main(fnAccept=None): ''' `fnAccept` is a list of one or more files to test. Otherwise runs all. ''' poolSize = common.cpus() if pylintRun is None: print("make sure that 'sudo pip install pylint' is there. exiting.") return mg = commonTest.ModuleGather() fnPathReject = [ 'demos/', 'alpha/webapps/server', 'test/', '/ext/', #'bar.py', # used to crash pylint... #'repeat.py', # used to hang pylint... #'spanner.py', # used to hang pylint... ] disable = [ # These also need to be changed in MUSIC21BASE/.pylintrc 'cyclic-import', # we use these inside functions when there's a deep problem. 'unnecessary-pass', # nice, but not really a problem... 'locally-disabled', # test for this later, but hopefully will know what # they're doing 'duplicate-code', # needs to ignore strings -- keeps getting doctests... 'arguments-differ', # someday... 'abstract-class-instantiated', # this trips on the fractions.Fraction() class. 'multiple-imports', # import os, sys -- fine... 'fixme', # known... 'superfluous-parens', # nope -- if they make things clearer... 'too-many-statements', # someday 'no-member', # important, but too many false positives 'too-many-arguments', # definitely! but takes too long to get a fix now... 'too-many-public-methods', # maybe, look 'too-many-branches', # yes, someday 'too-many-locals', # no 'too-many-lines', # yes, someday. 'bad-whitespace', # maybe later, but "bad" isn't something I necessarily agree with 'bad-continuation', # never remove -- this is a good thing many times. 'too-many-return-statements', # we'll see 'unpacking-non-sequence', # gets it wrong too often. 'too-many-instance-attributes', # maybe later 'too-many-boolean-expressions', #AbstractDiatonicScale.__eq__ shows how this # can be fine... 'misplaced-comparison-constant', # sometimes 2 < x is what we want 'unsubscriptable-object', # unfortunately, thinks that Streams are unsubscriptable. 'consider-iterating-dictionary', # sometimes .keys() is a good test against # changing the dictionary size while iterating. 'invalid-name', # these are good music21 names; fix the regexp instead... 'no-self-use', # maybe later 'too-few-public-methods', # never remove or set to 1 'trailing-whitespace', # should ignore blank lines with tabs 'trailing-newlines', # just because something is easy to detect doesn't make it bad. 'missing-docstring', # gets too many well-documented properties 'star-args', # no problem with them... 'protected-access', # this is an important one, but for now we do a lot of # x = copy.deepcopy(self); x._volume = ... which is not a problem... 'unused-argument', 'import-self', # fix is either to get rid of it or move away many tests... 'redefined-variable-type', # this would be great! but too much. 'simplifiable-if-statement', # NO! NO! NO! # if (x or y and z and q): return True, else: return False, # is a GREAT paradigm -- over "return (x or y and z and q)" and # assuming that it returns a bool... it's no slower than # the simplification and it's so much clearer. 'consider-using-enumerate', # good when i used only once, but # x[i] = y[i] is a nice paradigm, even if one can be simplified out. ] goodnameRx = { 'argument-rgx': r'[a-z_][A-Za-z0-9_]{2,30}$', 'attr-rgx': r'[a-z_][A-Za-z0-9_]{2,30}$', 'class-rgx': r'[A-Z_][A-Za-z0-9_]{2,30}$', 'function-rgx': r'[a-z_][A-Za-z0-9_]{2,30}$', 'method-rgx': r'[a-z_][A-Za-z0-9_]{2,30}$', 'module-rgx': r'(([a-z_][a-zA-Z0-9_]*)|([A-Z][a-zA-Z0-9]+))$', 'variable-rgx': r'[a-z_][A-Za-z0-9_]{2,30}$', } cmd = [ '--output-format=parseable', r'--dummy-variables-rgx="_$|dummy|unused|i$|j$|junk|counter"', '--docstring-min-length=3', '--max-args=7', # should be 5 later, but baby steps '--bad-names="foo,shit,f**k,stuff"', # definitely allow "bar" for barlines '--reports=n', '--max-branches=20', '-j ' + str(poolSize), # multiprocessing! r'--ignore-long-lines="converter\.parse"', # some tiny notation... '--max-line-length=100', # tada ] for gn, gnv in goodnameRx.items(): cmd.append('--' + gn + '="' + gnv + '"') #print(cmd) for pyLintId in disable: cmd.append('--disable=%s' % pyLintId) # add entire package acceptable = [] for fp in mg.modulePaths: rejectIt = False for rejectPath in fnPathReject: if rejectPath in fp: rejectIt = True break if rejectIt: continue if fnAccept: rejectIt = True for acceptableName in fnAccept: if acceptableName in fp: rejectIt = False break if rejectIt: continue acceptable.append(fp) cmdFile = cmd + acceptable #print(' '.join(cmdFile)) #print(fp) pylintRun(cmdFile, exit=False)
def mainPoolRunner(testGroup=('test',), restoreEnvironmentDefaults=False, leaveOut=1): ''' Run all tests. Group can be test and/or external ''' normalStdError = sys.stderr timeStart = time.time() poolSize = common.cpus() print('Creating %d processes for multiprocessing (omitting %d processors)' % (poolSize, leaveOut)) modGather = commonTest.ModuleGather(useExtended=True) maxTimeout = 200 pathsToRun = modGather.modulePaths # [30:60] pool = multiprocessing.Pool(processes=poolSize) # @UndefinedVariable # pylint: disable=not-callable # imap returns the results as they are completed. Since the number of files is small, # the overhead of returning is outweighed by the positive aspect of getting results immediately # unordered says that results can RETURN in any order; not that they'd be pooled out in any # order. res = pool.imap_unordered(runOneModuleWithoutImp, ((modGather, fp) for fp in pathsToRun)) continueIt = True timeouts = 0 eventsProcessed = 0 summaryOutput = [] while continueIt is True: try: newResult = res.next(timeout=1) if timeouts >= 5: print("") if newResult is not None: if newResult.moduleName is not None: mn = newResult.moduleName mn = mn.replace('___init__', '') mn = mn.replace('_', '.') else: mn = "" rt = newResult.runTime if rt is not None: rt = round(newResult.runTime * 10)/10.0 if not newResult.errors and not newResult.failures: print("\t\t\t\t{0}: {1} tests in {2} secs".format( mn, newResult.testsRun, rt)) else: print("\t\t\t\t{0}: {1} tests, {2} errors {3} failures in {4} secs".format( mn, newResult.testsRun, len(newResult.errors), len(newResult.failures), rt)) timeouts = 0 eventsProcessed += 1 summaryOutput.append(newResult) except multiprocessing.TimeoutError: # @UndefinedVariable timeouts += 1 if timeouts == 5 and eventsProcessed > 0: print("Delay in processing, seconds: ", end="") elif timeouts == 5: print("Starting first modules, should take 5-10 seconds: ", end="") if timeouts % 5 == 0: print(str(timeouts) + " ", end="") if timeouts > maxTimeout and eventsProcessed > 0: print("\nToo many delays, giving up...") continueIt = False printSummary(summaryOutput, timeStart, pathsToRun) pool.close() exit() except StopIteration: continueIt = False pool.close() pool.join() except Exception as excp: # pylint: disable=broad-except eventsProcessed += 1 exceptionLog = ModuleResponse("UntrappedException", None, "%s" % excp) summaryOutput.append(exceptionLog) sys.stderr = normalStdError printSummary(summaryOutput, timeStart, pathsToRun)
def main(fnAccept=None, strict=False): ''' `fnAccept` is a list of one or more files to test. Otherwise runs all. ''' poolSize = common.cpus() if pylintRun is None: print("make sure that 'sudo pip3 install pylint' is there. exiting.") return mg = commonTest.ModuleGather() fnPathReject = [ # 'demos/', # 'test/timeGraphs.py', '/ext/', # 'bar.py', # used to crash pylint... # 'repeat.py', # used to hang pylint... # 'spanner.py', # used to hang pylint... ] disable_unless_strict = [ 'too-many-statements', # someday 'too-many-arguments', # definitely! but takes too long to get a fix now... 'too-many-public-methods', # maybe, look 'too-many-branches', # yes, someday 'too-many-lines', # yes, someday. 'too-many-return-statements', # we'll see 'too-many-instance-attributes', # maybe later 'inconsistent-return-statements', # would be nice 'protected-access', # this is an important one, but for now we do a lot of # x = copy.deepcopy(self); x._volume = ... which is not a problem... # also, test suites need to be exempt. 'keyword-arg-before-vararg', # a good thing to check for new code, but # requires rewriting function signatures in old code ] disable = [ # These also need to be changed in MUSIC21BASE/.pylintrc 'arguments-differ', # -- no -- should be able to add additional arguments so long # as initial ones are the same. 'multiple-imports', # import os, sys -- fine... 'redefined-variable-type', # would be good, but currently # lines like: if x: y = note.Note() ; else: y = note.Rest() # triggers this, even though y doesn't change. 'no-else-return', # these are unnecessary but can help show the flow of thinking. 'cyclic-import', # we use these inside functions when there's a deep problem. 'unnecessary-pass', # nice, but not really a problem... 'locally-disabled', # test for this later, but hopefully will know what # they're doing 'consider-using-get', # if it can figure out that the default value is something # simple, we will turn back on, but until then, no. 'chained-comparison', # sometimes simpler that way # 'duplicate-code', # needs to ignore strings -- keeps getting doctests... 'too-many-ancestors', # -- 8 is okay. 'abstract-class-instantiated', # this trips on the fractions.Fraction() class. 'fixme', # known... 'superfluous-parens', # nope -- if they make things clearer... 'no-member', # important, but too many false positives 'too-many-locals', # no 'bad-whitespace', # maybe later, but "bad" isn't something I necessarily agree with 'bad-continuation', # never remove -- this is a good thing many times. 'unpacking-non-sequence', # gets it wrong too often. # AbstractDiatonicScale.__eq__ shows how this # can be fine... 'too-many-boolean-expressions', 'misplaced-comparison-constant', # sometimes 2 < x is what we want 'unsubscriptable-object', # unfortunately, thinks that Streams are unsubscriptable. # sometimes .keys() is a good test against # changing the dictionary size while iterating. 'consider-iterating-dictionary', 'invalid-name', # these are good music21 names; fix the regexp instead... 'no-self-use', # maybe later 'too-few-public-methods', # never remove or set to 1 'trailing-whitespace', # should ignore blank lines with tabs # just because something is easy to detect doesn't make it bad. 'trailing-newlines', 'missing-docstring', # gets too many well-documented properties 'star-args', # no problem with them... 'unused-argument', 'import-self', # fix is either to get rid of it or move away many tests... 'simplifiable-if-statement', # NO! NO! NO! # if (x or y and z and q): return True, else: return False, # is a GREAT paradigm -- over "return (x or y and z and q)" and # assuming that it returns a bool... it's no slower than # the simplification and it's so much clearer. 'consider-using-enumerate', # good when i used only once, but # x[i] = y[i] is a nice paradigm, even if one can be simplified out. 'not-callable', # false positives, for instance on x.next() 'raise-missing-from', # later. ] if not strict: disable = disable + disable_unless_strict goodNameRx = {'argument-rgx': r'[a-z_][A-Za-z0-9_]{2,30}$', 'attr-rgx': r'[a-z_][A-Za-z0-9_]{2,30}$', 'class-rgx': r'[A-Z_][A-Za-z0-9_]{2,30}$', 'function-rgx': r'[a-z_][A-Za-z0-9_]{2,30}$', 'method-rgx': r'[a-z_][A-Za-z0-9_]{2,30}$', 'module-rgx': r'(([a-z_][a-zA-Z0-9_]*)|([A-Z][a-zA-Z0-9]+))$', 'variable-rgx': r'[a-z_][A-Za-z0-9_]{2,30}$', } maxArgs = 7 if not strict else 5 maxBranches = 20 if not strict else 10 cmd = ['--output-format=parseable', r'--dummy-variables-rgx="_$|dummy|unused|i$|j$|junk|counter"', '--docstring-min-length=3', '--ignore-docstrings=yes', '--min-similarity-lines=8', '--max-args=' + str(maxArgs), # should be 5 later, but baby steps '--bad-names="foo,shit,f**k,stuff"', # definitely allow "bar" for barlines '--reports=n', '--max-branches=' + str(maxBranches), '-j ' + str(poolSize), # multiprocessing! r'--ignore-long-lines="converter\.parse"', # some tiny notation... '--max-line-length=100', ] for gn, gnv in goodNameRx.items(): cmd.append('--' + gn + '="' + gnv + '"') for pyLintId in disable: cmd.append(f'--disable={pyLintId}') # add entire package acceptable = [] for fp in mg.modulePaths: rejectIt = False for rejectPath in fnPathReject: if rejectPath in fp: rejectIt = True break if rejectIt: continue if fnAccept: rejectIt = True for acceptableName in fnAccept: if acceptableName in fp: rejectIt = False break if rejectIt: continue acceptable.append(fp) cmdFile = cmd + acceptable if not acceptable: print('No matching files were found.') return # print(fnAccept) # print(' '.join(cmdFile)) # print(fp) try: # noinspection PyArgumentList,PyCallingNonCallable pylintRun(cmdFile, exit=False) except TypeError: # noinspection PyCallingNonCallable pylintRun(cmdFile, do_exit=False) # renamed in recent versions
def mainPoolRunner( testGroup=('test', ), restoreEnvironmentDefaults=False, leaveOut=1): ''' Run all tests. Group can be test and/or external ''' commonTest.testImports() normalStdError = sys.stderr timeStart = time.time() poolSize = common.cpus() print( f'Creating {poolSize} processes for multiprocessing (omitting {leaveOut} processors)' ) modGather = commonTest.ModuleGather(useExtended=True) maxTimeout = 200 pathsToRun = modGather.modulePaths # [30:60] # pylint: disable=not-callable with multiprocessing.Pool(processes=poolSize) as pool: # imap returns the results as they are completed. # Since the number of files is small, the overhead of returning is # outweighed by the positive aspect of getting results immediately # unordered says that results can RETURN in any order; not that # they'd be pooled out in any order. res = pool.imap_unordered(runOneModuleWithoutImp, ((modGather, fp) for fp in pathsToRun)) continueIt = True timeouts = 0 eventsProcessed = 0 summaryOutput = [] while continueIt is True: try: newResult = res.next(timeout=1) if timeouts >= 5: print('') if newResult is not None: if newResult.moduleName is not None: mn = newResult.moduleName mn = mn.replace('___init__', '') mn = mn.replace('_', '.') else: mn = '' rt = newResult.runTime if rt is not None: rt = round(newResult.runTime * 10) / 10.0 if not newResult.errors and not newResult.failures: print( f'\t\t\t\t{mn}: {newResult.testsRun} tests in {rt} secs' ) else: numErr = len(newResult.errors) numFail = len(newResult.failures) print( f'\t\t\t\t{mn}: {newResult.testsRun} tests, ' f'{numErr} errors {numFail} failures in {rt} secs' ) timeouts = 0 eventsProcessed += 1 summaryOutput.append(newResult) except multiprocessing.TimeoutError: timeouts += 1 if timeouts == 5 and eventsProcessed > 0: print('Delay in processing, seconds: ', end='') elif timeouts == 5: print('Starting first modules, should take 5-10 seconds: ', end='') if timeouts % 5 == 0: print(str(timeouts) + ' ', end='', flush=True) if timeouts > maxTimeout and eventsProcessed > 0: print('\nToo many delays, giving up...', flush=True) continueIt = False printSummary(summaryOutput, timeStart, pathsToRun) pool.close() sys.exit() except StopIteration: continueIt = False pool.close() pool.join() except Exception as excp: # pylint: disable=broad-except eventsProcessed += 1 exceptionLog = ModuleResponse(returnCode='UntrappedException', moduleName=str(excp)) summaryOutput.append(exceptionLog) sys.stderr = normalStdError printSummary(summaryOutput, timeStart, pathsToRun)
def main(fnAccept=None): ''' `fnAccept` is a list of one or more files to test. Otherwise runs all. ''' poolSize = common.cpus() if pylintRun is None: print("make sure that 'sudo pip install pylint' is there. exiting.") return mg = commonTest.ModuleGather() fnPathReject = [ 'demos/', 'alpha/webapps/server', 'test/', 'mxObjects.py', 'fromMxObjects.py', 'toMxObjects.py', 'xmlHandler.py', '/ext/', #'bar.py', # used to crash pylint... #'repeat.py', # used to hang pylint... #'spanner.py', # used to hang pylint... ] disable = [ 'cyclic-import', # we use these inside functions when there's a deep problem. 'unnecessary-pass', # nice, but not really a problem... 'locally-disabled', # test for this later, but hopefully will know what # they're doing 'duplicate-code', # needs to ignore strings -- keeps getting doctests... 'arguments-differ', # someday... 'abstract-class-instantiated', # this trips on the fractions.Fraction() class. 'fixme', # known... 'superfluous-parens', # nope -- if they make things clearer... 'too-many-statements', # someday 'no-member', # important, but too many false positives 'too-many-arguments', # definitely! but takes too long to get a fix now... 'too-many-public-methods', # maybe, look 'too-many-branches', # yes, someday 'too-many-locals', # no 'too-many-lines', # yes, someday. 'bad-whitespace', # maybe later, but "bad" isn't something I necessarily agree with 'bad-continuation', # never remove -- this is a good thing many times. 'too-many-return-statements', # we'll see 'unpacking-non-sequence', # gets it wrong too often. 'too-many-instance-attributes', # maybe later 'invalid-name', # these are good music21 names; fix the regexp instead... 'no-self-use', # maybe later 'too-few-public-methods', # never remove or set to 1 'trailing-whitespace', # should ignore blank lines with tabs 'missing-docstring', # gets too many well-documented properties 'star-args', # no problem with them... 'protected-access', # this is an important one, but for now we do a lot of # x = copy.deepcopy(self); x._volume = ... which is not a problem... 'unused-argument', 'import-self', # fix is either to get rid of it or move away many tests... ] goodnameRx = {'argument-rgx': r'[a-z_][A-Za-z0-9_]{2,30}$', 'attr-rgx': r'[a-z_][A-Za-z0-9_]{2,30}$', 'class-rgx': r'[A-Z_][A-Za-z0-9_]{2,30}$', 'function-rgx': r'[a-z_][A-Za-z0-9_]{2,30}$', 'method-rgx': r'[a-z_][A-Za-z0-9_]{2,30}$', 'module-rgx': r'(([a-z_][a-zA-Z0-9_]*)|([A-Z][a-zA-Z0-9]+))$', 'variable-rgx': r'[a-z_][A-Za-z0-9_]{2,30}$', } cmd = ['--output-format=parseable', r'--dummy-variables-rgx="_$|dummy|unused|i$|j$|junk|counter"', '--docstring-min-length=3', '--max-args=7', # should be 5 later, but baby steps '--bad-names="foo,shit,f**k,stuff"', # definitely allow "bar" for barlines '--reports=n', '--max-branches=20', '-j ' + str(poolSize), # multiprocessing! r'--ignore-long-lines="converter\.parse"', # some tiny notation... '--max-line-length=100', # tada ] for gn, gnv in goodnameRx.items(): cmd.append('--' + gn + '="' + gnv + '"') #print(cmd) for pyLintId in disable: cmd.append('--disable=%s' % pyLintId) # add entire package acceptable = [] for fp in mg.modulePaths: rejectIt = False for rejectPath in fnPathReject: if rejectPath in fp: rejectIt = True break if rejectIt: continue if fnAccept: rejectIt = True for acceptableName in fnAccept: if acceptableName in fp: rejectIt = False break if rejectIt: continue acceptable.append(fp) cmdFile = cmd + acceptable #print(' '.join(cmdFile)) #print(fp) pylintRun(cmdFile, exit=False)
def main(fnAccept=None): ''' `fnAccept` is a list of one or more files to test. Otherwise runs all. ''' poolSize = common.cpus() if pylintRun is None: print("make sure that 'sudo pip install pylint' is there. exiting.") return mg = commonTest.ModuleGather() fnPathReject = [ 'demos/', 'alpha/webapps/server', 'test/', 'mxObjects.py', 'fromMxObjects.py', 'toMxObjects.py', 'xmlHandler.py', '/ext/', #'bar.py', # used to crash pylint... #'repeat.py', # used to hang pylint... #'spanner.py', # used to hang pylint... ] disable = [ 'cyclic-import', # we use these inside functions when there's a deep problem. 'unnecessary-pass', # nice, but not really a problem... 'locally-disabled', # test for this later, but hopefully will know what # they're doing 'duplicate-code', # needs to ignore strings -- keeps getting doctests... 'arguments-differ', # someday... 'abstract-class-instantiated', # this trips on the fractions.Fraction() class. 'fixme', # known... 'superfluous-parens', # nope -- if they make things clearer... 'too-many-statements', # someday 'no-member', # important, but too many false positives 'too-many-arguments', # definitely! but takes too long to get a fix now... 'too-many-public-methods', # maybe, look 'too-many-branches', # yes, someday 'too-many-locals', # no 'too-many-lines', # yes, someday. 'bad-whitespace', # maybe later, but "bad" isn't something I necessarily agree with 'bad-continuation', # never remove -- this is a good thing many times. 'too-many-return-statements', # we'll see 'unpacking-non-sequence', # gets it wrong too often. 'too-many-instance-attributes', # maybe later 'invalid-name', # these are good music21 names; fix the regexp instead... 'no-self-use', # maybe later 'too-few-public-methods', # never remove or set to 1 'trailing-whitespace', # should ignore blank lines with tabs 'missing-docstring', # gets too many well-documented properties 'star-args', # no problem with them... 'protected-access', # this is an important one, but for now we do a lot of # x = copy.deepcopy(self); x._volume = ... which is not a problem... 'unused-argument', 'import-self', # fix is either to get rid of it or move away many tests... ] goodnameRx = { 'argument-rgx': r'[a-z_][A-Za-z0-9_]{2,30}$', 'attr-rgx': r'[a-z_][A-Za-z0-9_]{2,30}$', 'class-rgx': r'[A-Z_][A-Za-z0-9_]{2,30}$', 'function-rgx': r'[a-z_][A-Za-z0-9_]{2,30}$', 'method-rgx': r'[a-z_][A-Za-z0-9_]{2,30}$', 'module-rgx': r'(([a-z_][a-zA-Z0-9_]*)|([A-Z][a-zA-Z0-9]+))$', 'variable-rgx': r'[a-z_][A-Za-z0-9_]{2,30}$', } cmd = [ '--output-format=parseable', r'--dummy-variables-rgx="_$|dummy|unused|i$|j$|junk|counter"', '--docstring-min-length=3', '--max-args=7', # should be 5 later, but baby steps '--bad-names="foo,shit,f**k,stuff"', # definitely allow "bar" for barlines '--reports=n', '--max-branches=20', '-j ' + str(poolSize), # multiprocessing! r'--ignore-long-lines="converter\.parse"', # some tiny notation... '--max-line-length=100', # tada ] for gn, gnv in goodnameRx.items(): cmd.append('--' + gn + '="' + gnv + '"') #print(cmd) for pyLintId in disable: cmd.append('--disable=%s' % pyLintId) # add entire package acceptable = [] for fp in mg.modulePaths: rejectIt = False for rejectPath in fnPathReject: if rejectPath in fp: rejectIt = True break if rejectIt: continue if fnAccept: rejectIt = True for acceptableName in fnAccept: if acceptableName in fp: rejectIt = False break if rejectIt: continue acceptable.append(fp) cmdFile = cmd + acceptable #print(' '.join(cmdFile)) #print(fp) pylintRun(cmdFile, exit=False)
def main(fnAccept=None, strict=False): ''' `fnAccept` is a list of one or more files to test. Otherwise runs all. ''' poolSize = common.cpus() if pylintRun is None: print("make sure that 'sudo pip3 install pylint' is there. exiting.") return mg = commonTest.ModuleGather() fnPathReject = [ 'demos/', 'alpha/webapps/server', 'test/timeGraphs.py', '/ext/', #'bar.py', # used to crash pylint... #'repeat.py', # used to hang pylint... #'spanner.py', # used to hang pylint... ] disable_unless_strict = [ 'too-many-statements', # someday 'too-many-arguments', # definitely! but takes too long to get a fix now... 'too-many-public-methods', # maybe, look 'too-many-branches', # yes, someday 'too-many-lines', # yes, someday. 'too-many-return-statements', # we'll see 'too-many-instance-attributes', # maybe later 'inconsistent-return-statements', # would be nice 'protected-access', # this is an important one, but for now we do a lot of # x = copy.deepcopy(self); x._volume = ... which is not a problem... # also, test suites need to be exempt. 'keyword-arg-before-vararg', # a good thing to check for new code, but # requires rewriting function signatures in old code ] disable = [ # These also need to be changed in MUSIC21BASE/.pylintrc 'arguments-differ', # -- no -- should be able to add additional arguments so long # as initial ones are the same. 'multiple-imports', # import os, sys -- fine... 'redefined-variable-type', # would be good, but currently # lines like: if x: y = note.Note() ; else: y = note.Rest() # triggers this, even though y doesn't change. 'no-else-return', # these are unnessary but can help show the flow of thinking. 'cyclic-import', # we use these inside functions when there's a deep problem. 'unnecessary-pass', # nice, but not really a problem... 'locally-disabled', # test for this later, but hopefully will know what # they're doing 'consider-using-get', # if it can figure out that the default value is something # simple, we will turn back on, but until then, no. 'chained-comparison', # sometimes simpler that way #'duplicate-code', # needs to ignore strings -- keeps getting doctests... 'too-many-ancestors', # -- 8 is okay. 'abstract-class-instantiated', # this trips on the fractions.Fraction() class. 'fixme', # known... 'superfluous-parens', # nope -- if they make things clearer... 'no-member', # important, but too many false positives 'too-many-locals', # no 'bad-whitespace', # maybe later, but "bad" isn't something I necessarily agree with 'bad-continuation', # never remove -- this is a good thing many times. 'unpacking-non-sequence', # gets it wrong too often. 'too-many-boolean-expressions', #AbstractDiatonicScale.__eq__ shows how this # can be fine... 'misplaced-comparison-constant', # sometimes 2 < x is what we want 'unsubscriptable-object', # unfortunately, thinks that Streams are unsubscriptable. 'consider-iterating-dictionary', # sometimes .keys() is a good test against # changing the dictionary size while iterating. 'invalid-name', # these are good music21 names; fix the regexp instead... 'no-self-use', # maybe later 'too-few-public-methods', # never remove or set to 1 'trailing-whitespace', # should ignore blank lines with tabs 'trailing-newlines', # just because something is easy to detect doesn't make it bad. 'missing-docstring', # gets too many well-documented properties 'star-args', # no problem with them... 'unused-argument', 'import-self', # fix is either to get rid of it or move away many tests... 'simplifiable-if-statement', # NO! NO! NO! # if (x or y and z and q): return True, else: return False, # is a GREAT paradigm -- over "return (x or y and z and q)" and # assuming that it returns a bool... it's no slower than # the simplification and it's so much clearer. 'consider-using-enumerate', # good when i used only once, but # x[i] = y[i] is a nice paradigm, even if one can be simplified out. ] if not strict: disable = disable + disable_unless_strict goodnameRx = {'argument-rgx': r'[a-z_][A-Za-z0-9_]{2,30}$', 'attr-rgx': r'[a-z_][A-Za-z0-9_]{2,30}$', 'class-rgx': r'[A-Z_][A-Za-z0-9_]{2,30}$', 'function-rgx': r'[a-z_][A-Za-z0-9_]{2,30}$', 'method-rgx': r'[a-z_][A-Za-z0-9_]{2,30}$', 'module-rgx': r'(([a-z_][a-zA-Z0-9_]*)|([A-Z][a-zA-Z0-9]+))$', 'variable-rgx': r'[a-z_][A-Za-z0-9_]{2,30}$', } maxArgs = 7 if not strict else 5 maxBranches = 20 if not strict else 10 cmd = ['--output-format=parseable', r'--dummy-variables-rgx="_$|dummy|unused|i$|j$|junk|counter"', '--docstring-min-length=3', '--ignore-docstrings=yes', '--min-similarity-lines=8', '--max-args=' + str(maxArgs), # should be 5 later, but baby steps '--bad-names="foo,shit,f**k,stuff"', # definitely allow "bar" for barlines '--reports=n', '--max-branches=' + str(maxBranches), '-j ' + str(poolSize), # multiprocessing! r'--ignore-long-lines="converter\.parse"', # some tiny notation... '--max-line-length=100', # tada ] for gn, gnv in goodnameRx.items(): cmd.append('--' + gn + '="' + gnv + '"') for pyLintId in disable: cmd.append('--disable=%s' % pyLintId) # add entire package acceptable = [] for fp in mg.modulePaths: rejectIt = False for rejectPath in fnPathReject: if rejectPath in fp: rejectIt = True break if rejectIt: continue if fnAccept: rejectIt = True for acceptableName in fnAccept: if acceptableName in fp: rejectIt = False break if rejectIt: continue acceptable.append(fp) cmdFile = cmd + acceptable #print(' '.join(cmdFile)) #print(fp) try: pylintRun(cmdFile, exit=False) except TypeError: pylintRun(cmdFile, do_exit=False) # renamed in recent versions