def _main(): args, config = _parse_args() logger.setup_stdout_logger(args.verbose) walker = SrcRootWalker.SrcRootWalker(config.getlist('Analyze', 'include_patterns'), config.getlist('Analyze', 'exclude_patterns', ())) # Strips any files that should be excluded files = walker.verify_if_files_exist_in_srcroot_paths(args.files) maintainerfile = Maintainers.Maintainers(config.get('General', 'maintainers')) for file_ in files: maintainers = maintainerfile.find_matching_maintainers(file_) print "---------------\n", file_, "\n---------------" if len(maintainers) == 0: print "No maintainer found!" else: for maintainer in maintainers: print "\tSubsystem:\t", maintainer['subsystem'] print "\tMaintainer:\t", print "\n\t \t".join(["{0} < {1} >".format(person, email) for person, email in maintainer['maintainer']]) print "\tMaillist:\t", maintainer['maillist'] print "\tStatus:\t\t", maintainer['status']
def _main(): args, config = _parse_args() logger.setup_stdout_logger(args.verbose) walker = SrcRootWalker.SrcRootWalker(config.getlist('Analyze', 'include_patterns'), config.getlist('Analyze', 'exclude_patterns', ())) # Strips any files that should be excluded files = walker.verify_if_files_exist_in_srcroot_paths(args.files) maintainerfile = Maintainers.Maintainers(config.get('General', 'maintainers')) for file_ in files: maintainers = maintainerfile.find_matching_maintainers(file_) print "---------------\n", file_, "\n---------------" if len(maintainers) == 0: print "No maintainer found!" else: for maintainer in maintainers: print "\tSubsystem:\t", maintainer['subsystem'] print "\tMaintainer:\t", for person, email in maintainer['maintainer']: print person, "<", email, ">" print "\tMaillist:\t", maintainer['maillist'] print "\tStatus:\t\t", maintainer['status']
def _main(): main_help = """Diffs 2 files, if no arguments but the files are supplied then it fallbacks to context aware diff and outputs to a pickled stream""" parser = argparse.ArgumentParser(description=main_help) parser.add_argument('files', type=str, nargs=2, help="You must supply 2 files to perform diff on") help_ = "Makes the printout pretty instead of a pickle stream" parser.add_argument('-p', dest='pretty', action='store_true', help=help_) parser.add_argument('--config', dest='config', default=default_config, help="config file") parser.add_argument('-v', dest='verbose', action='store_true', help="Enable verbose mode") args = parser.parse_args() logger.setup_stdout_logger(args.verbose) stats = PyDiffer(args.files[0], args.files[1], config=ProjectConfig(args.config)).get_changestat() if args.pretty: # Find longest function name length = 0 for func in stats.iterkeys(): if len(func) > length: length = len(func) # Then print with formating adapted for the longest function name for func, (added, changed, deleted, total) in stats.iteritems(): func_format = '{: <' + str(length + 3) + '}' func_format += func_format.format(func) func_format += '{: <10}'.format("total:" + str(total)) func_format += '{: <10}'.format("added:" + str(added)) func_format += '{: <10}'.format("changed:" + str(changed)) func_format += '{: <10}'.format("deleted:" + str(deleted)) print func_format else: pickle.dump(stats, sys.stdout, protocol=2)
def main(): # pylint: disable=C0111 parser = argparse.ArgumentParser(description="Runner for all unittests", add_help=True) parser.add_argument('-v', action="store_true", dest='verbose', default=False, help="enable logger, in DEBUG") helptext = "Executes minimum of stuff, suitable for commit-hooks and whatnot" parser.add_argument('-q', action="store_true", dest='quicktest', default=False, help=helptext) helptext = "Executes selenium teststowards a local django server using sqlite as supplied from projects.config" parser.add_argument('--selenium', action="store_true", dest='selenium', default=False, help=helptext) args = parser.parse_args() _initialize() if args.verbose: logger.setup_stdout_logger(verbose=True) else: logger.disable_logger() was_successful = True for path in paths_to_search_for_suites: was_successful = _run_tests(args, path=path, pattern="*_SUITE.py") & was_successful if args.selenium: for path in paths_to_search_for_suites: was_successful = _run_tests(args, path=path, pattern="*_LIVESUITE.py") & was_successful if was_successful: exit(0) else: print "Unittests failed, fix your crap and rerun", __file__ exit(-1)
def main(): # pylint: disable=C0111 parser = argparse.ArgumentParser(description="Runner for all unittests", add_help=True) parser.add_argument('-v', action="store_true", dest='verbose', default=False, help="enable logger, in DEBUG") helptext = "Executes minimum of stuff, suitable for commit-hooks and whatnot" parser.add_argument('-q', action="store_true", dest='quicktest', default=False, help=helptext) helptext = "Executes selenium teststowards a local django server using sqlite as supplied from projects.config" parser.add_argument('--selenium', action="store_true", dest='selenium', default=False, help=helptext) args = parser.parse_args() _initialize() if args.verbose: logger.setup_stdout_logger(verbose=True) else: logger.disable_logger() was_successful = True for path in paths_to_search_for_suites: was_successful = _run_tests(args, path=path, pattern="*_SUITE.py") & was_successful if args.selenium: for path in paths_to_search_for_suites: was_successful = _run_tests( args, path=path, pattern="*_LIVESUITE.py") & was_successful if was_successful: exit(0) else: print "Unittests failed, fix your crap and rerun", __file__ exit(-1)
default=False, help=help_str) help_str = "Lookup contributions *since* date <YYYY-MM-DD> (default: -3 years)" parser.add_argument('-s', dest='since', default=None, help=help_str) parser.add_argument('--config', dest='config', default=default_config, help="config file") help_str = "File to check contributor on" parser.add_argument('files', nargs='+', help=help_str, action=_PathAction) args = parser.parse_args() logger.setup_stdout_logger() config = ProjectConfig(config=args.config) vcs = VcsWrapper.VcsFactory(config) ldapWrapper = LdapWrapper.LdapWrapper() for file_ in args.files: outputstring = file_ + "\n" if args.since is not None: since = datetime.strptime(args.since, '%Y-%m-%d') contributors, number_of_checkins = vcs.find_contributors( file_, since) else: since_date = date.today() - timedelta(days=365 * 3) contributors, number_of_checkins = vcs.find_contributors( file_, since_date)
if __name__ == '__main__': parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter, description=help_text, add_help=True) help_str = "Try and lookup email WARN: unstable! (default: NO)" parser.add_argument('-e', action="store_true", dest='email', default=False, help=help_str) help_str = "Lookup contributions *since* date <YYYY-MM-DD> (default: -3 years)" parser.add_argument('-s', dest='since', default=None, help=help_str) parser.add_argument('--config', dest='config', default=default_config, help="config file") help_str = "File to check contributor on" parser.add_argument('files', nargs='+', help=help_str, action=_PathAction) args = parser.parse_args() logger.setup_stdout_logger() config = ProjectConfig(config=args.config) vcs = VcsWrapper.VcsFactory(config) ldapWrapper = LdapWrapper.LdapWrapper() for file_ in args.files: outputstring = file_ + "\n" if args.since is not None: since = datetime.strptime(args.since, '%Y-%m-%d') contributors, number_of_checkins = vcs.find_contributors(file_, since) else: since_date = date.today() - timedelta(days=365 * 3) contributors, number_of_checkins = vcs.find_contributors(file_, since_date) for name, value in contributors[0:10]:
def __call__(self, _, namespace, values, option_string=None): setattr(namespace, self.dest, [os.path.abspath(val) for val in values]) if __name__ == '__main__': parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter, description=help_text, add_help=True) help_str = "Enable verbose output" parser.add_argument('-v', action="store_true", dest='verbose', default=False, help=help_str) parser.add_argument('--config', dest='config', default=default_config, help="config file") help_str = "File(s) to check changerate on" parser.add_argument('files', nargs='+', help=help_str, action=_PathAction) args = parser.parse_args() logger.setup_stdout_logger(args.verbose) config = ProjectConfig(config=args.config) vcs = VcsWrapper.VcsFactory(config) cr = ChangeRate.ChangeRate(vcs) for file_ in args.files: changerates = cr.count_change_rate_all(file_) func_length = len(max([func for entry in changerates for func in entry['changerates'].iterkeys()], key=len)) func_format = '{: <' + str(func_length + 3) + '}' print file_ for entry in sorted(changerates, key=lambda entry: entry['datetime']): print str(entry['datetime'].date()) for func, (added, changed, deleted, total) in entry['changerates'].iteritems(): buff = "\t"
help_str = "Enable verbose output" parser.add_argument('-v', action="store_true", dest='verbose', default=False, help=help_str) parser.add_argument('--config', dest='config', default=default_config, help="config file") help_str = "File(s) to check changerate on" parser.add_argument('files', nargs='+', help=help_str, action=_PathAction) args = parser.parse_args() logger.setup_stdout_logger(args.verbose) config = ProjectConfig(config=args.config) vcs = VcsWrapper.VcsFactory(config) cr = ChangeRate.ChangeRate(vcs) for file_ in args.files: changerates = cr.count_change_rate_all(file_) func_length = len( max([ func for entry in changerates for func in entry['changerates'].iterkeys() ], key=len)) func_format = '{: <' + str(func_length + 3) + '}'
def main(): # pylint: disable=C0111 args = _parse_args() logger.setup_stdout_logger(args.verbose) args.old_db = os.path.abspath(args.old_db) args.new_db = os.path.abspath(args.new_db) args.output = os.path.abspath(args.output) print "Using old:", args.old_db, "new:", args.new_db, "out:", args.output print "Copying", args.new_db, "to", args.output shutil.copyfile(args.new_db, args.output) db_old = MetricsDb.MetricsDb('sqlite:///' + args.old_db) db_out = MetricsDb.MetricsDb('sqlite:///' + args.output) with db_old.get_session() as old_session, db_out.get_session() as out_session: print "\nTrying to match files between old and new database..." # Manual file mapping filemap = {} failed_mappings = 0 old_files = {file_: None for file_, in old_session.query(File.file)} new_files = {file_: None for file_, in out_session.query(File.file)} for oldfile in old_files: finalmatch = oldfile if oldfile in queried_filematching: finalmatch = queried_filematching[oldfile] elif oldfile not in new_files: searchstring = '%' + os.sep + os.path.basename(oldfile) + '%' matches = [match for match, in out_session.query(File.file)\ .filter(File.file.like(searchstring))\ .order_by(collate(File.file, 'NOCASE'))] # Filter out any files that already have a perfect match between old_files and new_files matches = [file_ for file_ in matches if file_ not in old_files] if len(matches) > 0: close_matches = difflib.get_close_matches(oldfile, matches)[:10] if len(close_matches) > 0: if len(close_matches) == 1: finalmatch = close_matches[0] else: choice = _query_user_for_filemapping(oldfile, close_matches) if choice == 0: finalmatch = oldfile else: finalmatch = matches[choice - 1] queried_filematching[oldfile] = finalmatch else: failed_mappings += 1 filemap[oldfile] = finalmatch _generate_filemapping('autogenerated_filemapping.py', queried_filematching) print "Failed to map", failed_mappings, "out of", len(old_files) print "Selecting defect modifications from", args.old_db, "and inserting into", args.output for defect in FancyBar().iter(old_session.query(DefectModification).all()): db_out.insert_defect_modification(out_session, filemap.get(defect.file.file, defect.file.file), defect.version.version, defect.function.function, defect.defect_id, defect.user.user, defect.date) out_session.commit() print "Selecting change_metrics from", args.old_db, "and inserting into", args.output for cm in FancyBar().iter(old_session.query(ChangeMetric).all()): db_out.insert_change_metric(out_session, filemap.get(cm.file.file, cm.file.file), cm.version.version, cm.function.function, date_=cm.date, user=cm.user.user, added=cm.added, changed=cm.changed, deleted=cm.deleted, nloc=cm.nloc, token_count=cm.token_count, parameter_count=cm.parameter_count, cyclomatic_complexity=cm.cyclomatic_complexity) out_session.commit() print "done"
def main(): # pylint: disable=C0111 args = _parse_args() logger.setup_stdout_logger(args.verbose) args.old_db = os.path.abspath(args.old_db) args.new_db = os.path.abspath(args.new_db) args.output = os.path.abspath(args.output) print "Using old:", args.old_db, "new:", args.new_db, "out:", args.output print "Copying", args.new_db, "to", args.output shutil.copyfile(args.new_db, args.output) db_old = MetricsDb.MetricsDb('sqlite:///' + args.old_db) db_out = MetricsDb.MetricsDb('sqlite:///' + args.output) with db_old.get_session() as old_session, db_out.get_session( ) as out_session: print "\nTrying to match files between old and new database..." # Manual file mapping filemap = {} failed_mappings = 0 old_files = {file_: None for file_, in old_session.query(File.file)} new_files = {file_: None for file_, in out_session.query(File.file)} for oldfile in old_files: finalmatch = oldfile if oldfile in queried_filematching: finalmatch = queried_filematching[oldfile] elif oldfile not in new_files: searchstring = '%' + os.sep + os.path.basename(oldfile) + '%' matches = [match for match, in out_session.query(File.file)\ .filter(File.file.like(searchstring))\ .order_by(collate(File.file, 'NOCASE'))] # Filter out any files that already have a perfect match between old_files and new_files matches = [ file_ for file_ in matches if file_ not in old_files ] if len(matches) > 0: close_matches = difflib.get_close_matches( oldfile, matches)[:10] if len(close_matches) > 0: if len(close_matches) == 1: finalmatch = close_matches[0] else: choice = _query_user_for_filemapping( oldfile, close_matches) if choice == 0: finalmatch = oldfile else: finalmatch = matches[choice - 1] queried_filematching[oldfile] = finalmatch else: failed_mappings += 1 filemap[oldfile] = finalmatch _generate_filemapping('autogenerated_filemapping.py', queried_filematching) print "Failed to map", failed_mappings, "out of", len(old_files) print "Selecting defect modifications from", args.old_db, "and inserting into", args.output for defect in FancyBar().iter( old_session.query(DefectModification).all()): db_out.insert_defect_modification( out_session, filemap.get(defect.file.file, defect.file.file), defect.version.version, defect.function.function, defect.defect_id, defect.user.user, defect.date) out_session.commit() print "Selecting change_metrics from", args.old_db, "and inserting into", args.output for cm in FancyBar().iter(old_session.query(ChangeMetric).all()): db_out.insert_change_metric( out_session, filemap.get(cm.file.file, cm.file.file), cm.version.version, cm.function.function, date_=cm.date, user=cm.user.user, added=cm.added, changed=cm.changed, deleted=cm.deleted, nloc=cm.nloc, token_count=cm.token_count, parameter_count=cm.parameter_count, cyclomatic_complexity=cm.cyclomatic_complexity) out_session.commit() print "done"