def main(): # Process independent projects for project in projects.get_project_list(): print ("Processing %s" %(project)) gitlog.generate_cache_file (project) # Combine projects openstack = [] for project in projects.get_project_list(): openstack += gitlog.get_commits (project) openstack = sorted (openstack, key=lambda c: c['committer_date']) print ("Generating the global OpenStack project cache") gitlog.save_cache_file ("openstack", openstack)
def main(): # Create directories if not os.path.exists (conf.REPOS_PATH): os.makedirs (conf.REPOS_PATH) if not os.path.exists (conf.CACHE_PATH): os.makedirs (conf.CACHE_PATH) # Clone repositories if ns.repo: checkout_repo (ns.repo) else: repos = projects.get_project_list() for repo in repos: checkout_repo (repo)
def get_all_releases_dicts (): # Add a 'global' release rel = releases[:] rel += [{"name": "Global", "period": (rel[0]['period'][0], rel[-1]['period'][1]), "projects": ["openstack"]}] # Figure out project on each release for project in projects.get_project_list(): commits = gitlog.get_commits (project) for r in rel: commits_release = [c for c in commits if (c['author_date'] >= r['period'][0] and c['author_date'] <= r['period'][1])] if len(commits_release) > 1: r['projects'].append (project) return rel