def build(source, build_dir, root, initiator): """Builds the register in build_dir/source. source is either settings.register_path of setting.staging_path """ logging.info("Sync script started by %s...", initiator) # canditate for removal as this is only place it is used standards_id, standards = load_repos(repos_path) # TODO: move to utils clusters_id = {} with open(cluster_path) as f: clusters = load(f) for cluster in clusters: clusters_id[cluster["id"]] = cluster # check if initiator is present in repos.json if initiator in standards_id.keys(): cleanup(build_path, source, build_dir, initiator) logging.info("Fetching repo %s..." % initiator) fetch_repo(root, source, initiator, standards_id[initiator]["url"], build_path) logging.info("Building folders...") build_folders(source, build_dir, standards_id[initiator], root, standards_id[initiator]["cluster"], build_path) create_infomodel_homepage( root, source, assets_path, build_path, build_dir, standards_id[initiator]["cluster"], standards_id[initiator], ) logging.info("Creating homepagepage...") webpages.create_register_homepage(clusters, source, build_dir) if standards_id[initiator]["cluster"] != "": webpages.create_cluster_overview( standards, source, build_dir, standards_id[initiator]["cluster"], root, assets_path ) else: print "%s is not listed in repos.json... aborting." % initiator logging.error("%s is not listed in repos.json... aborting" % initiator) exit() # TODO: check if repo needs to be removed from repos/ print "Done!"
def build_register(initiator): """Builds the register in build_dir/sources_path.""" root = OSFS(root_path) logging.info("Sync script started by %s...", initiator) # canditate for removal as this is only place it is used standards_id, standards = load_repos(repos_path) #TODO: move to utils clusters_id = {} with open(cluster_path) as f: clusters = load(f) for cluster in clusters: clusters_id[cluster['id']] = cluster # TODO: move to run.py if initiator in standards_id.keys(): cleanup(initiator) logging.info("Fetching repo %s..." % initiator) fetch_repo(root, initiator, standards_id[initiator]['url']) # create_zipfile(initiator, root) logging.info("Building folders...") build_folders( standards_id[initiator], root, standards_id[initiator]['cluster']) create_infomodel_homepage(root, standards_id[initiator]['cluster'], standards_id[initiator]) logging.info("Creating homepagepage...") webpages.create_register_homepage(clusters) if standards_id[initiator]['cluster'] != "": webpages.create_cluster_overview(standards, standards_id[initiator]['cluster'], root) else: print "%s is not listed in repos.json... aborting." % initiator logging.error("%s is not listed in repos.json... aborting" % initiator) exit() #TODO: check if repo needs to be removed from repos/ print "Done!"
from fs.errors import ResourceNotFoundError import settings as s from backend import fetch_repo, deploy_register, build_register from utils import load_repos root_fs = OSFS(s.root_path) root_fs.makedir(s.build_path, recursive=True, allow_recreate=True) build_fs = OSFS(s.build_path) build_fs.makedir(s.sources_path, allow_recreate=True) build_fs.makedir(s.register_path, allow_recreate=True) # create production directory if needed try: production_fs = OSFS(s.production_path) except ResourceNotFoundError: # grab production dir's parent dir path = s.production_path.split('/')[-2] print path production_fs = OSFS(s.production_path[:len(s.production_path) - (len(path) + 1)]).makeopendir(path) print production_fs if not production_fs.exists(s.backups_path): production_fs.makedir(s.backups_path) # fetch repos from GitHub for repo in load_repos(s.repos_path)[0].values(): print 'Fetching %s for the first time' % repo['id'] fetch_repo(root_fs, repo['id'], repo['url']) build_register(repo['id']) deploy_register()