def fetch_repos(args): """ Clone repos and set them to the state described by the LAYERS.json file """ update = args.update try: paths = PathSanity(args.top_dir) paths["src_dir"] = args.src_dir paths.setitem_strict("json_in", args.json_in) except ValueError as e: print(e) sys.exit(1) # Parse JSON file with repo data with open(paths["json_in"], 'r') as repos_fd: while True: try: repos = JSONDecoder(object_hook=Repo.repo_decode).decode( repos_fd.read()) fetcher = RepoFetcher(paths["src_dir"], repos=repos) except ValueError: break if not os.path.exists(paths["src_dir"]): os.mkdir(paths["src_dir"]) try: if not update: fetcher.clone() else: fetcher.update() except EnvironmentError as e: print(e) sys.exit(1)
def fetch_repos(args): """ Clone repos and set them to the state described by the LAYERS.json file """ update = args.update try: paths = PathSanity(args.top_dir) paths["src_dir"] = args.src_dir paths.setitem_strict("json_in", args.json_in) except ValueError as e: print(e) sys.exit(1) # Parse JSON file with repo data with open(paths["json_in"], 'r') as repos_fd: while True: try: repos = JSONDecoder(object_hook=Repo.repo_decode).decode(repos_fd.read()) fetcher = RepoFetcher(paths["src_dir"], repos=repos) except ValueError: break; if not os.path.exists(paths["src_dir"]): os.mkdir(paths["src_dir"]) try: if not update: fetcher.clone() else: fetcher.update() except EnvironmentError as e: print(e) sys.exit(1)
def layers_gen(args): """ Collect data from repos in src_dir to generate the LAYERS file. """ paths = PathSanity(args.top_dir) paths["src_dir"] = args.src_dir paths["bblayers_file"] = args.bblayers_file paths["layers_file"] = args.layers_file # create list of Repo objects repos = Repo.repos_from_state(paths["bblayers_file"], top_dir=paths._top_dir, src_dir=paths["src_dir"]) # create LAYERS file layers = LayerSerializer(repos) with open(paths["layers_file"], 'w') as layers_fd: layers.write(fd=layers_fd)
def json_gen(args): """ Parse bblayers.conf and collect data from repos in src_dir to generate a json file representing their state. """ paths = PathSanity(args.top_dir) paths["conf_dir"] = "conf" paths["bblayers_file"] = os.path.join(paths["conf_dir"], "bblayers.conf") paths["src_dir"] = args.src_dir paths["json_out"] = args.json_out # build a list of Repo objects and create a fetcher for them repos = Repo.repos_from_state(paths["bblayers_file"], top_dir=paths._top_dir, src_dir=paths["src_dir"]) fetcher = RepoFetcher(paths["src_dir"], repos=repos) # Serialize Repo objects to JSON manifest with open(paths["json_out"], 'w') as repo_json_fd: json.dump(fetcher, repo_json_fd, indent=4, cls=FetcherEncoder)
def setup(args): """ Setup build structure. """ # Setup paths to source and destination files. Test for existence. try: paths = PathSanity(args.top_dir) paths["src_dir"] = args.src_dir paths["conf_dir"] = "conf" build_type = args.build_type paths.setitem_strict("build_op_data", args.build_op_data) paths.setitem_strict( "build_src", os.path.join(paths["build_op_data"], "build_" + build_type + ".sh")) paths.setitem_strict("build_dst", "build.sh", exist=False) paths.setitem_strict("json_dst", "LAYERS.json", exist=False) paths.setitem_strict( "json_src", os.path.join(paths["build_op_data"], "LAYERS_" + build_type + ".json")) paths.setitem_strict( "local_conf_src", os.path.join(paths["build_op_data"], "local_" + build_type + ".conf")) paths.setitem_strict("local_conf_dst", os.path.join(paths["conf_dir"], "local.conf"), exist=False) paths.setitem_strict( "env_src", os.path.join(paths["build_op_data"], "environment.sh.template")) paths.setitem_strict("env_dst", "environment.sh", exist=False) paths.setitem_strict("bblayers_dst", os.path.join(paths["conf_dir"], "bblayers.conf"), exist=False) except ValueError as e: print(e) sys.exit(1) # Parse JSON file with repo data with open(paths["json_src"], 'r') as repos_fd: while True: try: repos = JSONDecoder(object_hook=Repo.repo_decode).decode( repos_fd.read()) fetcher = RepoFetcher(paths["src_dir"], repos=repos) except ValueError: break # create bblayers.conf file if not os.path.isdir(paths["conf_dir"]): os.mkdir(paths["conf_dir"]) bblayers = BBLayerSerializer(paths.getitem_rel("src_dir"), repos=fetcher._repos) with open(paths["bblayers_dst"], 'w') as test_file: bblayers.write(fd=test_file) # create LAYERS.json in root of build to make it obvious which layers are # currently in use. shutil.copy(paths["json_src"], paths["json_dst"]) # copy local_type.conf -> local.conf shutil.copy(paths["local_conf_src"], paths["local_conf_dst"]) # generate environment.sh shutil.copy(paths["env_src"], paths["env_dst"]) os.chmod(paths["env_dst"], stat.S_IRWXU | stat.S_IRWXG | stat.S_IROTH | stat.S_IWOTH) for line in fileinput.input(paths["env_dst"], inplace=1): line = re.sub("@sources@", paths.getitem_rel("src_dir"), line.rstrip()) print(line) # copy build script shutil.copy(paths["build_src"], paths["build_dst"]) os.chmod(paths["build_dst"], stat.S_IRWXU | stat.S_IRWXG | stat.S_IROTH | stat.S_IWOTH) return
def manifest(args): """ Create manifest describing current state of repos in src_dir. top_dir: The root directory of the build. """ # need sanity tests for paths / names paths = PathSanity(args.top_dir) paths.setitem_strict("conf_dir", "conf", exist=True) paths.setitem_strict("bblayers_file", os.path.join(paths["conf_dir"], "bblayers.conf"), exist=True) paths.setitem_strict("localconf_file", os.path.join(paths["conf_dir"], "local.conf"), exist=True) paths.setitem_strict("env_file", "environment.sh", exist=True) paths.setitem_strict("build_file", "build.sh", exist=True) paths.setitem_strict("build_op_file", "build_op.py", exist=True) paths.setitem_strict("layers_file", "LAYERS.json", exist=True) archive_prefix = args.archive paths["archive_file"] = archive_prefix + ".tar.bz2" # collect build config files and tar it all up # make temporary directory tmp_paths = PathSanity(tempfile.mkdtemp()) tmp_paths.setitem_strict("conf_dir", "conf", exist=False) # mk conf dir os.mkdir(tmp_paths["conf_dir"]) # copy local.conf to tmp/conf/local.conf shutil.copy(paths["bblayers_file"], tmp_paths["conf_dir"]) shutil.copy(paths["localconf_file"], tmp_paths["conf_dir"]) # copy environment.sh to tmp/ shutil.copy(paths["env_file"], tmp_paths._top_dir) # copy build.sh to tmp/ shutil.copy(paths["build_file"], tmp_paths._top_dir) # copy fetch.sh to tmp/ shutil.copy(paths["build_op_file"], tmp_paths._top_dir) # copy LAYERS to tmp shutil.copy(paths["layers_file"], tmp_paths._top_dir) # tar it all up with tarfile.open(paths["archive_file"], "w:bz2") as tar: tar.add(tmp_paths._top_dir, arcname=archive_prefix, recursive=True) return
def setup(args): """ Setup build structure. """ # Setup paths to source and destination files. Test for existence. try: paths = PathSanity(args.top_dir) paths["src_dir"] = args.src_dir paths["conf_dir"] = "conf" build_type = args.build_type paths.setitem_strict("build_op_data", args.build_op_data) paths.setitem_strict("build_src", os.path.join(paths["build_op_data"], "build_" + build_type + ".sh")) paths.setitem_strict("build_dst", "build.sh", exist=False) paths.setitem_strict("json_dst", "LAYERS.json", exist=False) paths.setitem_strict("json_src", os.path.join(paths["build_op_data"], "LAYERS_" + build_type + ".json")) paths.setitem_strict("local_conf_src", os.path.join(paths["build_op_data"], "local_" + build_type + ".conf")) paths.setitem_strict("local_conf_dst", os.path.join(paths["conf_dir"], "local.conf"), exist=False) paths.setitem_strict("env_src", os.path.join(paths["build_op_data"], "environment.sh.template")) paths.setitem_strict("env_dst", "environment.sh", exist=False) paths.setitem_strict("bblayers_dst", os.path.join(paths["conf_dir"], "bblayers.conf"), exist=False) except ValueError as e: print(e) sys.exit(1) # Parse JSON file with repo data with open(paths["json_src"], 'r') as repos_fd: while True: try: repos = JSONDecoder(object_hook=Repo.repo_decode).decode(repos_fd.read()) fetcher = RepoFetcher(paths["src_dir"], repos=repos) except ValueError: break; # create bblayers.conf file if not os.path.isdir(paths["conf_dir"]): os.mkdir(paths["conf_dir"]) bblayers = BBLayerSerializer(paths.getitem_rel("src_dir"), repos=fetcher._repos) with open(paths["bblayers_dst"], 'w') as test_file: bblayers.write(fd=test_file) # create LAYERS.json in root of build to make it obvious which layers are # currently in use. shutil.copy(paths["json_src"], paths["json_dst"]) # copy local_type.conf -> local.conf shutil.copy(paths["local_conf_src"], paths["local_conf_dst"]) # generate environment.sh shutil.copy(paths["env_src"], paths["env_dst"]) os.chmod(paths["env_dst"], stat.S_IRWXU | stat.S_IRWXG | stat.S_IROTH | stat.S_IWOTH) for line in fileinput.input(paths["env_dst"], inplace=1): line = re.sub("@sources@", paths.getitem_rel("src_dir"), line.rstrip()) print(line) # copy build script shutil.copy(paths["build_src"], paths["build_dst"]) os.chmod(paths["build_dst"], stat.S_IRWXU | stat.S_IRWXG | stat.S_IROTH | stat.S_IWOTH) return