async def add_index( self, index_data: Union[str, Mapping[str, Any], IndexConfig, BringIndexTing], allow_existing: bool = False, ) -> BringIndexTing: if is_instance_or_subclass(index_data, BringIndexTing): index: BringIndexTing = index_data # type: ignore else: index = await self.create_index( index_data=index_data, allow_existing=allow_existing # type: ignore ) if index.id in self.index_ids and not allow_existing: raise FrklException( f"Can't add index '{index.id}'.", reason="Index with this id already added.", ) if ( index.id in self.index_ids and self._indexes.get(index.id, None) is not None and index != self._indexes.get(index.id) ): raise FrklException( f"Can't add index '{index.id}'.", reason="Different index with same id already exists.", ) self._indexes[index.id] = index return index
async def init(self, config: IndexConfig) -> None: config_dict = config.index_type_config git_url = config_dict.get("git_url", None) path = config_dict.get("path", None) if path is None and git_url is None: raise FrklException( f"Can't create folder index with config: {config_dict}", reason="Neither 'path' nor 'git_url' value provided.", ) if path and git_url: raise FrklException( f"Can't create folder index with config: {config_dict}", reason="Both 'path' and 'git_url' value provided.", solution="Only provide one of those keys.", ) if git_url: self._uri = git_url if is_git_repo_url(git_url): _local_path = await ensure_repo_cloned(url=git_url, update=False) else: _local_path = git_url else: self._uri = path _local_path = path maker = await self.get_maker(_local_path) await maker.sync() self._metadata_timestamp = str(arrow.Arrow.now())
async def mogrify(self, *value_names: str, **requirements) -> Mapping[str, Any]: artefact_path = requirements["file_path"] remove_root = requirements.get("remove_root", None) base_target = self.create_temp_dir("extract_") target_folder = os.path.join(base_target, "extracted") extract_folder = os.path.join(base_target, "extract") if artefact_path.endswith( ".gz") and not artefact_path.endswith(".tar.gz"): new_file_name = os.path.basename(artefact_path)[0:-3] ensure_folder(extract_folder) new_path = os.path.join(extract_folder, new_file_name) with gzip.open(artefact_path, "rb") as f_in: with open(new_path, "wb") as f_out: shutil.copyfileobj(f_in, f_out) else: shutil.unpack_archive(artefact_path, extract_folder) if remove_root is None: childs = os.listdir(extract_folder) if len(childs) == 1 and os.path.isdir( os.path.join(extract_folder, childs[0])): remove_root = True else: remove_root = False if remove_root: childs = os.listdir(extract_folder) if len(childs) == 0: raise FrklException( msg="Can't remove archive subfolder.", reason= f"No root file/folder for extracted archive: {artefact_path}", ) elif len(childs) > 1: raise FrklException( msg="Can't remove archive subfolder.", reason= f"More than one root files/folders: {', '.join(childs)}", ) root = os.path.join(extract_folder, childs[0]) if not os.path.isdir(root): raise FrklException( msg="Can't remove archive root.", reason=f"Not a folder: {childs[0]}", ) shutil.move(root, target_folder) shutil.rmtree(extract_folder) else: shutil.move(extract_folder, target_folder) return {"folder_path": target_folder}
async def get_pkg( self, name: str, index: Optional[str] = None, raise_exception: bool = False ) -> Optional[PkgTing]: if index and "." in name: raise ValueError( f"Can't get pkg '{name}' for index '{index}': either specify index name, or use namespaced pkg name, not both." ) elif "." in name: tokens = name.rsplit(".", maxsplit=1) _index_name: Optional[str] = tokens[0] _pkg_name = tokens[1] # _full_name = f"{_index_name}.{_pkg_name}" else: _pkg_name = name _index_name = index if index is None: _index_name = await self.get_default_index() if _index_name is None: for id_n in self.index_ids: idx = await self.get_index(id_n) pkg_names = await idx.pkg_names if _pkg_name in pkg_names: _index_name = idx.id break if _index_name is None: if raise_exception: raise FrklException( f"No index provided, and none of the registered ones contains a pkg named '{_pkg_name}'." ) else: return None if isinstance(not _index_name, str): raise NotImplementedError() # _full_name = f"{_index_name}.{name}" result_index: BringIndexTing = await self.get_index(_index_name) pkg = await result_index.get_pkg(_pkg_name, raise_exception=raise_exception) if pkg is None and raise_exception: raise FrklException(msg=f"Can't retrieve pkg '{name}': no such package") return pkg
async def get_parent_pkg(self, source_details: Mapping[str, Any], bring_index: BringIndexTing) -> PkgTing: pkg_name = source_details["name"] pkg_index = source_details.get("index", None) if pkg_index is None: pkg_index = bring_index elif "." not in pkg_index: ctx = await self._bring.get_index(pkg_index) if ctx is None: ctx_names = self._bring.index_ids raise FrklException( msg=f"Can't retrieve child pkg '{pkg_name}'.", reason= f"Requested index '{pkg_index}' not among available indexes: {', '.join(ctx_names)}", ) pkg_index = ctx else: raise NotImplementedError() # ting_name = f"{pkg_index.full_name}.pkgs.{pkg_name}" ting = await pkg_index.get_pkg(pkg_name) ting_name = ting.full_name if ting is None: pkg_list = [] for tn in self._bring._tingistry_obj.ting_names: # if '.pkgs.' in tn: pkg_list.append(tn) pkg_list_string = "\n - ".join(pkg_list) raise FrklException( msg="Can't resolve bring pkg.", reason= f"Requested child pkg '{ting_name}' not among available pkgs:\n\n{pkg_list_string}", ) if not is_instance_or_subclass(ting, PkgTing): raise FrklException( msg="Can't resolve bring pkg.", reason= f"Parent pkg '{ting_name}' does not sub-class the PkgTing class.", ) return ting # type: ignore
async def get_uri(self) -> str: if self._uri is None: raise FrklException( "Can't retrieve uri for index.", reason="Index not initialized yet." ) return self._uri
def create_mogrifier_ting( self, mogrify_plugin: str, pipeline_id: str, index: str, input_vals: Mapping[str, Any], vars: Mapping[str, Any], ) -> Mogrifier: plugin: Mogrifier = self.plugin_manager.get_plugin(mogrify_plugin) if not plugin: raise FrklException( msg="Can't create transmogrifier.", reason=f"No mogrify plugin '{mogrify_plugin}' available.", ) ting: Mogrifier = self._tingistry_obj.create_ting( # type: ignore prototing=f"bring.mogrify.plugins.{mogrify_plugin}", ting_name= f"bring.mogrify.pipelines.{pipeline_id}.{mogrify_plugin}_{index}", ) ting.set_input(**input_vals) msg = ting.get_msg() td = BringTaskDesc(name=mogrify_plugin, msg=msg, subtopic=f"{pipeline_id}.{ting.name}") ting.task_desc = td return ting
async def create_transmogrificator( self, vars: Optional[Mapping[str, Any]] = None, extra_mogrifiers: Iterable[Union[str, Mapping[str, Any]]] = None, parent_task_desc: TaskDesc = None, ) -> Transmogrificator: vals: Mapping[str, Any] = await self.get_values( # type: ignore "metadata", resolve=True) metadata = vals["metadata"] if vars is None: vars = {} _vars = await self.calculate_full_vars(_pkg_metadata=metadata, **vars) version = await self.find_version_data(vars=_vars, metadata=metadata) if not version: if not vars: reason = "No version match for no/empty variable input." elif len(vars) == 1: reason = f"Can't find version match for var: {vars}" else: vars_string = to_value_string(_vars, reindent=2) reason = ( f"Can't find version match for vars combination:\n\n{vars_string}" ) raise FrklException(msg=f"Can't process pkg '{self.name}'.", reason=reason) mogrify_list: List[Union[str, Mapping[str, Any]]] = list(version["_mogrify"]) if extra_mogrifiers: mogrify_list.extend(extra_mogrifiers) pipeline_id = generate_valid_identifier(prefix="pipe_", length_without_prefix=6) task_desc = BringTaskDesc( name=f"prepare package '{self.name}'", msg=f"preparing file(s) for package '{self.name}'", subtopic=pipeline_id, ) mogrify_vars = metadata["pkg_vars"]["mogrify_vars"] tm = self._transmogritory.create_transmogrificator( mogrify_list, vars=vars, args=mogrify_vars, name=self.name, task_desc=task_desc, pipeline_id=pipeline_id, ) if parent_task_desc is not None: tm.task_desc.parent = parent_task_desc return tm
async def retrieve(self, *value_names: str, **requirements) -> Dict[str, Any]: result: Dict[str, Any] = {} config: IndexConfig = requirements["config"] if not is_instance_or_subclass(config, IndexConfig): raise FrklException( f"Can't process index {self.name}", reason=f"Invalid index config type: {type(config)}", ) self._id = config.id if not self._initialized: await self.init(config) self._initialized = True if "config" in value_names: result["config"] = config.to_dict() if "id" in value_names: result["id"] = self._id if "index_type" in value_names: result["index_type"] = config.index_type if "index_file" in value_names: result["index_file"] = config.index_file if "uri" in value_names: result["uri"] = await self.get_uri() if "index_type_config" in value_names: result["index_type_config"] = config.index_type_config if "defaults" in value_names: _defaults = calculate_defaults( typistry=self._tingistry_obj.typistry, data=config.defaults) result["defaults"] = _defaults if "info" in value_names: result["info"] = config.info if "labels" in value_names: result["labels"] = config.labels if "tags" in value_names: result["tags"] = config.tags if "metadata_timestamp" in value_names: result["metadata_timestamp"] = await self.get_metadata_timestamp() if "pkgs" in value_names: # await self._ensure_pkgs(config) result["pkgs"] = await self._get_pkgs() return result
def provides(self) -> Mapping[str, Union[str, Mapping[str, Any]]]: if not hasattr(self.__class__, "_provides"): raise FrklException( f"Error processing mogrifier '{self.name}'.", reason= f"No class attribute '_provides' availble for {self.__class__.__name__}. This is a bug.", ) return self.__class__._provides # type: ignore
async def get_tempting(self, name: str) -> TemplaTing: temptings = await self.get_temptings() tempting = temptings.get(name, None) if tempting is None: raise FrklException( f"Can't process template '{name}'.", reason=f"Template does not exist. Available: {', '.join(sorted(temptings.keys()))}", ) return tempting
async def mogrify(self, *value_names: str, **requirements) -> Mapping[str, Any]: path: str = requirements["folder_path"] patch_map: Mapping = requirements["patch_map"] for file, patch_set in patch_map.items(): target_file = os.path.join(path, file) if not os.path.exists(target_file): raise FrklException(msg=f"Can't patch file '{file}'.", reason="File does not exists.") if not os.path.isfile(target_file): raise FrklException(msg=f"Can't patch file '{file}'.", reason="Not a file.") await self.patch(target_file, patch_set) return {"folder_path": path}
async def add_example(_example: str): pkg = await bring.get_pkg(_example) if pkg is None: raise FrklException( msg=f"Can't add example for '{_example}'.", reason="No such package available.", ) vals: Mapping[str, Any] = await pkg.get_values( # type: ignore "source", "info", resolve=True) # type: ignore examples_md[_example] = vals
async def get_pkg( self, pkg_name: str, bring_index: BringIndexTing, pkg_index: Optional[str] = None, ) -> PkgTing: if pkg_index is None: pkg_index = bring_index.full_name elif "." not in pkg_index: ctx = await self._bring.get_index(pkg_index) if ctx is None: ctx_names = self._bring.index_ids raise FrklException( msg=f"Can't retrieve child pkg '{pkg_name}'.", reason=f"Requested index '{pkg_index}' not among available indexes: {', '.join(ctx_names)}", ) pkg_index = ctx.full_name ting_name = f"{pkg_index}.pkgs.{pkg_name}" ting = self._bring._tingistry_obj.get_ting(ting_name) if ting is None: pkg_list = [] for tn in self._bring._tingistry_obj.ting_names: # if '.pkgs.' in tn: pkg_list.append(tn) pkg_list_string = "\n - ".join(pkg_list) raise FrklException( msg="Can't resolve bring pkg.", reason=f"Requested child pkg '{ting_name}' not among available pkgs:\n\n{pkg_list_string}", ) if not is_instance_or_subclass(ting, PkgTing): raise FrklException( msg="Can't resolve bring pkg.", reason=f"Parent pkg '{ting_name}' does not sub-class the PkgTing class.", ) return ting # type: ignore
async def get_pkg_map(self, *indexes) -> Mapping[str, Mapping[str, PkgTing]]: """Get all pkgs, per available (or requested) indexes.""" if not indexes: idxs: Iterable[str] = self.index_ids else: idxs = list(indexes) ctxs = [] for c in idxs: ctx = await self.get_index(c) if ctx is None: raise FrklException( msg=f"Can't get packages for index '{c}.", reason="No such index found.", ) ctxs.append(ctx) pkg_map: Dict[str, Dict[str, PkgTing]] = {} async def get_pkgs(_index: BringIndexTing): pkgs = await _index.get_pkgs() for pkg in pkgs.values(): pkg_map[_index.id][pkg.name] = pkg for index in ctxs: if index.name in pkg_map.keys(): raise FrklException( msg=f"Can't assemble packages for index '{index.name}'", reason="Duplicate index name.", ) pkg_map[index.id] = {} async with create_task_group() as tg: for index in ctxs: await tg.spawn(get_pkgs, index) return pkg_map
async def get_templates_pkg(self) -> PkgTing: if self._templates_pkg is None: self._templates_pkg = await self._bring.get_pkg( self._templates_pkg_name, raise_exception=False ) if self._templates_pkg is None: raise FrklException( msg="Can't process template.", reason=f"Specified templates pkg '{self._templates_pkg_name}' does not exist.", ) return self._templates_pkg
async def _merge_item( self, item_id: str, item: Any, item_metadata: Mapping[str, Any], merge_config: Mapping[str, Any], ) -> Optional[MutableMapping[str, Any]]: item_details = self.pkg_spec.get_item_details(item_id) if not item_details: log.debug(f"Ignoring file item: {item_id}") return None target_id = item_details[PATH_KEY] if self.pkg_spec.flatten: target_path = os.path.join(self.path, os.path.basename(target_id)) else: target_path = os.path.join(self.path, target_id) if self.pkg_spec.single_file: childs = os.listdir(self.path) if childs: raise FrklException( msg=f"Can't merge item '{item_id}'.", reason=f"Package is marked as single file, and target path '{self.path}' already contains a child.", ) ensure_folder(os.path.dirname(target_path)) move_method = merge_config.get("move_method", "copy") if move_method == "move": shutil.move(item, target_path) elif move_method == "copy": shutil.copy2(item, target_path) else: raise ValueError(f"Invalid 'move_method' value: {move_method}") if "mode" in item_details.keys(): mode_value = item_details["mode"] if not isinstance(mode_value, str): mode_value = str(mode_value) mode = int(mode_value, base=8) os.chmod(target_path, mode) self._merged_items[target_path] = MetadataFileItem( id=target_path, parent=self, metadata=item_metadata ) return {"msg": "installed"}
async def validate_update(self, raise_exception: bool = True) -> Set[PkgTing]: pkg_names = await self.get_inconsistent_package_names() if pkg_names: if raise_exception: raise FrklException( msg="Can't update index.", reason=f"Missing/inconsistent packages: {pkg_names}", ) return await self.get_inconsistent_packages()
def create_temp_dir(self, prefix=None): if prefix is None: prefix = self._name if not self.working_dir: raise FrklException( msg= f"Can't create temporary directory for mogrifier {self.name}", reason="Working dir not set for mogrifier", ) tempdir = tempfile.mkdtemp(prefix=f"{prefix}_", dir=self.working_dir) return tempdir
async def get_default_index(self) -> str: index_name = await self.config.get_default_index() if not index_name: indexes = await self._index_factory.get_indexes_in_config() if not indexes: raise FrklException( "Can't calculate default index.", reason="No 'default_index' value in config, and no indexes configured/registered (yet).", ) index_name = list(indexes)[0] return index_name
async def get_index_configs(self) -> Mapping[str, Mapping[str, Any]]: if self._config_indexes is not None: return self._config_indexes self._indexes_in_config = [] if self.bring_config is None: self._config_indexes = {} return self._config_indexes indexes: Iterable[Union[str, Mapping[ str, Any]]] = await self.bring_config.get_config_value_async("indexes") self._config_indexes = {} auto_ids = {} for item in indexes: if isinstance(item, str): index_data = await resolve_index_string(item) elif isinstance(item, collections.Mapping): id = item["id"] index_data = await resolve_index_string(id) dict_merge(index_data, item, copy_dct=False) else: raise TypeError(f"Invalid type for index config: {type(item)}") id = index_data["id"] self._indexes_in_config.append(id) auto_id = index_data["auto_id"] auto_ids[auto_id] = id if id in self._config_indexes.keys(): raise FrklException( msg=f"Can't add index config with id '{id}'", reason="Duplicate index id.", ) self._config_indexes[id] = index_data # make sure we also use the config if the lower-level id is used for auto_id in auto_ids.keys(): if auto_id in self._config_indexes.keys(): continue self._config_indexes[auto_id] = self._config_indexes[ auto_ids[auto_id]] return self._config_indexes
def assemble_mogrifiers( mogrifier_list: Iterable[Union[Mapping, str]], vars: Mapping[str, Any], args: Mapping[str, Any], task_desc: Optional[Mapping[str, Any]] = None, ) -> Iterable[Union[Mapping, Iterable[Mapping]]]: # TODO: validate vars if not vars and not args: _data: Iterable[Union[Mapping, str]] = mogrifier_list else: relevant_vars = {} for k, v in vars.items(): if k in args.keys(): relevant_vars[k] = v _data = replace_strings_in_obj(source_obj=mogrifier_list, replacement_dict=relevant_vars) mog_data = [] for index, _mog in enumerate(_data): if isinstance(_mog, str): mog: Mapping[str, Any] = {"type": _mog, "_task_desc": task_desc} mog_data.append(mog) elif isinstance(_mog, collections.Mapping): mog = dict(_mog) if "_task_desc" not in mog.keys(): mog["_task_desc"] = task_desc mog_data.append(mog) elif isinstance(_mog, collections.Iterable): mog_l: Iterable[Union[Mapping, Iterable[Mapping]]] = assemble_mogrifiers( mogrifier_list=_mog, vars=vars, args=args, task_desc=task_desc) mog_data.append(mog_l) else: raise FrklException( msg="Can't create transmogrifier.", reason=f"Invalid configuration type '{type(_mog)}': {_mog}", ) return mog_data
async def calculate_config( self, _config_list: Iterable[Union[str, Mapping[str, Any]]] ) -> MutableMapping[str, Any]: # TODO: this could be made more efficient by only loading the config dicts that are required config_dicts: Mapping[str, Mapping[ str, Any]] = await self._config_contexts.get_config_dicts() result: List[Mapping[str, Any]] = [] config_list: List[Union[str, Mapping[str, Any]]] = ["__init_dict__", "default"] config_list.extend(_config_list) for c in config_list: temp_dict: Optional[Mapping[str, Any]] = None if isinstance(c, str): if c == "__init_dict__": temp_dict = BRING_CORE_CONFIG elif "=" in c: k, v = c.split("=", maxsplit=1) temp_dict = {k: v} elif c in config_dicts.keys(): temp_dict = config_dicts[c] elif isinstance(c, collections.Mapping): temp_dict = c if temp_dict is None: raise FrklException( msg=f"Can't parse config item: {c}.", reason= "Invalid type or config option, must be either name of a config profile, a key/value pair (separated with '=', or a dict-like object.", ) result.append(temp_dict) result_dict = get_seeded_dict(*result, merge_strategy="merge") return result_dict
async def get_config_dict(self) -> Mapping[str, Any]: async with await self._get_config_dict_lock(): if self._config_dict is not None: return self._config_dict profile_dict = await self.calculate_config(self.config_input) profile_dict.setdefault("default_index", None) if "defaults" not in profile_dict.keys(): profile_dict["defaults"] = {} elif not isinstance(profile_dict["defaults"], collections.Mapping): raise FrklException( f"Invalid config, 'defaults' value needs to be a mapping: {profile_dict['defaults']}" ) self._config_dict = profile_dict for watcher_id in self._task_watcher_ids: self._task_watch_manager.remove_watcher(watcher_id) self._task_watcher_ids.clear() task_log_config: Union[str, Mapping, Iterable] = self._config_dict.get( "task_log", []) if isinstance(task_log_config, (str, collections.Mapping)): task_log_config = [task_log_config] for tlc in task_log_config: if isinstance(tlc, str): tlc = { "type": tlc, "base_topics": [BRING_TASKS_BASE_TOPIC] } id = self._task_watch_manager.add_watcher(tlc) self._task_watcher_ids.append(id) return self._config_dict
def calculate_defaults(typistry: Typistry, data: Mapping[str, Any]): pm = typistry.get_plugin_manager(DefaultsProducer) producers: List[Tuple] = [] values: Dict[str, Any] = {} for k, v in data.items(): if k.startswith("_"): pn = f"{k[1:]}" if pn in pm.plugin_names: producers.append((pn, v)) else: values[k] = v result = [] for item in producers: plugin: DefaultsProducer = pm.get_plugin(item[0]) val = item[1] if val is False or (isinstance(val, str) and val.lower() == "false"): continue if not isinstance(val, (bool, Mapping)): raise FrklException( msg=f"Can't calculate '{item[0]}' defaults for: {val}", reason="Value must be a boolean, or a dictionary.", ) if isinstance(val, Mapping): result.append(plugin.get_values(**val)) else: result.append(plugin.get_values()) result.append(values) r = get_seeded_dict(*result, merge_strategy="merge") return r
async def retrieve(self, *value_names: str, **requirements) -> Mapping[str, Any]: if not self._index: raise FrklException( msg=f"Can't retrieve values for PkgTing '{self.full_name}'.", reason="Index not set yet.", ) result: Dict[str, Any] = {} for vn in value_names: if vn == "index_name": result[vn] = self.bring_index.name continue if vn == "args": result[vn] = await self._calculate_args( requirements["metadata"]) else: result[vn] = requirements[vn] return result
async def get_pkg(self, name: str, raise_exception: bool = True) -> Optional[PkgTing]: pkgs = await self.get_pkgs() pkg = pkgs.get(name, None) if pkg is None and raise_exception: pkg_names = await self.pkg_names raise FrklException( msg= f"Can't retrieve package '{name}' from index '{self.name}'.", reason="No package with that name available.", solution= f"Make sure the package name is correct, available packages: {', '.join(pkg_names)}.", ) elif is_instance_or_subclass(pkg, Exception) and raise_exception: raise pkg # type: ignore elif is_instance_or_subclass(pkg, Exception): return None return pkg
async def create_ting(self, index: BringIndexTing, pkg_name: str) -> PkgTing: pkgs = await self.get_pkg_data() pkg_data = pkgs.get(pkg_name, None) if pkg_data is None: raise FrklException( msg=f"Can't create ting '{pkg_name}'.", reason="No package with that name available.", ) ting: PkgTing = index.tingistry.get_ting( # type: ignore f"{index.full_name}.pkgs.{pkg_name}" ) if ting is None: ting = index.tingistry.create_ting( # type: ignore "bring.types.static_pkg", f"{index.full_name}.pkgs.{pkg_name}", # type: ignore ) ting.bring_index = index ting.set_input(**pkg_data) # ting._set_result(data) return ting
async def update(self, index_names: Optional[Iterable[str]] = None): if index_names is None: index_names = self.index_ids td = BringTaskDesc( name="update metadata", msg="updating metadata for all indexes" ) tasks = ParallelTasksAsync(task_desc=td) # tasks = SerialTasksAsync(task_desc=td) for index_name in index_names: index = await self.get_index(index_name) if index is None: raise FrklException( msg=f"Can't update index '{index_name}'.", reason="No index with that name registered.", ) tsk = await index._create_update_tasks() if tsk: tasks.add_task(tsk) await self.run_async_tasks(tasks, subtopic="update_indexes")
async def explode_index_string(index_string: str) -> MutableMapping[str, Any]: result: Dict[str, Any] = {} if index_string.startswith("gitlab"): tokens = index_string.split(".") username = tokens[1] repo = tokens[2] version = "master" if len(tokens) > 3: version = tokens[4] if len(tokens) > 4: raise NotImplementedError() # path = tokens[5:] result["type"] = "git_repo" result[ "index_file"] = f"https://gitlab.com/{username}/{repo}/-/raw/{version}/.bring/{DEFAULT_FOLDER_INDEX_NAME}" result["git_url"] = f"https://gitlab.com/{username}/{repo}.git" result["version"] = version result["id"] = index_string result["auto_id"] = index_string elif index_string.startswith("github"): tokens = index_string.split(".") username = tokens[1] repo = tokens[2] version = "master" if len(tokens) > 3: version = tokens[4] if len(tokens) > 4: raise NotImplementedError() # path = tokens[5:] result["type"] = "git_repo" result[ "index_file"] = f"https://raw.githubusercontent.com/{username}/{repo}/{version}/.bring/{DEFAULT_FOLDER_INDEX_NAME}" result["git_url"] = f"https://github.com/{username}/{repo}.git" result["version"] = version result["id"] = index_string result["auto_id"] = index_string elif index_string.startswith("bitbucket"): tokens = index_string.split(".") username = tokens[1] repo = tokens[2] raise NotImplementedError() elif index_string.endswith(".br.idx"): result["type"] = "index_file" if is_url_or_abbrev(index_string): result["index_file"] = index_string result["id"] = result["index_file"] elif os.path.isfile(index_string): result["index_file"] = os.path.abspath(index_string) result["id"] = f"file://{result['index_file']}" else: raise FrklException( msg=f"Can't determine type of index file: {index_string}") result["auto_id"] = index_string elif os.path.isdir(os.path.realpath(index_string)): result["type"] = "folder" result["path"] = os.path.abspath(index_string) result["index_file"] = os.path.join(result["path"], ".bring", DEFAULT_FOLDER_INDEX_NAME) result["id"] = f"file://{result['path']}" result["auto_id"] = index_string elif is_git_repo_url(index_string): result["type"] = "git_repo" result["git_url"] = index_string result["id"] = result["git_url"] # TODO: calculate and insert index_file key for known hosts result["auto_id"] = index_string else: raise FrklException(msg=f"Can't parse index string: {index_string}") return result