def get_workspace(argv, shell_path, config_filename=None, varname=None): """ If target option -t is given return value of that one. Else, if varname is given and exists, considers that one, plus, if config_filename is given, searches for a file named in config_filename in 'shell_path' and ancestors. In that case, if two solutions are found, asks the user. :param shell_path: where to look for relevant config_filename :param config_filename: optional, filename for files defining workspaces :param varname: optional, env var to be used as workspace folder :returns: abspath if a .rosinstall was found, error and exist else. """ parser = OptionParser() parser.add_option( "-t", "--target-workspace", dest="workspace", default=None, help="which workspace to use", action="store") # suppress errors based on any other options this parser is agnostic about argv2 = [x for x in argv if ((not x.startswith('-')) or x.startswith('--target-workspace=') or x.startswith('-t') or x == '--target-workspace')] (options, _) = parser.parse_args(argv2) if options.workspace is not None: if (config_filename is not None and not os.path.isfile(os.path.join(options.workspace, config_filename))): raise MultiProjectException("%s has no workspace configuration file '%s'" % (os.path.abspath(options.workspace), config_filename)) return os.path.abspath(options.workspace) varname_path = None if varname is not None and varname in os.environ: # workspace could be relative, maybe confusing, # but that's the users fault varname_path = os.environ[varname] if varname_path.strip() == '' or not os.path.isdir(varname_path): varname_path = None # use current dir current_path = None if config_filename is not None: while shell_path is not None and not shell_path == os.path.dirname(shell_path): if os.path.exists(os.path.join(shell_path, config_filename)): current_path = shell_path break shell_path = os.path.dirname(shell_path) if current_path is not None and varname_path is not None and not samefile(current_path, varname_path): raise MultiProjectException("Ambiguous workspace: %s=%s, %s" % (varname, varname_path, os.path.abspath(config_filename))) if current_path is None and varname_path is None: raise MultiProjectException("Command requires a target workspace.") if current_path is not None: return current_path else: return varname_path
def install(self, checkout=True, backup=True, backup_path=None, inplace=False, timeout=None, verbose=False, shallow=False): """ Runs the equivalent of SCM checkout for new local repos or update for existing. :param checkout: whether to use an update command or a checkout/clone command :param backup: if checkout is True and folder exists, if backup is false folder will be DELETED. :param backup_path: if checkout is true and backup is true, move folder to this location :param inplace: for symlinks, allows to delete contents at target location and checkout to there. """ if checkout is True: print("[%s] Fetching %s (version %s) to %s" % (self.get_local_name(), self.uri, self.version, self.get_path())) if self.path_exists(): if os.path.islink(self.path): if inplace is False: # remove same as unlink os.remove(self.path) else: shutil.rmtree(os.path.realpath(self.path)) else: if backup is False: shutil.rmtree(self.path) else: self.backup(backup_path) if not self._get_vcsc().checkout(self.uri, self.version, timeout=timeout, verbose=verbose, shallow=shallow): raise MultiProjectException( "[%s] Checkout of %s version %s into %s failed." % (self.get_local_name(), self.uri, self.version, self.get_path())) else: print("[%s] Updating %s" % (self.get_local_name(), self.get_path())) if not self._get_vcsc().update( self.version, verbose=verbose, timeout=timeout): raise MultiProjectException( "[%s] Update Failed of %s" % (self.get_local_name(), self.get_path())) print("[%s] Done." % self.get_local_name())
def aggregate_from_uris(config_uris, config_filename=None, allow_other_element=True): """ Builds a List of PathSpec from a list of location strings (uri, paths). If locations is a folder, attempts to find config_filename in it, and use "folder/config_filename" instead(rewriting element path and stripping scm nature), else add folder as PathSpec. Anything else, parse yaml at location, and add a PathSpec for each element. :param config_uris: source of yaml :param config_filename: file to use when given a folder :param allow_other_element: if False, discards elements to be added without SCM information """ aggregate_source_yaml = [] # build up a merged list of config elements from all given config_uris if config_uris is None: return [] for loop_uri in config_uris: source_path_specs = get_path_specs_from_uri( loop_uri, config_filename) # allow duplicates, dealt with in Config class if not allow_other_element: for spec in source_path_specs: if not spec.get_scmtype(): raise MultiProjectException( "Forbidden non-SCM element: %s (%s)" % (spec.get_local_name(), spec.get_legacy_type())) aggregate_source_yaml.extend(source_path_specs) return aggregate_source_yaml
def _get_vcsc(self): # lazy initializer if self.vcsc is None: try: self.vcsc = get_vcs_client(self._scmtype, self.get_path()) except VcsError as exc: raise MultiProjectException( "Unable to create vcs client of type %s for %s: %s" % (self._scmtype, self.get_path(), exc)) return self.vcsc
def install(self, checkout=True, backup=False, backup_path=None, robust=False, verbose=False, inplace=False, timeout=None): if not self.install_success: raise MultiProjectException("Unittest Mock says install failed")
def backup(self, backup_path): if not backup_path: raise MultiProjectException( "[%s] Cannot install %s. backup disabled." % (self.get_local_name(), self.get_path())) backup_path = os.path.join( backup_path, "%s_%s" % (os.path.basename(self.path), datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S"))) print("[%s] Backing up %s to %s" % (self.get_local_name(), self.get_path(), backup_path)) shutil.move(self.path, backup_path)
def cmd_regenerate(self, target_path, argv, config=None): parser = OptionParser( usage="usage: %s regenerate" % self.progname, formatter=IndentedHelpFormatterWithNL(), description=__MULTIPRO_CMD_DICT__["remove"] + """ this command without options generates files setup.sh, setup.bash and setup.zsh. Note that doing this is unnecessary in general, as these files do not change anymore, unless you change from one ROS distro to another (which you should never do like this, create a separate new workspace instead), or you deleted or modified any of those files accidentally. """, epilog="See: http://wiki.ros.org/rosinstall for details\n") parser.add_option("-c", "--catkin", dest="catkin", default=False, help="Declare this is a catkin build.", action="store_true") parser.add_option("--cmake-prefix-path", dest="catkinpp", default=None, help="Where to set the CMAKE_PREFIX_PATH", action="store") # -t option required here for help but used one layer above, see cli_common parser.add_option("-t", "--target-workspace", dest="workspace", default=None, help="which workspace to use", action="store") (options, args) = parser.parse_args(argv) if len(args) > 0: print("Error: Too many arguments.") print(parser.usage) return -1 if config is None: config = get_config(target_path, additional_uris=[], config_filename=self.config_filename) elif config.get_base_path() != target_path: raise MultiProjectException("Config path does not match %s %s " % (config.get_base_path(), target_path)) rosinstall_cmd.cmd_generate_ros_files(config, target_path, nobuild=True, rosdep_yes=False, catkin=options.catkin, catkinpp=options.catkinpp, no_ros_allowed=True) return 0
def get_config(basepath, additional_uris=None, config_filename=None, merge_strategy='KillAppend'): """ Create a Config element necessary for all other commands. The command will look at the uris in sequence, each can be a web resource, a filename or a folder. In case it is a folder, when a config_filename is provided, the folder will be searched for a file of that name, and that one will be used. Else the folder will be considered a target location for the config. All files will be parsed for config elements, thus conceptually the input to Config is an expanded list of config elements. Config takes this list and consolidates duplicate paths by keeping the last one in the list. :param basepath: where relative paths shall be resolved against :param additional_uris: the location of config specifications or folders :param config_filename: name of files which may be looked at for config information :param merge_strategy: One of 'KillAppend, 'MergeKeep', 'MergeReplace' :returns: a Config object :raises MultiProjectException: on plenty of errors """ if basepath is None: raise MultiProjectException("Need to provide a basepath for Config.") # print("source...........................", path_specs) ## Generate the config class with the uri and path if (config_filename is not None and basepath is not None and os.path.isfile(os.path.join(basepath, config_filename))): base_path_specs = get_path_specs_from_uri(os.path.join( basepath, config_filename), as_is=True) else: base_path_specs = [] config = Config(base_path_specs, basepath, config_filename=config_filename, merge_strategy=merge_strategy) add_uris(config=config, additional_uris=additional_uris, config_filename=config.get_config_filename(), merge_strategy=merge_strategy) return config
def __init__(self, path_specs, install_path, config_filename=None, extended_types=None, merge_strategy='KillAppend'): """ :param config_source_dict: A list (e.g. from yaml) describing the config, list of dict, each dict describing one element. :param config_filename: When given a folder, Config :param merge_strategy: how to deal with entries with equivalent path. See insert_element will look in folder for file of that name for more config source, str. """ assert install_path is not None, "Install path is None" if path_specs is None: raise MultiProjectException("Passed empty source to create config") # All API operations must grant that elements in trees have unique local_name and paths # Also managed (VCS) entries must be disjunct (meaning one cannot be in a child folder of another managed one) # The idea is that managed entries can safely be concurrently modified self.trees = [] self.base_path = os.path.abspath(install_path) self.config_filename = None if config_filename is not None: self.config_filename = os.path.basename(config_filename) # using a registry primarily for unit test design self.registry = { 'svn': AVCSConfigElement, 'git': AVCSConfigElement, 'hg': AVCSConfigElement, 'bzr': AVCSConfigElement, 'tar': AVCSConfigElement } if extended_types is not None: self.registry = dict( list(self.registry.items()) + list(extended_types.items())) for path_spec in path_specs: action = self.add_path_spec(path_spec, merge_strategy) # Usual action in init should be 'Append', anything else is unusual if action == 'KillAppend': print("Replace existing entry %s by appending." % path_spec.get_local_name()) elif action == 'MergeReplace': print("Replace existing entry %s" % path_spec.get_local_name()) elif action == 'MergeKeep': print("Keep existing entry %s, discard later one" % path_spec.get_local_name())
def __init__(self, path, local_name, uri, version='', properties=None): """ Creates a config element for a VCS repository. :param path: absolute or relative path, str :param vcs_client: Object compatible with vcstools.VcsClientBase :param local_name: display name for the element, str :param uri: VCS uri to checkout/pull from, str :param version: optional revision spec (tagname, SHAID, ..., str) """ super(VCSConfigElement, self).__init__(path, local_name, properties) if uri is None: raise MultiProjectException( "Invalid scm entry having no uri attribute for path %s" % path) # strip trailing slashes if defined to not be too strict #3061 self.uri = uri.rstrip('/') self.version = version
def _create_vcs_config_element(self, scmtype, path, local_name, uri, version='', properties=None): try: eclass = self.registry[scmtype] except LookupError: raise MultiProjectException( "No VCS client registered for vcs type %s" % scmtype) return eclass(scmtype=scmtype, path=path, local_name=local_name, uri=uri, version=version, properties=properties)
def _insert_vcs_path_spec(self, path_spec, local_path, merge_strategy='KillAppend'): # Get the version and source_uri elements source_uri = normalize_uri(path_spec.get_uri(), self.get_base_path()) version = path_spec.get_version() try: local_name = os.path.normpath(path_spec.get_local_name()) elem = self._create_vcs_config_element( path_spec.get_scmtype(), local_path, local_name, source_uri, version, properties=path_spec.get_tags()) return self.insert_element(elem, merge_strategy) except LookupError as ex: raise MultiProjectException( "Abstracted VCS Config failed. Exception: %s" % ex)
def get_yaml_from_uri(uri): """reads and parses yaml from a local file or remote uri""" stream = None try: try: if os.path.isfile(uri): try: stream = open(uri, 'r') except IOError as ioe: raise MultiProjectException("Unable open file [%s]: %s" % (uri, ioe)) else: try: stream = urlopen_netrc(uri) except IOError as ioe2: raise MultiProjectException( "Unable to download URL [%s]: %s" % (uri, ioe2)) except ValueError as vae: raise MultiProjectException( "Is not a local file, nor a valid URL [%s] : %s" % (uri, vae)) if not stream: raise MultiProjectException("couldn't load config uri %s" % uri) try: yamldata = yaml.load(stream) except yaml.YAMLError as yame: raise MultiProjectException( "Invalid multiproject yaml format in [%s]: %s" % (uri, yame)) # we want a list or a dict, but pyyaml parses xml as string if type(yamldata) == 'str': raise MultiProjectException( "Invalid multiproject yaml format in [%s]: %s" % (uri, yamldata)) finally: if stream is not None: stream.close() return yamldata
def __init__(self, path, local_name, properties=None): self.path = path if path is None: raise MultiProjectException("Invalid empty path") self.local_name = local_name self.properties = properties
def cmd_info(self, target_path, argv, reverse=True, config=None): # similar to multiproject_cli except shows ros-pkg-path # options parser = OptionParser( usage="usage: %s info [localname]* [OPTIONS]" % self.progname, formatter=IndentedHelpFormatterWithNL(), description=__MULTIPRO_CMD_DICT__["info"] + """ The Status (S) column shows x for missing L for uncommited (local) changes V for difference in version and/or remote URI C for difference in local and remote versions The 'Version-Spec' column shows what tag, branch or revision was given in the .rosinstall file. The 'UID' column shows the unique ID of the current (and specified) version. The 'URI' column shows the configured URL of the repo. If status is V, the difference between what was specified and what is real is shown in the respective column. For SVN entries, the url is split up according to standard layout (trunk/tags/branches). The ROS_PACKAGE_PATH follows the order of the table, earlier entries overlay later entries. When given one localname, just show the data of one element in list form. This also has the generic properties element which is usually empty. The --only option accepts keywords: %(opts)s Examples: $ %(prog)s info -t ~/ros/fuerte $ %(prog)s info robot_model $ %(prog)s info --yaml $ %(prog)s info --only=path,cur_uri,cur_revision robot_model geometry """ % { 'prog': self.progname, 'opts': ONLY_OPTION_VALID_ATTRS }, epilog="See: http://wiki.ros.org/rosinstall for details\n") parser.add_option("--root", dest="show_ws_root", default=False, help="Show workspace root path", action="store_true") parser.add_option("--data-only", dest="data_only", default=False, help="Does not provide explanations", action="store_true") parser.add_option("--no-pkg-path", dest="no_pkg_path", default=False, help="Suppress ROS_PACKAGE_PATH.", action="store_true") parser.add_option( "--pkg-path-only", dest="pkg_path_only", default=False, help= "Shows only ROS_PACKAGE_PATH separated by ':'. Supercedes all other options.", action="store_true") parser.add_option( "--only", dest="only", default=False, help= "Shows comma-separated lists of only given comma-separated attribute(s).", action="store") parser.add_option( "--yaml", dest="yaml", default=False, help="Shows only version of single entry. Intended for scripting.", action="store_true") parser.add_option( "--fetch", dest="fetch", default=False, help= "When used, retrieves version information from remote (takes longer).", action="store_true") parser.add_option("-u", "--untracked", dest="untracked", default=False, help="Also show untracked files as modifications", action="store_true") # -t option required here for help but used one layer above, see cli_common parser.add_option("-t", "--target-workspace", dest="workspace", default=None, help="which workspace to use", action="store") parser.add_option("-m", "--managed-only", dest="unmanaged", default=True, help="only show managed elements", action="store_false") (options, args) = parser.parse_args(argv) if config is None: config = get_config(target_path, additional_uris=[], config_filename=self.config_filename) elif config.get_base_path() != target_path: raise MultiProjectException("Config path does not match %s %s " % (config.get_base_path(), target_path)) if options.show_ws_root: print(config.get_base_path()) return 0 if args == []: args = None if options.pkg_path_only: print(":".join(get_ros_package_path(config))) return 0 if options.no_pkg_path: header = 'workspace: %s\nROS_ROOT: %s' % ( target_path, get_ros_stack_path(config)) print(header) return 0 elif options.only: only_options = options.only.split(",") if only_options == '': parser.error('No valid options given') lines = get_info_table_raw_csv(config, parser, properties=only_options, localnames=args) print('\n'.join(lines)) return 0 elif options.yaml: source_aggregate = cmd_snapshot(config, localnames=args) print(yaml.safe_dump(source_aggregate), end='') return 0 # this call takes long, as it invokes scms. outputs = cmd_info(config, localnames=args, untracked=options.untracked, fetch=options.fetch) if args and len(args) == 1: # if only one element selected, print just one line print( get_info_list(config.get_base_path(), outputs[0], options.data_only)) return 0 header = 'workspace: %s\nROS_ROOT: %s' % (target_path, get_ros_stack_path(config)) print(header) table = get_info_table(config.get_base_path(), outputs, options.data_only, reverse=reverse) if table is not None and table != '': print("\n%s" % table) if options.unmanaged: outputs2 = cmd_find_unmanaged_repos(config) table2 = get_info_table(config.get_base_path(), outputs2, options.data_only, reverse=reverse, unmanaged=True) if table2 is not None and table2 != '': print( "\nAlso detected these repositories in the workspace, add using '%s set':\n\n%s" % (self.progname, table2)) return 0
def prepare_install(self, backup_path=None, arg_mode='abort', robust=False): preparation_report = PreparationReport(self) present = self.detect_presence() if present or self.path_exists(): is_link = os.path.islink(self.path) # Directory exists see what we need to do error_message = None if not present: error_message = "Failed to detect %s presence at %s." % ( self.get_vcs_type_name(), self.path) if is_link: error_message += " Path is symlink, only symlink will be removed." else: cur_url = self._get_vcsc().get_url() if cur_url is not None: # strip trailing slashes for #3269 cur_url = cur_url.rstrip('/') if not cur_url or cur_url != self.uri.rstrip('/'): # local repositories get absolute pathnames if not (os.path.isdir(self.uri) and os.path.isdir(cur_url) and samefile(cur_url, self.uri)): if not self._get_vcsc().url_matches(cur_url, self.uri): error_message = "Url %s does not match %s requested." % ( cur_url, self.uri) if error_message is None: # update should be possible preparation_report.checkout = False else: # If robust ala continue-on-error, just error now and # it will be continued at a higher level if robust: raise MultiProjectException("Update Failed of %s: %s" % (self.path, error_message)) # prompt the user based on the error code if arg_mode == 'prompt': print("Prepare updating %s (version %s) to %s" % (self.uri, self.version, self.path)) mode = Ui.get_ui().prompt_del_abort_retry( error_message, allow_skip=True, allow_inplace=is_link) else: mode = arg_mode if mode == 'backup': preparation_report.backup = True if backup_path is None: print("Prepare updating %s (version %s) to %s" % (self.uri, self.version, self.path)) preparation_report.backup_path = \ Ui.get_ui().get_backup_path() else: preparation_report.backup_path = backup_path elif mode == 'abort': preparation_report.abort = True preparation_report.error = error_message elif mode == 'skip': preparation_report.skip = True preparation_report.error = error_message elif mode == 'delete': preparation_report.backup = False elif mode == 'inplace': preparation_report.inplace = True else: raise RuntimeError('Bug: Unknown option "%s" selected' % mode) return preparation_report
def get_versioned_path_spec(self): raise MultiProjectException( "Cannot generate versioned outputs with non source types")
def get_path_spec_from_yaml(yaml_dict): """ Fills in local properties based on dict, unifies different syntaxes """ local_name = None uri = None version = None scmtype = None tags = [] if type(yaml_dict) != dict: raise MultiProjectException( "Yaml for each element must be in YAML dict form: %s " % yaml_dict) # old syntax: # - hg: {local-name: common_rosdeps, # version: common_rosdeps-1.0.2, # uri: https://kforge.ros.org/common/rosdepcore} # - setup-file: {local-name: /opt/ros/fuerte/setup.sh} # - other: {local-name: /opt/ros/fuerte/share/ros} # - other: {local-name: /opt/ros/fuerte/share} # - other: {local-name: /opt/ros/fuerte/stacks} if yaml_dict is None or len(yaml_dict) == 0: raise MultiProjectException("no element in yaml dict.") if len(yaml_dict) > 1: raise MultiProjectException( "too many keys in element dict %s" % (list(yaml_dict.keys()))) if not list(yaml_dict.keys())[0] in __ALLTYPES__: raise MultiProjectException( "Unknown element type '%s'" % (list(yaml_dict.keys())[0])) firstkey = list(yaml_dict.keys())[0] if firstkey in __REPOTYPES__: scmtype = list(yaml_dict.keys())[0] if firstkey == 'setup-file': tags.append('setup-file') values = yaml_dict[firstkey] if values is not None: for key, value in list(values.items()): if key == "local-name": local_name = value elif key == "meta": tags.append({key: value}) elif key == "uri": uri = value elif key == "version": version = value else: raise MultiProjectException( "Unknown key %s in %s" % (key, yaml_dict)) # global validation if local_name is None: raise MultiProjectException( "Config element without a local-name: %s" % (yaml_dict)) if scmtype != None: if uri is None: raise MultiProjectException( "scm type without declared uri in %s" % (values)) # local_name is fixed, path may be normalized, made absolute, etc. path = local_name return PathSpec(local_name=local_name, path=path, scmtype=scmtype, uri=uri, version=version, tags=tags)
def do_work(self): localname = "" scm = None uri = "" curr_uri = None exists = False version = "" # what is given in config file curr_version_label = "" # e.g. branchname remote_revision = "" # UID on remote default_remote_label = None # git default branch display_version = '' modified = "" currevision = "" # revision number of version specversion = "" # actual revision number localname = self.element.get_local_name() path = self.element.get_path() or localname if localname is None or localname == "": raise MultiProjectException( "Missing local-name in element: %s" % self.element) if (os.path.exists(normabspath(path, self.path))): exists = True if self.element.is_vcs_element(): if not exists: path_spec = self.element.get_path_spec() version = path_spec.get_version() else: path_spec = self.element.get_versioned_path_spec( fetch=fetch) version = path_spec.get_version() remote_revision = path_spec.get_remote_revision() curr_version_label = path_spec.get_curr_version() if (curr_version_label is not None and version != curr_version_label): display_version = curr_version_label else: display_version = version curr_uri = path_spec.get_curr_uri() status = self.element.get_status(self.path, self.untracked) if (status is not None and status.strip() != ''): modified = True specversion = path_spec.get_revision() if (version is not None and version.strip() != '' and (specversion is None or specversion.strip() == '')): specversion = '"%s"' % version if (self.fetch and specversion == None and path_spec.get_scmtype() == 'git'): default_remote_label = self.element.get_default_remote_label( ) currevision = path_spec.get_current_revision() scm = path_spec.get_scmtype() uri = path_spec.get_uri() return { 'scm': scm, 'exists': exists, 'localname': localname, 'path': path, 'uri': uri, 'curr_uri': curr_uri, 'version': version, 'remote_revision': remote_revision, 'default_remote_label': default_remote_label, 'curr_version_label': curr_version_label, 'specversion': specversion, 'actualversion': currevision, 'modified': modified, 'properties': self.element.get_properties() }
def cmd_install_or_update(config, backup_path=None, mode='abort', robust=False, localnames=None, num_threads=1, timeout=None, verbose=False, shallow=False): """ performs many things, generally attempting to make the local filesystem look like what the config specifies, pulling from remote sources the most recent changes. The command may have stdin user interaction (TODO abstract) :param backup_path: if and where to backup trees before deleting them :param robust: proceed to next element even when one element fails :returns: True on Success :raises MultiProjectException: on plenty of errors """ success = True if not os.path.exists(config.get_base_path()): os.mkdir(config.get_base_path()) # Prepare install operation check filesystem and ask user preparation_reports = [] elements = select_elements(config, localnames) for tree_el in elements: abs_backup_path = None if backup_path is not None: abs_backup_path = os.path.join(config.get_base_path(), backup_path) try: preparation_report = tree_el.prepare_install( backup_path=abs_backup_path, arg_mode=mode, robust=robust) if preparation_report is not None: if preparation_report.abort: raise MultiProjectException( "Aborting install because of %s" % preparation_report.error) if not preparation_report.skip: preparation_reports.append(preparation_report) else: if preparation_report.error is not None: print("Skipping install of %s because: %s" % ( preparation_report.config_element.get_local_name(), preparation_report.error)) except MultiProjectException as exc: fail_str = ("Failed to install tree '%s'\n %s" % (tree_el.get_path(), exc)) if robust: success = False print("Continuing despite %s" % fail_str) else: raise MultiProjectException(fail_str) class Installer(): def __init__(self, report): self.element = report.config_element self.report = report def do_work(self): self.element.install(checkout=self.report.checkout, backup=self.report.backup, backup_path=self.report.backup_path, inplace=self.report.inplace, timeout=self.report.timeout, verbose=self.report.verbose, shallow=self.report.shallow) return {} work = DistributedWork(capacity=len(preparation_reports), num_threads=num_threads, silent=False) for report in preparation_reports: report.verbose = verbose report.timeout = timeout report.shallow = shallow thread = Installer(report) work.add_thread(thread) try: work.run() except MultiProjectException as exc: print("Exception caught during install: %s" % exc) success = False if not robust: raise return success
def add_uris( config, additional_uris, # config_filename is not redundant with config.get_config_filename() # because in some cases a different config_filename is required config_filename=None, merge_strategy="KillAppend", allow_other_element=True): """ changes the given config by merging with the additional_uris :param config: a Config objects :param additional_uris: the location of config specifications or folders :param config_filename: name of files which may be looked at for config information :param merge_strategy: One of 'KillAppend, 'MergeKeep', 'MergeReplace' :param allow_other_element: if False, discards elements to be added with no SCM information :returns: a dict {<local-name>: (<action>, <path-spec>), <local-name>: ...} determined by the merge_strategy :raises MultiProjectException: on plenty of errors """ if config is None: raise MultiProjectException("Need to provide a Config.") if not additional_uris: return {} if config_filename is None: added_uris = additional_uris else: added_uris = [] # reject if the additional uri points to the same file as our # config is based on for uri in additional_uris: # check whether we try to merge with other workspace comp_uri = None if (os.path.isfile(uri) and os.path.basename(uri) == config_filename): # add from other workspace by file comp_uri = os.path.dirname(uri) if (os.path.isdir(uri) and os.path.isfile(os.path.join(uri, config_filename))): # add from other workspace by dir comp_uri = uri if (comp_uri is not None and realpath_relation( os.path.abspath(comp_uri), os.path.abspath(config.get_base_path())) == 'SAME_AS'): print( 'Warning: Discarding config basepath as additional uri: %s' % uri) continue added_uris.append(uri) actions = {} if len(added_uris) > 0: path_specs = aggregate_from_uris(added_uris, config_filename, allow_other_element) for path_spec in path_specs: action = config.add_path_spec(path_spec, merge_strategy) actions[path_spec.get_local_name()] = (action, path_spec) return actions
def insert_element(self, new_config_elt, merge_strategy='KillAppend'): """ Insert ConfigElement to self.trees, checking for duplicate local-name or path first. In case local_name matches, follow given strategy - KillAppend (default): remove old element, append new at the end - MergeReplace: remove first hit, insert new at that position. - MergeKeep: Discard new element In case local path matches but local name does not, raise Exception :returns: the action performed None, 'Append', 'KillAppend', 'MergeReplace', 'MergeKeep' """ removals = [] replaced = False for index, loop_elt in enumerate(self.trees): # if paths are os.path.realpath, no symlink problems. relationship = realpath_relation(loop_elt.get_path(), new_config_elt.get_path()) if relationship == 'SAME_AS': if os.path.normpath( loop_elt.get_local_name()) != os.path.normpath( new_config_elt.get_local_name()): raise MultiProjectException( "Elements with different local_name target the same path: %s, %s" % (loop_elt, new_config_elt)) else: if (loop_elt == new_config_elt): return None if (merge_strategy == 'MergeReplace' or (merge_strategy == 'KillAppend' and index == len(self.trees) - 1)): self.trees[index] = new_config_elt # keep looping to check for overlap when replacing non- # scm with scm entry replaced = True if (loop_elt.is_vcs_element or not new_config_elt.is_vcs_element): return 'MergeReplace' elif merge_strategy == 'KillAppend': removals.append(loop_elt) elif merge_strategy == 'MergeKeep': return 'MergeKeep' else: raise LookupError("No such merge strategy: %s" % str(merge_strategy)) elif ((relationship == 'CHILD_OF' and new_config_elt.is_vcs_element()) or (relationship == 'PARENT_OF' and loop_elt.is_vcs_element())): # we do not allow any elements to be children of scm elements # to allow for parallel updates and because wstool may # delete scm folders on update, and thus subfolders can be # deleted with their parents raise MultiProjectException( "Managed Element paths overlap: %s, %s" % (loop_elt, new_config_elt)) if replaced: return 'MergeReplace' for loop_elt in removals: self.trees.remove(loop_elt) self.trees.append(new_config_elt) if len(removals) > 0: return 'KillAppend' return 'Append'