示例#1
0
def __0_25_1(conf: GitConfigParser, repo: Repo, console: Console):
    """
  Until v0.25.1 inclusive, bossman init changed the git config to add explicit push/fetch
  refspecs to force git pull/push to also fetch/push notes.

  This behaviour was changed in favour of an explicit pull and push as part of application
  logic when relevant (e.g. before bossman status/prerelease/release, before an after bossman apply).

  The change was largely motivated by the fact that adding the fetch/push refspecs to the config
  appears to break normal branch tracking, forcing an explicit `git push origin ref`, not great UX.
  Because notes are critical to bossman, it's also better not to rely on the user remembering to push
  after apply.
  """
    notes_refspec = "+refs/notes/*:refs/notes/*"
    for section in conf.sections():
        if section.startswith("remote"):
            push_refspecs = conf.get_values(section, "push", [])
            if notes_refspec in push_refspecs:
                conf.remove_option(section, "push")
                push_refspecs = list(refspec for refspec in push_refspecs
                                     if refspec != notes_refspec)
                for refspec in push_refspecs:
                    conf.add_value(section, "push", refspec)
                console.print(r"[red]-[/] \[{}] push: {}".format(
                    section, notes_refspec))
            fetch_refspecs = conf.get_values(section, "fetch", [])
            if notes_refspec in fetch_refspecs:
                conf.remove_option(section, "fetch")
                fetch_refspecs = list(refspec for refspec in fetch_refspecs
                                      if refspec != notes_refspec)
                for refspec in fetch_refspecs:
                    conf.add_value(section, "fetch", refspec)
                console.print(r"[red]-[/] \[{}] fetch: {}".format(
                    section, notes_refspec))
示例#2
0
    def test_base(self):
        path_repo = fixture_path("git_config")
        path_global = fixture_path("git_config_global")
        r_config = GitConfigParser([path_repo, path_global], read_only=True)
        assert r_config.read_only
        num_sections = 0
        num_options = 0

        # test reader methods
        assert r_config._is_initialized is False
        for section in r_config.sections():
            num_sections += 1
            for option in r_config.options(section):
                num_options += 1
                val = r_config.get(section, option)
                val_typed = r_config.get_value(section, option)
                assert isinstance(val_typed, (
                    bool,
                    int,
                    float,
                ) + string_types)
                assert val
                assert "\n" not in option
                assert "\n" not in val

                # writing must fail
                with self.assertRaises(IOError):
                    r_config.set(section, option, None)
                with self.assertRaises(IOError):
                    r_config.remove_option(section, option)
            # END for each option
            with self.assertRaises(IOError):
                r_config.remove_section(section)
        # END for each section
        assert num_sections and num_options
        assert r_config._is_initialized is True

        # get value which doesnt exist, with default
        default = "my default value"
        assert r_config.get_value("doesnt", "exist", default) == default

        # it raises if there is no default though
        with self.assertRaises(cp.NoSectionError):
            r_config.get_value("doesnt", "exist")
示例#3
0
    def test_base(self):
        path_repo = fixture_path("git_config")
        path_global = fixture_path("git_config_global")
        r_config = GitConfigParser([path_repo, path_global], read_only=True)
        assert r_config.read_only
        num_sections = 0
        num_options = 0

        # test reader methods
        assert r_config._is_initialized is False
        for section in r_config.sections():
            num_sections += 1
            for option in r_config.options(section):
                num_options += 1
                val = r_config.get(section, option)
                val_typed = r_config.get_value(section, option)
                assert isinstance(val_typed, (bool, int, float, ) + string_types)
                assert val
                assert "\n" not in option
                assert "\n" not in val

                # writing must fail
                with self.assertRaises(IOError):
                    r_config.set(section, option, None)
                with self.assertRaises(IOError):
                    r_config.remove_option(section, option)
            # END for each option
            with self.assertRaises(IOError):
                r_config.remove_section(section)
        # END for each section
        assert num_sections and num_options
        assert r_config._is_initialized is True

        # get value which doesnt exist, with default
        default = "my default value"
        assert r_config.get_value("doesnt", "exist", default) == default

        # it raises if there is no default though
        with self.assertRaises(cp.NoSectionError):
            r_config.get_value("doesnt", "exist")
示例#4
0
def _get_submodules(dspath, fulfilled, recursive, recursion_limit, contains,
                    bottomup, set_property, delete_property, refds_path):
    if not GitRepo.is_valid_repo(dspath):
        return
    modinfo = _parse_gitmodules(dspath)
    # write access parser
    parser = None
    # TODO bring back in more global scope from below once segfaults are
    # figured out
    #if set_property or delete_property:
    #    gitmodule_path = opj(dspath, ".gitmodules")
    #    parser = GitConfigParser(
    #        gitmodule_path, read_only=False, merge_includes=False)
    #    parser.read()
    # put in giant for-loop to be able to yield results before completion
    for sm in _parse_git_submodules(dspath):
        if contains and not path_startswith(contains, sm['path']):
            # we are not looking for this subds, because it doesn't
            # match the target path
            continue
        sm.update(modinfo.get(sm['path'], {}))
        if set_property or delete_property:
            gitmodule_path = opj(dspath, ".gitmodules")
            parser = GitConfigParser(gitmodule_path,
                                     read_only=False,
                                     merge_includes=False)
            parser.read()
            # do modifications now before we read the info out for reporting
            # use 'submodule "NAME"' section ID style as this seems to be the default
            submodule_section = 'submodule "{}"'.format(sm['gitmodule_name'])
            # first deletions
            for dprop in assure_list(delete_property):
                parser.remove_option(submodule_section, dprop)
                # also kick from the info we just read above
                sm.pop('gitmodule_{}'.format(dprop), None)
            # and now setting values
            for sprop in assure_list(set_property):
                prop, val = sprop
                if val.startswith('<') and val.endswith('>') and '{' in val:
                    # expand template string
                    val = val[1:-1].format(
                        **dict(sm,
                               refds_relpath=relpath(sm['path'], refds_path),
                               refds_relname=relpath(sm['path'], refds_path).
                               replace(os.sep, '-')))
                parser.set_value(submodule_section, prop, val)
                # also add to the info we just read above
                sm['gitmodule_{}'.format(prop)] = val
            Dataset(dspath).add(
                '.gitmodules',
                to_git=True,
                message='[DATALAD] modified subdataset properties')
            # let go of resources, locks, ...
            parser.release()

        #common = commonprefix((with_pathsep(subds), with_pathsep(path)))
        #if common.endswith(sep) and common == with_pathsep(subds):
        #    candidates.append(common)
        subdsres = get_status_dict('subdataset',
                                   status='ok',
                                   type='dataset',
                                   logger=lgr)
        subdsres.update(sm)
        subdsres['parentds'] = dspath
        if not bottomup and \
                (fulfilled is None or
                 GitRepo.is_valid_repo(sm['path']) == fulfilled):
            yield subdsres

        # expand list with child submodules. keep all paths relative to parent
        # and convert jointly at the end
        if recursive and \
                (recursion_limit in (None, 'existing') or
                 (isinstance(recursion_limit, int) and
                  recursion_limit > 1)):
            for r in _get_submodules(
                    sm['path'], fulfilled, recursive,
                (recursion_limit -
                 1) if isinstance(recursion_limit, int) else recursion_limit,
                    contains, bottomup, set_property, delete_property,
                    refds_path):
                yield r
        if bottomup and \
                (fulfilled is None or
                 GitRepo.is_valid_repo(sm['path']) == fulfilled):
            yield subdsres
    if parser is not None:
        # release parser lock manually, auto-cleanup is not reliable in PY3
        parser.release()
示例#5
0
def _get_submodules(dspath, fulfilled, recursive, recursion_limit,
                    contains, bottomup, set_property, delete_property,
                    refds_path):
    if not GitRepo.is_valid_repo(dspath):
        return
    modinfo = _parse_gitmodules(dspath)
    # write access parser
    parser = None
    # TODO bring back in more global scope from below once segfaults are
    # figured out
    #if set_property or delete_property:
    #    gitmodule_path = opj(dspath, ".gitmodules")
    #    parser = GitConfigParser(
    #        gitmodule_path, read_only=False, merge_includes=False)
    #    parser.read()
    # put in giant for-loop to be able to yield results before completion
    for sm in _parse_git_submodules(dspath):
        if contains and not path_startswith(contains, sm['path']):
            # we are not looking for this subds, because it doesn't
            # match the target path
            continue
        sm.update(modinfo.get(sm['path'], {}))
        if set_property or delete_property:
            gitmodule_path = opj(dspath, ".gitmodules")
            parser = GitConfigParser(
                gitmodule_path, read_only=False, merge_includes=False)
            parser.read()
            # do modifications now before we read the info out for reporting
            # use 'submodule "NAME"' section ID style as this seems to be the default
            submodule_section = 'submodule "{}"'.format(sm['gitmodule_name'])
            # first deletions
            for dprop in assure_list(delete_property):
                parser.remove_option(submodule_section, dprop)
                # also kick from the info we just read above
                sm.pop('gitmodule_{}'.format(dprop), None)
            # and now setting values
            for sprop in assure_list(set_property):
                prop, val = sprop
                if val.startswith('<') and val.endswith('>') and '{' in val:
                    # expand template string
                    val = val[1:-1].format(
                        **dict(
                            sm,
                            refds_relpath=relpath(sm['path'], refds_path),
                            refds_relname=relpath(sm['path'], refds_path).replace(os.sep, '-')))
                parser.set_value(
                    submodule_section,
                    prop,
                    val)
                # also add to the info we just read above
                sm['gitmodule_{}'.format(prop)] = val
            Dataset(dspath).add(
                '.gitmodules', to_git=True,
                message='[DATALAD] modified subdataset properties')
            # let go of resources, locks, ...
            parser.release()

        #common = commonprefix((with_pathsep(subds), with_pathsep(path)))
        #if common.endswith(sep) and common == with_pathsep(subds):
        #    candidates.append(common)
        subdsres = get_status_dict(
            'subdataset',
            status='ok',
            type='dataset',
            logger=lgr)
        subdsres.update(sm)
        subdsres['parentds'] = dspath
        if not bottomup and \
                (fulfilled is None or
                 GitRepo.is_valid_repo(sm['path']) == fulfilled):
            yield subdsres

        # expand list with child submodules. keep all paths relative to parent
        # and convert jointly at the end
        if recursive and \
                (recursion_limit in (None, 'existing') or
                 (isinstance(recursion_limit, int) and
                  recursion_limit > 1)):
            for r in _get_submodules(
                    sm['path'],
                    fulfilled, recursive,
                    (recursion_limit - 1)
                    if isinstance(recursion_limit, int)
                    else recursion_limit,
                    contains,
                    bottomup,
                    set_property,
                    delete_property,
                    refds_path):
                yield r
        if bottomup and \
                (fulfilled is None or
                 GitRepo.is_valid_repo(sm['path']) == fulfilled):
            yield subdsres
    if parser is not None:
        # release parser lock manually, auto-cleanup is not reliable in PY3
        parser.release()