Beispiel #1
0
 def test_complex_aliases(self):
     file_obj = self._to_memcache(fixture_path('.gitconfig'))
     w_config = GitConfigParser(file_obj, read_only=False)
     self.assertEqual(w_config.get('alias', 'rbi'),
                      '"!g() { git rebase -i origin/${1:-master} ; } ; g"')
     w_config.release()
     self.assertEqual(
         file_obj.getvalue(),
         self._to_memcache(fixture_path('.gitconfig')).getvalue())
Beispiel #2
0
    def test_rename(self):
        file_obj = self._to_memcache(fixture_path('git_config'))
        cw = GitConfigParser(file_obj, read_only=False, merge_includes=False)

        self.failUnlessRaises(ValueError, cw.rename_section, "doesntexist", "foo")
        self.failUnlessRaises(ValueError, cw.rename_section, "core", "include")

        nn = "bee"
        assert cw.rename_section('core', nn) is cw
        assert not cw.has_section('core')
        assert len(cw.items(nn)) == 4
        cw.release()
Beispiel #3
0
    def test_read_write(self):
        # writer must create the exact same file as the one read before
        for filename in ("git_config", "git_config_global"):
            file_obj = self._to_memcache(fixture_path(filename))
            w_config = GitConfigParser(file_obj, read_only=False)
            w_config.read()  # enforce reading
            assert w_config._sections
            w_config.write()  # enforce writing

            # we stripped lines when reading, so the results differ
            assert file_obj.getvalue()
            self.assertEqual(
                file_obj.getvalue(),
                self._to_memcache(fixture_path(filename)).getvalue())

            # creating an additional config writer must fail due to exclusive access
            self.failUnlessRaises(IOError,
                                  GitConfigParser,
                                  file_obj,
                                  read_only=False)

            # should still have a lock and be able to make changes
            assert w_config._lock._has_lock()

            # changes should be written right away
            sname = "my_section"
            oname = "mykey"
            val = "myvalue"
            w_config.add_section(sname)
            assert w_config.has_section(sname)
            w_config.set(sname, oname, val)
            assert w_config.has_option(sname, oname)
            assert w_config.get(sname, oname) == val

            sname_new = "new_section"
            oname_new = "new_key"
            ival = 10
            w_config.set_value(sname_new, oname_new, ival)
            assert w_config.get_value(sname_new, oname_new) == ival

            file_obj.seek(0)
            r_config = GitConfigParser(file_obj, read_only=True)
            assert r_config.has_section(sname)
            assert r_config.has_option(sname, oname)
            assert r_config.get(sname, oname) == val
            w_config.release()
Beispiel #4
0
    def test_read_write(self):
        # writer must create the exact same file as the one read before
        for filename in ("git_config", "git_config_global"):
            file_obj = self._to_memcache(fixture_path(filename))
            w_config = GitConfigParser(file_obj, read_only=False)
            w_config.read()                 # enforce reading
            assert w_config._sections
            w_config.write()                # enforce writing

            # we stripped lines when reading, so the results differ
            assert file_obj.getvalue()
            self.assertEqual(file_obj.getvalue(), self._to_memcache(fixture_path(filename)).getvalue())

            # creating an additional config writer must fail due to exclusive access
            self.failUnlessRaises(IOError, GitConfigParser, file_obj, read_only=False)

            # should still have a lock and be able to make changes
            assert w_config._lock._has_lock()

            # changes should be written right away
            sname = "my_section"
            oname = "mykey"
            val = "myvalue"
            w_config.add_section(sname)
            assert w_config.has_section(sname)
            w_config.set(sname, oname, val)
            assert w_config.has_option(sname, oname)
            assert w_config.get(sname, oname) == val

            sname_new = "new_section"
            oname_new = "new_key"
            ival = 10
            w_config.set_value(sname_new, oname_new, ival)
            assert w_config.get_value(sname_new, oname_new) == ival

            file_obj.seek(0)
            r_config = GitConfigParser(file_obj, read_only=True)
            assert r_config.has_section(sname)
            assert r_config.has_option(sname, oname)
            assert r_config.get(sname, oname) == val
            w_config.release()
Beispiel #5
0
def _parse_gitmodules(dspath):
    gitmodule_path = opj(dspath, ".gitmodules")
    parser = GitConfigParser(gitmodule_path)
    mods = {}
    for sec in parser.sections():
        try:
            modpath = parser.get(sec, 'path')
        except Exception:
            lgr.debug("Failed to get '%s.path', skipping section", sec)
            continue
        if not modpath or not sec.startswith('submodule '):
            continue
        modpath = normpath(opj(dspath, modpath))
        modprops = {'gitmodule_{}'.format(opt): parser.get_value(sec, opt)
                    for opt in parser.options(sec)
                    if not (opt.startswith('__') or opt == 'path')}
        modprops['gitmodule_name'] = sec[11:-1]
        mods[modpath] = modprops
    # make sure we let go of any resources held be the parser
    # we cannot rely on __del__
    parser.release()
    return mods
Beispiel #6
0
def _parse_gitmodules(dspath):
    gitmodule_path = opj(dspath, ".gitmodules")
    parser = GitConfigParser(gitmodule_path)
    mods = {}
    for sec in parser.sections():
        try:
            modpath = parser.get(sec, 'path')
        except Exception:
            lgr.debug("Failed to get '%s.path', skipping section", sec)
            continue
        if not modpath or not sec.startswith('submodule '):
            continue
        modpath = normpath(opj(dspath, modpath))
        modprops = {'gitmodule_{}'.format(opt): parser.get_value(sec, opt)
                    for opt in parser.options(sec)
                    if not (opt.startswith('__') or opt == 'path')}
        modprops['gitmodule_name'] = sec[11:-1]
        mods[modpath] = modprops
    # make sure we let go of any resources held be the parser
    # we cannot rely on __del__
    parser.release()
    return mods
Beispiel #7
0
    def test_config_include(self, rw_dir):
        def write_test_value(cw, value):
            cw.set_value(value, 'value', value)

        # end

        def check_test_value(cr, value):
            assert cr.get_value(value, 'value') == value

        # end

        # PREPARE CONFIG FILE A
        fpa = os.path.join(rw_dir, 'a')
        cw = GitConfigParser(fpa, read_only=False)
        write_test_value(cw, 'a')

        fpb = os.path.join(rw_dir, 'b')
        fpc = os.path.join(rw_dir, 'c')
        cw.set_value('include', 'relative_path_b', 'b')
        cw.set_value('include', 'doesntexist', 'foobar')
        cw.set_value('include', 'relative_cycle_a_a', 'a')
        cw.set_value('include', 'absolute_cycle_a_a', fpa)
        cw.release()
        assert os.path.exists(fpa)

        # PREPARE CONFIG FILE B
        cw = GitConfigParser(fpb, read_only=False)
        write_test_value(cw, 'b')
        cw.set_value('include', 'relative_cycle_b_a', 'a')
        cw.set_value('include', 'absolute_cycle_b_a', fpa)
        cw.set_value('include', 'relative_path_c', 'c')
        cw.set_value('include', 'absolute_path_c', fpc)
        cw.release()

        # PREPARE CONFIG FILE C
        cw = GitConfigParser(fpc, read_only=False)
        write_test_value(cw, 'c')
        cw.release()

        cr = GitConfigParser(fpa, read_only=True)
        for tv in ('a', 'b', 'c'):
            check_test_value(cr, tv)
        # end for each test to verify
        assert len(cr.items(
            'include')) == 8, "Expected all include sections to be merged"
        cr.release()

        # test writable config writers - assure write-back doesn't involve includes
        cw = GitConfigParser(fpa, read_only=False, merge_includes=True)
        tv = 'x'
        write_test_value(cw, tv)
        cw.release()

        cr = GitConfigParser(fpa, read_only=True)
        self.failUnlessRaises(cp.NoSectionError, check_test_value, cr, tv)
        cr.release()

        # But can make it skip includes alltogether, and thus allow write-backs
        cw = GitConfigParser(fpa, read_only=False, merge_includes=False)
        write_test_value(cw, tv)
        cw.release()

        cr = GitConfigParser(fpa, read_only=True)
        check_test_value(cr, tv)
        cr.release()
Beispiel #8
0
 def test_complex_aliases(self):
     file_obj = self._to_memcache(fixture_path('.gitconfig'))
     w_config = GitConfigParser(file_obj, read_only=False)
     self.assertEqual(w_config.get('alias', 'rbi'), '"!g() { git rebase -i origin/${1:-master} ; } ; g"')
     w_config.release()
     self.assertEqual(file_obj.getvalue(), self._to_memcache(fixture_path('.gitconfig')).getvalue())
Beispiel #9
0
def _get_submodules(dspath, fulfilled, recursive, recursion_limit, contains,
                    bottomup, set_property, delete_property, refds_path):
    if not GitRepo.is_valid_repo(dspath):
        return
    modinfo = _parse_gitmodules(dspath)
    # write access parser
    parser = None
    # TODO bring back in more global scope from below once segfaults are
    # figured out
    #if set_property or delete_property:
    #    gitmodule_path = opj(dspath, ".gitmodules")
    #    parser = GitConfigParser(
    #        gitmodule_path, read_only=False, merge_includes=False)
    #    parser.read()
    # put in giant for-loop to be able to yield results before completion
    for sm in _parse_git_submodules(dspath):
        if contains and not path_startswith(contains, sm['path']):
            # we are not looking for this subds, because it doesn't
            # match the target path
            continue
        sm.update(modinfo.get(sm['path'], {}))
        if set_property or delete_property:
            gitmodule_path = opj(dspath, ".gitmodules")
            parser = GitConfigParser(gitmodule_path,
                                     read_only=False,
                                     merge_includes=False)
            parser.read()
            # do modifications now before we read the info out for reporting
            # use 'submodule "NAME"' section ID style as this seems to be the default
            submodule_section = 'submodule "{}"'.format(sm['gitmodule_name'])
            # first deletions
            for dprop in assure_list(delete_property):
                parser.remove_option(submodule_section, dprop)
                # also kick from the info we just read above
                sm.pop('gitmodule_{}'.format(dprop), None)
            # and now setting values
            for sprop in assure_list(set_property):
                prop, val = sprop
                if val.startswith('<') and val.endswith('>') and '{' in val:
                    # expand template string
                    val = val[1:-1].format(
                        **dict(sm,
                               refds_relpath=relpath(sm['path'], refds_path),
                               refds_relname=relpath(sm['path'], refds_path).
                               replace(os.sep, '-')))
                parser.set_value(submodule_section, prop, val)
                # also add to the info we just read above
                sm['gitmodule_{}'.format(prop)] = val
            Dataset(dspath).add(
                '.gitmodules',
                to_git=True,
                message='[DATALAD] modified subdataset properties')
            # let go of resources, locks, ...
            parser.release()

        #common = commonprefix((with_pathsep(subds), with_pathsep(path)))
        #if common.endswith(sep) and common == with_pathsep(subds):
        #    candidates.append(common)
        subdsres = get_status_dict('subdataset',
                                   status='ok',
                                   type='dataset',
                                   logger=lgr)
        subdsres.update(sm)
        subdsres['parentds'] = dspath
        if not bottomup and \
                (fulfilled is None or
                 GitRepo.is_valid_repo(sm['path']) == fulfilled):
            yield subdsres

        # expand list with child submodules. keep all paths relative to parent
        # and convert jointly at the end
        if recursive and \
                (recursion_limit in (None, 'existing') or
                 (isinstance(recursion_limit, int) and
                  recursion_limit > 1)):
            for r in _get_submodules(
                    sm['path'], fulfilled, recursive,
                (recursion_limit -
                 1) if isinstance(recursion_limit, int) else recursion_limit,
                    contains, bottomup, set_property, delete_property,
                    refds_path):
                yield r
        if bottomup and \
                (fulfilled is None or
                 GitRepo.is_valid_repo(sm['path']) == fulfilled):
            yield subdsres
    if parser is not None:
        # release parser lock manually, auto-cleanup is not reliable in PY3
        parser.release()
Beispiel #10
0
def _get_submodules(dspath, fulfilled, recursive, recursion_limit,
                    contains, bottomup, set_property, delete_property,
                    refds_path):
    if not GitRepo.is_valid_repo(dspath):
        return
    modinfo = _parse_gitmodules(dspath)
    # write access parser
    parser = None
    # TODO bring back in more global scope from below once segfaults are
    # figured out
    #if set_property or delete_property:
    #    gitmodule_path = opj(dspath, ".gitmodules")
    #    parser = GitConfigParser(
    #        gitmodule_path, read_only=False, merge_includes=False)
    #    parser.read()
    # put in giant for-loop to be able to yield results before completion
    for sm in _parse_git_submodules(dspath):
        if contains and not path_startswith(contains, sm['path']):
            # we are not looking for this subds, because it doesn't
            # match the target path
            continue
        sm.update(modinfo.get(sm['path'], {}))
        if set_property or delete_property:
            gitmodule_path = opj(dspath, ".gitmodules")
            parser = GitConfigParser(
                gitmodule_path, read_only=False, merge_includes=False)
            parser.read()
            # do modifications now before we read the info out for reporting
            # use 'submodule "NAME"' section ID style as this seems to be the default
            submodule_section = 'submodule "{}"'.format(sm['gitmodule_name'])
            # first deletions
            for dprop in assure_list(delete_property):
                parser.remove_option(submodule_section, dprop)
                # also kick from the info we just read above
                sm.pop('gitmodule_{}'.format(dprop), None)
            # and now setting values
            for sprop in assure_list(set_property):
                prop, val = sprop
                if val.startswith('<') and val.endswith('>') and '{' in val:
                    # expand template string
                    val = val[1:-1].format(
                        **dict(
                            sm,
                            refds_relpath=relpath(sm['path'], refds_path),
                            refds_relname=relpath(sm['path'], refds_path).replace(os.sep, '-')))
                parser.set_value(
                    submodule_section,
                    prop,
                    val)
                # also add to the info we just read above
                sm['gitmodule_{}'.format(prop)] = val
            Dataset(dspath).add(
                '.gitmodules', to_git=True,
                message='[DATALAD] modified subdataset properties')
            # let go of resources, locks, ...
            parser.release()

        #common = commonprefix((with_pathsep(subds), with_pathsep(path)))
        #if common.endswith(sep) and common == with_pathsep(subds):
        #    candidates.append(common)
        subdsres = get_status_dict(
            'subdataset',
            status='ok',
            type='dataset',
            logger=lgr)
        subdsres.update(sm)
        subdsres['parentds'] = dspath
        if not bottomup and \
                (fulfilled is None or
                 GitRepo.is_valid_repo(sm['path']) == fulfilled):
            yield subdsres

        # expand list with child submodules. keep all paths relative to parent
        # and convert jointly at the end
        if recursive and \
                (recursion_limit in (None, 'existing') or
                 (isinstance(recursion_limit, int) and
                  recursion_limit > 1)):
            for r in _get_submodules(
                    sm['path'],
                    fulfilled, recursive,
                    (recursion_limit - 1)
                    if isinstance(recursion_limit, int)
                    else recursion_limit,
                    contains,
                    bottomup,
                    set_property,
                    delete_property,
                    refds_path):
                yield r
        if bottomup and \
                (fulfilled is None or
                 GitRepo.is_valid_repo(sm['path']) == fulfilled):
            yield subdsres
    if parser is not None:
        # release parser lock manually, auto-cleanup is not reliable in PY3
        parser.release()
    def test_config_include(self, rw_dir):
        def write_test_value(cw, value):
            cw.set_value(value, 'value', value)
        # end

        def check_test_value(cr, value):
            assert cr.get_value(value, 'value') == value
        # end

        # PREPARE CONFIG FILE A
        fpa = os.path.join(rw_dir, 'a')
        cw = GitConfigParser(fpa, read_only=False)
        write_test_value(cw, 'a')

        fpb = os.path.join(rw_dir, 'b')
        fpc = os.path.join(rw_dir, 'c')
        cw.set_value('include', 'relative_path_b', 'b')
        cw.set_value('include', 'doesntexist', 'foobar')
        cw.set_value('include', 'relative_cycle_a_a', 'a')
        cw.set_value('include', 'absolute_cycle_a_a', fpa)
        cw.release()
        assert os.path.exists(fpa)

        # PREPARE CONFIG FILE B
        cw = GitConfigParser(fpb, read_only=False)
        write_test_value(cw, 'b')
        cw.set_value('include', 'relative_cycle_b_a', 'a')
        cw.set_value('include', 'absolute_cycle_b_a', fpa)
        cw.set_value('include', 'relative_path_c', 'c')
        cw.set_value('include', 'absolute_path_c', fpc)
        cw.release()

        # PREPARE CONFIG FILE C
        cw = GitConfigParser(fpc, read_only=False)
        write_test_value(cw, 'c')
        cw.release()

        cr = GitConfigParser(fpa, read_only=True)
        for tv in ('a', 'b', 'c'):
            check_test_value(cr, tv)
        # end for each test to verify
        assert len(cr.items('include')) == 8, "Expected all include sections to be merged"
        cr.release()

        # test writable config writers - assure write-back doesn't involve includes
        cw = GitConfigParser(fpa, read_only=False, merge_includes=True)
        tv = 'x'
        write_test_value(cw, tv)
        cw.release()

        cr = GitConfigParser(fpa, read_only=True)
        self.failUnlessRaises(cp.NoSectionError, check_test_value, cr, tv)
        cr.release()

        # But can make it skip includes alltogether, and thus allow write-backs
        cw = GitConfigParser(fpa, read_only=False, merge_includes=False)
        write_test_value(cw, tv)
        cw.release()

        cr = GitConfigParser(fpa, read_only=True)
        check_test_value(cr, tv)
        cr.release()