def test_empty_config_value(self): cr = GitConfigParser(fixture_path('git_config_with_empty_value'), read_only=True) assert cr.get_value('core', 'filemode'), "Should read keys with values" with self.assertRaises(cp.NoOptionError): cr.get_value('color', 'ui')
def test_complex_aliases(self): file_obj = self._to_memcache(fixture_path('.gitconfig')) w_config = GitConfigParser(file_obj, read_only=False) self.assertEqual(w_config.get('alias', 'rbi'), '"!g() { git rebase -i origin/${1:-master} ; } ; g"') w_config.release() self.assertEqual( file_obj.getvalue(), self._to_memcache(fixture_path('.gitconfig')).getvalue())
def get_module_parser(repo): from git import GitConfigParser gitmodule_path = opj(repo.path, ".gitmodules") # TODO: What does constructor of GitConfigParser, in case file doesn't exist? #if exists(gitmodule_path): parser = GitConfigParser(gitmodule_path) parser.read() return parser
def migrate(conf: GitConfigParser, repo: Repo, console: Console): if "bossman" not in conf.sections(): conf.add_section("bossman") console.print("Initializing git configuration [yellow]{}[/]...".format( conf._file_or_files)) conf_version = packaging.version.parse( conf.get_value("bossman", "version", "0.0.0")) for version, migration in MIGRATIONS.items(): if conf_version <= packaging.version.parse(version): migration(conf, repo, console)
def predict_local_user(repo): config = GitConfigParser( [os.path.normpath(os.path.expanduser("~/.gitconfig"))], read_only=True ) writer = repo.config_writer() sections = config.sections() i = 0 is_set = False while i < len(sections) and not is_set: sec = sections[i] if 'multi-user' in sec: # name = re.findall(r'"([^"]*)"', sec)[1] user = dict(config.items(sec)) logging.debug("multi-user: %s", str(user)) if 'url' in user and 'name' in user and 'email' in user: for remote in repo.remotes: logging.debug("remote-url: %s", remote.url) prog = re.compile(user['url']) if prog.match(remote.url): logging.info( "%s found in remote url %s", user['url'], remote.url ) logging.info( "Setting local user.name to %s", user["name"] ) writer.set_value('user', 'name', user['name']) logging.info( "Setting local user.email to %s", user["email"] ) writer.set_value('user', 'email', user['email']) writer.release() is_set = True break elif 'url' not in user: logging.warning('url not set for %s', sec) elif 'name' not in user: logging.warning('name not set for %s', sec) elif 'email' not in user: logging.warning('email not set for %s', sec) i = i + 1 return is_set
def test_lock_reentry(self, rw_dir): fpl = os.path.join(rw_dir, 'l') gcp = GitConfigParser(fpl, read_only=False) with gcp as cw: cw.set_value('include', 'some_value', 'a') # entering again locks the file again... with gcp as cw: cw.set_value('include', 'some_other_value', 'b') # ...so creating an additional config writer must fail due to exclusive access self.failUnlessRaises(IOError, GitConfigParser, fpl, read_only=False) # but work when the lock is removed with GitConfigParser(fpl, read_only=False): assert os.path.exists(fpl) # reentering with an existing lock must fail due to exclusive access self.failUnlessRaises(IOError, gcp.__enter__)
def test_config_include(self, rw_dir): def write_test_value(cw, value): cw.set_value(value, 'value', value) # end def check_test_value(cr, value): assert cr.get_value(value, 'value') == value # end # PREPARE CONFIG FILE A fpa = osp.join(rw_dir, 'a') with GitConfigParser(fpa, read_only=False) as cw: write_test_value(cw, 'a') fpb = osp.join(rw_dir, 'b') fpc = osp.join(rw_dir, 'c') cw.set_value('include', 'relative_path_b', 'b') cw.set_value('include', 'doesntexist', 'foobar') cw.set_value('include', 'relative_cycle_a_a', 'a') cw.set_value('include', 'absolute_cycle_a_a', fpa) assert osp.exists(fpa) # PREPARE CONFIG FILE B with GitConfigParser(fpb, read_only=False) as cw: write_test_value(cw, 'b') cw.set_value('include', 'relative_cycle_b_a', 'a') cw.set_value('include', 'absolute_cycle_b_a', fpa) cw.set_value('include', 'relative_path_c', 'c') cw.set_value('include', 'absolute_path_c', fpc) # PREPARE CONFIG FILE C with GitConfigParser(fpc, read_only=False) as cw: write_test_value(cw, 'c') with GitConfigParser(fpa, read_only=True) as cr: for tv in ('a', 'b', 'c'): check_test_value(cr, tv) # end for each test to verify assert len(cr.items( 'include')) == 8, "Expected all include sections to be merged" # test writable config writers - assure write-back doesn't involve includes with GitConfigParser(fpa, read_only=False, merge_includes=True) as cw: tv = 'x' write_test_value(cw, tv) with GitConfigParser(fpa, read_only=True) as cr: with self.assertRaises(cp.NoSectionError): check_test_value(cr, tv) # But can make it skip includes alltogether, and thus allow write-backs with GitConfigParser(fpa, read_only=False, merge_includes=False) as cw: write_test_value(cw, tv) with GitConfigParser(fpa, read_only=True) as cr: check_test_value(cr, tv)
def test_lock_reentry(self, rw_dir): fpl = osp.join(rw_dir, 'l') gcp = GitConfigParser(fpl, read_only=False) with gcp as cw: cw.set_value('include', 'some_value', 'a') # entering again locks the file again... with gcp as cw: cw.set_value('include', 'some_other_value', 'b') # ...so creating an additional config writer must fail due to exclusive access with self.assertRaises(IOError): GitConfigParser(fpl, read_only=False) # but work when the lock is removed with GitConfigParser(fpl, read_only=False): assert osp.exists(fpl) # reentering with an existing lock must fail due to exclusive access with self.assertRaises(IOError): gcp.__enter__()
def test_multi_line_config(self): file_obj = self._to_memcache(fixture_path("git_config_with_comments")) config = GitConfigParser(file_obj, read_only=False) ev = "ruby -e '\n" ev += " system %(git), %(merge-file), %(--marker-size=%L), %(%A), %(%O), %(%B)\n" ev += " b = File.read(%(%A))\n" ev += " b.sub!(/^<+ .*\\nActiveRecord::Schema\\.define.:version => (\\d+). do\\n=+\\nActiveRecord::Schema\\." ev += "define.:version => (\\d+). do\\n>+ .*/) do\n" ev += " %(ActiveRecord::Schema.define(:version => #{[$1, $2].max}) do)\n" ev += " end\n" ev += " File.open(%(%A), %(w)) {|f| f.write(b)}\n" ev += " exit 1 if b.include?(%(<)*%L)'" assert_equal(config.get('merge "railsschema"', 'driver'), ev) assert_equal(config.get('alias', 'lg'), "log --graph --pretty=format:'%Cred%h%Creset -%C(yellow)%d%Creset %s %Cgreen(%cr)%Creset'" " --abbrev-commit --date=relative") assert len(config.sections()) == 23
def get_author() -> Actor: """ Get the name and email information in the user's .gitconfig. :return: the git author information as an :py:class:`~git.util.Actor` """ info = {} gitconfig = GitConfigParser(os.path.expanduser('~/.gitconfig')) sections = gitconfig.sections() assert 'user' in sections, 'No "user" field in .gitconfig' user_items = gitconfig.items('user') for tup in user_items: info[tup[0]] = tup[1] assert 'name' in info and 'email' in info, 'Incomplete information (name and/or email) in .gitconfig' return Actor(name=info['name'], email=info['email'])
def test_read_write(self): # writer must create the exact same file as the one read before for filename in ("git_config", "git_config_global"): file_obj = self._to_memcache(fixture_path(filename)) with GitConfigParser(file_obj, read_only=False) as w_config: w_config.read() # enforce reading assert w_config._sections w_config.write() # enforce writing # we stripped lines when reading, so the results differ assert file_obj.getvalue() self.assertEqual(file_obj.getvalue(), self._to_memcache(fixture_path(filename)).getvalue()) # creating an additional config writer must fail due to exclusive access with self.assertRaises(IOError): GitConfigParser(file_obj, read_only=False) # should still have a lock and be able to make changes assert w_config._lock._has_lock() # changes should be written right away sname = "my_section" oname = "mykey" val = "myvalue" w_config.add_section(sname) assert w_config.has_section(sname) w_config.set(sname, oname, val) assert w_config.has_option(sname, oname) assert w_config.get(sname, oname) == val sname_new = "new_section" oname_new = "new_key" ival = 10 w_config.set_value(sname_new, oname_new, ival) assert w_config.get_value(sname_new, oname_new) == ival file_obj.seek(0) r_config = GitConfigParser(file_obj, read_only=True) assert r_config.has_section(sname) assert r_config.has_option(sname, oname) assert r_config.get(sname, oname) == val
def test_rename(self): file_obj = self._to_memcache(fixture_path('git_config')) with GitConfigParser(file_obj, read_only=False, merge_includes=False) as cw: with self.assertRaises(ValueError): cw.rename_section("doesntexist", "foo") with self.assertRaises(ValueError): cw.rename_section("core", "include") nn = "bee" assert cw.rename_section('core', nn) is cw assert not cw.has_section('core') assert len(cw.items(nn)) == 4
def test_rename(self): file_obj = self._to_memcache(fixture_path('git_config')) cw = GitConfigParser(file_obj, read_only=False, merge_includes=False) self.failUnlessRaises(ValueError, cw.rename_section, "doesntexist", "foo") self.failUnlessRaises(ValueError, cw.rename_section, "core", "include") nn = "bee" assert cw.rename_section('core', nn) is cw assert not cw.has_section('core') assert len(cw.items(nn)) == 4 cw.release()
def test_includes_order(self): with GitConfigParser(list(map(fixture_path, ("git_config", "git_config_global")))) as r_config: r_config.read() # enforce reading # Simple inclusions, again checking them taking precedence assert r_config.get_value('sec', 'var0') == "value0_included" # This one should take the git_config_global value since included # values must be considered as soon as they get them assert r_config.get_value('diff', 'tool') == "meld" try: assert r_config.get_value('sec', 'var1') == "value1_main" except AssertionError: raise SkipTest( 'Known failure -- included values are not in effect right away' )
def test_conditional_includes_from_branch_name(self, rw_dir): # Initiate mocked branch branch = mock.Mock() type(branch).name = mock.PropertyMock(return_value="/foo/branch") # Initiate mocked repository repo = mock.Mock(active_branch=branch) # Initiate config files. path1 = osp.join(rw_dir, "config1") path2 = osp.join(rw_dir, "config2") template = "[includeIf \"onbranch:{}\"]\n path={}\n" # Ensure that config is included is branch is correct. with open(path1, "w") as stream: stream.write(template.format("/foo/branch", path2)) with GitConfigParser(path1, repo=repo) as config: assert config._has_includes() assert config._included_paths() == [("path", path2)] # Ensure that config is included is branch is incorrect. with open(path1, "w") as stream: stream.write(template.format("incorrect", path2)) with GitConfigParser(path1, repo=repo) as config: assert not config._has_includes() assert config._included_paths() == [] # Ensure that config is included with branch using glob pattern. with open(path1, "w") as stream: stream.write(template.format("/foo/**", path2)) with GitConfigParser(path1, repo=repo) as config: assert config._has_includes() assert config._included_paths() == [("path", path2)]
def test_conditional_includes_from_branch_name_error(self, rw_dir): # Initiate mocked repository to raise an error if HEAD is detached. repo = mock.Mock() type(repo).active_branch = mock.PropertyMock(side_effect=TypeError) # Initiate config file. path1 = osp.join(rw_dir, "config1") # Ensure that config is ignored when active branch cannot be found. with open(path1, "w") as stream: stream.write("[includeIf \"onbranch:foo\"]\n path=/path\n") with GitConfigParser(path1, repo=repo) as config: assert not config._has_includes() assert config._included_paths() == []
def get_user_email(cfg: git.GitConfigParser) -> str: ''' Returns the user email specified in the .gitconfig file Parameters: cfg (git.GitConfigparser): A config parser object pointing at the user's .gitconfig file Returns: email (str): The user's email address ''' try: return cfg.get_value('user', 'email') except configparser.NoSectionError or configparser.NoOptionError: print('WARNING: Git user email not set.') return pyip.inputEmail(prompt='Please enter your email: ')
def get_user_name(cfg: git.GitConfigParser) -> str: ''' Returns the user name specified in the .gitconfig file Parameters: cfg (git.GitConfigParser): A config parser object pointing at the user's .gitconfig file Returns: name (str): The user's name ''' try: return cfg.get_value('user', 'name') except configparser.NoSectionError or configparser.NoOptionError: print('WARNING: Git user name not set.') return pyip.inputStr(prompt='Please enter your name: ')
def test_single_to_multiple(self): file_obj = self._to_memcache(fixture_path('git_config_multiple')) with GitConfigParser(file_obj, read_only=False) as cw: cw.add_value('section1', 'other_option1', 'other_value1a') cw.write() file_obj.seek(0) cr = GitConfigParser(file_obj, read_only=True) self.assertEqual(cr.get_value('section1', 'option1'), 'value1b') self.assertEqual(cr.get_values('section1', 'option1'), ['value1a', 'value1b']) self.assertEqual(cr.get_value('section1', 'other_option1'), 'other_value1a') self.assertEqual(cr.get_values('section1', 'other_option1'), ['other_value1', 'other_value1a']) self.assertEqual(cr.items('section1'), [('option1', 'value1b'), ('other_option1', 'other_value1a')]) self.assertEqual( cr.items_all('section1'), [('option1', ['value1a', 'value1b']), ('other_option1', ['other_value1', 'other_value1a'])])
def _parse_gitmodules(dspath): gitmodule_path = opj(dspath, ".gitmodules") parser = GitConfigParser(gitmodule_path) mods = {} for sec in parser.sections(): try: modpath = parser.get(sec, 'path') except Exception: lgr.debug("Failed to get '%s.path', skipping section", sec) continue if not modpath or not sec.startswith('submodule '): continue modpath = normpath(opj(dspath, modpath)) modprops = {'gitmodule_{}'.format(opt): parser.get_value(sec, opt) for opt in parser.options(sec) if not (opt.startswith('__') or opt == 'path')} modprops['gitmodule_name'] = sec[11:-1] mods[modpath] = modprops # make sure we let go of any resources held be the parser # we cannot rely on __del__ parser.release() return mods
def _parse_gitmodules(dspath): gitmodule_path = opj(dspath, ".gitmodules") parser = GitConfigParser(gitmodule_path) mods = {} for sec in parser.sections(): modpath = parser.get_value(sec, 'path', default=0) if not modpath or not sec.startswith('submodule '): continue modpath = normpath(opj(dspath, modpath)) modprops = {'gitmodule_{}'.format(opt): parser.get_value(sec, opt) for opt in parser.options(sec) if not (opt.startswith('__') or opt == 'path')} modprops['gitmodule_name'] = sec[11:-1] mods[modpath] = modprops return mods
def test_base(self): path_repo = fixture_path("git_config") path_global = fixture_path("git_config_global") r_config = GitConfigParser([path_repo, path_global], read_only=True) assert r_config.read_only num_sections = 0 num_options = 0 # test reader methods assert r_config._is_initialized is False for section in r_config.sections(): num_sections += 1 for option in r_config.options(section): num_options += 1 val = r_config.get(section, option) val_typed = r_config.get_value(section, option) assert isinstance(val_typed, ( bool, int, float, ) + string_types) assert val assert "\n" not in option assert "\n" not in val # writing must fail self.failUnlessRaises(IOError, r_config.set, section, option, None) self.failUnlessRaises(IOError, r_config.remove_option, section, option) # END for each option self.failUnlessRaises(IOError, r_config.remove_section, section) # END for each section assert num_sections and num_options assert r_config._is_initialized is True # get value which doesnt exist, with default default = "my default value" assert r_config.get_value("doesnt", "exist", default) == default # it raises if there is no default though self.failUnlessRaises(cp.NoSectionError, r_config.get_value, "doesnt", "exist")
def test_read_write(self): # writer must create the exact same file as the one read before for filename in ("git_config", "git_config_global"): file_obj = self._to_memcache(fixture_path(filename)) with GitConfigParser(file_obj, read_only=False) as w_config: w_config.read() # enforce reading assert w_config._sections w_config.write() # enforce writing # we stripped lines when reading, so the results differ assert file_obj.getvalue() self.assertEqual( file_obj.getvalue(), self._to_memcache(fixture_path(filename)).getvalue()) # creating an additional config writer must fail due to exclusive access with self.assertRaises(IOError): GitConfigParser(file_obj, read_only=False) # should still have a lock and be able to make changes assert w_config._lock._has_lock() # changes should be written right away sname = "my_section" oname = "mykey" val = "myvalue" w_config.add_section(sname) assert w_config.has_section(sname) w_config.set(sname, oname, val) assert w_config.has_option(sname, oname) assert w_config.get(sname, oname) == val sname_new = "new_section" oname_new = "new_key" ival = 10 w_config.set_value(sname_new, oname_new, ival) assert w_config.get_value(sname_new, oname_new) == ival file_obj.seek(0) r_config = GitConfigParser(file_obj, read_only=True) assert r_config.has_section(sname) assert r_config.has_option(sname, oname) assert r_config.get(sname, oname) == val
def test_multiple_values(self): file_obj = self._to_memcache(fixture_path('git_config_multiple')) with GitConfigParser(file_obj, read_only=False) as cw: self.assertEqual(cw.get('section0', 'option0'), 'value0') self.assertEqual(cw.get_values('section0', 'option0'), ['value0']) self.assertEqual(cw.items('section0'), [('option0', 'value0')]) # Where there are multiple values, "get" returns the last. self.assertEqual(cw.get('section1', 'option1'), 'value1b') self.assertEqual(cw.get_values('section1', 'option1'), ['value1a', 'value1b']) self.assertEqual(cw.items('section1'), [('option1', 'value1b'), ('other_option1', 'other_value1')]) self.assertEqual(cw.items_all('section1'), [('option1', ['value1a', 'value1b']), ('other_option1', ['other_value1'])]) with self.assertRaises(KeyError): cw.get_values('section1', 'missing') self.assertEqual(cw.get_values('section1', 'missing', 1), [1]) self.assertEqual(cw.get_values('section1', 'missing', 's'), ['s'])
def from_git(self, config_level, repo=None): """Read config values from git configuration :param config_level: One of the following values system = system wide configuration global = user level configuration repository = configuration file for a repository (`repo` must be provided) :type config_level: str :param repo: Repo from which to retrieve config when `config_level` is set to 'repository' :type repo: Repo """ assert config_level != 'repository' or isinstance(repo, Repo), \ "When config_level is set to \'repository\', a valid Repo must be provided as well" self.attempted_git_config_level.append(config_level) config = GitConfigParser(Repo._get_config_path(repo, config_level), read_only=True) for opt in self.config_options: if len(opt) > 2: self._set_attr_from_git(opt[0], config, opt[2])
def test_base(self): path_repo = fixture_path("git_config") path_global = fixture_path("git_config_global") r_config = GitConfigParser([path_repo, path_global], read_only=True) assert r_config.read_only num_sections = 0 num_options = 0 # test reader methods assert r_config._is_initialized is False for section in r_config.sections(): num_sections += 1 for option in r_config.options(section): num_options += 1 val = r_config.get(section, option) val_typed = r_config.get_value(section, option) assert isinstance(val_typed, (bool, int, float, ) + string_types) assert val assert "\n" not in option assert "\n" not in val # writing must fail self.failUnlessRaises(IOError, r_config.set, section, option, None) self.failUnlessRaises(IOError, r_config.remove_option, section, option) # END for each option self.failUnlessRaises(IOError, r_config.remove_section, section) # END for each section assert num_sections and num_options assert r_config._is_initialized is True # get value which doesnt exist, with default default = "my default value" assert r_config.get_value("doesnt", "exist", default) == default # it raises if there is no default though self.failUnlessRaises(cp.NoSectionError, r_config.get_value, "doesnt", "exist")
def _get_submodules(dspath, fulfilled, recursive, recursion_limit, contains, bottomup, set_property, delete_property, refds_path): if not GitRepo.is_valid_repo(dspath): return modinfo = _parse_gitmodules(dspath) # write access parser parser = None # TODO bring back in more global scope from below once segfaults are # figured out #if set_property or delete_property: # gitmodule_path = opj(dspath, ".gitmodules") # parser = GitConfigParser( # gitmodule_path, read_only=False, merge_includes=False) # parser.read() # put in giant for-loop to be able to yield results before completion for sm in _parse_git_submodules(dspath): if contains and not path_startswith(contains, sm['path']): # we are not looking for this subds, because it doesn't # match the target path continue sm.update(modinfo.get(sm['path'], {})) if set_property or delete_property: gitmodule_path = opj(dspath, ".gitmodules") parser = GitConfigParser(gitmodule_path, read_only=False, merge_includes=False) parser.read() # do modifications now before we read the info out for reporting # use 'submodule "NAME"' section ID style as this seems to be the default submodule_section = 'submodule "{}"'.format(sm['gitmodule_name']) # first deletions for dprop in assure_list(delete_property): parser.remove_option(submodule_section, dprop) # also kick from the info we just read above sm.pop('gitmodule_{}'.format(dprop), None) # and now setting values for sprop in assure_list(set_property): prop, val = sprop if val.startswith('<') and val.endswith('>') and '{' in val: # expand template string val = val[1:-1].format( **dict(sm, refds_relpath=relpath(sm['path'], refds_path), refds_relname=relpath(sm['path'], refds_path). replace(os.sep, '-'))) parser.set_value(submodule_section, prop, val) # also add to the info we just read above sm['gitmodule_{}'.format(prop)] = val Dataset(dspath).add( '.gitmodules', to_git=True, message='[DATALAD] modified subdataset properties') # let go of resources, locks, ... parser.release() #common = commonprefix((with_pathsep(subds), with_pathsep(path))) #if common.endswith(sep) and common == with_pathsep(subds): # candidates.append(common) subdsres = get_status_dict('subdataset', status='ok', type='dataset', logger=lgr) subdsres.update(sm) subdsres['parentds'] = dspath if not bottomup and \ (fulfilled is None or GitRepo.is_valid_repo(sm['path']) == fulfilled): yield subdsres # expand list with child submodules. keep all paths relative to parent # and convert jointly at the end if recursive and \ (recursion_limit in (None, 'existing') or (isinstance(recursion_limit, int) and recursion_limit > 1)): for r in _get_submodules( sm['path'], fulfilled, recursive, (recursion_limit - 1) if isinstance(recursion_limit, int) else recursion_limit, contains, bottomup, set_property, delete_property, refds_path): yield r if bottomup and \ (fulfilled is None or GitRepo.is_valid_repo(sm['path']) == fulfilled): yield subdsres if parser is not None: # release parser lock manually, auto-cleanup is not reliable in PY3 parser.release()
def test_config_include(self, rw_dir): def write_test_value(cw, value): cw.set_value(value, 'value', value) # end def check_test_value(cr, value): assert cr.get_value(value, 'value') == value # end # PREPARE CONFIG FILE A fpa = os.path.join(rw_dir, 'a') cw = GitConfigParser(fpa, read_only=False) write_test_value(cw, 'a') fpb = os.path.join(rw_dir, 'b') fpc = os.path.join(rw_dir, 'c') cw.set_value('include', 'relative_path_b', 'b') cw.set_value('include', 'doesntexist', 'foobar') cw.set_value('include', 'relative_cycle_a_a', 'a') cw.set_value('include', 'absolute_cycle_a_a', fpa) cw.release() assert os.path.exists(fpa) # PREPARE CONFIG FILE B cw = GitConfigParser(fpb, read_only=False) write_test_value(cw, 'b') cw.set_value('include', 'relative_cycle_b_a', 'a') cw.set_value('include', 'absolute_cycle_b_a', fpa) cw.set_value('include', 'relative_path_c', 'c') cw.set_value('include', 'absolute_path_c', fpc) cw.release() # PREPARE CONFIG FILE C cw = GitConfigParser(fpc, read_only=False) write_test_value(cw, 'c') cw.release() cr = GitConfigParser(fpa, read_only=True) for tv in ('a', 'b', 'c'): check_test_value(cr, tv) # end for each test to verify assert len(cr.items('include')) == 8, "Expected all include sections to be merged" cr.release() # test writable config writers - assure write-back doesn't involve includes cw = GitConfigParser(fpa, read_only=False, merge_includes=True) tv = 'x' write_test_value(cw, tv) cw.release() cr = GitConfigParser(fpa, read_only=True) self.failUnlessRaises(cp.NoSectionError, check_test_value, cr, tv) cr.release() # But can make it skip includes alltogether, and thus allow write-backs cw = GitConfigParser(fpa, read_only=False, merge_includes=False) write_test_value(cw, tv) cw.release() cr = GitConfigParser(fpa, read_only=True) check_test_value(cr, tv) cr.release()
def get_grader_name(): return GitConfigParser([path.normpath(path.expanduser("~/.gitconfig"))], read_only=True).get_value("user", "name")
def required(conf: GitConfigParser): parse = packaging.version.parse conf_version = parse(conf.get_value("bossman", "version", "0.0.0")) return any(conf_version <= parse(version) for version in MIGRATIONS)
def test_complex_aliases(self): file_obj = self._to_memcache(fixture_path('.gitconfig')) w_config = GitConfigParser(file_obj, read_only=False) self.assertEqual(w_config.get('alias', 'rbi'), '"!g() { git rebase -i origin/${1:-master} ; } ; g"') w_config.release() self.assertEqual(file_obj.getvalue(), self._to_memcache(fixture_path('.gitconfig')).getvalue())
def __0_25_1(conf: GitConfigParser, repo: Repo, console: Console): """ Until v0.25.1 inclusive, bossman init changed the git config to add explicit push/fetch refspecs to force git pull/push to also fetch/push notes. This behaviour was changed in favour of an explicit pull and push as part of application logic when relevant (e.g. before bossman status/prerelease/release, before an after bossman apply). The change was largely motivated by the fact that adding the fetch/push refspecs to the config appears to break normal branch tracking, forcing an explicit `git push origin ref`, not great UX. Because notes are critical to bossman, it's also better not to rely on the user remembering to push after apply. """ notes_refspec = "+refs/notes/*:refs/notes/*" for section in conf.sections(): if section.startswith("remote"): push_refspecs = conf.get_values(section, "push", []) if notes_refspec in push_refspecs: conf.remove_option(section, "push") push_refspecs = list(refspec for refspec in push_refspecs if refspec != notes_refspec) for refspec in push_refspecs: conf.add_value(section, "push", refspec) console.print(r"[red]-[/] \[{}] push: {}".format( section, notes_refspec)) fetch_refspecs = conf.get_values(section, "fetch", []) if notes_refspec in fetch_refspecs: conf.remove_option(section, "fetch") fetch_refspecs = list(refspec for refspec in fetch_refspecs if refspec != notes_refspec) for refspec in fetch_refspecs: conf.add_value(section, "fetch", refspec) console.print(r"[red]-[/] \[{}] fetch: {}".format( section, notes_refspec))
def test_conditional_includes_from_git_dir(self, rw_dir): # Initiate repository path git_dir = osp.join(rw_dir, "target1", "repo1") os.makedirs(git_dir) # Initiate mocked repository repo = mock.Mock(git_dir=git_dir) # Initiate config files. path1 = osp.join(rw_dir, "config1") path2 = osp.join(rw_dir, "config2") template = "[includeIf \"{}:{}\"]\n path={}\n" with open(path1, "w") as stream: stream.write(template.format("gitdir", git_dir, path2)) # Ensure that config is ignored if no repo is set. with GitConfigParser(path1) as config: assert not config._has_includes() assert config._included_paths() == [] # Ensure that config is included if path is matching git_dir. with GitConfigParser(path1, repo=repo) as config: assert config._has_includes() assert config._included_paths() == [("path", path2)] # Ensure that config is ignored if case is incorrect. with open(path1, "w") as stream: stream.write(template.format("gitdir", git_dir.upper(), path2)) with GitConfigParser(path1, repo=repo) as config: assert not config._has_includes() assert config._included_paths() == [] # Ensure that config is included if case is ignored. with open(path1, "w") as stream: stream.write(template.format("gitdir/i", git_dir.upper(), path2)) with GitConfigParser(path1, repo=repo) as config: assert config._has_includes() assert config._included_paths() == [("path", path2)] # Ensure that config is included with path using glob pattern. with open(path1, "w") as stream: stream.write(template.format("gitdir", "**/repo1", path2)) with GitConfigParser(path1, repo=repo) as config: assert config._has_includes() assert config._included_paths() == [("path", path2)] # Ensure that config is ignored if path is not matching git_dir. with open(path1, "w") as stream: stream.write(template.format("gitdir", "incorrect", path2)) with GitConfigParser(path1, repo=repo) as config: assert not config._has_includes() assert config._included_paths() == [] # Ensure that config is included if path in hierarchy. with open(path1, "w") as stream: stream.write(template.format("gitdir", "target1/", path2)) with GitConfigParser(path1, repo=repo) as config: assert config._has_includes() assert config._included_paths() == [("path", path2)]
def _get_submodules(dspath, fulfilled, recursive, recursion_limit, contains, bottomup, set_property, delete_property, refds_path): if not GitRepo.is_valid_repo(dspath): return modinfo = _parse_gitmodules(dspath) # write access parser parser = None # TODO bring back in more global scope from below once segfaults are # figured out #if set_property or delete_property: # gitmodule_path = opj(dspath, ".gitmodules") # parser = GitConfigParser( # gitmodule_path, read_only=False, merge_includes=False) # parser.read() # put in giant for-loop to be able to yield results before completion for sm in _parse_git_submodules(dspath): if contains and not path_startswith(contains, sm['path']): # we are not looking for this subds, because it doesn't # match the target path continue sm.update(modinfo.get(sm['path'], {})) if set_property or delete_property: gitmodule_path = opj(dspath, ".gitmodules") parser = GitConfigParser( gitmodule_path, read_only=False, merge_includes=False) parser.read() # do modifications now before we read the info out for reporting # use 'submodule "NAME"' section ID style as this seems to be the default submodule_section = 'submodule "{}"'.format(sm['gitmodule_name']) # first deletions for dprop in assure_list(delete_property): parser.remove_option(submodule_section, dprop) # also kick from the info we just read above sm.pop('gitmodule_{}'.format(dprop), None) # and now setting values for sprop in assure_list(set_property): prop, val = sprop if val.startswith('<') and val.endswith('>') and '{' in val: # expand template string val = val[1:-1].format( **dict( sm, refds_relpath=relpath(sm['path'], refds_path), refds_relname=relpath(sm['path'], refds_path).replace(os.sep, '-'))) parser.set_value( submodule_section, prop, val) # also add to the info we just read above sm['gitmodule_{}'.format(prop)] = val Dataset(dspath).add( '.gitmodules', to_git=True, message='[DATALAD] modified subdataset properties') # let go of resources, locks, ... parser.release() #common = commonprefix((with_pathsep(subds), with_pathsep(path))) #if common.endswith(sep) and common == with_pathsep(subds): # candidates.append(common) subdsres = get_status_dict( 'subdataset', status='ok', type='dataset', logger=lgr) subdsres.update(sm) subdsres['parentds'] = dspath if not bottomup and \ (fulfilled is None or GitRepo.is_valid_repo(sm['path']) == fulfilled): yield subdsres # expand list with child submodules. keep all paths relative to parent # and convert jointly at the end if recursive and \ (recursion_limit in (None, 'existing') or (isinstance(recursion_limit, int) and recursion_limit > 1)): for r in _get_submodules( sm['path'], fulfilled, recursive, (recursion_limit - 1) if isinstance(recursion_limit, int) else recursion_limit, contains, bottomup, set_property, delete_property, refds_path): yield r if bottomup and \ (fulfilled is None or GitRepo.is_valid_repo(sm['path']) == fulfilled): yield subdsres if parser is not None: # release parser lock manually, auto-cleanup is not reliable in PY3 parser.release()
def config(self,item): """ Get a variable from the git config file under the meta section """ config = GitConfigParser(environ['HOME']+"/.gitconfig") return config.get_value('meta', item)
DEVELOPERS = { "kreczko": 'luke', "benkrikler": 'ben', } # LZ has no public repositories at the moment, hence the two are the same GITHUB_BASE = '[email protected]:' GITHUB_SSH = '[email protected]:' PROJECTS = { 'BACCARAT': GITHUB_BASE + "sim/BACCARAT.git", 'DER': GITHUB_BASE + "sim/ElectronicsSimulation.git", 'TDRAnalysis': GITHUB_BASE + "sim/TDRAnalysis.git", 'PhotonDetection': GITHUB_BASE + "physics/PhotonDetection.git", #'TDRScience': GITHUB_BASE + "TDRScience.git", # this one is 700 MB large } globalconfig = GitConfigParser( [os.path.normpath(os.path.expanduser("~/.gitconfig"))], read_only=True) USER = globalconfig.get('user', 'name') DEV_PATH = os.environ.get('DEV_PATH') for alias, git_url in six.iteritems(PROJECTS): repo_dir = os.path.join(DEV_PATH, alias) if os.path.exists(repo_dir): print(">> Repo {0} already exists".format(repo_dir)) continue print('>> Cloning {0}'.format(git_url)) Repo.clone_from(git_url, repo_dir) repo = Repo(repo_dir) # rename origin repo.remotes.origin.rename('upstream') group = re.findall(':(.*?)/', git_url, re.DOTALL)[0]
def cli(ctx, action, config_file): globalconfig = GitConfigParser([os.path.expanduser(config_file)], read_only=True) ini_file_list = parse_file(os.path.expanduser(config_file)).asList() ctx.log(action)
def _get_gitmodule_parser(dspath): """Get a parser instance for write access""" gitmodule_path = opj(dspath, ".gitmodules") parser = GitConfigParser(gitmodule_path, read_only=False, merge_includes=False) parser.read() return parser