def get_tags(repo): return { commit: os.path.basename(tag_ref) for tag_ref, commit in repo.get_refs().items() if tag_ref.startswith('refs/tags/') and VALID_TAG.match(tag_ref[len('refs/tags/'):]) }
def get_tags(repo): tags = {} for tag_ref, commit in repo.get_refs().items(): tag_ref = tag_ref.decode('utf-8') if tag_ref.startswith('refs/tags/') \ and VALID_TAG.match(tag_ref[len('refs/tags/'):]): tags[commit] = os.path.basename(tag_ref) return tags
def get_tags(repo): tags = {} for tag_ref, commit in repo.get_refs().items(): tag_ref = _to_str(tag_ref) if tag_ref.startswith('refs/tags/') and VALID_TAG.match( tag_ref[len('refs/tags/'):]): tags[_to_str(commit)] = os.path.basename(tag_ref) return tags
def _has_stable_branches(): try: repo = dulwich.repo.Repo.discover() except dulwich.repo.NotGitRepository: return False refs = repo.get_refs() for ref in refs.keys(): ref = ref.decode('utf-8') if ref.startswith('refs/remotes/origin/stable'): return True return False
def build_relatives(repo): children = {} parents = {} heads = {n: s for n, s in repo.get_refs().items() if n.startswith(b'refs/heads/')} for entry in repo.get_walker(include=list(heads.values())): for p in entry.commit.parents: parents.setdefault(entry.commit.id, set()).add(p) children.setdefault(p, set()).add(entry.commit.id) return children, parents
def _envs_dulwich(repo, base_branch): ''' Check the refs and return a list of the ones which can be used as salt environments. ''' ret = set() for ref in _dulwich_env_refs(repo.get_refs()): # ref will be something like 'refs/heads/master' rtype, rspec = ref[5:].split('/', 1) if rtype == 'tags': ret.add(rspec) elif rtype == 'heads': if rspec == base_branch: rspec = 'base' ret.add(rspec) return ret
def _envs_dulwich(repo, base_branch): """ Check the refs and return a list of the ones which can be used as salt environments. """ ret = set() for ref in _dulwich_env_refs(repo.get_refs()): # ref will be something like 'refs/heads/master' rtype, rspec = ref[5:].split("/", 1) rspec = rspec.replace("/", "_") if rtype == "heads": if rspec == base_branch: rspec = "base" if _env_is_exposed(rspec): ret.add(rspec) elif rtype == "tags" and _env_is_exposed(rspec): ret.add(rspec) return ret
def _get_other_versions(app): if not app.config.html_theme_options.get('show_other_versions', False): return [] all_series = [] try: repo = dulwich.repo.Repo.discover() except dulwich.repo.NotGitRepository: return [] refs = repo.get_refs() for ref in refs.keys(): ref = ref.decode('utf-8') if ref.startswith('refs/remotes/origin/stable'): series = ref.rpartition('/')[-1] all_series.append(series) elif ref.startswith('refs/tags/') and ref.endswith('-eol'): series = ref.rpartition('/')[-1][:-4] all_series.append(series) all_series.sort() # NOTE(dhellmann): Given when this feature was implemented, we # assume that the earliest version we can link to is for # mitaka. Projects that have older docs online can set the option # to indicate another start point. Projects that come later should # automatically include everything they actually have available # because the start point is not present in the list. earliest_desired = app.config.html_theme_options.get( 'earliest_published_series', 'mitaka') if earliest_desired and earliest_desired in all_series: interesting_series = all_series[all_series.index(earliest_desired):] else: interesting_series = all_series # Reverse the list because we want the most recent to appear at # the top of the dropdown. The "latest" release is added to the # front of the list by the theme so we do not need to add it # here. interesting_series.reverse() return interesting_series
def get_root(repo): """ Retrieve the hash of the repo root to uniquely identify the git repository """ # Check if the repository is empty if len(repo.get_refs()) == 0: return None # Get walker needs at least the HEAD ref to be present walker = repo.get_walker() entry = None # Iterate on the lazy iterator to get to the last one for entry in walker: pass assert entry is not None # SHA should always be valid utf-8 return to_utf8(entry.commit.id)
def get_refs(repo): refs = defaultdict(set) for ref, commit in repo.get_refs().items(): refs[commit].add(commit) refs[commit].add(ref) return refs
def update(): ''' Execute a git fetch on all of the repos ''' # data for the fileserver event data = {'changed': False, 'backend': 'gitfs'} provider = _get_provider() pid = os.getpid() data['changed'] = purge_cache() for repo_conf in init(): repo = repo_conf['repo'] if provider == 'gitpython': origin = repo.remotes[0] working_dir = repo.working_dir elif provider == 'pygit2': origin = repo.remotes[0] working_dir = repo.workdir elif provider == 'dulwich': # origin is just a uri here, there is no origin object origin = repo_conf['uri'] working_dir = repo.path lk_fn = os.path.join(working_dir, 'update.lk') with salt.utils.fopen(lk_fn, 'w+') as fp_: fp_.write(str(pid)) try: if provider == 'gitpython': for fetch in origin.fetch(): if fetch.old_commit is not None: data['changed'] = True elif provider == 'pygit2': fetch = origin.fetch() if fetch.get('received_objects', 0): data['changed'] = True elif provider == 'dulwich': client, path = \ dulwich.client.get_transport_and_path_from_url( origin, thin_packs=True ) refs_pre = repo.get_refs() try: refs_post = client.fetch(path, repo) except KeyError: log.critical( 'Local repository cachedir {0!r} (corresponding ' 'remote: {1}) has been corrupted. Salt will now ' 'attempt to remove the local checkout to allow it to ' 'be re-initialized in the next fileserver cache ' 'update.' .format(repo_conf['cachedir'], repo_conf['uri']) ) try: salt.utils.rm_rf(repo_conf['cachedir']) except OSError as exc: log.critical( 'Unable to remove {0!r}: {1}' .format(repo_conf['cachedir'], exc) ) continue if refs_post is None: # Empty repository log.warning( 'gitfs remote {0!r} is an empty repository and will ' 'be skipped.'.format(origin) ) continue if refs_pre != refs_post: data['changed'] = True # Update local refs for ref in _dulwich_env_refs(refs_post): repo[ref] = refs_post[ref] # Prune stale refs for ref in repo.get_refs(): if ref not in refs_post: del repo[ref] except Exception as exc: log.warning( 'Exception caught while fetching: {0}'.format(exc) ) try: os.remove(lk_fn) except (IOError, OSError): pass env_cache = os.path.join(__opts__['cachedir'], 'gitfs/envs.p') if data.get('changed', False) is True or not os.path.isfile(env_cache): new_envs = envs(ignore_cache=True) serial = salt.payload.Serial(__opts__) with salt.utils.fopen(env_cache, 'w+') as fp_: fp_.write(serial.dumps(new_envs)) log.trace('Wrote env cache data to {0}'.format(env_cache)) # if there is a change, fire an event if __opts__.get('fileserver_events', False): event = salt.utils.event.MasterEvent(__opts__['sock_dir']) event.fire_event(data, tagify(['gitfs', 'update'], prefix='fileserver')) try: salt.fileserver.reap_fileserver_cache_dir( os.path.join(__opts__['cachedir'], 'gitfs/hash'), find_file ) except (IOError, OSError): # Hash file won't exist if no files have yet been served up pass
def _get_tree_dulwich(repo, short): ''' Return a dulwich.objects.Tree object if the branch/tag/SHA is found, otherwise False ''' refs = repo.get_refs() # Sorting ensures we check heads (branches) before tags for ref in sorted(_dulwich_env_refs(refs)): # ref will be something like 'refs/heads/master' rtype, rspec = ref[5:].split('/') if rspec == short: if rtype == 'heads': commit = repo.get_object(refs[ref]) elif rtype == 'tags': tag = repo.get_object(refs[ref]) if isinstance(tag, dulwich.objects.Tag): # Tag.get_object() returns a 2-tuple, the 2nd element of # which is the commit SHA to which the tag refers commit = repo.get_object(tag.object[1]) elif isinstance(tag, dulwich.objects.Commit): commit = tag else: log.error( 'Unhandled object type {0!r} in _get_tree_dulwich. ' 'This is a bug, please report it.' .format(tag.type_name) ) return repo.get_object(commit.tree) # Branch or tag not matched, check if 'short' is a commit. This is more # difficult with Dulwich because of its inability to deal with shortened # SHA-1 hashes. try: int(short, 16) except ValueError: # Not hexidecimal, likely just a non-matching environment return None try: if len(short) == 40: sha_obj = repo.get_object(short) if isinstance(sha_obj, dulwich.objects.Commit): sha_commit = sha_obj else: matches = [ x for x in ( repo.get_object(x) for x in repo.object_store if x.startswith(short) ) if isinstance(x, dulwich.objects.Commit) ] if len(matches) > 1: log.warning('Ambiguous commit ID {0!r}'.format(short)) return None try: sha_commit = matches[0] except IndexError: pass except TypeError as exc: log.warning('Invalid environment {0}: {1}'.format(short, exc)) except KeyError: # No matching SHA return None try: return repo.get_object(sha_commit.tree) except NameError: # No matching sha_commit object was created. Unable to find SHA. pass return None
def _get_tree_dulwich(repo, short): """ Return a dulwich.objects.Tree object if the branch/tag/SHA is found, otherwise None """ if short == __opts__["gitfs_base"] or short in envs(): refs = repo.get_refs() # Sorting ensures we check heads (branches) before tags for ref in sorted(_dulwich_env_refs(refs)): # ref will be something like 'refs/heads/master' rtype, rspec = ref[5:].split("/", 1) rspec = rspec.replace("/", "_") if rspec == short and _env_is_exposed(rspec): if rtype == "heads": commit = repo.get_object(refs[ref]) elif rtype == "tags": tag = repo.get_object(refs[ref]) if isinstance(tag, dulwich.objects.Tag): # Tag.get_object() returns a 2-tuple, the 2nd element # of which is the commit SHA to which the tag refers commit = repo.get_object(tag.object[1]) elif isinstance(tag, dulwich.objects.Commit): commit = tag else: log.error( "Unhandled object type {0!r} in " "_get_tree_dulwich. This is a bug, please report " "it.".format(tag.type_name) ) return repo.get_object(commit.tree) # Branch or tag not matched, check if 'short' is a commit. This is more # difficult with Dulwich because of its inability to deal with shortened # SHA-1 hashes. if not _env_is_exposed(short): return None try: int(short, 16) except ValueError: # Not hexidecimal, likely just a non-matching environment return None try: if len(short) == 40: sha_obj = repo.get_object(short) if isinstance(sha_obj, dulwich.objects.Commit): sha_commit = sha_obj else: matches = set( [ x for x in (repo.get_object(x) for x in repo.object_store if x.startswith(short)) if isinstance(x, dulwich.objects.Commit) ] ) if len(matches) > 1: log.warning("Ambiguous commit ID {0!r}".format(short)) return None try: sha_commit = matches.pop() except IndexError: pass except TypeError as exc: log.warning("Invalid environment {0}: {1}".format(short, exc)) except KeyError: # No matching SHA return None try: return repo.get_object(sha_commit.tree) except NameError: # No matching sha_commit object was created. Unable to find SHA. pass return None
def get_refs(repo): refs = defaultdict(set) refs.update( {commit: {commit, ref} for commit, ref in iteritems(repo.get_refs())}) return refs
def get_tags(repo): return dict((commit, os.path.basename(tag_ref)) for tag_ref, commit in iteritems(repo.get_refs()) if tag_ref.startswith('refs/tags/') and VALID_TAG.match(tag_ref[len('refs/tags/'):]))
def update(): """ Execute a git fetch on all of the repos """ # data for the fileserver event data = {"changed": False, "backend": "gitfs"} provider = _get_provider() pid = os.getpid() data["changed"] = purge_cache() for repo_conf in init(): repo = repo_conf["repo"] if provider == "gitpython": origin = repo.remotes[0] working_dir = repo.working_dir elif provider == "pygit2": origin = repo.remotes[0] working_dir = repo.workdir elif provider == "dulwich": # origin is just a uri here, there is no origin object origin = repo_conf["uri"] working_dir = repo.path lk_fn = os.path.join(working_dir, "update.lk") with salt.utils.fopen(lk_fn, "w+") as fp_: fp_.write(str(pid)) try: log.debug("Fetching from {0}".format(origin.url)) if provider == "gitpython": _f = [] try: _f = origin.fetch() except AssertionError: _f = origin.fetch() for fetch in _f: if fetch.old_commit is not None: data["changed"] = True elif provider == "pygit2": fetch = origin.fetch() if fetch.get("received_objects", 0): data["changed"] = True elif provider == "dulwich": client, path = dulwich.client.get_transport_and_path_from_url(origin, thin_packs=True) refs_pre = repo.get_refs() try: refs_post = client.fetch(path, repo) except KeyError: log.critical( "Local repository cachedir {0!r} (corresponding " "remote: {1}) has been corrupted. Salt will now " "attempt to remove the local checkout to allow it to " "be re-initialized in the next fileserver cache " "update.".format(repo_conf["cachedir"], repo_conf["uri"]) ) try: salt.utils.rm_rf(repo_conf["cachedir"]) except OSError as exc: log.critical("Unable to remove {0!r}: {1}".format(repo_conf["cachedir"], exc)) continue if refs_post is None: # Empty repository log.warning("gitfs remote {0!r} is an empty repository and will " "be skipped.".format(origin)) continue if refs_pre != refs_post: data["changed"] = True # Update local refs for ref in _dulwich_env_refs(refs_post): repo[ref] = refs_post[ref] # Prune stale refs for ref in repo.get_refs(): if ref not in refs_post: del repo[ref] except Exception as exc: log.error( "Exception {0} caught while fetching gitfs remote {1}".format(exc, repo_conf["uri"]), exc_info=log.isEnabledFor(logging.DEBUG), ) try: os.remove(lk_fn) except (IOError, OSError): pass env_cache = os.path.join(__opts__["cachedir"], "gitfs/envs.p") if data.get("changed", False) is True or not os.path.isfile(env_cache): new_envs = envs(ignore_cache=True) serial = salt.payload.Serial(__opts__) with salt.utils.fopen(env_cache, "w+") as fp_: fp_.write(serial.dumps(new_envs)) log.trace("Wrote env cache data to {0}".format(env_cache)) # if there is a change, fire an event if __opts__.get("fileserver_events", False): event = salt.utils.event.get_event("master", __opts__["sock_dir"], __opts__["transport"], listen=False) event.fire_event(data, tagify(["gitfs", "update"], prefix="fileserver")) try: salt.fileserver.reap_fileserver_cache_dir(os.path.join(__opts__["cachedir"], "gitfs/hash"), find_file) except (IOError, OSError): # Hash file won't exist if no files have yet been served up pass