def pull(self, progress_func, username=None, password=None): """ This pulls updates from a remote repository. This code has been take from dulwich.porcelain. """ if not self.remote_url: return selected_refs = [] def determine_wants(remote_refs): selected_refs.extend( parse_reftuples(remote_refs, self.git_repository.refs, self.remote_fetch_refs)) return [remote_refs[lh] for (lh, rh, force) in selected_refs] logger.info("Pulling from '%s' ..." % (self.remote_url)) client, path = get_transport_and_path_from_url(self.remote_url) if password: client.ssh_vendor.ssh_kwargs["password"] = password try: remote_refs = client.fetch(path.encode('utf-8'), self.git_repository, progress=progress_func, determine_wants=determine_wants) except FileExistsError: logger.exception("Pack already exists. Possibly a bug in dulwich.") return for (lh, rh, force) in selected_refs: self.git_repository.refs[rh] = remote_refs[lh] self.git_repository.reset_index()
def clone(url, path=None): client, target = dulwich.client.get_transport_and_path( url) #c , 'user/repo' if not path: if target.endswith('.git'): target = target[:-4] path = target path = F.mkdir(path) ls = F.ls(path) if len(ls) > 1: #skip .git return py.No('Not empty dir:' + path, ls) if len(ls) == 1: r = dulwich.repo.Repo(path) else: r = dulwich.repo.Repo.init(path) print(U.stime(), 'fetching url...') remote_refs = client.fetch(url, r) r[b"HEAD"] = remote_refs[b"HEAD"] print(U.stime(), 'build_index_from_tree ...') index.build_index_from_tree(r.path, r.index_path(), r.object_store, r[b'HEAD'].tree) r.close() return path, r
def download_git_repos(): ''' Perform the inital checkout of the winrepo repositories. ''' try: import dulwich.client import dulwich.repo import dulwich.index except ImportError: raise CommandExecutionError('Command require dulwich python module') winrepo_dir = __salt__['config.get']('winrepo_dir') winrepo_remotes = __salt__['config.get']('winrepo_remotes') winrepo_remotes_ng = __salt__['config.get']('winrepo_remotes_ng') winrepo_dir_ng = __salt__['config.get']('winrepo_dir_ng') winrepo_cfg = [(winrepo_remotes, winrepo_dir), (winrepo_remotes_ng, winrepo_dir_ng)] ret = {} for remotes, base_dir in winrepo_cfg: for remote_info in remotes: try: if '/' in remote_info: targetname = remote_info.split('/')[-1] else: targetname = remote_info rev = 'HEAD' try: rev, remote_url = remote_info.strip().split() except ValueError: remote_url = remote_info gittarget = os.path.join(base_dir, targetname).replace('.', '_') client, path = dulwich.client.get_transport_and_path( remote_url) if os.path.exists(gittarget): local_repo = dulwich.repo.Repo(gittarget) else: os.makedirs(gittarget) local_repo = dulwich.repo.Repo.init(gittarget) remote_refs = client.fetch(remote_url, local_repo) if six.b(rev) not in remote_refs.refs: raise CommandExecutionError( 'Failed to find remote ref: {0}'.format(rev)) local_repo[six.b(rev)] = remote_refs[six.b(rev)] dulwich.index.build_index_from_tree( local_repo.path, local_repo.index_path(), local_repo.object_store, local_repo[six.b('head')].tree) ret.update({remote_info: True}) except Exception as exc: log.exception('Failed to process remote_info: %s', remote_info, exc_info=True) raise if __salt__['winrepo.genrepo'](): return ret return False
def testUpdate(self, event): src = "https://github.com/aaroncox/test.git" client, path = dulwich.client.get_transport_and_path(src) target = self.config['destination'] + "\\test" r = Repo(target) remote_refs = client.fetch(src, r) r["HEAD"] = remote_refs["HEAD"] index.build_index_from_tree(r.path, r.index_path(), r.object_store, r['head'].tree) for pack in r.object_store.packs: pack.close()
def clone_repository(clone_url, path, private_key_file): dulwich.client.get_ssh_vendor = dulwich.client.ParamikoSSHVendor client, host_path = dulwich.client.get_transport_and_path(clone_url) client.ssh_kwargs = { "pkey": private_key_file } r = dulwich.repo.Repo.init(path, mkdir=True) remote_refs = client.fetch(host_path, r, determine_wants=r.object_store.determine_wants_all) r["HEAD"] = remote_refs["HEAD"] r._build_tree()
def download_git_repos(): ''' Perform the inital checkout of the winrepo repositories. ''' try: import dulwich.client import dulwich.repo import dulwich.index except ImportError: raise CommandExecutionError( 'Command require dulwich python module' ) winrepo_dir = __salt__['config.get']('winrepo_dir') winrepo_remotes = __salt__['config.get']('winrepo_remotes') winrepo_remotes_ng = __salt__['config.get']('winrepo_remotes_ng') winrepo_dir_ng = __salt__['config.get']('winrepo_dir_ng') winrepo_cfg = [(winrepo_remotes, winrepo_dir), (winrepo_remotes_ng, winrepo_dir_ng)] ret = {} for remotes, base_dir in winrepo_cfg: for remote_info in remotes: if '/' in remote_info: targetname = remote_info.split('/')[-1] else: targetname = remote_info rev = 'HEAD' try: rev, remote_url = remote_info.strip().split() except ValueError: remote_url = remote_info gittarget = os.path.join(base_dir, targetname).replace('.', '_') client, path = dulwich.client.get_transport_and_path(remote_url) if os.path.exists(gittarget): local_repo = dulwich.repo.Repo(gittarget) else: os.makedirs(gittarget) local_repo = dulwich.repo.Repo.init(gittarget) remote_refs = client.fetch(remote_url, local_repo) if rev not in remote_refs: raise CommandExecutionError( 'Failed to find remote ref: {0}'.format(rev) ) local_repo[rev] = remote_refs[rev] dulwich.index.build_index_from_tree( local_repo.path, local_repo.index_path(), local_repo.object_store, local_repo['head'].tree ) ret.update({remote_info: True}) if __salt__['winrepo.genrepo'](): return ret return False
def clone(local, remote): client, host_path = dulwich.client.get_transport_and_path(remote) if not os.path.exists(local): os.makedirs(local) r = dulwich.repo.Repo.init(local) remote_refs = client.fetch(host_path, r, determine_wants=r.object_store.determine_wants_all, progress=sys.stdout.write) r["HEAD"] = remote_refs["HEAD"]
def git_clone( remote_repo, local_dir=None, new=True ): import dulwich.index, dulwich.client, dulwich.repo, urllib try: client, host_path = dulwich.client.get_transport_and_path(remote_repo) if local_dir is None: local_dir = host_path.split("/")[-1] if new: os.mkdir(local_dir) r = dulwich.repo.Repo.init(local_dir) else: r = dulwich.repo.Repo(local_dir) remote_refs = client.fetch(host_path, r) r['HEAD'] = remote_refs['refs/heads/master'] dulwich.index.build_index_from_tree(r.path, r.index_path(), r.object_store, r['HEAD'].tree) except: pass finally: print("---Sorry! There was a fatal error in cloning!") print("--- Please try cloning manually "+remote_repo+" into "+local_dir)
def git_clone(remote_repo, local_dir=None, new=True): import dulwich.index, dulwich.client, dulwich.repo, urllib try: client, host_path = dulwich.client.get_transport_and_path(remote_repo) if local_dir is None: local_dir = host_path.split("/")[-1] if new: os.mkdir(local_dir) r = dulwich.repo.Repo.init(local_dir) else: r = dulwich.repo.Repo(local_dir) remote_refs = client.fetch(host_path, r) r['HEAD'] = remote_refs['refs/heads/master'] dulwich.index.build_index_from_tree(r.path, r.index_path(), r.object_store, r['HEAD'].tree) except: pass finally: print("---Sorry! There was a fatal error in cloning!") print("--- Please try cloning manually " + remote_repo + " into " + local_dir)
def __clone_repo(self, repo_url, destination): """ This is to replicate the functionality of cloning a repo, without using the porcelain interface. """ try: local = Repo.init(destination, mkdir=True) except FileExistsError: local = Repo(destination) client, path = dulwich.client.get_transport_and_path(repo_url) remote_refs = client.fetch(repo_url, local) ref = f'refs/heads/{self.branch}' try: local[ref.encode()] = remote_refs[ref.encode()] except KeyError: ref = 'refs/heads/master' msgs = [ f'\nBranch {self.branch} does not exist at {repo_url}!', 'Using "master" branch for plugin, this may not work ' 'with your RELEASE' ] for msg in msgs: iocage_lib.ioc_common.logit( { 'level': 'INFO', 'message': msg }, _callback=self.callback) local[ref.encode()] = remote_refs[ref.encode()] index_file = local.index_path() tree = local[ref.encode()].tree index.build_index_from_tree(local.path, index_file, local.object_store, tree)
def clone(source, target, bare=False): client, host_path = dulwich.client.get_transport_and_path(source) if target is None: target = host_path.split("/")[-1] if not os.path.exists(target): os.mkdir(target) if bare: r = dulwich.repo.Repo.init_bare(target) else: r = dulwich.repo.Repo.init(target) remote_refs = client.fetch( host_path, r, determine_wants=r.object_store.determine_wants_all) r["HEAD"] = remote_refs["HEAD"] for key, val in remote_refs.iteritems(): if not key.endswith('^{}'): r.refs.add_if_new(key, val) return r
def clone(source, target, bare=False): client, host_path = dulwich.client.get_transport_and_path(source) if target is None: target = host_path.split("/")[-1] if not os.path.exists(target): os.mkdir(target) if bare: r = dulwich.repo.Repo.init_bare(target) else: r = dulwich.repo.Repo.init(target) remote_refs = client.fetch(host_path, r, determine_wants=r.object_store.determine_wants_all) r["HEAD"] = remote_refs["HEAD"] for key, val in remote_refs.iteritems(): if not key.endswith('^{}'): r.refs.add_if_new(key, val) return r
def fetch_refspecs( self, url: str, refspecs: Iterable[str], force: Optional[bool] = False, on_diverged: Optional[Callable[[str, str], bool]] = None, **kwargs, ): from dulwich.client import get_transport_and_path from dulwich.objectspec import parse_reftuples from dulwich.porcelain import ( DivergedBranches, check_diverged, get_remote_repo, ) fetch_refs = [] def determine_wants(remote_refs): fetch_refs.extend( parse_reftuples( remote_refs, self.repo.refs, [os.fsencode(refspec) for refspec in refspecs], force=force, )) return [ remote_refs[lh] for (lh, _, _) in fetch_refs if remote_refs[lh] not in self.repo.object_store ] try: _remote, location = get_remote_repo(self.repo, url) client, path = get_transport_and_path(location, **kwargs) except Exception as exc: raise SCMError( f"'{url}' is not a valid Git remote or URL") from exc with Tqdm(desc="Fetching git refs", bar_format=self.BAR_FMT_NOTOTAL) as pbar: def progress(msg_b): msg = msg_b.decode("ascii").strip() pbar.update_msg(msg) pbar.refresh() logger.trace(msg) fetch_result = client.fetch( path, self.repo, progress=progress, determine_wants=determine_wants, ) for (lh, rh, _) in fetch_refs: try: if rh in self.repo.refs: check_diverged(self.repo, self.repo.refs[rh], fetch_result.refs[lh]) except DivergedBranches: if not force: overwrite = False if on_diverged: overwrite = on_diverged( os.fsdecode(rh), os.fsdecode(fetch_result.refs[lh])) if not overwrite: continue self.repo.refs[rh] = fetch_result.refs[lh]
def clone(self, client): refs = client.fetch('', self) for name, ref in refs.iteritems(): self.refs.add_if_new(name, ref)
def fetch_refspecs( self, url: str, refspecs: Iterable[str], force: Optional[bool] = False, on_diverged: Optional[Callable[[str, str], bool]] = None, progress: Callable[["GitProgressEvent"], None] = None, **kwargs, ): from dulwich.client import get_transport_and_path from dulwich.objectspec import parse_reftuples from dulwich.porcelain import ( DivergedBranches, check_diverged, get_remote_repo, ) fetch_refs = [] def determine_wants(remote_refs): fetch_refs.extend( parse_reftuples( remote_refs, self.repo.refs, [os.fsencode(refspec) for refspec in refspecs], force=force, )) return [ remote_refs[lh] for (lh, _, _) in fetch_refs if remote_refs[lh] not in self.repo.object_store ] try: _remote, location = get_remote_repo(self.repo, url) client, path = get_transport_and_path(location, **kwargs) except Exception as exc: raise SCMError( f"'{url}' is not a valid Git remote or URL") from exc from dvc.scm.progress import GitProgressReporter fetch_result = client.fetch( path, self.repo, progress=GitProgressReporter(progress) if progress else None, determine_wants=determine_wants, ) for (lh, rh, _) in fetch_refs: try: if rh in self.repo.refs: check_diverged(self.repo, self.repo.refs[rh], fetch_result.refs[lh]) except DivergedBranches: if not force: overwrite = False if on_diverged: overwrite = on_diverged( os.fsdecode(rh), os.fsdecode(fetch_result.refs[lh])) if not overwrite: continue self.repo.refs[rh] = fetch_result.refs[lh]
def update(): """ Execute a git fetch on all of the repos """ # data for the fileserver event data = {"changed": False, "backend": "gitfs"} provider = _get_provider() pid = os.getpid() data["changed"] = purge_cache() for repo_conf in init(): repo = repo_conf["repo"] if provider == "gitpython": origin = repo.remotes[0] working_dir = repo.working_dir elif provider == "pygit2": origin = repo.remotes[0] working_dir = repo.workdir elif provider == "dulwich": # origin is just a uri here, there is no origin object origin = repo_conf["uri"] working_dir = repo.path lk_fn = os.path.join(working_dir, "update.lk") with salt.utils.fopen(lk_fn, "w+") as fp_: fp_.write(str(pid)) try: log.debug("Fetching from {0}".format(origin.url)) if provider == "gitpython": _f = [] try: _f = origin.fetch() except AssertionError: _f = origin.fetch() for fetch in _f: if fetch.old_commit is not None: data["changed"] = True elif provider == "pygit2": fetch = origin.fetch() if fetch.get("received_objects", 0): data["changed"] = True elif provider == "dulwich": client, path = dulwich.client.get_transport_and_path_from_url(origin, thin_packs=True) refs_pre = repo.get_refs() try: refs_post = client.fetch(path, repo) except KeyError: log.critical( "Local repository cachedir {0!r} (corresponding " "remote: {1}) has been corrupted. Salt will now " "attempt to remove the local checkout to allow it to " "be re-initialized in the next fileserver cache " "update.".format(repo_conf["cachedir"], repo_conf["uri"]) ) try: salt.utils.rm_rf(repo_conf["cachedir"]) except OSError as exc: log.critical("Unable to remove {0!r}: {1}".format(repo_conf["cachedir"], exc)) continue if refs_post is None: # Empty repository log.warning("gitfs remote {0!r} is an empty repository and will " "be skipped.".format(origin)) continue if refs_pre != refs_post: data["changed"] = True # Update local refs for ref in _dulwich_env_refs(refs_post): repo[ref] = refs_post[ref] # Prune stale refs for ref in repo.get_refs(): if ref not in refs_post: del repo[ref] except Exception as exc: log.error( "Exception {0} caught while fetching gitfs remote {1}".format(exc, repo_conf["uri"]), exc_info=log.isEnabledFor(logging.DEBUG), ) try: os.remove(lk_fn) except (IOError, OSError): pass env_cache = os.path.join(__opts__["cachedir"], "gitfs/envs.p") if data.get("changed", False) is True or not os.path.isfile(env_cache): new_envs = envs(ignore_cache=True) serial = salt.payload.Serial(__opts__) with salt.utils.fopen(env_cache, "w+") as fp_: fp_.write(serial.dumps(new_envs)) log.trace("Wrote env cache data to {0}".format(env_cache)) # if there is a change, fire an event if __opts__.get("fileserver_events", False): event = salt.utils.event.get_event("master", __opts__["sock_dir"], __opts__["transport"], listen=False) event.fire_event(data, tagify(["gitfs", "update"], prefix="fileserver")) try: salt.fileserver.reap_fileserver_cache_dir(os.path.join(__opts__["cachedir"], "gitfs/hash"), find_file) except (IOError, OSError): # Hash file won't exist if no files have yet been served up pass
def fetch_one(): remote_refs = client.fetch(host_path, r, determine_wants=determine_wants, progress=sys.stdout.write)
def update(): ''' Execute a git fetch on all of the repos ''' # data for the fileserver event data = {'changed': False, 'backend': 'gitfs'} provider = _get_provider() pid = os.getpid() data['changed'] = purge_cache() for repo_conf in init(): repo = repo_conf['repo'] if provider == 'gitpython': origin = repo.remotes[0] working_dir = repo.working_dir elif provider == 'pygit2': origin = repo.remotes[0] working_dir = repo.workdir elif provider == 'dulwich': # origin is just a uri here, there is no origin object origin = repo_conf['uri'] working_dir = repo.path lk_fn = os.path.join(working_dir, 'update.lk') with salt.utils.fopen(lk_fn, 'w+') as fp_: fp_.write(str(pid)) try: if provider == 'gitpython': for fetch in origin.fetch(): if fetch.old_commit is not None: data['changed'] = True elif provider == 'pygit2': fetch = origin.fetch() if fetch.get('received_objects', 0): data['changed'] = True elif provider == 'dulwich': client, path = \ dulwich.client.get_transport_and_path_from_url( origin, thin_packs=True ) refs_pre = repo.get_refs() try: refs_post = client.fetch(path, repo) except KeyError: log.critical( 'Local repository cachedir {0!r} (corresponding ' 'remote: {1}) has been corrupted. Salt will now ' 'attempt to remove the local checkout to allow it to ' 'be re-initialized in the next fileserver cache ' 'update.' .format(repo_conf['cachedir'], repo_conf['uri']) ) try: salt.utils.rm_rf(repo_conf['cachedir']) except OSError as exc: log.critical( 'Unable to remove {0!r}: {1}' .format(repo_conf['cachedir'], exc) ) continue if refs_post is None: # Empty repository log.warning( 'gitfs remote {0!r} is an empty repository and will ' 'be skipped.'.format(origin) ) continue if refs_pre != refs_post: data['changed'] = True # Update local refs for ref in _dulwich_env_refs(refs_post): repo[ref] = refs_post[ref] # Prune stale refs for ref in repo.get_refs(): if ref not in refs_post: del repo[ref] except Exception as exc: log.warning( 'Exception caught while fetching: {0}'.format(exc) ) try: os.remove(lk_fn) except (IOError, OSError): pass env_cache = os.path.join(__opts__['cachedir'], 'gitfs/envs.p') if data.get('changed', False) is True or not os.path.isfile(env_cache): new_envs = envs(ignore_cache=True) serial = salt.payload.Serial(__opts__) with salt.utils.fopen(env_cache, 'w+') as fp_: fp_.write(serial.dumps(new_envs)) log.trace('Wrote env cache data to {0}'.format(env_cache)) # if there is a change, fire an event if __opts__.get('fileserver_events', False): event = salt.utils.event.MasterEvent(__opts__['sock_dir']) event.fire_event(data, tagify(['gitfs', 'update'], prefix='fileserver')) try: salt.fileserver.reap_fileserver_cache_dir( os.path.join(__opts__['cachedir'], 'gitfs/hash'), find_file ) except (IOError, OSError): # Hash file won't exist if no files have yet been served up pass
client, path = dulwich.client.get_transport_and_path('git://github.com/smcdon/MolySym-Twitter-Project.git') print 'client is: ', client, 'path is: ', path print local_repo = Repo("/home/smcdon/Programming/MolySym Projects/twitter_project") print "local_repo" print local_repo.object_store print local_repo.refs print print "new_repo" new_repo = Repo("/home/smcdon/Programming/MolySym Projects/newrepo") print new_repo.object_store print new_repo.refs src = "git://github.com/smcdon/MolySym-Twitter-Project.git" target = "/home/smcdon/Programming/MolySym Projects/newrepo2" #client = TCPGitClient('git://github.com/smcdon/MolySym-Twitter-Project.git') os.mkdir(target) r = Repo.init(target) ##graph_walker = r.get_graph_walker() ##determine_wants = r.object_store.determine_wants_all remote_refs = client.fetch(src, r) r["HEAD"] = remote_refs["HEAD"]