def main(): if len(sys.argv) > 1: sys.exit(options()) if 'local out of date' in cmd.Git('..').execute( ['git', 'remote', 'show', 'origin']).lower(): x = input('Update available. Do you want to update? Y/N ') if x.lower() == 'y': Repo('..').remote(name='origin').pull() if 'up to date' in cmd.Git('..').execute( ['git', 'remote', 'show', 'origin']).lower(): sys.exit(print('Version up to date.')) else: sys.exit(print('Failed to update.')) try: import readline completions = Completer([ "courses", "regions", "options", "help", "quit", "exit", "clear", "flat", "jumps", "date" ]) readline.set_completer(completions.complete) readline.parse_and_bind('tab: complete') except ModuleNotFoundError: # windows pass while True: args = input('[rpscrape]> ').lower().strip() parse_args([arg.strip() for arg in args.split()])
def _check_remote_repository(self): try: g = cmd.Git() g.ls_remote(self.repository) return True except GitCommandError: return False
def git_ls_remote(url): remote_refs = {} g = cmd.Git() for ref in g.ls_remote(url).split('\n'): hash_ref_list = ref.split('\t') remote_refs[hash_ref_list[1]] = hash_ref_list[0] return remote_refs
def git_sync(self, sourcepath, remoteurl, branch): from git import Repo, cmd if not os.path.exists(sourcepath): self.download_source(sourcepath, remoteurl, branch) try: g = cmd.Git(sourcepath) repo = Repo(sourcepath) remote = repo.remote('origin') l = remote.pull() self.log.info([f.commit.message for f in l]) if repo.active_branch.name <> branch: self.log.info("switch %s to %s ... " % (repo.active_branch.name, branch)) # stashid = datetime.datetime.strftime(datetime.datetime.now(), "%Y-%m-%d-%H-%M-%S") # repo.git.stash.drop( stashid ) repo.git.checkout('HEAD', b=branch) else: repo.git.reset("--hard", "origin/" + branch) except: self.log.error("git error...", exc_info=True) if os.path.exists(sourcepath): self.log.error("remove path [%s] and clone again...", sourcepath) import shutil shutil.rmtree(sourcepath) self.download_source(sourcepath, remoteurl, branch)
def check_repo_commit(repo_path): repo = Repo(path=repo_path) g = cmd.Git(repo_path) origin_url = get_origin_url(g) repo_name = origin_url.split("/")[-1].split(".")[0] err_msg = need_to_commit(repo, repo_name=repo_name) if err_msg: return "{}_error_msg".format(repo_name), err_msg else: sha1, branch = repo.head.commit.name_rev.split() commit_date = datetime.fromtimestamp(repo.head.commit.authored_date).strftime( "%A %d. %B %Y %H:%M:%S" ) committer_name = repo.head.commit.committer.name committer_email = repo.head.commit.committer.email return ( repo_name, { "sha1": sha1, "branch": branch, "commit_date": commit_date, "committer_name": committer_name, "committer_email": committer_email, "origin_url": origin_url, }, )
def update_repositories(repo=None): if not repo: repos = Repositories.query.all() else: repos = [repo] for repo in repos: g = git_cmd.Git(repo.fp) g.pull()
def find_latest_remote_ref(url, reference, guess=True): """ Discovers, from a git remote, the latest "appropriate" tag/sha based on a reference: If reference is a branch, returns the sha for the head of the branch. If reference is a tag, find the latest patch release of the same tag line. """ # Use GitPtyhon git.cmd to avoid fetching repos # as listing remotes is not implemented outside Repo use gcli = gitcmd.Git() # this stores a sha for a matching branch/tag # tag will watch if ending with a number # (so v11.1, 1.11.1rc1 would still match) regex = re.compile('(?P<sha>[0-9a-f]{40})\t(?P<fullref>' 'refs/heads/(?P<branch>.*)' '|refs/tags/(?P<tag>.*(\d|-eol)))') # search_ver will become ["master"], ["eol-mitaka"], # ["stable/pike"], ["16", "1", "9"] search_ver = reference.split('.') # For EOL tag matching if 'stable/' in reference: eol_tag = reference.strip('stable/') + '-eol' else: eol_tag = None # sadly we can't presume the ls-remote list will be sorted # so we have to find out ourselves. patch_releases = [] for remote in gcli.ls_remote('--refs', url).splitlines(): m = regex.match(remote) # First, start to match the remote result with a branchname if m and m.group('branch') and m.group('branch') == reference: return m.group('sha') # Try to match EOL tag elif eol_tag and m and m.group('tag') == eol_tag: return eol_tag # Then try to find closest matching tags if guess work is allowed elif m and m.group('tag') and guess: ref_of_tag = m.group('tag').split('.') # keep a tag if it's almost the same (only last part changes) # as what we are looking for. # Store its reference and sha. if ref_of_tag[0:-1] and ref_of_tag[0:-1] == search_ver[0:-1]: # store only the last bit, space efficient! patch_releases.append(ref_of_tag[-1]) # Returns the highest value if something was found if patch_releases: search_ver[-1] = max(patch_releases) return ".".join(search_ver) # Nothing else found: Return original reference. return reference
def __init__(self, gitSettings, cleanDirty=False): self.localRepoDir = gitSettings["local"] try: # Getting the "fetching" URL #print("local repository:", self.localRepoDir) gitPath = os.path.abspath(self.localRepoDir) if not os.path.exists(gitPath): os.makedirs(gitPath) g = cmd.Git(gitPath) urlInUse = ":".join( g.execute(["git", "remote", "show", "origin"]).split("\n")[1].split(":")[1:]).strip() urlToUse = gitSettings["protocol"] + "://" + gitSettings[ "user"] + "@" + gitSettings["remote"] if urlInUse != urlToUse: #print("Changing URL in use...") g.execute(["git", "remote", "set-url", "origin", urlToUse]) urlInUse = ":".join( g.execute(["git", "remote", "show", "origin" ]).split("\n")[1].split(":")[1:]).strip() except exc.GitCommandError: # Generally, get here when the repo has not been created yet. It is # ok, it will be created below. pass except: raise #if not os.path.isdir(self.localRepoDir): # os.makedirs(self.localRepoDir) self.offline = False try: self.repo = Repo(self.localRepoDir) assert not self.repo.bare except (exc.InvalidGitRepositoryError, exc.NoSuchPathError): self.repo = Repo.clone_from( gitSettings["protocol"] + "://" + gitSettings["user"] + "@" + gitSettings["remote"], self.localRepoDir) self.tryToFetch() try: # Setup a local tracking branch of a remote branch self.repo.create_head('master', self.origin.refs.master).set_tracking_branch( self.origin.refs.master) except: pass self.pull(cleanDirty)
def ls_remote(url): """ Get remote tags/heads from git repo """ remote_refs = [] for ref in cmd.Git().ls_remote(url).split('\n'): hash_ref_list = ref.split('\t') ref = hash_ref_list[1].split('/')[-1] commit = hash_ref_list[0] if 'tags' in hash_ref_list[1]: remote_refs.append(dict(ref_type='tag', ref=ref, commit=commit)) if 'heads' in hash_ref_list[1]: remote_refs.append(dict(ref_type='branch', ref=ref, commit=commit)) return remote_refs
def download_generic_alerts(repo_url, save_location, deploy_key): git_obj = cmd.Git(save_location) git_ssh_cmd = 'ssh -i %s' % deploy_key git_obj.update_environment(GIT_SSH_COMMAND=git_ssh_cmd) if not os.path.isdir(save_location): logger.debug("Cloning " + str(repo_url) + " into " + str(save_location)) Repo.clone_from(repo_url, save_location, env={'GIT_SSH_COMMAND': git_ssh_cmd}) else: logger.debug("Updating " + str(save_location)) git_obj.pull()
def check_for_update(): try: from git import Repo, cmd if 'local out of date' in cmd.Git('..').execute( ['git', 'remote', 'show', 'origin']).lower(): x = input('Update available. Do you want to update? Y/N ') if x.lower() == 'y': Repo('..').remote(name='origin').pull() if 'up to date' in cmd.Git('..').execute( ['git', 'remote', 'show', 'origin']).lower(): sys.exit(print('Updated successfully.')) else: sys.exit(print('Failed to update.')) except ModuleNotFoundError: sys.exit( print( 'gitpython module not found.\n\nInstall with "pip3 install gitpython" or disable auto update in settings.' ))
def UploadProject(request, Url): codepath = Url.split('/')[-1].split('.')[0] sourcepath = GitSetting.objects.get(id=1).sourcepath sourcepath = sourcepath.rstrip('/') if not os.path.exists(sourcepath): os.mkdir(sourcepath) if not os.path.exists(sourcepath + '/' + codepath): Repo.clone_from(Url, sourcepath + '/' + codepath) else: os.chdir(sourcepath) g = cmd.Git(codepath) g.pull() return HttpResponseRedirect(reverse('listallprojectsurl'))
def worker(self, _id): repository: cmd.Git = cmd.Git(self.workdir) while 1: target = self.global_queue[_id].get() if target is None: break rev, word = target try: result: str = repository.grep("-E", word, rev) except Exception: continue hit: list = result.split("\n") for line in hit: line = ":".join(line.split(":")[1:]) self.global_hits[_id].add(line)
def pull_remote_data(reponame, branch): """ Pulls data from remote repository. TODO: add optional path to pulling """ try: git_local = cmd.Git('.') print("[+] Pulling files") git_local.pull(reponame, branch) except: print("Unexpected error in %s:" % inspect.stack()[0][3], sys.exc_info()[0]) return None
def get_commit(config): http_check, err = check_http_auth(config['git_url']) if err: return False, err url = config['git_url'] branch = config['branch'] client = cmd.Git() try: with client.custom_environment(GIT_SSH_COMMAND=ssh_cmd): refs = client.ls_remote(url).split('\n') refs = [ ref.split('\t')[0] for ref in refs if ref.split('\t')[1].split('/')[-1] == branch ] except GitCommandError as err: return False, err if len(refs) != 1: return False, f'no commits found in branch {config["branch"]}' return refs[0], None
def run(self, word: str) -> int: _id = int(self.id_counter) self.id_counter += 1 workers = [] self.global_queue[_id] = queue.Queue() self.global_hits[_id] = set() for i in range(self.worker_num): th = threading.Thread(target=self.worker, args=(_id,)) th.setDaemon(True) th.start() workers.append(th) self.global_workers[_id] = workers repository: cmd.Git = cmd.Git(self.workdir) revs: str = repository.rev_list("--all") for rev in revs.split("\n"): self.global_queue[_id].put((rev, word)) for i in range(self.worker_num): self.global_queue[_id].put(None) return _id
def check_remote_repository_initialized(self, repository: Repository): repository_url = self.get_repository_url(repository) git = cmd.Git() try: with git.custom_environment(**self.git_ssh_cmd): response = git.ls_remote('-h', repository_url).split() if len(response) > 0: return True else: self.config.log.info("%s is empty", repository.full_name) return False except exc.GitCommandError as error: self.config.log.error("%s access restricted: %s" % (repository.full_name, error.status)) self.config.error_count += 1 return False
def _is_easy_backport(repo, commit): g_cmd = cmd.Git(working_dir=repo.working_tree_dir) for ref in repo.refs: # consider a patch easy to backport if only it cleanly applies to all # stable branches; otherwise it will potentially require more work to # resolve git conflicts if ref.name.startswith('origin/stable/'): # before applying any patches, make sure the tree is clean and # fully reflects remote head g_cmd.clean(force=True, d=True, x=True) g_cmd.reset(hard=True) g_cmd.checkout(ref.name) try: g_cmd.cherry_pick(commit.hexsha) except cmd.GitCommandError: # cherry-pick does not have a 'dry run' mode, so we need to # actually clean up after a failure g_cmd.cherry_pick(abort=True) return False return True
def git_announce(): # initialize GitPython git = git_cmd.Git() # list of urls to announce url = ['git://git.zx2c4.com/wireguard-linux-compat'] # for each url... for i in range(0, len(url)): # repository name repo = url[i].split('/')[-1] # list of tags tags = git.ls_remote('--tags', url[i]).split('\n') repo_path = join(path + '/' + repo) # create repo directory if not exists if not exists(repo_path): makedirs(repo_path) # parse every 2 entries, next one is tagged commit for j in range(0, len(tags), 2): tag = tags[j].replace('/', '\t').split('\t') tag_file = join(repo_path + '/' + tag[3]) # short SHA-1 format – first 12 letters tag_sha = tag[0][:12] # although rare since tag re-releases are uncommon, announce if tag is different if get_content(tag_file) != tag_sha: msg = '*New Git release detected!*\n' msg += '\n' msg += 'Repository: [' + repo + '](' + url[i].replace( 'git:', 'https:') + ')' + '\n' msg += 'Tag: `' + tag[3] + '` (`' + tag_sha + '`)\n' msg += 'Commit: `' + tags[j + 1][:12] + '`' notify(msg) # write tag sha write_to(tag_file, tag_sha)
def setup_ssh(self): ssh_wrapper = os.path.join(self.config.get_backup_path(), self.wrapper) if os.path.exists(ssh_wrapper): try: os.remove(ssh_wrapper) except OSError: pass ssh_str_path = str(self.config.get_ssh_key()) if ssh_str_path != ".": # @todo Adjust best options depending on OpenSSH version. self.ssh_cmd = 'ssh -i {} -F /dev/null -o StrictHostKeyChecking=no'.format( ssh_str_path) self.git_ssh_cmd = {"GIT_SSH_COMMAND": self.ssh_cmd} git_version = cmd.Git().version_info self.use_git_ssh_wrapper = LooseVersion("{}.{}".format( str(git_version[0]), str( git_version[1]))) < LooseVersion("2.3") if self.use_git_ssh_wrapper: with open(ssh_wrapper, "w") as file: file.write("#!/bin/bash\n") file.write(self.ssh_cmd + ' "$@"\n') file.close() os.chmod(ssh_wrapper, S_IXUSR | S_IRUSR | S_IWUSR) try: del self.git_ssh_cmd['GIT_SSH_COMMAND'] except KeyError: pass self.git_ssh_cmd['GIT_SSH'] = os.path.abspath(ssh_wrapper)
def __init__(self): self.repo = Repo(os.path.join(__file__, "..\\" * 5)) self.cmd = cmd.Git(self.repo.working_dir) self.dirty = self.repo.is_dirty() self.lastCCommitterName = self.repo.commit().author.name self.lastCommitterEmail = self.repo.commit().author.email
OR you can use `IPv6` domain as a key: -``` sql +```sql CREATE TABLE hits (url String, from IPv6) ENGINE = MergeTree() ORDER BY from; ... MORE OPTIONS: -h, --help show this help message and exit --no-pager use stdout as difference result output ''' SCRIPT_PATH = os.path.abspath(__file__) CLICKHOUSE_REPO_HOME = os.path.join(os.path.dirname(SCRIPT_PATH), '..', '..') SCRIPT_COMMAND_EXECUTOR = cmd.Git(CLICKHOUSE_REPO_HOME) SCRIPT_COMMAND_PARSER = argparse.ArgumentParser(add_help=False) SCRIPT_COMMAND_PARSER.add_argument('path', type=bytes, nargs='?', default=None) SCRIPT_COMMAND_PARSER.add_argument('--no-pager', action='store_true', default=False) SCRIPT_COMMAND_PARSER.add_argument('-h', '--help', action='store_true', default=False) def execute(commands): return SCRIPT_COMMAND_EXECUTOR.execute(commands)
def main(): if 'local out of date' in cmd.Git('..').execute( ['git', 'remote', 'show', 'origin']).lower(): x = input('Update available. Do you want to update? Y/N ') if x.lower() == 'y': Repo('..').remote(name='origin').pull() if 'up to date' in cmd.Git('..').execute( ['git', 'remote', 'show', 'origin']).lower(): sys.exit(print('Version up to date.')) else: sys.exit(print('Failed to update.')) if len(sys.argv) > 1: parser = argparse.ArgumentParser() parser.add_argument( '-d', '--date', type=str, metavar='', help= 'Date or date range in format YYYY/MM/DD e.g 2020/01/19-2020/05/01' ) parser.add_argument('-c', '--course', type=str, metavar='', help='Numeric course code e.g 20') parser.add_argument('-r', '--region', type=str, metavar='', help='Region code e.g ire') parser.add_argument( '-y', '--year', type=str, metavar='', help='Year or year range in format YYYY e.g 2018-2020') parser.add_argument('-t', '--type', type=str, metavar='', help='Race type [flat/jumps]') args = parser.parse_args() if args.date and any([args.course, args.year, args.type]): print( 'Arguments not compatible with -d flag.\n\nFormat:\n\t\t-d YYYY/MM/DD -r [REGION CODE]\n\nExamples:\n\t\t-d 2020/01/19 -r gb\n' ) print( 'When scraping by date, if no region code is specified, all available races will be scraped by default.' ) sys.exit() if args.date: if not check_date(args.date): sys.exit( print( 'Invalid date.\n\nFormat:\n\t\tYYYY/MM/DD\n\t\tYYYY/MM/DD-YYYY/MM/DD\n\nExamples:\n\t\t2015/03/27\n\t\t2020/01/19-2020/05/01' )) if args.region: if not valid_region(args.region): sys.exit( print( 'Invalid region code.\n\nExamples:\n\t\t-r gb\n\t\t-r ire' )) region = args.region else: region = 'all' races = [] dates = get_dates(args.date) for d in dates: for link in get_race_links(d, region): races.append(link) scrape_races(races, region, args.date.replace('/', '_'), '') sys.exit() if args.course: if not valid_course(args.course): sys.exit( print( 'Invalid course code.\n\nExamples:\n\t\t-c 20\n\t\t-c 1083' )) if args.region: if not valid_region(args.region): sys.exit( print( 'Invalid region code.\n\nExamples:\n\t\t-r gb\n\t\t-r ire' )) years = parse_years(args.year) if args.year else [] if not years or not valid_years(years): sys.exit( print( 'Invalid year.\n\nFormat:\n\t\tYYYY\n\nExamples:\n\t\t-y 2015\n\t\t-y 2012-2017' )) if not args.type or args.type not in ['flat', 'jumps']: sys.exit( print( 'Invalid race type.\n\nMust be either flat or jumps.\n\nExamples:\n\t\t-t flat\n\t\t-t jumps' )) if not args.course and not args.region: sys.exit(print('Must supply a course or region code.')) tracks = [course[0] for course in courses(args.region) ] if args.region else [args.course] names = [course_name(track) for track in tracks] target = args.region if args.region else course_name(args.course) races = get_races(tracks, names, years, args.type, x_y()) scrape_races(races, target, args.year, args.type) sys.exit() try: import readline completions = Completer([ "courses", "regions", "options", "help", "quit", "exit", "clear", "flat", "jumps", "date" ]) readline.set_completer(completions.complete) readline.parse_and_bind('tab: complete') except ModuleNotFoundError: # windows pass while True: args = input('[rpscrape]> ').lower().strip() parse_args([arg.strip() for arg in args.split()])
from git import cmd from github3 import login # Enctrypted account data with open('config/keys.json') as json_data: account_data = json.load(json_data)[0] key = account_data['cipher_key'] u_name = account_data['username'] u_pass = account_data['password'] u_repo = account_data['repository'] gl = cmd.Git('.') trojan_id = "abc" trojan_config = "%s.json" % trojan_id data_path = "data/%s/" % trojan_id trojan_modules = [] configured = False task_queue = Queue.Queue() def connect_to_github(): gh = login(username=u_name, password=u_pass) repo = gh.repository(u_name, u_repo) branch = repo.branch('master')