def commit(self, objects, message): # validate commit message if not message or not isinstance(message, basestring): raise ValueError("Commit message should not be empty or not string") env = os.environ.copy() env.update({ 'GIT_WORK_TREE': self.repo, 'GIT_DIR': '%s/.git' % self.repo, }) git.gc("--prune", _env=env) git.checkout("HEAD", _env=env) # pull and push from and to the remote git.pull("origin", "master", _env=env) for obj in objects: git.add("-A", obj, _env=env) try: git.commit("-m", message, _env=env) except Exception: pass git.push(_env=env)
def release(): """Release/publish the code. """ # Rebase and push the master with tags to origin. print("Here are the remaining TODO items:") print(bash('TODO.sh')) print() if not util.yes( "Do you still want to rebase and push the master with tags " "to origin (y/n)?"): util.delayed_exit() git.rebase('-i', 'origin/master') git.push('--tags', 'origin', 'master') # Upload to PyPI. if not util.yes("Do you want to upload to PyPI (this is permanent!) " "(y/n)?"): util.delayed_exit() setup.sdist.upload() # Reset the version number. # In CHANGES.txt: newheading = ('TBD (in `GitHub <https://github.com/kdavies4/natu>`_ ' 'only) -- Updates:') newlink = ('.. _vx.x.x: ' 'https://github.com/kdavies4/natu/archive/vx.x.x.zip') rpls = [(r'(<http://semver.org>`_\.)', r'\1\n\n' + newheading), (r'(Initial release\n\n\n)', r'\1%s\n' % newlink)] util.replace('CHANGES.txt', rpls)
def commit(self, objects, message): # validate commit message if not message or not isinstance(message, basestring): raise ValueError( "Commit message should not be empty or not string") env = os.environ.copy() env.update({ 'GIT_WORK_TREE': self.repo, 'GIT_DIR': '%s/.git' % self.repo, }) git.gc("--prune", _env=env) git.checkout("HEAD", _env=env) # pull and push from and to the remote git.pull("origin", "master", _env=env) for obj in objects: git.add("-A", obj, _env=env) try: git.commit("-m", message, _env=env) except Exception: pass git.push(_env=env)
def release(): """Release/publish the code. """ # Rebase and push the master with tags to origin. print("Here are the remaining TODO items:") print(bash('TODO.sh')) print() if not util.yes("Do you still want to rebase and push the master with tags " "to origin (y/n)?"): util.delayed_exit() git.rebase('-i', 'origin/master') git.push('--tags', 'origin', 'master') # Upload to PyPI. if not util.yes("Do you want to upload to PyPI (this is permanent!) " "(y/n)?"): util.delayed_exit() setup.sdist.upload() # Reset the version number. # In natu/__init__.py: set_version('None') # In CHANGES.txt: newheading = ('TBD (in `GitHub <https://github.com/kdavies4/natu>`_ ' 'only) -- Updates:') newlink = ('.. _vx.x.x: ' 'https://github.com/kdavies4/natu/archive/vx.x.x.zip') rpls = [(r'(<http://semver.org>`_\.)', r'\1\n\n' + newheading), (r'(Initial release\n\n\n)', r'\1%s\n' % newlink)] util.replace('CHANGES.txt', rpls)
def main(): """ Requires youtube uploader script from https://github.com/tokland/youtube-upload """ from subprocess import Popen, PIPE import glob from sh import git yt_ids = [] for fname in glob.glob('*.webm'): title = fname.replace('.webm','').replace('_',' ') command = 'youtube-upload --title="'+title+'" '+fname p = Popen(command,stdout=PIPE,shell=True) out = p.communicate() yt_ids.append(str(out[0].rstrip()).replace("b'",'').replace("'",'')) readme_content = '# White dwarf nova\n' for idd in yt_ids: readme_content += '[![IMAGE ALT TEXT HERE](http://img.youtube.com/vi/'+idd+'/0.jpg)](http://www.youtube.com/watch?v='+idd+')\n' with open('README.md','w') as f: f.write(readme_content) git.add('README.md') git.commit(m='update videos') git.push()
def main(): """ Requires youtube uploader script from https://github.com/tokland/youtube-upload """ from subprocess import Popen, PIPE import glob from sh import git yt_ids = [] for fname in glob.glob("*.webm"): title = fname.replace(".webm", "").replace("_", " ") command = 'youtube-upload --title="' + title + '" ' + fname p = Popen(command, stdout=PIPE, shell=True) out = p.communicate() yt_ids.append(str(out[0].rstrip()).replace("b'", "").replace("'", "")) readme_content = "# White dwarf nova\n" for idd in yt_ids: readme_content += ( "[![IMAGE ALT TEXT HERE](http://img.youtube.com/vi/" + idd + "/0.jpg)](http://www.youtube.com/watch?v=" + idd + ")\n" ) with open("README.md", "w") as f: f.write(readme_content) git.add("README.md") git.commit(m="update videos") git.push()
def add_to_blacklist(self, items_to_blacklist, username, code_permissions): # Check if we're on master if git("rev-parse", "--abbrev-ref", "HEAD").strip() != "master": return (False, "Not currently on master.") # Check that we're up-to-date with origin (GitHub) git.remote.update() if git("rev-parse", "refs/remotes/origin/master").strip() != git("rev-parse", "master").strip(): return (False, "HEAD isn't at tip of origin's master branch") # Check that blacklisted_websites.txt isn't modified locally. That could get ugly fast if "blacklisted_websites.txt" in git.status(): # Also ugly return (False, "blacklisted_websites.txt modified locally. This is probably bad.") # Store current commit hash current_commit = git("rev-parse", "HEAD").strip() # Add items to file with open("blacklisted_websites.txt", "a+") as blacklisted_websites: last_character = blacklisted_websites.read()[-1:] if last_character != "\n": blacklisted_websites.write("\n") blacklisted_websites.write("\n".join(items_to_blacklist) + "\n") # Checkout a new branch (mostly unnecessary, but may help if we create PRs in the future branch = "auto-blacklist-{0}".format(str(time.time())) git.checkout("-b", branch) # Clear HEAD just in case git.reset("HEAD") git.add("blacklisted_websites.txt") git.commit("-m", "Auto blacklist of {0} by {1} --autopull".format(", ".join(items_to_blacklist), username)) if code_permissions: git.checkout("master") git.merge(branch) git.push() else: git.push("origin", branch) git.checkout("master") if GlobalVars.github_username is None or GlobalVars.github_password is None: return (False, "tell someone to set a GH password") payload = {"title": "{0}: Blacklist {1}".format(username, ", ".join(items_to_blacklist)), "body": "{0} requests blacklist of domains: \n\n - {1}".format(username, "\n - ".join(items_to_blacklist)), "head": branch, "base": "master"} response = requests.post("https://api.github.com/repos/Charcoal-SE/SmokeDetector/pulls", auth=HTTPBasicAuth(GlobalVars.github_username, GlobalVars.github_password), data=json.dumps(payload)) print(response.json()) return (True, "You don't have code privileges, but I've [created a pull request for you]({0}).".format(response.json()["html_url"])) git.checkout(current_commit) # Return to old commit to await CI. This will make Smokey think it's in reverted mode if it restarts if not code_permissions: return (False, "Unable to perform action due to lack of code-level permissions. [Branch pushed](https://github.com/Charcoal-SE/SmokeDetector/tree/{0}), PR at your leisure.".format(branch)) return (True, "Blacklisted {0} - the entry will be applied via autopull if CI succeeds.".format(", ".join(items_to_blacklist)))
def generate_csv(output_directory='./'): # Set up root data dir root_filename = ROOT + '/cfb-data/automated/wiki/' + ROOT + '/' # Clone data repository for updates - continue if already exists from sh import git try: shutil.rmtree(ROOT + '/cfb-data/') except: pass git.clone( 'https://' + os.getenv('MACHINE_AUTH') + '@github.com/coffenbacher/cfb-data.git', ROOT + '/cfb-data/') # Create our root if required if not os.path.exists(root_filename): os.makedirs(root_filename) # Set up logging logging.basicConfig( level='WARNING', #format='%(asctime)s %(levelname)-8s %(message)s', #datefmt='%a, %d %b %Y %H:%M:%S', filename=root_filename + ROOT + '.log', filemode='w') # Extract all current names from Wiki data = FN() # Write everything to csv with open(root_filename + ROOT + '.csv', 'w') as csvfile: writer = csv.DictWriter(csvfile, fieldnames=FIELDNAMES, extrasaction='ignore') for d in data: asci = dict([(unidecode(k), unidecode(unicode(v))) for k, v in d.items()]) writer.writerow(asci) # [unidecode(unicode(d.get(field))) for field in FIELDNAMES] # Write everything to json with open(root_filename + ROOT + '.json', 'w') as jsonfile: relevant = [{f: d.get(f, None) for f in FIELDNAMES} for d in data] jsonfile.write(json.dumps(relevant)) with open(root_filename + ROOT + '_meta.json', 'w') as metafile: d = { 'created': datetime.now().strftime('%x %X'), 'rows': len(data), 'headers': ','.join(FIELDNAMES), } metafile.write(json.dumps(d)) git = git.bake(**{ 'git-dir': ROOT + '/cfb-data/.git/', 'work-tree': ROOT + '/cfb-data' }) git.commit(m='Auto updating %s data' % ROOT, a=True) git.push('origin', 'master')
def publish_site(self): tmp_branch = '__dynamo_deploy__' detached_branch = None git.checkout(self.target_branch) self._add_and_commit_site('dist') git.push('origin', self.target_branch) git.checkout('@{-1}')
def sync(event, context): event = Event(event) sourcerepo = SourceRepo(event) with tempfile.TemporaryDirectory() as tmpdir: git.clone('--mirror', event.clone_url, _cwd=tmpdir) git.push('--mirror', sourcerepo.url, _cwd=tmpdir) return
def push(): print("git push") try: git.push() except: try: branch_name = branchName() print("git push -u gh " + branch_name) try: git.push("-u", "gh", branch_name) except : print("Nothing to push") except: print("Nothing has been created")
def main(): git.checkout("-b", "task_1") f = open("file1.txt", "w") f.write("here is a file") f.close() git.commit("-am", "commited new text file1") git.push("--set-upstream", "origin", "task1") git.checkout("-")
def update(conf, args): '''Apply updates from the upstream repository.''' print('Checking for updates...') # fetch changes from the canonical repo git.fetch(constants.GIT_REMOTE, no_tags=True, quiet=True) # get a list of the commit messages for the incoming changes updates = git('--no-pager', 'log', '..FETCH_HEAD', oneline=True) updates = [tuple(m.split(None, 1)) for m in updates.splitlines()] # print out a list of the incoming updates if len(updates) > 0: print('Available updates:') max_updates = 10 for commit, msg in updates[:max_updates]: print(color.yellow('*'), msg) # print a special message if too many updates are available if len(updates) > max_updates: print('...and', color.green(len(updates) - max_updates), 'more!') print('Run `git log ..FETCH_HEAD` to see the full list') # bail if we have uncommitted changes (git exits non-0 in this case) if git.diff(exit_code=True, quiet=True, _ok_code=(0, 1)).exit_code != 0: raise ValueError('The repository has uncommitted changes. Handle them, ' 'then try updating again.') print('Applying the update...') # stash _all_ changes to the repo git.stash(include_untracked=True, all=True, quiet=True) # squash all the fetched commits together and merge them into master git.merge('@{u}', squash=True, quiet=True) # add a nice update commit that includes the latest upstream commit hash commit_message = 'Update dotparty to %s' % updates[0][0] git.commit(m=commit_message, quiet=True) # TODO: if squash merge failed, roll back to the pre-update state and # complain with instructions for the user to do their own update. # un-stash all our old changes git.stash('pop', quiet=True) # push our changes back up to the remote git.push(quiet=True) print('Update successful!') else: print('Already up-to-date!')
def _release(language, message, channel): print message, "...", if _is_dirty(): sys.exit("Repo must be in clean state before deploying. Please commit changes.") _generate_yaml(language, channel) if _is_dirty(): git.add('.travis.yml') git.commit(m=message, allow_empty=True) git.pull(rebase=True) git.push() print "done."
def update_roles(self): os.chdir('%s/%s' % (self.repo_path, self.ROLES)) git.pull('-f', '-u', 'origin', 'master') os.chdir(self.repo_path) message = '%s %s %s %s' % (self.UPDATE, self.ROLES, 'to', self.get_submodule_hash(self.ROLES)) git.commit('-m', message, '.gitmodules', self.ROLES) sys.stdout.write('Committed %s [%s]\n' % (self.ROLES, message)) try: git.push('origin', 'master') except sh.ErrorReturnCode_1: pass
def migrate_for_path(packagepath, use_https): _, name, _ = get_package_info(packagepath) destination = os.path.join(destinationRoot, name) if not os.path.exists(destination): # Only do conversion is path does not yet exist print "Migrating {0} to {1} ...".format(packagepath, destination) #import ipdb; ipdb.set_trace() migrate_repo(packagepath, destination, use_https) print "-" * 5, name, "done", "-"*5 else: print "{0} already migrated at {1}. git svn rebase-ing instead to bring up to date".format(packagepath, destination) git.pull("origin", "master") git.svn.rebase() git.push("origin", "master")
def initialize_git_repository() -> None: """Prepare the git repository.""" print(" * Initializing the project git repository") try: git.init() git.remote( "add", "origin", "[email protected]:{{ cookiecutter.github_user }}/" "{{ cookiecutter.project_slug }}.git", ) git.add(".") git.commit("-m", "feat: create initial project structure") git.checkout("-b", "gh-pages") git.checkout("-b", "feat/initial_iteration") print(" * Pushing initial changes to master") git.push("--force", "--set-upstream", "origin", "master") git.push("--force", "--set-upstream", "origin", "gh-pages") git.push("--force", "--set-upstream", "origin", "feat/initial_iteration") git.push("--force") except sh.ErrorReturnCode as error: print("There was an error creating the Git repository.") print(str(error.stderr, "utf8")) sys.exit(1)
def _release(language, message, channel): print message, "...", if _is_dirty(): sys.exit( "Repo must be in clean state before deploying. Please commit changes." ) _generate_yaml(language, channel) if _is_dirty(): git.add('.travis.yml') git.commit(m=message, allow_empty=True) git.pull(rebase=True) git.push() print "done."
def install_app(self, appid, pdpobject): print "Creating app : %s on heroku" % appid r = self._create_app(appid) if r.ok: print "Successfully created" else: print "Error creating application" print r.status_code, r.text return resp = r.json() git_url = resp["git_url"] web_url = resp["web_url"] print "Staging PDP archive.." print "PDP archive is at : %s" % pdpobject.pdpdir print "Configuring git.." cwd = os.getcwd() os.chdir(pdpobject.pdpdir) from sh import git git.init() print "Invoking the right artifact handler" plan = pdpobject.plan for a in plan.artifacts: h = self.handler_map.get(a.type) if h is None: raise NotImplementedError("No handler for artifact type : %s" % a.type) ho = h(pdpobject, a) ho.handle_artifact() print "Configuring git remote.." git.remote.add("heroku", git_url) def process_output(line): print(line) print "Adding files to repo" git.add(".") print "Committing to local repo" git.commit("-m", "Initial commit") print "Pushing to heroku" git.push("-u", "heroku", "master", _out=process_output, _tty_out=True) print "Uploaded app successfully.." print "App is available at : %s" % web_url return appid, git_url, web_url
def reply_comment_email(from_email, subject, message): try: m = re.search('\[(\d+)\-(\w+)\]', subject) branch_name = m.group(1) article = m.group(2) message = decode_best_effort(message) # safe logic: no answer or unknown answer is a go for publishing if message[:2].upper() == 'NO': logger.info('discard comment: %s' % branch_name) email_body = get_template('drop_comment').render(original=message) mail(pecosys.get_config('post', 'from_email'), pecosys.get_config('post', 'to_email'), 'Re: ' + subject, email_body) else: if pecosys.get_config("git", "disabled"): logger.debug("GIT usage disabled (debug mode)") else: git.merge(branch_name) if pecosys.get_config("git", "remote"): git.push() logger.info('commit comment: %s' % branch_name) # execute post-commit command asynchronously postcommand = pecosys.get_config('post', 'postcommand') if postcommand: executor.execute(postcommand) # send approval confirmation email to admin email_body = get_template('approve_comment').render(original=message) mail(pecosys.get_config('post', 'from_email'), pecosys.get_config('post', 'to_email'), 'Re: ' + subject, email_body) # notify reader once comment is published reader_email, article_url = get_email_metadata(message) if reader_email: notify_reader(reader_email, article_url) # notify subscribers every time a new comment is published notify_subscribers(article) if pecosys.get_config("git", "disabled"): logger.debug("GIT usage disabled (debug mode)") else: git.branch("-D", branch_name) except: logger.exception("new email failure")
def migrate_repo(packagepath, destination_path, use_https, authors="~/ros/fuerte/tue/authors.txt"): """#The process to migrate a project is: # Create a new repository. $ git svn clone <svn_url> <destination_path> $ cd ~/ros/fuerte/tue/git/challenge_cleanup $ git remote add origin <repo https url> $ git pull origin master $ git push origin master""" authors = os.path.expanduser(authors) svn_url = svnurl_for_path(packagepath) language, name, description = get_package_info(packagepath) try: repo = create_repo(name, description, language=language) except github.GithubException as e: if e.status == 422: cont = raw_input( "The repo has an invalid field, it could already exist. Please verify and press 'c' to continue without first creating the repo: " ) if 'c' in cont: repo = tue.get_repo(name) else: sys.stderr.write("Could not migrate {0}".format(name)) return print "git svn clone {0} {1} A={2}".format(svn_url, destination_path, authors) git.svn.clone(svn_url, destination_path, A=authors) cd(destination_path) if use_https: git_url = repo.clone_url else: git_url = repo.ssh_url print "git remote add origin {0}".format(git_url) git.remote.add("origin", git_url) print "git pull origin master" git.pull("origin", "master") print "git push origin master" git.push("origin", "master")
def migrate_for_path(packagepath, use_https): _, name, _ = get_package_info(packagepath) destination = os.path.join(destinationRoot, name) if not os.path.exists( destination): # Only do conversion is path does not yet exist print "Migrating {0} to {1} ...".format(packagepath, destination) #import ipdb; ipdb.set_trace() migrate_repo(packagepath, destination, use_https) print "-" * 5, name, "done", "-" * 5 else: print "{0} already migrated at {1}. git svn rebase-ing instead to bring up to date".format( packagepath, destination) git.pull("origin", "master") git.svn.rebase() git.push("origin", "master")
def run_completed(cls): if settings.TASK_CONFIG_BACKUP_DISABLE_GIT: for repository in Repository.objects.all(): repository.lock.release() return for repository in Repository.objects.all(): repository.lock.acquire() for device_group in DeviceGroup.objects.all(): os.chdir(device_group.repository.path) git.push() for repository in Repository.objects.all(): repository.lock.release()
def _git_commit(commit_message): """ Do a git commit in the current directory with the given commit message (without adding any changes). Instead of using a Git wrapper library, we directly call the Git binaries using :mod:`sh`, which ensures that error return codes are either handled or raised. If the :option:`DEBUG` setting is set, a Git push is attempted after commiting. Args: commit_message (str): Single-line commit message Returns: *True* if a commit has been made and `False` if there weren't any changes. Raises: :exc:`sh.ErrorReturnCode` exception in case of failure. """ # This is, of course, not the most sophisticated approach at # interacting with Git but hey, it's simple and it works. Any error # code != 0 results in an exception, so there's little risk of # silent failure. We should probably replace this with GitPython or # any of the full-featured command-line wrappers at some point in # the future, but as long as all we're doing is automated commits, # it's perfectly fine. # Check if there are any staged changes, abort if not (git commit # would return an error otherwise). if len(git("diff-index", "--name-only", "HEAD", "--")): git.commit(message=commit_message) # Doing a git push inside the handler means that the entire task # would fail if the push fails. Doing a push once per run is # sufficient, so only push inside the task handler in debug mode. if settings.DEBUG: git.push() changes = True else: changes = False return changes
def create_branch(self, name, head): if self.lookup_branch(name): raise GlError( 'Branch {0} already exists in remote repository {1}'.format( name, self.name)) # Push won't let us push the creation of a new branch from a SHA. So we # create a temporary local ref, make it point to the commit, and do the # push tmp_b = self.gl_repo.create_branch('gl_tmp_ref', head) try: git.push(self.name, '{0}:{1}'.format(tmp_b, name)) return self.lookup_branch(name) except ErrorReturnCode as e: raise GlError(stderr(e)) finally: tmp_b.delete()
def push(self, username="******", password="******"): """Push to remote.""" def line_proc(line, stdin): if "Username for" in line: stdin.put(username + "\n") elif "Password for" in line: stdin.put(password + "\n") else: print "git-push: ", line.strip() rcv = self.char_to_line(line_proc) try: # This is a super hack. See http://amoffat.github.io/sh/tutorials/2-interacting_with_processes.html # for some explaination. p = git.push('-u', 'origin', 'master', _out=rcv, _out_bufsize=0, _tty_in=True) p.exit_code except ErrorReturnCode_128: raise Exception( """Push to repository repository failed. You will need to store or cache credentials. You can do this by using ssh, .netrc, or a credential maanger. See: https://www.kernel.org/pub/software/scm/git/docs/gitcredentials.html""" ) return True
def create_tag(self, name, commit): if self.lookup_tag(name): raise GlError( 'Tag {0} already exists in remote repository {1}'.format( name, self.name)) # We can't create a tag in a remote without creating a local one first. So # we create a temporary local ref, make it point to the commit, and do the # push tmp_t = self.gl_repo.create_tag('gl_tmp_ref', commit) try: git.push(self.name, 'refs/tags/{0}:refs/tags/{1}'.format(tmp_t, name)) return self.lookup_tag(name) except ErrorReturnCode as e: raise GlError(stderr(e)) finally: tmp_t.delete()
def push(self, username="******", password="******"): '''Push to remote''' def line_proc(line,stdin): if "Username for" in line: stdin.put(username+ "\n") elif "Password for" in line: stdin.put(password+ "\n") else: print "git-push: ", line.strip() rcv = self.char_to_line(line_proc) try: # This is a super hack. See http://amoffat.github.io/sh/tutorials/2-interacting_with_processes.html # for some explaination. p = git.push('-u','origin','master', _out=rcv, _out_bufsize=0, _tty_in=True) p.exit_code except ErrorReturnCode_128: raise Exception("""Push to repository repository failed. You will need to store or cache credentials. You can do this by using ssh, .netrc, or a credential maanger. See: https://www.kernel.org/pub/software/scm/git/docs/gitcredentials.html""") return True
def main(): """Script body""" app_dir = join(dirname(abspath(__file__)), '..') sys.path.append(app_dir) from carto_renderer import version # pylint: disable=import-error cur_version = version.SEMANTIC if not cur_version.endswith('-SNAPSHOT'): raise ValueError('Not a SNAPSHOT version!') default_release = cur_version.replace('-SNAPSHOT', '') pytest.main(app_dir) release_version = prompt( 'Release version [{ver}]: '.format(ver=default_release), r'^\d+[.]\d+[.]\d+$', 'Release version should be Major.Minor.Patch!', default_release) split_version = [int(i) for i in release_version.split('.')] split_version[2] += 1 default_next = '.'.join([str(s) for s in split_version]) + '-SNAPSHOT' next_version = prompt( 'Next version [' + default_next + ']: ', r'^\d+[.]\d+[.]\d+-SNAPSHOT$', 'Not a valid SNAPSHOT version!', default_next) ver_file = join(app_dir, 'carto_renderer', 'version.py') set_version(ver_file, cur_version, release_version) git.add(ver_file) git.commit('-m', 'Setting version to ' + release_version) git.tag('v' + release_version) set_version(ver_file, release_version, next_version) git.add(ver_file) git.commit('-m' 'Setting version to ' + next_version) do_push = prompt('Push changes to the remote repository (y/n)? [y]: ', '.*', None, 'y') if do_push.lower().startswith('y'): print(git.push()) print(git.push('--tags'))
def log(msg): global f print('[logbot]', 'log:', msg) today = strftime('%Y-%m-%d.log') if f.name != today: f.close() try: git.add(f.name) git.commit('-m', 'add ' + f.name) git.push() except ErrorReturnCode: print('[logbot]', 'err:', 'fail to commit', f.name) print('[logbot]', 'commit:', f.name) print('[logbot]', 'new:', today) f = open(today, 'w+') f.write(msg)
def migrate_repo(packagepath, destination_path, use_https, authors="~/ros/fuerte/tue/authors.txt"): """#The process to migrate a project is: # Create a new repository. $ git svn clone <svn_url> <destination_path> $ cd ~/ros/fuerte/tue/git/challenge_cleanup $ git remote add origin <repo https url> $ git pull origin master $ git push origin master""" authors = os.path.expanduser(authors) svn_url = svnurl_for_path(packagepath) language, name, description = get_package_info(packagepath) try: repo = create_repo(name, description, language=language) print "URLs: ".format(repo.clone_url, repo.ssh_url) except github.GithubException as e: if e.status == 422: cont = raw_input( "The repo has an invalid field, it could already exist. Please verify and press 'c' to continue without first creating the repo: ") if 'c' in cont: repo = tue.get_repo(name) else: sys.stderr.write("Could not migrate {0}".format(name)) return print "git svn clone {0} {1} A={2}".format(svn_url, destination_path, authors) git.svn.clone(svn_url, destination_path, A=authors) cd(destination_path) if use_https: git_url = repo.clone_url else: git_url = repo.ssh_url print "git remote add origin {0}".format(git_url) git.remote.add("origin", git_url) print "git pull origin master" git.pull("origin", "master") print "git push origin master" git.push("origin", "master")
def push(self): '''Push to remote''' if not self.remote_url: raise RepositoryException("Can't push without setting remote_url in constructor") p = git.push(self.remote_url) return True
def deploy_code(repo): logging.info("Deploying repo: %s, back2github: %s", repo["dir"], repo["sync"]) try: logging.info("Change working directory to %s", repo["dir"]) sh.cd(repo["dir"]) logging.info("Update code, git pull origin") git.pull("origin", _out=logging.info, _err=logging.error) logging.info("done") logging.info("Delete old branches, git fetch -p") git.fetch("origin", "-p", _out=logging.info, _err=logging.error) logging.info("done") if repo["sync"]: logging.info("Back2github, git push github") git.push("github", _out=logging.info, _err=logging.error, _bg=True) except Exception as e: logging.info("Deploy error: %s", e)
def main(): """Script body""" app_dir = join(dirname(abspath(__file__)), '..') sys.path.append(app_dir) from carto_renderer import version # pylint: disable=import-error cur_version = version.SEMANTIC if not cur_version.endswith('-SNAPSHOT'): raise ValueError('Not a SNAPSHOT version!') default_release = cur_version.replace('-SNAPSHOT', '') pytest.main(app_dir) release_version = prompt( 'Release version [{ver}]: '.format(ver=default_release), r'^\d+[.]\d+[.]\d+$', 'Release version should be Major.Minor.Patch!', default_release) split_version = [int(i) for i in release_version.split('.')] split_version[2] += 1 default_next = '.'.join([str(s) for s in split_version]) + '-SNAPSHOT' next_version = prompt('Next version [' + default_next + ']: ', r'^\d+[.]\d+[.]\d+-SNAPSHOT$', 'Not a valid SNAPSHOT version!', default_next) ver_file = join(app_dir, 'carto_renderer', 'version.py') set_version(ver_file, cur_version, release_version) git.add(ver_file) git.commit('-m', 'Setting version to ' + release_version) git.tag('v' + release_version) set_version(ver_file, release_version, next_version) git.add(ver_file) git.commit('-m' 'Setting version to ' + next_version) do_push = prompt('Push changes to the remote repository (y/n)? [y]: ', '.*', None, 'y') if do_push.lower().startswith('y'): print(git.push()) print(git.push('--tags'))
def backup(): access_token = request.form["access_token"] spreadsheet_id = request.form["spreadsheet_id"] with cd("repo/"): git.pull() ods = export_as_ods(access_token, spreadsheet_id) ods_path = write_bytes_to_file("clubs.ods", ods) fods_path = convert_ods_to_fods(ods_path) os.remove(ods_path) # Only commit and push if any files have changed. if git("ls-files", "-m"): git.add(fods_path) git.commit("-m", "Update spreadsheet.") git.push() return "Consider it done!"
def process_cookbook(self): if not self.cookbook_name: sys.stderr.write('Could not find cookbook name, exiting\n') sys.exit(1) commits = [self.process_cookbook_submodule(self.cookbook_name, self.submodule_path, self.cookbook_git_url, self.COOKBOOK)] for dependency in self.dependency_cookbooks: dep_path = 'cookbooks/%s' % dependency dep_url = self.dependency_cookbooks[dependency]['url'] commits.append(self.process_cookbook_submodule(dependency, dep_path, dep_url, self.DEPENDENCY)) # Push any changes if any(commits): git.push('origin', 'master')
def needs_push(self): import os try: for line in git.push('origin', 'master', n=True, porcelain=True): if '[up to date]' in line: return False return True except ErrorReturnCode_128: global_logger.error("Needs_push failed in {}".format(os.getcwd())) return False
def needs_push(self): import os try: for line in git.push('origin','master',n=True, porcelain=True): if '[up to date]' in line: return False return True except ErrorReturnCode_128: logger.error("Needs_push failed in {}".format(os.getcwd())) return False
def __del__(self): cd(self.repo) newname = self.fname.split('-')[0] logging.info('Copying file %s to %s' % (self.fname, newname)) copyfile(self.directory + '/' + self.fname, self.repo + '/' + newname) try: logging.info('Adding the file to git') git.add(newname) logging.info('Committing the file') git.commit('-m', 'changes to ' + newname) logging.info('Pushing ...') git.push() except: pass if self._f and not self._f.closed: self._f.close()
def handle(self, *args, **options): for group in DeviceGroup.objects.all(): if group.repository is None: continue repo_path, group_path = os.path.split(group.config_backup_path) archive_path = os.path.join(repo_path, "_Archive", group_path) if not os.path.exists(group.config_backup_path): self.stderr.write("Invalid path: %s" % group.config_backup_path) continue group.repository.lock.acquire() if not os.path.exists(archive_path): os.makedirs(archive_path) os.chdir(group.config_backup_path) for filename in os.listdir("."): label = os.path.splitext(filename)[0] try: Device.objects.get(label=label) except Device.DoesNotExist: self.stdout.write("Archiving: %s" % label) os.rename(filename, os.path.join(archive_path, filename)) os.chdir(group.repository.path) git.add("-A", ".") if NetworkDeviceConfigBackupHandler._git_commit( "Moving device configs to archive"): git.push() group.repository.lock.release()
def push(self, remote, env={}, branch=None): """ Push a branch to a given remote Given a remote, env and branch, push branch to remote and add the environment variables in the env dict to the environment of the "git push" command. If no branch is given, the current branch will be used. The ability to specify env is so that PKEY and GIT_SSH can be specified so Git can use different SSH credentials than the current user (i.e. deploy keys for Github). If PKEY is not defined, the environment will not be over-ridden. """ os.chdir(self.repo) if branch: branch_to_push = branch else: branch_to_push = self.current_branch() # if there is no PKEY, we don't need to override env # We are explicit about what we are pushing, since the default behavior # is different in different versions of Git and/or by configuration if env["PKEY"]: new_env = os.environ.copy() new_env.update(env) git.push(remote, branch_to_push, _env=new_env) else: git.push(remote, branch_to_push)
def push_for_teams(teams, repo): for team in teams: print("\t Pushing changes for team {}".format(team)) team_repo_name = "{repo}-{batch}-{group}-{team}".format( repo=repo, batch=BATCH, group=GROUP, team=team) team_repo_git_url = "[email protected]:{org}/{repo}.git".format( org=TEAM_ORGANIZATION, repo=team_repo_name) with sh.pushd(repo): git.remote.add(team_repo_name, team_repo_git_url) try: out = git.push(team_repo_name, 'master') print(out.stdout or out.stderr) except ErrorReturnCode as e: pass finally: git.remote.remove(team_repo_name)
def needs_push(self): import os if not self.remote_url: raise RepositoryException("Can't push without setting remote_url in constructor") try: for line in git.push(self.remote_url,n=True, porcelain=True): if '[up to date]' in line: return False return True except ErrorReturnCode_128: global_logger.error("Needs_push failed in {}".format(os.getcwd())) return False
def publish(self, branch): self._check_op_not_in_progress() if not isinstance(branch, RemoteBranch): # TODO: allow this raise GlError( 'Can\'t publish to a local branch (yet---this will be implemented in ' 'the future)') try: assert self.branch_name.strip() assert branch.branch_name in self.gl_repo.remotes[ branch.remote_name].listall_branches() cmd = git.push( branch.remote_name, '{0}:{1}'.format(self.branch_name, branch.branch_name)) if 'Everything up-to-date' in stderr(cmd): raise GlError('No commits to publish') except ErrorReturnCode as e: err_msg = stderr(e) if 'Updates were rejected' in err_msg: raise GlError('There are changes you need to fuse/merge') raise GlError(err_msg)
def delete(self): try: git.push(self.remote_name, ':{0}'.format(self.branch_name)) except ErrorReturnCode as e: raise GlError(stderr(e))
def git_push(remote, branch, file_path): '''Push changes to remote''' p = git.push(remote, branch, _cwd=dir_path(file_path), _tty_out=False) p.wait() show_msg('Push Done', 'Info')
commit = input( 'Changes were detected, would you like to commit all current changes? (y/n) ' ) if commit != 'y' and commit != 'n': print('Expected "y" or "n", instead found {}. Exiting.'.format(commit)) exit(1) elif commit == 'n': print( 'Not committing changes; please commit all changes and try again. Exiting.' ) print(status) exit(2) commit_msg = input('Committing, please provide a commit message: ') git.add('.') git.commit('-am', commit_msg) push_result = git.push() print(push_result) if 'Your branch is ahead of' in status: print('Found unpushed commits. Pushing...') push_result = git.push() print(push_result) clean() make_dist() upld = input( 'About to upload to PyPi, are you sure you want to do this? (y/n) ') if upld != 'y' and upld != 'n': print('Expected "y" or "n", instead found {}. Exiting.'.format(upld)) exit(1) if upld == 'n':
def install_app(self, appid, pdpobject): print "Analyzing artifacts.." plan = pdpobject.plan for a in plan.artifacts: h, c = self.handler_map.get(a.type) if h is None: raise NotImplementedError("No handler for artifact type : %s" % a.type) print "Detected app type : %s, Matching cartridge : %s" % (a.type, c) ho = h(pdpobject, a) renv = ho.handle_artifact() print "Creating application.." r = self._create_app(appid, h.cartridge) if r.ok: print "Application %s created" % appid else: print "Error creating application %s" % appid print r.status_code, r.text return # Set env variables on the app using rhc for now from sh import rhc for k, v in renv.items(): rhc.env.set("%s=%s" % (k, v), "-a", appid) print "Set environment %s=%s on the app" % (k, v) resp = r.json() print "Received response : %s" % resp data = resp["data"] # Start staging it. git_url = data["git_url"] web_url = data["app_url"] print "Staging PDP archive.." print "PDP archive is at : %s" % pdpobject.pdpdir print "Configuring git.." # Empty out any samples given by the cartridge from sh import git import sh import tempfile import shutil import subprocess tmpdir = tempfile.mkdtemp() subprocess.check_call(["git", "clone", git_url, tmpdir]) print "Cloned cartridge sample to %s" % tmpdir cwd = os.getcwd() os.chdir(tmpdir) git.rm("-r", "*") print "Removed all contents from cartridge repo" # Copy pdpdir contents subprocess.check_call("cp -rp %s/* ." % pdpobject.pdpdir, shell=True) print "Copied app contents to %s" % tmpdir sh.ls("-la", tmpdir) print "Adding and committing app code" git.add("*") git.commit("-m", "Adding app contents") print "Configuring git remote.." git.remote.add("rhc", git_url) print "Pushing to openshift" git.push("-u", "-f", "rhc", "master") print "Uploaded app successfully.." print "App is available at : %s" % web_url return appid, git_url, web_url
def run_completed(cls): """ Many things happen once run is complete. At this point, all of the device configs are already committed. * Each device is symlinked in the `ByHostname` directory and symlinks for deleted device are removed. * All repositories are pushed to the default remote. * All device type regex filters are committed to the repository with the ID 1. This is done in order to track changes to the filters. """ super(NetworkDeviceConfigBackupHandler, cls).run_completed() # Generate "ByHostname" symlinks for repository in Repository.objects.all(): repository.lock.acquire() for device in Device.objects.all(): if device.group.repository: symlink_dir = os.path.join(device.group.repository.path, "_ByHostname") if not os.path.exists(symlink_dir): os.mkdir(symlink_dir) symlink = os.path.join(symlink_dir, '{}.txt'.format(device.hostname)) if not os.path.islink(symlink) and os.path.exists( device.config_backup_filename): os.symlink( os.path.relpath(device.config_backup_filename, symlink_dir), symlink) # Git push and config filter commits if settings.TASK_CONFIG_BACKUP_DISABLE_GIT: for repository in Repository.objects.all(): repository.lock.release() return for repository in Repository.objects.all(): os.chdir(repository.path) # Clean up broken symlinks find('-L', '_ByHostname', '-type', 'l', '-delete') git.add('-A', '_ByHostname') if cls._git_commit('Symlink updates'): git.push() for device_group in DeviceGroup.objects.all(): if device_group.repository: os.chdir(device_group.repository.path) git.push() # TODO: make the repository ID configurable os.chdir(Repository.objects.get(id=1).path) for device_type in DeviceType.objects.all(): if not device_type.config_filter: continue filename = os.path.join( Repository.objects.get(id=1).path, 'Meta', "{}_filter.txt".format( RE_MATCH_FIRST_WORD.findall( device_type.name.replace(" ", "_"))[0])) with codecs.open(filename, 'w', 'utf8') as f: f.write("# Automatically generated from database\n\n") f.write(device_type.config_filter) git.add("--", quote(filename)) if cls._git_commit('Config filter for device type "%s" changed' % device_type.name): git.push() for repository in Repository.objects.all(): repository.lock.release()
def git_push(self): """Push current version to git.""" # switch to project folder cd(self.paths['root']) # push changes to remote print git.push('origin')
print("load changes") git.pull("--all") git.reset("origin/master", "--hard") git.pull() print("Finish download") for i in l.split("\n")[1:]: if i and regex.search(i): ips.append(regex.search(i).groups()[0]) with open("rawlist.txt", "w") as fp: for i in ips: fp.write(i+"\n") with open("list.txt", 'w') as fp: # s = "[AutoProxy 0.2.9]\n" for i in ips: fp.write("||{}\n".format(i)) # s = base64.b64encode(s.encode('utf8')).decode() # for i in range(0, len(s), 64): # fp.write(s[i:i+64]) # fp.write("\n") if git.status("--short") != "": print("updating") git.add(".") git.commit("-m", "update by robot") git.push() print("updating finished")
print("load changes") git.pull("--all") git.reset("origin/master", "--hard") git.pull() print("Finish download") for i in l.split("\n")[1:]: if i and regex.search(i): ips.append(regex.search(i).groups()[0]) with open("rawlist.txt", "w") as fp: for i in ips: fp.write(i + "\n") with open("list.txt", 'w') as fp: # s = "[AutoProxy 0.2.9]\n" for i in ips: fp.write("||{}\n".format(i)) # s = base64.b64encode(s.encode('utf8')).decode() # for i in range(0, len(s), 64): # fp.write(s[i:i+64]) # fp.write("\n") if git.status("--short") != "": print("updating") git.add(".") git.commit("-m", "update by robot") git.push() print("updating finished")
exit() time_str = datetime.now().strftime('%Y-%m-%d %H:%M:%S %z') try: git.checkout('gh-pages') for dir_name in dir_names: # Remove the current documentation folders (v4, v5, current) try: rmtree(dir_name) except: pass # Move generated folders (in 'tmp/') into the root folder move('tmp/' + dir_name, '.') # Remove 'tmp/' rmtree('tmp') except: raise if args.commit or args.push_gh_pages: # Commit new version git.add(dir_names) git.commit(m='Auto-generated docs %s' % time_str) if args.push_gh_pages: # Commit new version print 'Pushing to "%s"' % args.push_gh_pages git.push([args.push_gh_pages, 'gh-pages'])
def add_to_blacklist(**kwargs): if 'windows' in str(platform.platform()).lower(): log('warning', "Git support not available in Windows.") return (False, "Git support not available in Windows.") blacklist = kwargs.get("blacklist", "") item_to_blacklist = kwargs.get("item_to_blacklist", "") username = kwargs.get("username", "") chat_profile_link = kwargs.get("chat_profile_link", "http://chat.stackexchange.com/users") code_permissions = kwargs.get("code_permissions", False) # Make sure git credentials are set up if git.config("--global", "--get", "user.name", _ok_code=[0, 1]) == "": return (False, "Tell someone to run `git config --global user.name \"SmokeDetector\"`") if git.config("--global", "--get", "user.email", _ok_code=[0, 1]) == "": return (False, "Tell someone to run `git config --global user.email \"[email protected]\"`") if blacklist == "": # If we broke the code, and this isn't assigned, error out before doing anything, but do # so gracefully with a nice error message. return (False, "Programming Error - Critical information missing for GitManager: blacklist") if item_to_blacklist == "": # If we broke the code, and this isn't assigned, error out before doing anything, but do # so gracefully with a nice error message. return (False, "Programming Error - Critical information missing for GitManager: item_to_blacklist") item_to_blacklist = item_to_blacklist.replace("\s", " ") if blacklist == "website": blacklist_file_name = "blacklisted_websites.txt" ms_search_option = "&body_is_regex=1&body=" elif blacklist == "keyword": blacklist_file_name = "bad_keywords.txt" ms_search_option = "&body_is_regex=1&body=" elif blacklist == "username": blacklist_file_name = "blacklisted_usernames.txt" ms_search_option = "&username_is_regex=1&username="******"Invalid blacklist type specified, something has broken badly!") git.checkout("master") try: git.pull() except: pass # Check that we're up-to-date with origin (GitHub) git.remote.update() if git("rev-parse", "refs/remotes/origin/master").strip() != git("rev-parse", "master").strip(): return (False, "HEAD isn't at tip of origin's master branch") # Check that blacklisted_websites.txt isn't modified locally. That could get ugly fast if blacklist_file_name in git.status(): # Also ugly return (False, "{0} is modified locally. This is probably bad.".format(blacklist_file_name)) # Add item to file with open(blacklist_file_name, "a+") as blacklist_file: last_character = blacklist_file.read()[-1:] if last_character != "\n": blacklist_file.write("\n") blacklist_file.write(item_to_blacklist + "\n") # Checkout a new branch (for PRs for non-code-privileged people) branch = "auto-blacklist-{0}".format(str(time.time())) git.checkout("-b", branch) # Clear HEAD just in case git.reset("HEAD") git.add(blacklist_file_name) git.commit("--author='SmokeDetector <*****@*****.**>'", "-m", u"Auto blacklist of {0} by {1} --autopull".format(item_to_blacklist, username)) if code_permissions: git.checkout("master") git.merge(branch) git.push("origin", "master") git.branch('-D', branch) # Delete the branch in the local git tree since we're done with it. else: git.push("origin", branch) git.checkout("master") if GlobalVars.github_username is None or GlobalVars.github_password is None: return (False, "Tell someone to set a GH password") payload = {"title": u"{0}: Blacklist {1}".format(username, item_to_blacklist), "body": u"[{0}]({1}) requests the blacklist of the {2} {3}. See the Metasmoke search [here]" "(https://metasmoke.erwaysoftware.com/search?utf8=%E2%9C%93{4}{5})\n" u"<!-- METASMOKE-BLACKLIST-{6} {3} -->".format(username, chat_profile_link, blacklist, item_to_blacklist, ms_search_option, item_to_blacklist.replace(" ", "+"), blacklist.upper()), "head": branch, "base": "master"} response = requests.post("https://api.github.com/repos/Charcoal-SE/SmokeDetector/pulls", auth=HTTPBasicAuth(GlobalVars.github_username, GlobalVars.github_password), data=json.dumps(payload)) log('debug', response.json()) try: git.checkout("deploy") # Return to deploy, pending the accept of the PR in Master. git.branch('-D', branch) # Delete the branch in the local git tree since we're done with it. return (True, "You don't have code privileges, but I've [created a pull request for you]({0}).".format( response.json()["html_url"])) except KeyError: git.checkout("deploy") # Return to deploy # Delete the branch in the local git tree, we'll create it again if the # command is run again. This way, we keep things a little more clean in # the local git tree git.branch('-D', branch) # Error capture/checking for any "invalid" GH reply without an 'html_url' item, # which will throw a KeyError. if "bad credentials" in str(response.json()['message']).lower(): # Capture the case when GH credentials are bad or invalid return (False, "Something is wrong with the GH credentials, tell someone to check them.") else: # Capture any other invalid response cases. return (False, "A bad or invalid reply was received from GH, the message was: %s" % response.json()['message']) git.checkout("deploy") # Return to deploy to await CI. return (True, "Blacklisted {0}".format(item_to_blacklist))