def remove_study(self,study_id, branch, author="OpenTree API <*****@*****.**>"): """Remove a study Given a study_id, branch and optionally an author, remove a study on the given branch and attribute the commit to author. Returns the SHA of the commit on branch. """ os.chdir(self.repo) study_dir = "study/%s" % study_id study_filename = "%s/%s.json" % (study_dir, study_id) if self.branch_exists(branch): git.checkout(branch) if not os.path.isdir(study_dir): # branch already exists locally with study removed # so just return the commit SHA return git("rev-parse","HEAD").strip() else: # Create this new branch off of master, NOT the currently-checked out branch! git.checkout("master") git.checkout("-b",branch) git.rm("-rf", study_dir) git.commit(author=author, message="Delete Study #%s via OpenTree API" % study_id) new_sha = git("rev-parse","HEAD") return new_sha.strip()
def rm(name, **kwargs): """Remove a note by name.""" try: git.rm(name) except ErrorReturnCode_128: raise FileNotFoundError git.commit("-m", "rm {}".format(name))
def mv(self, sender, page, old_path, author, message, commit=True): status = git.status("--porcelain", old_path).stdout.decode("utf8")[1:2] extra_path = "" if status in ("D", "M"): git.rm(old_path) extra_path = old_path if commit: self.commit(page, author=author, message=message, extra_path=extra_path)
def mv(self, sender, page, old_path, author, message, commit=True): status = git.status('--porcelain', old_path).stdout.decode('utf8')[1:2] extra_path = '' if status in ('D', 'M'): git.rm(old_path) extra_path = old_path if commit: self.commit(page, author=author, message=message, extra_path=extra_path)
def pagesBranch(): try: git.checkout("--orphan", "gh-pages") git.rm("-r", "-f", "--ignore-unmatch", ".") except: print("No files exist in the gh-pages branch") try: sh.cp("-R", os.path.dirname(os.path.realpath(__file__)) + "/www/coming-soon/*", "./") except: print("Template not found") pushAll("Creating the first instance of the coming-soon site") try: git.checkout("dev") except: git.checkout("-b", "dev")
def remove_study(self, study_id, branch, author="OpenTree API <*****@*****.**>"): """Remove a study Given a study_id, branch and optionally an author, remove a study on the given branch and attribute the commit to author. Returns the SHA of the commit on branch. """ os.chdir(self.repo) study_dir = "study/%s" % study_id study_filename = "%s/%s.json" % (study_dir, study_id) if self.branch_exists(branch): git.checkout(branch) if not os.path.isdir(study_dir): # branch already exists locally with study removed # so just return the commit SHA return git("rev-parse", "HEAD").strip() else: # Create this new branch off of master, NOT the currently-checked out branch! git.checkout("master") git.checkout("-b", branch) git.rm("-rf", study_dir) git.commit(author=author, message="Delete Study #%s via OpenTree API" % study_id) new_sha = git("rev-parse", "HEAD") return new_sha.strip()
def test_remote_delete_a_page(self): git.rm(self.page.path) git.commit('-m', 'delete page') response = self.client.post(self.url, {}) self.assertEqual(Page.objects.count(), 0)
def install_app(self, appid, pdpobject): print "Analyzing artifacts.." plan = pdpobject.plan for a in plan.artifacts: h, c = self.handler_map.get(a.type) if h is None: raise NotImplementedError("No handler for artifact type : %s" % a.type) print "Detected app type : %s, Matching cartridge : %s" % (a.type, c) ho = h(pdpobject, a) renv = ho.handle_artifact() print "Creating application.." r = self._create_app(appid, h.cartridge) if r.ok: print "Application %s created" % appid else: print "Error creating application %s" % appid print r.status_code, r.text return # Set env variables on the app using rhc for now from sh import rhc for k, v in renv.items(): rhc.env.set("%s=%s" % (k, v), "-a", appid) print "Set environment %s=%s on the app" % (k, v) resp = r.json() print "Received response : %s" % resp data = resp["data"] # Start staging it. git_url = data["git_url"] web_url = data["app_url"] print "Staging PDP archive.." print "PDP archive is at : %s" % pdpobject.pdpdir print "Configuring git.." # Empty out any samples given by the cartridge from sh import git import sh import tempfile import shutil import subprocess tmpdir = tempfile.mkdtemp() subprocess.check_call(["git", "clone", git_url, tmpdir]) print "Cloned cartridge sample to %s" % tmpdir cwd = os.getcwd() os.chdir(tmpdir) git.rm("-r", "*") print "Removed all contents from cartridge repo" # Copy pdpdir contents subprocess.check_call("cp -rp %s/* ." % pdpobject.pdpdir, shell=True) print "Copied app contents to %s" % tmpdir sh.ls("-la", tmpdir) print "Adding and committing app code" git.add("*") git.commit("-m", "Adding app contents") print "Configuring git remote.." git.remote.add("rhc", git_url) print "Pushing to openshift" git.push("-u", "-f", "rhc", "master") print "Uploaded app successfully.." print "App is available at : %s" % web_url return appid, git_url, web_url
"stm32", "esp32", "cc3200", "teensy", "windows", "zephyr", "minimal", "esp8266", "pic16bit", "qemu-arm", "bare-arm", "rp2", ] for p in ports_to_delete: try: git.rm("-rf", "ports/" + p) except sh.ErrorReturnCode_128: pass # We inherit stm32 changes into stm because we did a git rename. git.status("--porcelain=1", "ports/stm", _out=out_buf) out_buf.seek(0) line = out_buf.readline() while line: state, path = line.split() if state == "UU": git.checkout("--ours", path) git.add(path) elif state == "UA": git.rm(path) line = out_buf.readline()
def mv(self, page, old_path, author, message): status = git.status('--porcelain', old_path).stdout.decode('utf8')[1:2] if status == 'D': git.rm(old_path) self.commit(page, author=author, message=message)
def migrate_to_git(): if arguments['--users-file']: users = json.loads(open(arguments['<users_file>']).read()) else: users = parse_users() git_repo = arguments['<git_repo>'] if not os.path.exists(git_repo): os.makedirs(git_repo) if not os.path.exists(os.path.join(git_repo, '.git')): git.init(git_repo) data_dir = os.path.abspath(arguments['<data_dir>']) root = os.path.join(data_dir, 'pages') pages = os.listdir(root) os.chdir(git_repo) for page in pages: if page in CONVERSION_BLACKLIST: continue versions = get_versions(page, users=users, data_dir=data_dir) if not versions: print("### ignoring %s (no revisions found)" % page) continue path = _hyphenize(_unquote(page)) + '.txt' print("### Creating %s\n" % path) dirname, basename = os.path.split(path) if dirname and not os.path.exists(dirname): os.makedirs(dirname) for version in versions: revision = version.pop('revision') # Handle attachment revisions if revision == '99999999': continue print("revision %s" % revision) try: if version['content']: with open(path, 'w') as f: print("Opening %s" % path) f.write(version.pop('content')) print("Adding %s" % path) git.add(path) else: print("Removing %s" % path) git.rm(path) version.pop('content') if version['rst_content']: with open(path.replace('txt', 'rst'), 'w') as f: print("Opening %s" % path.replace('txt', 'rst')) f.write(version.pop('rst_content')) pandoc(path.replace('txt', 'rst'), f="rst", t="markdown_github", o=path.replace('txt', 'md')) print("Adding %s" % path.replace('txt', 'rst')) git.add(path.replace('txt', 'rst')) print("Adding %s" % path.replace('txt', 'md')) git.add(path.replace('txt', 'md')) elif os.path.isfile(path.replace('txt', 'rst')): print("Removing %s" % path.replace('txt', 'rst')) git.rm(path.replace('txt', 'rst')) print("Removing %s" % path.replace('txt', 'md')) git.rm(path.replace('txt', 'md')) version.pop('rst_content') else: version.pop('rst_content') print("Committing %s" % path) print(version['m']) if not version['m'].strip(): version['m'] = "Change made on %s" % version[ 'date'].strftime('%x') git.commit(path.replace('txt', '*'), **version) except Exception as e: print(e)
def main (): params = get_parameters() setup_logging(params.verbose) if (not params.dbuser or not params.dbpw): logging.error(u'Error: incomplete database login credentials') sys.exit(-1) # connect via API to retrieve namespaces logging.info('Connecting to wiki %s://%s%s/ as %s via API', params.protocol, params.wikiserver, params.scriptpath, params.wikiuser) session = requests.Session() session.verify = params.certificate session.auth = None session.headers['User-Agent'] = 'MwClient/' + mwclient.client.__ver__ + ' (https://github.com/mwclient/mwclient)' site = mwclient.Site((params.protocol,params.wikiserver),path=params.scriptpath+'/',pool=session) site.login(params.wikiuser,params.wikipw) result = site.api('query',meta='siteinfo',siprop='namespaces') namespaces = {} try: for nsid, nsdict in result['query']['namespaces'].iteritems(): if nsid==u'0': namespaces[nsid] = 'default' else: namespaces[nsid] = nsdict['canonical'] except: logging.error(u'Error in api result: %s',pprint.pformat(result)) # connect via database to retrieve page/revision content con = MySQLdb.connect(unix_socket=params.socket, user=params.dbuser, passwd=params.dbpw, db=params.dbname, charset='utf8', use_unicode=True) cursor = con.cursor() returncode = 0 # pages in these namespaces are used to generate the RDF rdfnamespaces = set(['Metaphor', 'Frame', 'Metaphor_family', 'Frame_family', 'MetaRC', 'CxnMP']) if params.new: # populate a new git repository; returncode = 1 signals that RDF should be regenerated returncode = 1 # query written so as not to return revisions in which the wiki text has not # changed at all revq = """\ (SELECT r.rev_id as 'rev_id', p.page_namespace, p.page_title, t.old_text, r.rev_comment, r.rev_timestamp, u.user_name, u.user_email, 'revision' FROM revision AS r INNER JOIN page AS p ON r.rev_page = p.page_id INNER JOIN text AS t ON r.rev_text_id = t.old_id INNER JOIN user AS u ON r.rev_user=u.user_id LEFT JOIN revision AS oldr ON r.rev_parent_id = oldr.rev_id WHERE r.rev_parent_id=0 OR (t.old_text <> (SELECT t2.old_text FROM revision AS r2 INNER JOIN text AS t2 ON r2.rev_text_id=t2.old_id WHERE r2.rev_id=r.rev_parent_id))) UNION ALL (SELECT a.ar_rev_id as 'rev_id', a.ar_namespace, a.ar_title, t.old_text, a.ar_comment, a.ar_timestamp, u.user_name, u.user_email, 'archive' FROM archive AS a INNER JOIN text AS t ON a.ar_text_id = t.old_id INNER JOIN user AS u ON a.ar_user=u.user_id LEFT JOIN archive AS olda ON a.ar_parent_id = olda.ar_rev_id WHERE a.ar_parent_id=0 OR (t.old_text <> (SELECT t2.old_text FROM archive AS a2 INNER JOIN text AS t2 ON a2.ar_text_id=t2.old_id WHERE a2.ar_rev_id=a.ar_parent_id))) ORDER BY rev_id ASC""" # retrieve deletion log events deltimequery = """\ SELECT l.log_timestamp, u.user_name, u.user_email, l.log_comment, l.log_action FROM logging AS l, user AS u WHERE log_type = "delete" AND log_namespace=%s AND log_title=%s AND l.log_user=u.user_id ORDER BY log_timestamp DESC""" deletionfiles = set() deletionqueue = [] deletiontimes = [] cursor.execute(revq) for row in cursor: rev = dict(zip(('id','nsid','title','text','comment', 'tstamp','username','email','type'), row)) rev['ns'] = namespaces[str(rev['nsid'])].replace(u' ',u'_') rev = processrevision(rev) # check if a deletion should be executed while deletiontimes and (rev['datetime'] > deletiontimes[0]): delitem = deletionqueue.pop(0) deldatetime = deletiontimes.pop(0) logging.info(u'running deletion on %s',delitem['pfpath']) git.rm(delitem['pfpath']) git.commit(delitem['pfpath'], author=delitem['author'], date=delitem['authordate'], m=delitem['comment']) addgitcommit(rev) if (rev['type']=='archive'): if rev['pfpath'] not in deletionfiles: # look up a deletion log event cursor.execute(deltimequery, (int(rev['nsid']),rev['title'])) (deltstamp, username, email, comment, action) = cursor.fetchone() if action=='delete': deldatetime = datetime.strptime(deltstamp,'%Y%m%d%H%M%S') author = username+u' <'+email+u'>' authordate = deldatetime.isoformat() + 'Z' delcomment = comment.decode('utf-8') del_idx = bisect(deletiontimes,deldatetime) deletiontimes.insert(del_idx, deldatetime) deletionqueue.insert(del_idx, {'datetime':deldatetime, 'author':author, 'authordate':authordate, 'comment':delcomment, 'pfpath':rev['pfpath']}) deletionfiles.add(rev['pfpath']) # process outstanding deletions while deletiontimes: delitem = deletionqueue.pop(0) deldatetime = deletiontimes.pop(0) git.rm(delitem['pfpath']) git.commit(delitem['pfpath'], author=delitem['author'], date=delitem['authordate'], m=delitem['comment']) else: tstampstr = check_output(['git','log','-n','1','--format=%at']).strip() logging.info('Time stamp is %s', tstampstr) dtime = datetime.utcfromtimestamp(int(tstampstr)) logging.info('Repository last log entry time: %s', dtime.isoformat()+'Z') dtime += timedelta(seconds=1) logging.info('Searching for log entries from time: %s',dtime.strftime('%Y%m%d%H%M%S')) # update the existing repository using the recentchanges API result = site.api('query',list='recentchanges',rcdir='newer', rcstart=dtime.strftime('%Y%m%d%H%M%S'), rcprop='loginfo|ids|userid|comment|title|timestamp') recentchanges = result['query']['recentchanges'] while 'query-continue' in result: continueval = result['query-continue']['recentchanges']['rccontinue'] result = site.api('query',list='recentchanges',rcdir='newer', rcstart=dtime.strftime('%Y%m%d%H%M%S'), rcprop='loginfo|ids|userid|comment|title|timestamp', rccontinue=continueval) recentchanges.extend(result['query']['recentchanges']) revq = """\ SELECT r.rev_id, p.page_namespace, p.page_title, t.old_text, r.rev_comment, r.rev_timestamp, u.user_name, u.user_email, 'revision' FROM revision AS r INNER JOIN page AS p ON r.rev_page = p.page_id INNER JOIN text AS t ON r.rev_text_id = t.old_id INNER JOIN user AS u ON r.rev_user=u.user_id WHERE r.rev_id=%s""" userq = """\ SELECT u.user_name, u.user_email FROM user as u WHERE u.user_id=%s """ pageq = """\ SELECT r.rev_id, p.page_namespace, p.page_title, t.old_text, r.rev_comment, r.rev_timestamp, u.user_name, u.user_email, 'revision' FROM revision AS r INNER JOIN page AS p ON r.rev_id = p.page_latest INNER JOIN text AS t ON r.rev_text_id = t.old_id INNER JOIN user AS u ON r.rev_user=u.user_id WHERE p.page_id=%s""" for rc in recentchanges: logging.info(u'processing change: %s', pprint.pformat(rc)) if rc['type'] in ('new','edit'): cursor.execute(revq,(int(rc['revid']),)) row = cursor.fetchone() rev = dict(zip(('id','nsid','title','text','comment', 'tstamp','username','email','type'), row)) rev['ns'] = namespaces[str(rev['nsid'])].replace(u' ',u'_') if rev['ns'] in rdfnamespaces: returncode = 1 rev = processrevision(rev) addgitcommit(rev) elif rc['type'] == 'log': ns = namespaces[str(rc['ns'])].replace(u' ',u'_') pfname = rc['title'].replace(u'/',u'___') pfpath = ns + u'/' + pfname revdatetime = datetime.strptime(rc['timestamp'],'%Y-%m-%dT%H:%M:%SZ') authordate = revdatetime.isoformat() + 'Z' cursor.execute(userq,(int(rc['userid']),)) (user_name, user_email) = cursor.fetchone() author = user_name + u' <' + user_email +u'>' gitcomment = rc['comment'].decode('utf-8') # these events can be delete, restore, or move if rc['logaction'] in ['move','delete','restore']: if ns in rdfnamespaces: returncode = 1 if rc['logaction'] == 'move': if os.path.exists(pfpath): newns = namespaces[str(rc['move']['new_ns'])].replace(u' ',u'_') if newns in rdfnamespaces: returncode = 1 newpfname = rc['move']['new_title'].replace(u'/',u'___') newpfpath = newns + u'/' + newpfname git.mv(pfpath, newpfpath) mtime = calendar.timegm(time.strptime(authordate, '%Y-%m-%dT%H:%M:%SZ')) os.utime(newpfpath,(time.time(),mtime)) if not gitcomment: gitcomment = u'%s renamed to %s' % (pfname, newpfname) git.commit(newpfpath,author=author,date=authordate,m=gitcomment) elif rc['logaction'] == 'delete': if os.path.exists(pfpath): git.rm(pfpath) if not gitcomment: gitcomment = u'%s deleted' % (pfname) git.commit(newpfpath,author=author,date=authordate,m=gitcomment) elif rc['logaction'] == 'restore': cursor.execute(pageq,(int(rc['pageid']),)) row = cursor.fetchone() rev = dict(zip(('id','nsid','title','text','comment', 'tstamp','username','email','type'), row)) rev['ns'] = namespaces[str(rev['nsid'])].replace(u' ',u'_') if rev['ns'] in rdfnamespaces: returncode = 1 rev = processrevision(rev) rev['gitcomment'] = gitcomment rev['author'] = author rev['authordate'] = authordate addgitcommit(rev) return returncode
existing_submodules = GetExistingSubmodules() repos_submodules = [repo['name'] for repo in repos] submodules_to_add = \ [s for s in repos_submodules if s not in existing_submodules] submodules_to_remove = \ [s for s in existing_submodules if s not in repos_submodules] print "To add: ", submodules_to_add print "To remove: ", submodules_to_remove for s in submodules_to_remove: print 'Removing ' + s + '...' git.submodule.deinit('--force', s) git.rm(s) for repo in repos: if repo['name'] not in submodules_to_add: continue print 'Adding ' + repo['full_name'] + '...' print repo['clone_url'] print repo['name'] git.submodule.add('--force', repo['clone_url'], repo['name']) print 'Updating ' + repo['full_name'] + '...' git.submodule.update('--recursive', '--init') existing_submodules = GetExistingSubmodules() print 'Fetching remotes...' git.submodule.foreach('git', 'fetch', '--tags')