def get_yaml_from_mercurial(vcs_address, vcs_subdir): from mercurial import ui, commands from urllib2 import HTTPError import hglib vtemp = mkdtemp(prefix='multipkg-vcs-') try: commands.clone(ui.ui(), str(vcs_address), dest=vtemp) client = hglib.open(vtemp) # get index.yaml path_to_yaml = path_join(vtemp, vcs_subdir, 'index.yaml') yaml = yaml_load(file(path_to_yaml).read()) recent_changes = [] for entry in client.log('tip:tip^^'): num, rev, none, branch, author, msg, date = entry date = date.strftime('%Y-%m-%d %H:%M:%S') recent_changes.append("commit %s | Author: %s | Date: %s \n%s\n" % (rev, author, date, msg)) yaml['.'] = dict(recent_changes="\n".join(recent_changes)) return yaml except HTTPError: raise RemotePackageNotFoundError(vcs_address) except IOError as e: if e.errno == errno.ENOENT and e.filename.find('.yaml') > -1: raise IndexNotFoundError('index.yaml not found in your repository') raise except: raise finally: if isdir(vtemp): rmtree(vtemp)
def execute(self,job,previous_state): p = job.publish #import ipdb;ipdb.set_trace() # Write JSON output file json_out = {} json_out["name"] = p.table_name json_out["job_id"] = job.id json_out["job_batch_id"] = job.batch_id json_out["schema"] = p.workspace.publish_schema json_out["data_schema"] = p.workspace.publish_data_schema json_out["outdated_schema"] = p.workspace.publish_outdated_schema json_out["workspace"] = p.workspace.name json_out["channel"] = p.workspace.publish_channel.name json_out["spatial_data"] = SpatialTable.check_spatial(job.publish.spatial_type) json_out["spatial_type"] = SpatialTable.get_spatial_type_desc(job.publish.spatial_type) json_out["sync_postgres_data"] = p.workspace.publish_channel.sync_postgres_data json_out["sync_geoserver_data"] = p.workspace.publish_channel.sync_geoserver_data json_out["dump_path"] = "{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, p.pgdump_file.path) json_out["data_md5"] = file_md5(p.pgdump_file.path) json_out["preview_path"] = "{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, settings.PREVIEW_ROOT) json_out["applications"] = ["{0}:{1}".format(o.application,o.order) for o in Application_Layers.objects.filter(publish=p)] json_out["title"] = p.title json_out["abstract"] = p.abstract json_out["allow_authenticated"] = p.workspace.allow_authenticated if p.geoserver_setting: json_out["geoserver_setting"] = json.loads(p.geoserver_setting) if p.workspace.publish_channel.sync_geoserver_data and p.style_file: json_out["style_path"] = "{}{}".format(BorgConfiguration.MASTER_PATH_PREFIX, p.style_file.path) json_out["style_md5"] = file_md5(p.style_file.path) #bbox if SpatialTable.check_spatial(job.publish.spatial_type): cursor=connection.cursor() st = SpatialTable.get_instance(cursor,p.workspace.schema,p.table_name,True) if st.geometry_columns: json_out["bbox"] = st.geometry_columns[0][2] elif st.geography_columns: json_out["bbox"] = st.geography_columns[0][2] #create the dir if required if not os.path.exists(os.path.dirname(p.output_filename_abs)): os.makedirs(os.path.dirname(p.output_filename_abs)) with open(p.output_filename_abs, "wb") as output: json.dump(json_out, output, indent=4) # Try and add file to repository, if no changes then continue hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY) try: hg.add(files=[p.output_filename_abs]) hg.commit(include=[p.output_filename_abs],addremove=True, user=BorgConfiguration.BORG_STATE_USER, message="{} - updated {}.{}".format(p.job_batch_id, p.workspace.name, p.name)) except hglib.error.CommandError as e: if e.out != "nothing changed\n": return (HarvestStateOutcome.failed, self.get_exception_message()) finally: hg.close() return (HarvestStateOutcome.succeed, None)
def __init__(self, url, mirror_path): # TODO: shared repositories in Mercurial are only possible # through an extension, and it's not clear how to use those in # this context. So here, we always make full clones for # each of the environments. self._path = os.path.abspath(mirror_path) self._pulled = False if hglib is None: raise ImportError("hglib") if self.is_local_repo(url): # Local repository, no need for mirror self._path = os.path.abspath(url) self._pulled = True elif not self.is_local_repo(self._path): if os.path.exists(self._path): self._raise_bad_mirror_error(self._path) # Clone is missing log.info("Cloning project") if url.startswith("hg+"): url = url[3:] # Mercurial branches are global, so there is no need for # an analog of git --mirror hglib.clone(url, dest=self._path, noupdate=True) self._repo = hglib.open(self._path)
def unpublish(self): """ remove store's json reference (if exists) from the repository, return True if store is removed for repository; return false, if layers does not existed in repository. """ json_files = [ self.json_filename_abs(action) for action in [ 'publish' ] ] #get all existing files. json_files = [ f for f in json_files if os.path.exists(f) ] if json_files: #file exists, layers is published, remove it. try_set_push_owner("liveserver") hg = None try: hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY) hg.remove(files=json_files) hg.commit(include=json_files,addremove=True, user="******", message="Remove live store {}.{}".format(self.workspace.name, self.name)) increase_committed_changes() try_push_to_repository("liveserver",hg) finally: if hg: hg.close() try_clear_push_owner("liveserver") return True else: return False
def empty_gwc(self): """ update layer group's json for empty gwc to the repository """ if self.status not in [ResourceStatus.PUBLISHED,ResourceStatus.UPDATED]: #layer is not published, no need to empty gwc return json_filename = self.json_filename_abs; try_set_push_owner("layergroup") hg = None try: json_out = {} json_out["name"] = self.name json_out["workspace"] = self.workspace.name json_out["action"] = "empty_gwc" json_out["empty_time"] = timezone.now().strftime("%Y-%m-%d %H:%M:%S.%f") if self.geoserver_setting: json_out["geoserver_setting"] = json.loads(self.geoserver_setting) #create the dir if required if not os.path.exists(os.path.dirname(json_filename)): os.makedirs(os.path.dirname(json_filename)) with open(json_filename, "wb") as output: json.dump(json_out, output, indent=4) hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY) hg.commit(include=[json_filename],addremove=True, user="******", message="Empty GWC of layer group {}.{}".format(self.workspace.name, self.name)) increase_committed_changes() try_push_to_repository("layergroup",hg) finally: if hg: hg.close() try_clear_push_owner("layergroup")
def unpublish(self): """ remove store's json reference (if exists) from the repository, return True if store is removed for repository; return false, if layers does not existed in repository. """ #remove it from catalogue service res = requests.delete("{}/catalogue/api/records/{}:{}/".format(settings.CSW_URL,self.datasource.workspace.name,self.kmi_name),auth=(settings.CSW_USER,settings.CSW_PASSWORD)) if res.status_code != 404: res.raise_for_status() json_files = [ self.json_filename_abs(action) for action in [ 'publish','empty_gwc' ] ] #get all existing files. json_files = [ f for f in json_files if os.path.exists(f) ] if json_files: #file exists, layers is published, remove it. try_set_push_owner("livelayer") hg = None try: hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY) hg.remove(files=json_files) hg.commit(include=json_files,addremove=True, user="******", message="Remove live layer {}.{}".format(self.datasource.workspace.name, self.kmi_name)) increase_committed_changes() try_push_to_repository("livelayer",hg) finally: if hg: hg.close() try_clear_push_owner("livelayer") return True else: return False
def get_revision_hglib(): # The following only works if python-hglib is installed. import hglib hgclient = hglib.open(getHgRoot()) tip = hgclient.tip() branch = hgclient.branch() return (tip.rev.replace('+',''), tip.node[:12], branch)
def get_hlib_client_and_path(): try: client = hglib.open() repopath = client.root() return client, repopath except Exception: raise NoVCSError(ERR_MSG)
def empty_gwc(self): """ update layer's json for empty gwc to the repository """ if self.publish_status.unpublished: #layer is not published, no need to empty gwc raise ValidationError("The wms layer({0}) is not published before.".format(self.kmi_name)) json_filename = self.json_filename_abs('empty_gwc'); try_set_push_owner("livelayer") hg = None try: json_out = {} json_out["name"] = self.kmi_name json_out["workspace"] = self.datasource.workspace.name json_out["store"] = self.datasource.name json_out["action"] = "empty_gwc" json_out["publish_time"] = timezone.localtime(timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f") #create the dir if required if not os.path.exists(os.path.dirname(json_filename)): os.makedirs(os.path.dirname(json_filename)) with open(json_filename, "wb") as output: json.dump(json_out, output, indent=4) hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY) hg.commit(include=[json_filename],addremove=True, user="******", message="Empty GWC of live layer {}.{}".format(self.datasource.workspace.name, self.kmi_name)) increase_committed_changes() try_push_to_repository("livelayer",hg) finally: if hg: hg.close() try_clear_push_owner("livelayer")
def get_revision_hglib(): # The following only works if python-hglib is installed. import hglib hgclient = hglib.open(getHgRoot()) parent = hgclient.parents()[0] branch = hgclient.branch() return (parent.rev.replace('+',''), parent.node[:12], branch)
def handle(self, **options): self.verbosity = options.pop('verbosity', 1) self.handleOptions(**options) import hglib repos = self.repos_for_names(*options['repos']) for dbrepo in repos: repopath = str(resolve(dbrepo.name)) if not os.path.isdir(os.path.join(repopath, '.hg')): self.minimal(("\n Cannot process %s, " "there's no local clone\n\n") % dbrepo.name) continue hgrepo = hglib.open(repopath) try: self.handleRepo(dbrepo, hgrepo) except StopIteration: # allow subclass to stop our loop over repositories break except Exception: self.stdout.write('') logging.error('%s\tError while processing' % dbrepo.name, exc_info=True) self._needsNewline = False if self._needsNewline: self.stdout.write('')
def _hg_log(repo_path, max_depth, since_date): repo = hglib.open(repo_path) commits = repo.log() since_date = date_parser(since_date) since_date = time.mktime(since_date.timetuple()) ret = [] for i, commit in enumerate(commits): (rev, node, tags, branch, author, desc, date) = commit # lets convert time to Unix timestamp date = int(time.mktime(date.timetuple())) if i > max_depth: break if date < since_date: break ret.append({ 'hash': node, 'author': author, 'time': date, 'commit': desc }) return ret
def hg_open_or_clone_repo(hg_repo): """Open local repo. Clone when non local path is given""" with hg_tmp_clone(hg_repo) as hg_repo_path: try: yield hglib.open(hg_repo_path) except hglib.error.ServerError as e: error(u'hg: {} could not open {}'.format(e, hg_repo))
def test_old_revision(self): c = hglib.open(self.tmpdir) with open(self.tmppath('foo'), 'wb') as fh: fh.write('foo initial') c.add(self.tmppath('foo')) c.commit('initial') with open(self.tmppath('foo'), 'wb') as fh: fh.write('foo second') with open(self.tmppath('bar'), 'wb') as fh: fh.write('bar second') c.add(self.tmppath('bar')) c.commit('second') # This wipes out the working directory, ensuring the finder isn't # finding anything from the filesystem. c.rawcommand(['update', 'null']) finder = self._get_finder(self.tmpdir, 0) f = finder.get('foo') self.assertEqual(f.read(), 'foo initial') self.assertEqual(f.read(), 'foo initial', 'read again for good measure') self.assertIsNone(finder.get('bar')) finder = MercurialRevisionFinder(self.tmpdir, rev=1) f = finder.get('foo') self.assertEqual(f.read(), 'foo second') f = finder.get('bar') self.assertEqual(f.read(), 'bar second')
def update_hg(path, skip_rebuild = False): try: import hglib except ImportError: print("Updating requires python-hglib to be installed.") print("Try: pip install python-hglib") return -1 f = open(os.path.join(path, "yt_updater.log"), "a") repo = hglib.open(path) repo.pull() ident = repo.identify().decode("utf-8") if "+" in ident: print("Can't rebuild modules by myself.") print("You will have to do this yourself. Here's a sample commands:") print("") print(" $ cd %s" % (path)) print(" $ hg up") print(" $ %s setup.py develop" % (sys.executable)) return 1 print("Updating the repository") f.write("Updating the repository\n\n") repo.update(check=True) f.write("Updated from %s to %s\n\n" % (ident, repo.identify())) if skip_rebuild: return f.write("Rebuilding modules\n\n") p = subprocess.Popen([sys.executable, "setup.py", "build_ext", "-i"], cwd=path, stdout = subprocess.PIPE, stderr = subprocess.STDOUT) stdout, stderr = p.communicate() f.write(stdout.decode('utf-8')) f.write("\n\n") if p.returncode: print("BROKEN: See %s" % (os.path.join(path, "yt_updater.log"))) sys.exit(1) f.write("Successful!\n") print("Updated successfully.")
def publish(self): """ publish store's json reference (if exists) to the repository, """ json_filename = self.json_filename_abs; try_set_push_owner("wmsserver") hg = None try: json_out = {} json_out["name"] = self.name json_out["capability_url"] = self.get_capability_url json_out["username"] = self.user or "" json_out["password"] = self.password or "" json_out["workspace"] = self.workspace.name json_out["publish_time"] = timezone.now().strftime("%Y-%m-%d %H:%M:%S.%f") if self.geoserver_setting: json_out["geoserver_setting"] = json.loads(self.geoserver_setting) #create the dir if required if not os.path.exists(os.path.dirname(json_filename)): os.makedirs(os.path.dirname(json_filename)) with open(json_filename, "wb") as output: json.dump(json_out, output, indent=4) hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY) hg.commit(include=[json_filename],addremove=True, user="******", message="Update wms store {}.{}".format(self.workspace.name, self.name)) increase_committed_changes() try_push_to_repository("wmsserver",hg) finally: if hg: hg.close() try_clear_push_owner("wmsserver")
def RepoMock(tmpdir): ''' Mock a local mercurial repo ''' # Init empty repo repo_dir = str(tmpdir.realpath()) hglib.init(repo_dir) # Add default pull in Mercurial config hgrc = tmpdir.join('.hg').join('hgrc') hgrc.write('[paths]\ndefault = {}'.format(repo_dir)) # Open repo with config repo = hglib.open(repo_dir) # Commit a file on central readme = tmpdir.join('README.md') readme.write('Hello World') repo.add(str(readme.realpath()).encode('utf-8')) repo.branch(name=b'central', force=True) repo.commit(message=b'Readme', user='******') # Mock push to avoid reaching try server repo.push = MagicMock(return_value=True) return repo
def checkout_existing(): with hglib.open(self._encode_filename(path)) as subrepo: subrepo.pull() subrepo.update(self._encode(commit_hash), clean=True) subrepo.rawcommand([b"--config", b"extensions.purge=", b"purge", b"--all"])
def __init__(self, repo_root, path): self.repo_root = repo_root self.path = path self.repo = hglib.open(repo_root) self.num_of_revs = len(self.repo.log()) if path != repo_root: self.prefix = path.replace(repo_root+'/', '')
def GetGeckoRevision(geckoDir): print "\nGetting Gecko Revision from: " + str(geckoDir) client = hglib.open(geckoDir) tip = client.tip() commit = str(client.tip().rev) + ":" + str(client.tip().node) print "Basing off Gecko master commit: " + str(commit) return commit
def push_hg(store, path): """Pushes the local documents via git.""" storedir, _ = os.path.split(path) client = hglib.open(storedir) if len(client.status(modified=True, unknown=True, added=True)) == 0: return client.commit(message='regolith auto-commit', addremove=True) client.push()
def test_default_revision(self): self.prepare_match_test() c = hglib.open(self.tmpdir) c.commit('initial commit') self.finder = self._get_finder(self.tmpdir) self.do_match_test() self.assertIsNone(self.finder.get('does-not-exist')) self.assertIsInstance(self.finder.get('bar'), MercurialFile)
def tmp_hg(url): from django.conf import settings tmp_repo_path = os.path.join( settings.POOTLE_FS_PATH, "__tmp_hg_src__") if os.path.exists(tmp_repo_path): shutil.rmtree(tmp_repo_path) hglib.clone(url, tmp_repo_path) yield tmp_repo_path, hglib.open(tmp_repo_path) shutil.rmtree(tmp_repo_path)
def _client(self): configs = ( 'ui.username="******"', ) client = hglib.open(self.tmpdir, encoding='UTF-8', configs=configs) self._clients.append(client) return client
def transplant(tree, destination, rev, trysyntax=None, push_bookmark=False): """Transplant a specified revision and ancestors to the specified tree. If ``trysyntax`` is specified, a Try commit will be created using the syntax specified. """ with hglib.open(get_repo_path(tree)) as client: return _transplant(client, tree, destination, rev, trysyntax=trysyntax, push_bookmark=push_bookmark)
def __init__(self, root): super(Mercurial, self).__init__(root) hgext = resource_filename('dxr', 'hgext/previous_revisions.py') with hglib.open(root, configs=['extensions.previous_revisions=%s' % hgext]) as client: tip = client.tip() self.revision = tip.node self.previous_revisions = self.find_previous_revisions(client) self.upstream = self._construct_upstream_url()
def compare(sourceBranch="default", targetBranch="stable"): excludeFile = None excludes = [] if os.path.isfile('graft_exclude.conf'): excludeFile = 'graft_exclude.conf' elif os.path.isfile('buildscripts/graft_exclude.conf'): excludeFile = 'buildscripts/graft_exclude.conf' if excludeFile: for L in open(excludeFile, 'r'): L = L.strip() if L: if L.startswith('#'): continue Ls = L.split() if len(Ls[0]) > 12: Ls[0] = Ls[0][:12] excludes.append(Ls[0]) print 'Loaded %s exclusions from %s\n' % (len(excludes), excludeFile) try: # Try if cwd is buildscripts/ folder c = hglib.open('../') except: # Try whether we are in hg root folder c = hglib.open('.') # Difference in changesets between branches cDiff = c.log("ancestors(%s) and not ancestors(%s)" % (sourceBranch, targetBranch)) # Filter out already grafted commits stdOut = c.rawcommand(cmdbuilder('log', debug=True, b=targetBranch)) grafted = [] r = re.compile('.*source=([a-zA-Z0-9]*)') for outL in stdOut.split('\n'): if outL.strip().startswith('extra') and ' source=' in outL: sourceRev = r.match(outL).groups()[0] grafted.append(sourceRev) # Filtered result # Also filter out merge commits (which are skipped by graft anyway) return [cs for cs in cDiff if (cs.node not in grafted and \ cs.node[:12] not in excludes and \ not isMergeCommit(c, cs) )]
def publish(self): """ Only publish the member layers which is already published. """ json_filename = self.json_filename_abs('publish'); try_set_push_owner("layergroup") hg = None try: json_out = self.update_catalogue_service(extra_datas={"publication_date": datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")}) layers = [] for group_layer in LayerGroupLayers.objects.filter(group=self).order_by("order"): if group_layer.layer and group_layer.layer.is_published: layers.append({"type":"wms_layer","name":group_layer.layer.name,"store":group_layer.layer.server.name,"workspace":group_layer.layer.server.workspace.name}) elif group_layer.publish and group_layer.publish.is_published: layers.append({"type":"publish","name":group_layer.publish.name,"workspace":group_layer.publish.workspace.name}) elif group_layer.sub_group and group_layer.sub_group.is_published: layers.append({"type":"group","name":group_layer.sub_group.name,"workspace":group_layer.sub_group.workspace.name}) if not layers: #layergroup is empty,remove it. raise LayerGroupEmpty("Layer group can't be empty.") json_out["layers"] = layers json_out["srs"] = self.srs or None json_out["publish_time"] = timezone.localtime(timezone.now()).strftime("%Y-%m-%d %H:%M:%S.%f") inclusions = self.get_inclusions() dependent_groups = [] for group in inclusions[2].keys(): if group.is_published: dependent_groups.append({"name":group.name,"workspace":group.workspace.name}) json_out["dependent_groups"] = dependent_groups #create the dir if required if not os.path.exists(os.path.dirname(json_filename)): os.makedirs(os.path.dirname(json_filename)) with open(json_filename, "wb") as output: json.dump(json_out, output, indent=4) hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY) #remove other related json files json_files = [ self.json_filename_abs(action) for action in [ 'empty_gwc' ] ] #get all existing files. json_files = [ f for f in json_files if os.path.exists(f) ] if json_files: hg.remove(files=json_files) json_files.append(json_filename) hg.commit(include=json_files, user="******",addremove=True, message="Update layer group {}.{}".format(self.workspace.name, self.name)) increase_committed_changes() try_push_to_repository("layergroup",hg) finally: if hg: hg.close() try_clear_push_owner("layergroup")
def __init__(self, hgbasedir, remoteUrl="", branchname=None, cleandir=None): """ @param base dir where local hgrepository will be stored @param remote url of hg repository, can be empty if local repo is created @branchname "" means is the tip, None means will try to fetch the branchname from the basedir @param cleandir: If True, files in that directory will be deleted before doing clone (if it wasn't an mercurial) if set to False, an exception will be raised if directory has files, if None, the user will be asked interactively. """ self.remoteUrl = remoteUrl.strip() self.basedir = hgbasedir self.branchname = branchname if not self.branchname: self.branchname = 'default' if remoteUrl<>"": self._log("mercurial remoteurl:%s"%(remoteUrl),category="config") if (not isinstance(hgbasedir, basestring) or not isinstance(remoteUrl, basestring))\ or (branchname and not isinstance(branchname, basestring)): raise ValueError("Input to hgclient need to be all strings") if not self.isInitialized() and not self.remoteUrl: raise RuntimeError(".hg not found and remote url is not supplied") if j.system.fs.exists(self.basedir) and not self.isInitialized(): if len(j.system.fs.listFilesInDir(self.basedir,recursive=True))==0: self._clone() else: #did not find the mercurial dir if j.application.interactive: if cleandir == None and self.remoteUrl<>"": cleandir = j.gui.dialog.askYesNo("\nDid find a directory but there was no mercurial metadata inside.\n\tdir: %s\n\turl:%s\n\tIs it ok to remove all files from the target destination before cloning the repository?"\ % (self.basedir,self.remoteUrl)) if cleandir: j.system.fs.removeDirTree(self.basedir) j.system.fs.createDir(self.basedir) self._clone() else: self._raise("Could not clone %s to %s, target dir was not empty" % (self.basedir,self.remoteUrl)) elif not j.system.fs.exists(self.basedir): j.system.fs.createDir(self.basedir) self._clone() else: self.client = hglib.open(self.basedir, configs=self._configs) self.remoteUrl = self.getUrl() currentbranchname = self.getbranchname() if branchname and branchname != currentbranchname: self.switchbranch(branchname) currentbranchname = branchname self.branchname = currentbranchname self.reponame, self.repokey = self._getRepoNameAndKey()
def handle(self, **options): update = options.get('update', False) all = options.get('all', False) pull_args = {} if update: pull_args['update'] = True from life.models import Repository, Changeset import hglib import os.path from django.conf import settings def resolve(path): return os.path.join(settings.REPOSITORY_BASE, *path.split('/')) # check for last push helper file if not all and os.path.isfile(resolve('.latest_cs')): latest_cs = int(open(resolve('.latest_cs')).read()) repos = (Repository.objects .filter(changesets__id__gt=latest_cs) .distinct()) else: repos = Repository.objects.all() latest_cs = Changeset.objects.order_by('-pk')[0].id for repo in repos: repopath = str(repo.local_path()) self.stdout.write(repo.name + '\n') if not os.path.isdir(os.path.join(repopath, '.hg')): # new repo, need to clone if os.path.isdir(repopath): self.stdout.write(( "\n\nCannot clone %s, " "existing directory in the way\n\n") % repo.name) continue _parent = os.path.dirname(repopath) if not os.path.isdir(_parent): try: os.makedirs(_parent) except Exception as e: self.stdout.write( ("\n\nFailed to prepare for clone, %s\n\n" % str(e)) ) continue try: hglib.clone(str(repo.url), repopath, noupdate=not update) except hglib.error.CommandError as e: self.stdout.write('Clone problems, %s' % str(e)) else: with hglib.open(repopath) as client: try: client.pull(**pull_args) except hglib.error.CommandError as e: self.stdout.write('Pull problems, %s' % str(e)) open(resolve('.latest_cs'), 'w').write('%i\n' % latest_cs)
def checkout(self, branch): """ Robust Checkout of the repository using configured mercurial client with extensions """ assert isinstance(branch, bytes) # Build command line repo_dir = os.path.join(self.directory, 'repo') shared_dir = os.path.join(self.directory, 'shared') logger.info('Updating repo', dir=repo_dir, branch=branch) cmd = hglib.util.cmdbuilder('robustcheckout', self.url, repo_dir, purge=True, sharebase=shared_dir, branch=branch) cmd.insert(0, hglib.HGPATH) # Run Command proc = hglib.util.popen(cmd) out, err = proc.communicate() if proc.returncode: raise hglib.error.CommandError(cmd, proc.returncode, out, err) # Use new high level mercurial client self.client = hglib.open(repo_dir) # Setup callback prompt def _cb(max_length, data): logger.info('Received data from HG', data=data) # Use new file when it exists if b'(c)hanged' in data: return b'c\n' # Send unresolved return b'u\n' self.client.setcbprompt(_cb) # Check branch has been successfull checkout identify = self.client.identify().decode('utf-8') parent, _, current_branch = REGEX_TIP.search(identify).groups() assert current_branch == branch.decode('utf-8'), \ 'Current branch {} is not expected branch {}'.format(current_branch, branch) # noqa logger.info('Checkout success', branch=branch, tip=parent) return parent
def handle(self, **options): update = options.get('update', False) all = options.get('all', False) pull_args = {} if update: pull_args['update'] = True from life.models import Repository, Changeset import hglib import os.path from django.conf import settings def resolve(path): return os.path.join(settings.REPOSITORY_BASE, *path.split('/')) # check for last push helper file if not all and os.path.isfile(resolve('.latest_cs')): latest_cs = int(open(resolve('.latest_cs')).read()) repos = (Repository.objects.filter( changesets__id__gt=latest_cs).distinct()) else: repos = Repository.objects.all() latest_cs = Changeset.objects.order_by('-pk')[0].id for name, url in repos.values_list('name', 'url'): repopath = str(resolve(name)) self.stdout.write(name + '\n') if not os.path.isdir(os.path.join(repopath, '.hg')): # new repo, need to clone if os.path.isdir(repopath): self.stdout.write( ("\n\nCannot clone %s, " "existing directory in the way\n\n") % name) continue _parent = os.path.dirname(repopath) if not os.path.isdir(_parent): try: os.makedirs(_parent) except Exception as e: self.stdout.write( ("\n\nFailed to prepare for clone, %s\n\n" % str(e))) continue hglib.clone(str(url), repopath) else: with hglib.open(repopath) as client: client.pull(**pull_args) open(resolve('.latest_cs'), 'w').write('%i\n' % latest_cs)
def retrieve_commits(self): shared_dir = self.repo_dir + "-shared" cmd = hglib.util.cmdbuilder( "robustcheckout", "https://hg.mozilla.org/mozilla-central", self.repo_dir, purge=True, sharebase=shared_dir, networkattempts=7, branch=b"tip", ) cmd.insert(0, hglib.HGPATH) proc = hglib.util.popen(cmd) out, err = proc.communicate() if proc.returncode: raise hglib.error.CommandError(cmd, proc.returncode, out, err) logger.info("mozilla-central cloned") try: os.remove(os.path.join(self.repo_dir, ".hg", "pushlog2.db")) except FileNotFoundError: logger.info("pushlog database doesn't exist") # Pull and update, to make sure the pushlog is generated. hg = hglib.open(self.repo_dir) hg.pull(update=True) hg.close() db.download_version(repository.COMMITS_DB) if not db.is_old_version(repository.COMMITS_DB): db.download(repository.COMMITS_DB, support_files_too=True) for commit in repository.get_commits(): pass rev_start = f"children({commit['node']})" else: rev_start = 0 repository.download_commits(self.repo_dir, rev_start) logger.info("commit data extracted from repository") self.compress_file("data/commits.json") self.compress_file("data/commit_experiences.pickle")
def download_commits(repo_dir, rev_start=0, ret=False, save=True): hg = hglib.open(repo_dir) revs = get_revs(hg, rev_start) if len(revs) == 0: print("No commits to analyze") return [] first_pushdate = hg_log(hg, [b"0"])[0].pushdate hg.close() print(f"Mining {len(revs)} commits using {os.cpu_count()} processes...") commits = hg_log_multi(repo_dir, revs) print("Downloading file->component mapping...") download_component_mapping() commits_to_ignore = get_commits_to_ignore(repo_dir, commits) print(f"{len(commits_to_ignore)} commits to ignore") calculate_experiences(commits, commits_to_ignore, first_pushdate, save) # Exclude commits to ignore. commits = [commit for commit in commits if commit not in commits_to_ignore] commits_num = len(commits) print(f"Mining {commits_num} commits using {os.cpu_count()} processes...") global rs_parsepatch import rs_parsepatch with concurrent.futures.ProcessPoolExecutor( initializer=_init, initargs=(repo_dir, )) as executor: commits = executor.map(_transform, commits, chunksize=64) commits = tqdm(commits, total=commits_num) if ret: commits = list(commits) if save: db.append(COMMITS_DB, commits) if ret: return commits
def __init__(self, cache_root, reporters, analyzers): assert isinstance(analyzers, list) assert len(analyzers) > 0, \ 'No analyzers specified, will not run.' self.analyzers = analyzers self.cache_root = cache_root assert os.path.isdir(self.cache_root), \ 'Cache root {} is not a dir.'.format(self.cache_root) assert 'MOZCONFIG' in os.environ, \ 'Missing MOZCONFIG in environment' # Save Taskcluster ID for logging if 'TASK_ID' in os.environ and 'RUN_ID' in os.environ: self.taskcluster_task_id = os.environ['TASK_ID'] self.taskcluster_run_id = os.environ['RUN_ID'] self.taskcluster_results_dir = '/tmp/results' else: self.taskcluster_task_id = 'local instance' self.taskcluster_run_id = 0 self.taskcluster_results_dir = tempfile.mkdtemp() if not os.path.isdir(self.taskcluster_results_dir): os.makedirs(self.taskcluster_results_dir) # Load reporters to use self.reporters = reporters if not self.reporters: logger.warn( 'No reporters configured, this analysis will not be published') # Clone mozilla-central self.repo_dir = os.path.join(cache_root, 'central') shared_dir = os.path.join(cache_root, 'central-shared') logger.info('Clone mozilla central', dir=self.repo_dir) cmd = hglib.util.cmdbuilder('robustcheckout', REPO_CENTRAL, self.repo_dir, purge=True, sharebase=shared_dir, branch=b'tip') cmd.insert(0, hglib.HGPATH) proc = hglib.util.popen(cmd) out, err = proc.communicate() if proc.returncode: raise hglib.error.CommandError(cmd, proc.returncode, out, err) # Open new hg client self.hg = hglib.open(self.repo_dir)
def test_remote(self): self.append('a', 'a') rev, node = self.client.commit(b('first'), addremove=True) self.client.clone(dest=b('other')) other = hglib.open('other') d = { b('parent'): [(0, node[:12], b('tip'), b('first'))], b('branch'): b('default'), b('commit'): True, b('update'): 0, b('remote'): (0, 0, 0, 0) } self.assertEquals(other.summary(remote=True), d) self.append('a', 'a') self.client.commit(b('second')) d[b('remote')] = (1, 0, 0, 0) self.assertEquals(other.summary(remote=True), d) self.client.bookmark(b('bm')) d[b('remote')] = (1, 1, 0, 0) self.assertEquals(other.summary(remote=True), d) other.bookmark(b('bmother')) d[b('remote')] = (1, 1, 0, 1) if self.client.version < (2, 0, 0): d[b('parent')] = [(0, node[:12], b('tip bmother'), b('first'))] else: d[b('bookmarks')] = b('*bmother') self.assertEquals(other.summary(remote=True), d) self.append('other/a', 'a') rev, node = other.commit(b('second in other')) d[b('remote')] = (1, 1, 1, 1) if self.client.version < (2, 0, 0): tags = b('tip bmother') else: tags = b('tip') d[b('parent')] = [(1, node[:12], tags, b('second in other'))] if self.client.version >= (3, 5): d[b('phases')] = b('1 draft') self.assertEquals(other.summary(remote=True), d)
def test_backout_removed_file(mock_secrets, fake_hg_repo): hg, local, remote = fake_hg_repo add_file(hg, local, "file", "1\n2\n3\n4\n5\n6\n7\n") revision1 = commit(hg, 1) hg.remove(files=[bytes(os.path.join(local, "file"), "ascii")]) revision2 = commit(hg, 2) hg.backout(rev=revision2, message=f"Backout {revision2[:12]}", user="******") revision3 = hg.log(limit=1)[0][1].decode("ascii") hg.push(dest=bytes(remote, "ascii")) copy_pushlog_database(remote, local) phabricator = PhabricatorUploader(local, revision3) report = covdir_report({ "source_files": [{ "name": "file", "coverage": [None, 0, 1, 1, 1, 1, 0] }] }) with hgmo.HGMO(local) as hgmo_server: stack = changesets(hgmo_server, revision3) with hglib.open(local) as hg: results = phabricator.generate(hg, report, stack) assert results == { revision1: { "revision_id": 1, "paths": { "file": { "coverage": "NUCCCCU", "lines_added": 6, "lines_covered": 4, "lines_unknown": 0, } }, }, revision2: { "revision_id": 2, "paths": {} }, }
def main(): if 'BUILD_URL' not in os.environ: print('Warning: This script should be called on jenkins only') return -1 if len(sys.argv) > 2: print('Unknown parameter: {}'.format(sys.argv)) return -1 patch = b(sys.argv[1]) if len(sys.argv) == 2 else b('../patch.diff') if not os.path.isfile(patch): if patchy is not None: print('Try to ask Review Board for patch file') patchy.save(patch) if not os.path.isfile(patch): print('Patch file "{}" does not exists'.format(patch)) return -1 cfg = ['extensions.hgext.purge=', 'extensions.hgext.strip=', 'phases.new-commit=draft'] client = hglib.open(configs=cfg) print('Revert workspace...') client.revert(b('.'), all=True, nobackup=True) print('Purge workspace...') client.rawcommand([b('purge'), b('--all')]) revs = len(client.log(revrange='secret() or draft()')) print('Found secret/draft changesets: {}'.format(revs)) if revs > 0: print('Strip secret and draft changesets...') client.rawcommand([b('strip'), b('-r'), b('secret() or draft()'), b('--no-backup'), b('--force')]) print('Import patch: {}'.format(patch)) client.import_([patch], user='******', date='today', message='jenkins patch review') return 0
def download_commits(repo_dir, rev_start=0, save=True): with hglib.open(repo_dir) as hg: revs = get_revs(hg, rev_start) if len(revs) == 0: print("No commits to analyze") return [] first_pushdate = hg_log(hg, [b"0"])[0].pushdate print(f"Mining {len(revs)} commits using {os.cpu_count()} processes...") commits = hg_log_multi(repo_dir, revs) print("Downloading file->component mapping...") download_component_mapping() set_commits_to_ignore(repo_dir, commits) commits_num = len(commits) print(f"Mining {commits_num} commits using {os.cpu_count()} processes...") global rs_parsepatch import rs_parsepatch from bugbug import rust_code_analysis_server global code_analysis_server code_analysis_server = rust_code_analysis_server.RustCodeAnalysisServer() with concurrent.futures.ProcessPoolExecutor( initializer=_init, initargs=(repo_dir,) ) as executor: commits = executor.map(_transform, commits, chunksize=64) commits = tqdm(commits, total=commits_num) commits = list(commits) code_analysis_server.terminate() calculate_experiences(commits, first_pushdate, save) commits = [commit.to_dict() for commit in commits if not commit.ignored] if save: db.append(COMMITS_DB, commits) return commits
def maybe_push_hg(): if not hg_push_url: return with hglib.open(hg_repo_path) as hrepo: logger.warn('checking for outgoing changesets to %s' % hg_push_url) outgoing = hrepo.outgoing(path=hg_push_url) if not outgoing: logger.warn('all changesets already in remote; no push ' 'necessary') return # We may want to add force=True and newbranch=True here. But # until they are needed, go with the safe defaults. out = hrepo.rawcommand([b'push', hg_push_url]) logger.warn(out)
def get_state(): with hglib.open(repo_path) as repo: tip = repo[b'tip'] tip_rev = tip.rev() tip_node = tip.node() hashes = { path: hash_path(os.path.join(b'.hg', path)) for path in hg_paths } return { 'tip_rev': tip_rev, 'tip_node': tip_node, 'hashes': hashes, }
def _hg_repository_sync(repopath, url, do_update=False): configpath = os.path.join(repopath, '.hg', 'hgrc') if not os.path.isfile(configpath): if not os.path.isdir(os.path.dirname(repopath)): os.makedirs(os.path.dirname(repopath)) hgrepo = hglib.clone(source=str(url), dest=str(repopath)) cfg = open(configpath, 'a') cfg.write('default-push = ssh%s\n' % str(url)[4:]) cfg.close() hgrepo.open() else: hgrepo = hglib.open(repopath) hgrepo.pull(source=str(url)) if do_update: hgrepo.update() return hgrepo
def __init__(self, repo_uri): ''' Initialize a hg repo (or open it if it already exists) ''' self.repo_uri = repo_uri cachedir = os.path.join(__opts__['cachedir'], 'hg_pillar') hash_type = getattr(hashlib, __opts__.get('hash_type', 'md5')) if six.PY2: repo_hash = hash_type(repo_uri).hexdigest() else: repo_hash = hash_type( salt.utils.stringutils.to_bytes(repo_uri)).hexdigest() self.working_dir = os.path.join(cachedir, repo_hash) if not os.path.isdir(self.working_dir): self.repo = hglib.clone(repo_uri, self.working_dir) self.repo.open() else: self.repo = hglib.open(self.working_dir)
def hg_get(self, port): if not os.path.exists(port.sources_root() + '/.hg'): logging.debug('Cloning Mercurial repository {0} into {1}'.format( port.portname, port.sources_root())) hglib.clone(port.source.get('hg'), port.sources_root()) else: logging.debug('Using existing repository in {0}'.format( port.sources_root())) client = hglib.open(port.sources_root()) version = ask_version(client.tags(), port) if version is None: raise Exception('No version selected and no default version found') logging.debug('Version {0} was selected'.format(version)) client.update(version.encode()) logging.debug('Source is now at Version {0}'.format(version)) port.version = version
async def clone(self): # Start by updating the repo in a separate process loop = asyncio.get_running_loop() with ProcessPoolExecutor() as pool: logger.info('Checking out tip in a separate process', repo=self.url) await loop.run_in_executor( pool, batch_checkout, self.url, self.dir, b'tip', self.batch_size, ) logger.info('Batch checkout finished') # Setup repo in main process self.repo = hglib.open(self.dir) self.repo.setcbout(lambda msg: logger.info('Mercurial', stdout=msg)) self.repo.setcberr(lambda msg: logger.info('Mercurial', stderr=msg))
def test_basic(self): self.append('a', 'a') rev, node = self.client.commit(b('first'), addremove=True) self.client.tag(b('my tag')) self.client.tag(b('local tag'), rev=rev, local=True) # filecache that was introduced in 2.0 makes us see the local tag, for # now we have to reconnect if self.client.version < (2, 0, 0): self.client = hglib.open() tags = self.client.tags() self.assertEquals(tags, [(b('tip'), 1, self.client.tip().node[:12], False), (b('my tag'), 0, node[:12], False), (b('local tag'), 0, node[:12], True)])
def test_bookmarks(self): self.append('a', 'a') self.client.commit(b('first'), addremove=True) self.append('a', 'a') self.client.commit(b('second')) self.client.clone(dest=b('other')) other = hglib.open(b('other')) self.client.bookmark(b('bm1'), 1) self.assertEquals(other.incoming(bookmarks=True), [(b('bm1'), self.client.tip().node[:12])]) self.assertEquals(self.client.outgoing(path=b('other'), bookmarks=True), [(b('bm1'), self.client.tip().node[:12])])
def execute(self, job, previous_state): if not job.publish.workspace.publish_channel.sync_geoserver_data: #no need to update geoserver return (JobStateOutcome.succeed, None) workspaces = Workspace.objects.filter( publish_channel=job.publish.workspace.publish_channel).order_by( 'name') # Generate user data SQL through template latest_data = render_to_string("layers.properties", {"workspaces": workspaces}) old_data = None output_filename = os.path.join( BorgConfiguration.BORG_STATE_REPOSITORY, job.publish.workspace.publish_channel.name, "layers.properties") #create dir if required if os.path.exists(output_filename): with open(output_filename, "rb") as output_file: old_data = output_file.read() elif not os.path.exists(os.path.dirname(output_filename)): os.makedirs(os.path.dirname(output_filename)) if old_data and old_data == latest_data: #layer access rule not changed return (JobStateOutcome.succeed, None) # Write output layer access rule, commit + push with open(output_filename, "wb") as output: output.write(latest_data) # Try and commit to repository, if no changes then continue hg = hglib.open(BorgConfiguration.BORG_STATE_REPOSITORY) try: hg.commit(include=[output_filename], addremove=True, user=BorgConfiguration.BORG_STATE_USER, message="{} - layer access rules updated".format( job.publish.job_batch_id)) except hglib.error.CommandError as e: if e.out != "nothing changed\n": return (HarvestStateOutcome.failed, self.get_exception_message()) finally: hg.close() return (JobStateOutcome.succeed, None)
def test_schedule( branch, revision, result, final_log, patch_resources, mock_hgmo, mock_repo, mock_component_taskcluster_artifact, mock_schedule_tests_classify, ): # The repo should only have the base commits repo_dir, _ = mock_repo repo = hglib.open(str(repo_dir)) logs = repo.log(follow=True) assert len(logs) == 4 assert [l.desc.decode("utf-8") for l in logs] == [ "Base history 3", "Base history 2", "Base history 1", "Base history 0", ] mock_schedule_tests_classify({"test-label1": 0.9}, {"test-group2": 0.9}) # Schedule tests for parametrized revision assert models.schedule_tests(branch, revision) == result # Now check the log has evolved assert final_log == [l.desc.decode("utf-8") for l in repo.log(follow=True)] if result == "OK": # Assert the test selection result is stored in Redis. assert json.loads( models.redis.get( f"bugbug:job_result:schedule_tests:{branch}_{revision}")) == { "tasks": { "test-label1": 0.9 }, "groups": { "test-group2": 0.9 }, "reduced_tasks": { "test-label1": 0.9 }, }
def main(packrat_url, phabricator_api_key, repo_callsign, update_revision, rev): """Pack Rat test client.""" submit_url = packrat_url + '/request-review' s = requests.session() s.headers['X-API-Key'] = phabricator_api_key hg = hglib.open() log = hg.log(revrange=rev) first = log[0].node last = log[-1].node data = { 'repository_callsign': repo_callsign, 'first': first, 'last': last, } print 'first: {}'.format(first) print 'last: {}'.format(last) if update_revision: data['revision_id'] = update_revision bundle = tempfile.NamedTemporaryFile(suffix='.bundle', prefix='tmp-', delete=False) bundle.close() print bundle.name try: assert hg.bundle(bundle.name, rev='last({})'.format(rev)) with open(bundle.name, 'rb') as b: req = requests.Request('POST', submit_url, data=data, files=[ ('bundle', ('uploaded.bundle', b, 'application/octet-stream')), ]) prep = s.prepare_request(req) resp = s.send(prep) finally: os.unlink(bundle.name) j = resp.json() if 'diff' in j: print j['diff']['uri'] if 'revision_result' in j: print 'Revision D{}'.format(j['revision_result']['object']['id'])
def handle(self, orig, fork, dry_run=False, **options): orig = self.repo_or_forest(orig) fork = self.repo_or_forest(fork) if type(orig) is not type(fork): raise CommandError( '%s and %s need to be both Repository or Forest' % (orig, fork)) if dry_run: confirm = 'yes' else: confirm = input( '''Declare %s to be a fork of %s and pull all changesets of the first into the latter? [yes/no] ''' % (self.style.WARNING(fork), self.style.WARNING(orig))) if confirm != 'yes': self.stdout.write('Aborting...') return # pulling mercurial changesets into orig repos = {orig.name: orig} forks = {orig.name: fork} if type(orig) is Forest: repos = { r.locale.code: r for r in orig.repositories.select_related('locale__code') } forks = { r.locale.code: r for r in fork.repositories.select_related('locale__code') } missing = set(forks.keys()) - set(repos.keys()) if missing: raise CommandError("""Cannot fork: %s They don't exist in %s""" % (', '.join(sorted(missing)), orig)) for name in sorted(forks.keys()): hgrepo = hglib.open(repos[name].local_path()) inc = hgrepo.incoming(path=forks[name].local_path()) if not dry_run: hgrepo.pull(source=forks[name].local_path()) self.stdout.write('{:<10}: {:>3}'.format(name, len(inc))) hgrepo.close() if not dry_run: fork.fork_of = orig fork.save()
def fake_hg_repo(tmpdir): tmp_path = tmpdir.strpath dest = os.path.join(tmp_path, "repos") local = os.path.join(dest, "local") remote = os.path.join(dest, "remote") for d in [local, remote]: os.makedirs(d) hglib.init(d) os.environ["USER"] = "******" hg = hglib.open(local) responses.add_passthru("http://localhost:8000") yield hg, local, remote hg.close()
def fake_hg_repo(tmpdir): tmp_path = tmpdir.strpath dest = os.path.join(tmp_path, 'repos') local = os.path.join(dest, 'local') remote = os.path.join(dest, 'remote') for d in [local, remote]: os.makedirs(d) hglib.init(d) os.environ['USER'] = '******' hg = hglib.open(local) responses.add_passthru('http://localhost:8000') yield hg, local, remote hg.close()
def clean(repo_dir): with hglib.open(repo_dir) as hg: hg.revert(repo_dir.encode("utf-8"), all=True) try: cmd = hglib.util.cmdbuilder( b"strip", rev=b"roots(outgoing())", force=True, backup=False ) hg.rawcommand(cmd) except hglib.error.CommandError as e: if b"abort: empty revision set" not in e.err: raise # Pull and update. logger.info("Pulling and updating mozilla-central") hg.pull(update=True) logger.info("mozilla-central pulled and updated")
def repository(self): """Returns the hglib.hgclient instance. If the config has changed, recreate the instance. """ configs = [f"{key}={value}" for key, value in self._get_config_options()] configs.sort() # returns the repo instance if the config has not changed. if self._repo is not None and self._configs == configs: return self._repo if self._repo: self._repo.close() self._configs = configs self._repo = hglib.open(self._repo_path, encoding="UTF-8", configs=configs) return self._repo
def clone_new_repo(source=None): """Clones a new copy of yt_analysis/yt and returns a path to it""" path = tempfile.mkdtemp() dest_repo_path = path + '/yt-backport' if source is None: source = YT_REPO hglib.clone(source=source, dest=dest_repo_path) with hglib.open(dest_repo_path) as client: # Changesets that are on the yt branch but aren't topological ancestors # of whichever changeset the experimental bookmark is pointing at bookmarks, _ = client.bookmarks() bookmark_names = [b[0] for b in bookmarks] if 'experimental' in bookmark_names: client.update('heads(branch(yt) - ::bookmark(experimental))') else: client.update('heads(branch(yt))') return dest_repo_path
class HgRepository(FileRepository): """ Class for hg repositories. """ DESCRIPTION = ("A non-empty mercurial repository (created with 'hg init' or 'hg clone'). " "The 1st argument contains the mercurial repository location, which can be a directory or an URL. " "The 2nd argument is ignored.") USABLE = HAVE_HG def __init__(self, *args): """ Initialize mercurial client to None (will be set later) All the real logic is in the setup_repo and create_working_copy methods """ self.client = None FileRepository.__init__(self, *args) def setup_repo(self): """ Set up mercurial repository. """ if not HAVE_HG: raise EasyBuildError("python-hglib is not available, which is required for Mercurial support.") self.wc = tempfile.mkdtemp(prefix='hg-wc-') def create_working_copy(self): """ Create mercurial working copy. """ # try to get a copy of try: client = hglib.clone(self.repo, self.wc) self.log.debug("repo %s cloned in %s" % (self.repo, self.wc)) except (HgCommandError, OSError), err: # it might already have existed self.log.warning("Mercurial local repo initialization failed, it might already exist: %s" % err) # local repo should now exist, let's connect to it again try: self.log.debug("connection to mercurial repo in %s" % self.wc) self.client = hglib.open(self.wc) except HgServerError, err: raise EasyBuildError("Could not connect to local mercurial repo: %s", err)
def __init__(self): self.model_class = RegressorModel self.repo_dir = get_login_info()["repo_dir"] if not os.path.exists(self.repo_dir): cmd = hglib.util.cmdbuilder( "robustcheckout", "https://hg.mozilla.org/mozilla-central", self.repo_dir, purge=True, sharebase=self.repo_dir + "-shared", networkattempts=7, branch=b"tip", ) cmd.insert(0, hglib.HGPATH) proc = hglib.util.popen(cmd) out, err = proc.communicate() if proc.returncode: raise hglib.error.CommandError(cmd, proc.returncode, out, err) logger.info("mozilla-central cloned") # Remove pushlog DB to make sure it's regenerated. try: os.remove(os.path.join(self.repo_dir, ".hg", "pushlog2.db")) except FileNotFoundError: logger.info("pushlog database doesn't exist") logger.info("Pulling and updating mozilla-central") with hglib.open(self.repo_dir) as hg: hg.pull(update=True) logger.info("mozilla-central pulled and updated") db.download_version(repository.COMMITS_DB) if db.is_old_version(repository.COMMITS_DB) or not os.path.exists( repository.COMMITS_DB): db.download(repository.COMMITS_DB, force=True, support_files_too=True) super().__init__() self.model = self.model_class.load(self.retrieve_model())
def __init__(self, cache_root, emails, mozreview, mozreview_enabled=False, client_id=None, access_token=None): # noqa self.emails = emails self.mozreview = mozreview self.mozreview_enabled = mozreview_enabled self.cache_root = cache_root assert os.path.isdir(self.cache_root), \ 'Cache root {} is not a dir.'.format(self.cache_root) assert 'MOZCONFIG' in os.environ, \ 'Missing MOZCONFIG in environment' # Load TC services & secrets self.notify = get_service( 'notify', client_id=client_id, access_token=access_token, ) # Clone mozilla-central self.repo_dir = os.path.join(cache_root, 'central') shared_dir = os.path.join(cache_root, 'central-shared') logger.info('Clone mozilla central', dir=self.repo_dir) cmd = hglib.util.cmdbuilder('robustcheckout', REPO_CENTRAL, self.repo_dir, purge=True, sharebase=shared_dir, branch=b'tip') cmd.insert(0, hglib.HGPATH) proc = hglib.util.popen(cmd) out, err = proc.communicate() if proc.returncode: raise hglib.error.CommandError(cmd, proc.returncode, out, err) # Open new hg client self.hg = hglib.open(self.repo_dir) # Setup clang self.clang = ClangTidy(self.repo_dir, settings.target)
def clone_mozilla_central(revision): shared_dir = REPO_DIR + '-shared' cmd = hglib.util.cmdbuilder('robustcheckout', REPO_CENTRAL, REPO_DIR, purge=True, sharebase=shared_dir, branch=b'tip') cmd.insert(0, hglib.HGPATH) proc = hglib.util.popen(cmd) out, err = proc.communicate() if proc.returncode: raise hglib.error.CommandError(cmd, proc.returncode, out, err) hg = hglib.open(REPO_DIR) hg.update(rev=revision, clean=True)