def status(self, verbose=False): remote_util.remote_activity("Getting %s status", self.name) gist = self._repo_gist() sys.stdout.write("%s (gist %s) is available\n" % (self.name, gist["id"])) if verbose: sys.stdout.write(pprint.pformat(gist)) sys.stdout.write("\n")
def stop_details(self): remote_util.remote_activity("Getting %s status", self.name) try: gist = self._repo_gist() except NoSuchGist: return None else: return "gist %s will be deleted - THIS CANNOT BE UNDONE!" % gist["id"]
def filtered_runs(self, **filters): remote_util.remote_activity("Getting run info on %s", self.name) opts = _filtered_runs_filter_opts(**filters) out = self._guild_cmd_output("runs list", opts) if not out: data = [] else: data = json.loads(out.decode()) assert isinstance(data, list), (data, self.name) return [remotelib.RunProxy(run_data) for run_data in data]
def start(self): remote_util.remote_activity("Getting %s status", self.name) try: gist = self._repo_gist() except NoSuchGist: log.info("Creating gist") gist = self._create_gist() log.info( "Created %s (gist %s) for user %s", self.name, gist["id"], self.user, ) self._sync_runs_meta() else: raise remotelib.OperationError( "%s (gist %s) already exists for user %s" % (self.name, gist["id"], self.user) )
def _sync_runs_meta(self, force=False): remote_util.remote_activity("Refreshing run info for %s" % self.name) if not force and meta_sync.meta_current(self.local_sync_dir, self._remote_meta_id): return _ensure_azure_local_dir(self.local_sync_dir) meta_sync.clear_local_meta_id(self.local_sync_dir) # TODO: This is a terribly ineffecient approach as we're # copying everything just to get metadata for the runs. The # azcopy sync command has limited include/exclude pattern # support which makes it hard to use for this # application. Copying metadata for each remote run would # likely be quite inefficient as well, though certainly less # impacting on local storage. sync_args = [ self._container_path(), self.local_sync_dir, "--delete-destination", "true", ] self._azcopy("sync", sync_args, quiet=True)
def _sync_runs_meta(self, force=False): remote_util.remote_activity("Refreshing run info for %s" % self.name) if not force and self._meta_current(): return meta_sync.clear_local_meta_id(self.local_sync_dir) sync_args = [ self._s3_uri(), self.local_sync_dir, "--exclude", "*", "--include", "*/.guild/opref", "--include", "*/.guild/attrs/*", "--include", "*/.guild/LOCK*", "--include", "meta-id", "--delete", ] self._s3_cmd("sync", sync_args, quiet=True)
def _sync_runs_meta(self, force=False): remote_util.remote_activity("Refreshing run info for %s" % self.name) self._ensure_local_gist_repo() self._sync_runs_meta_for_gist(force)
def one_run(self, run_id_prefix): out = self._guild_cmd_output( "runs info", [run_id_prefix, "--private-attrs", "--json"]) remote_util.remote_activity("Resolving run on %s", self.name) run_data = self._run_data_for_json(out) return remotelib.RunProxy(run_data)