def _init_target_path(self, path): generated = os.path.join(self.resource.ctx.target_dir, ".guild", "generated") util.ensure_dir(generated) target_dir = tempfile.mkdtemp(suffix="", prefix="", dir=generated) basename = os.path.basename(path) return os.path.join(target_dir, basename)
def export(selected): if args.copy_resources and not args.yes: cli.out( "WARNING: you have specified --copy-resources, which will " "copy resources used by each run!" "" ) if not cli.confirm("Really copy resources exported runs?"): return util.ensure_dir(args.location) util.touch(os.path.join(args.location, ".guild-nocopy")) exported = 0 for run in selected: dest = os.path.join(args.location, run.id) if os.path.exists(dest): log.warning("%s exists, skipping", dest) continue if args.move: cli.out("Moving {}".format(run.id)) if args.copy_resources: shutil.copytree(run.path, dest) shutil.rmtree(run.path) else: shutil.move(run.path, dest) else: cli.out("Copying {}".format(run.id)) symlinks = not args.copy_resources shutil.copytree(run.path, dest, symlinks) exported += 1 cli.out("Exported %i run(s)" % exported)
def start(self): self._verify_aws_creds() self._verify_terraform() util.ensure_dir(self.working_dir) self._refresh_config() self._ensure_terraform_init() self._terraform_apply()
def __init__( self, logdir, max_queue_size=10, flush_secs=120, filename_base=None, filename_suffix=""): with warnings.catch_warnings(): warnings.simplefilter("ignore", Warning) # pylint: disable=no-name-in-module from tensorboard.summary.writer import event_file_writer as writelib util.ensure_dir(logdir) filename_base = filename_base or ("%010d.%s.%s.%s" % ( time.time(), socket.gethostname(), os.getpid(), writelib._global_uid.get())) filename = ( os.path.join(logdir, "events.out.tfevents.%s" % filename_base) + filename_suffix) self._writer = writelib._AsyncWriter( writelib.RecordWriter(open(filename, "wb")), max_queue_size, flush_secs) event = writelib.event_pb2.Event( wall_time=time.time(), file_version="brain.Event:2") self.add_event(event) self.flush()
def resolve(self, resolve_context): from guild import pip_util # expensive if self.resource.config: return _resolve_config_path(self.resource.config, self.source.resdef.name) download_dir = url_source_download_dir(self.source) util.ensure_dir(download_dir) try: source_path = pip_util.download_url(self.source.uri, download_dir, self.source.sha256) except pip_util.HashMismatch as e: raise ResolutionError( "bad sha256 for '%s' (expected %s but got %s)" % (e.path, e.expected, e.actual)) except Exception as e: if log.getEffectiveLevel() <= logging.DEBUG: log.exception(self.source.uri) raise ResolutionError(e) else: unpack_dir = self._unpack_dir(source_path, resolve_context.unpack_dir) resolved = resolve_source_files(source_path, self.source, unpack_dir) post_process(self.source, unpack_dir or os.path.dirname(source_path)) return resolved
def _init_tfevent_link(tfevent_src, tfevent_link, run, state): link_dir = os.path.dirname(tfevent_link) util.ensure_dir(link_dir) if state.log_hparams: _init_hparam_session(run, link_dir, state) log.debug("Creating link from '%s' to '%s'", tfevent_src, tfevent_link) util.symlink(tfevent_src, tfevent_link)
def _symlink(source_path, link): assert os.path.isabs(link), link if os.path.exists(link): log.warning("%s already exists, skipping link", link) return util.ensure_dir(os.path.dirname(link)) log.debug("resolving source %s as link %s", source_path, link) util.symlink(source_path, link)
def _link_to_source(self, source_path): link = self._link_path(source_path) if os.path.exists(link): log.warning("source '%s' already exists, skipping link", link) return util.ensure_dir(os.path.dirname(link)) log.debug("resolving source '%s' as link '%s'", source_path, link) os.symlink(source_path, link)
def _open_file(path): if path == "-": return util.StdIOContextManager(sys.stdout) util.ensure_dir(os.path.dirname(path)) try: return open(path, "w") except (OSError, IOError) as e: cli.error("error opening %s: %s" % (path, e))
def _write_completion_script_to_user_config(shell, script): path = os.path.join(config.user_config_home(), "%s_completion" % shell) cli.out("Writing completion script to %s" % util.format_dir(path), err=True) util.ensure_dir(os.path.dirname(path)) with open(path, "w") as f: f.write(script) f.write("\n") return path
def _write_package_metadata(pkgdef, setup_kw): egg_info_dir = "%s.egg-info" % setup_kw["name"] util.ensure_dir(egg_info_dir) dest = os.path.join(egg_info_dir, "PACKAGE") with open(dest, "w") as f: yaml.dump(_pkg_metadata(pkgdef), f, default_flow_style=False, width=9999)
def _init_export_dir(dir): util.ensure_dir(dir) try: util.touch(os.path.join(dir, ".guild-nocopy")) except IOError as e: if e.errno == errno.ENOTDIR: cli.error("'%s' is not a directory" % dir) else: cli.error("error initializing export directory '%s': %s" % (dir, e))
def generate(self, dest, vars): util.ensure_dir(dest) for relpath, src, template in self._file_templates: file_dest = os.path.join(dest, relpath) util.ensure_dir(os.path.dirname(file_dest)) if template is None: shutil.copyfile(src, file_dest) else: _render_template(template, vars, file_dest)
def _pull_run(self, run, delete): remote_run_src = self._container_path(*RUNS_PATH + [run.id]) local_run_dest = os.path.join(var.runs_dir(), run.id) args = [remote_run_src, local_run_dest] if delete: args[:0] = ["--delete-destination", "true"] log.info("Copying %s from %s", run.id, self.name) util.ensure_dir(local_run_dest) self._azcopy("sync", args)
def simulate_batch(): """Ensure that the current queue run looks like a batch run to Guild. Creates a proto dir to mimick the appearance of a batch. This ensures that the queue doesn't show up in compare and other facilities that ignore batch runs by default. """ this_run = op_util.current_run() util.ensure_dir(this_run.guild_path("proto"))
def resolve(self): download_dir = self._source_download_dir() util.ensure_dir(download_dir) try: return pip_util.download_url(self.source.uri, download_dir, self.source.sha256) except pip_util.HashMismatch as e: raise ResolutionError( "bad sha256 for '%s' (expected %s but got %s)" % (self.source.uri, e.expected, e.actual))
def _link_to_source(source_path, link): assert os.path.isabs(link), link source_path = util.strip_trailing_sep(source_path) if os.path.lexists(link) or os.path.exists(link): log.warning("%s already exists, skipping link", link) return util.ensure_dir(os.path.dirname(link)) log.debug("resolving source %s as link %s", source_path, link) source_rel_path = _source_rel_path(source_path, link) util.symlink(source_rel_path, link)
def _copy_source(src_base, source_config, dest_base): to_copy = _source_to_copy(src_base, source_config) if not to_copy: log.debug("no source to copy") return for src, src_rel_path in to_copy: dest = os.path.join(dest_base, src_rel_path) log.debug("copying source %s to %s", src, dest) util.ensure_dir(os.path.dirname(dest)) _try_copy_file(src, dest)
def _init_index(self): try: return index.open_dir(self.path) except index.EmptyIndexError: return self._create_index() except OSError as e: if e.errno != errno.ENOENT: raise util.ensure_dir(self.path) return self._create_index()
def _copy_source(source_path, dest_path): assert os.path.isabs(dest_path), dest_path if os.path.lexists(dest_path) or os.path.exists(dest_path): log.warning("%s already exists, skipping copy", dest_path) return util.ensure_dir(os.path.dirname(dest_path)) log.debug("resolving source %s as copy %s", source_path, dest_path) if os.path.isdir(source_path): util.copytree(source_path, dest_path) else: util.copyfile(source_path, dest_path)
def _init_published_run(state): """Ensure empty target directory for published run. As a side effect, lazily creates `state.dest_home` and creates `.guild-nocopy` to ensure that the published runs home is not considered by Guild for source snapshots. """ util.ensure_dir(state.dest_home) util.touch(os.path.join(state.dest_home, ".guild-nocopy")) if os.path.exists(state.run_dest): util.safe_rmtree(state.run_dest) os.mkdir(state.run_dest)
def _validated_dir(path, abs=False, create=False): path = os.path.expanduser(path) if abs: path = os.path.abspath(path) if not os.path.exists(path): if create: util.ensure_dir(path) else: cli.error("directory '%s' does not exist" % path) if not os.path.isdir(path): cli.error("'%s' is not a directory" % path) return path
def init_project(project_dir, src, params): meta = _try_guild_meta(src) if meta: _validate_params(params, meta) to_copy = _safe_project_init_copies(src, project_dir) for file_src, file_dest in to_copy: rel_file_src = os.path.relpath(file_src, src) log.info("Copying %s to %s", rel_file_src, file_dest) util.ensure_dir(os.path.dirname(file_dest)) shutil.copy2(file_src, file_dest) if meta: _apply_params(project_dir, params, meta)
def main(args): resdef = resourcedef.ResourceDef("download", {}) source = resourcedef.ResourceSource(resdef, args.url) download_dir = resolver.url_source_download_dir(source) util.ensure_dir(download_dir) try: source_path = pip_util.download_url(source.uri, download_dir) except Exception as e: _handle_download_error(e, source) else: sha256 = util.file_sha256(source_path, use_cache=False) print("{} {}".format(sha256, source_path))
def _refresh_image_summaries(run, run_logdir, state): if not state.log_images: return images_logdir = os.path.join(run_logdir, ".images") for path, relpath in _iter_images(run.dir): if _count_images(images_logdir) >= MAX_IMAGE_SUMMARIES: break img_path_digest = _path_digest(relpath) tfevent_path = _image_tfevent_path(images_logdir, img_path_digest) if _image_updated_since_summary(path, tfevent_path): util.ensure_dir(images_logdir) with _image_writer(images_logdir, img_path_digest) as writer: writer.add_image(relpath, path)
def _init_resource_cache(guild_dir, local): env_cache = os.path.join(guild_dir, "cache", "resources") if os.path.exists(env_cache): log.info("Resource cache %s exists, skipping") return if local: if os.path.islink(env_cache): os.unlink(env_cache) util.ensure_dir(env_cache) else: shared_cache = _shared_resource_cache(guild_dir) util.ensure_dir(shared_cache) os.symlink(shared_cache, env_cache)
def _validated_dir(path, abs=False, create=False, guild_nocopy=False): path = os.path.expanduser(path) if abs: path = os.path.abspath(path) if not os.path.exists(path): if create: util.ensure_dir(path) else: cli.error("directory '%s' does not exist" % path) if not os.path.isdir(path): cli.error("'%s' is not a directory" % path) if guild_nocopy: util.ensure_file(os.path.join(path, ".guild-nocopy")) return path
def obtain(self, dest): url, rev = self.get_url_rev(self.url) archive_url = "{}/archive/{}.zip".format(url, rev) util.ensure_dir(dest) downloaded_file = pip_util.download_url(archive_url, dest) zf = zipfile.ZipFile(downloaded_file) for name in zf.namelist(): dest_path = os.path.join(*([dest] + name.split("/")[1:])) if name.endswith("/"): util.ensure_dir(dest_path) else: with open(dest_path, "wb") as fdst: fsrc = zf.open(name) shutil.copyfileobj(fsrc, fdst)
def _start(name, f, log): import daemonize pidfile = var.pidfile(name) if os.path.exists(pidfile): raise Running(name, pidfile) util.ensure_dir(os.path.dirname(pidfile)) # Save original log level to workaround issue with daemonization # (see note in _run). log_level = log.getEffectiveLevel() daemon = daemonize.Daemonize(app=name, action=lambda: _run(f, log, log_level), pid=pidfile, keep_fds=_log_fds(log)) daemon.start()
def _append_to_init_script(init_script, lines): cli.out( "Updating %s to support Guild command completion" % util.format_dir(init_script), err=True, ) util.ensure_dir(os.path.dirname(init_script)) exists = os.path.exists(init_script) with open(init_script, "a") as f: if exists: f.write(os.linesep) for line in lines: f.write(line) f.write(os.linesep)