def run_postinstall(self): """Run cygwin postinstall scripts""" with open(self.cyglog('postinstall.log'), 'wb') as fd: os.chdir(self.root_dir) # Compute the list of postinstall scripts pscripts = [] for ext in ('.sh', '.bat', '.cmd', '.dash'): pscripts += ls('etc/postinstall/*' + ext) pscripts.sort() # Set some env variables needed by the postinstall scripts os.environ['SHELL'] = '/bin/bash' os.environ['CYGWINROOT'] = self.root_dir os.environ['TERM'] = 'dump' for p in (('usr', 'bin'), ('bin', ), ('usr', 'sbin'), ('sbin', )): os.environ['PATH'] = os.path.join(self.root_dir, *p) + ';' + \ os.environ['PATH'] # run postinstall scripts for index, ps in enumerate(pscripts): logging.info('Run postinstall (%s/%s) %s' % (index, len(pscripts), os.path.basename(ps))) fd.write('run %s\n' % ps) if ps.endswith('.dash'): Run([os.path.join(self.root_dir, 'bin', 'dash.exe'), ps], output=fd) elif ps.endswith('.sh'): Run([os.path.join(self.root_dir, 'bin', 'bash.exe'), '--norc', '--noprofile', ps], output=fd) mv(ps, ps + '.done')
def set(self, uid: str, value: Any, timeout: int = DEFAULT_TIMEOUT) -> bool: # Make sure that the cache dir exists self._create_cache_dir() dest_file = self.uid_to_file(uid) tmp_file = tempfile.NamedTemporaryFile(dir=self.cache_dir, delete=False) try: tmp_file.write( pickle.dumps(self.get_expiry_time(timeout), pickle.HIGHEST_PROTOCOL)) tmp_file.write(pickle.dumps(value, pickle.HIGHEST_PROTOCOL)) except Exception as err: tmp_file.close() e3.log.debug("error when setting %s in %s:\n%s", uid, dest_file, err) return False else: tmp_file.close() if sys.platform == "win32": # unix: no cover # atomic rename does not work on windows if the dest file # already exist rm(dest_file) mv(tmp_file.name, dest_file) return True finally: rm(tmp_file.name)
def remove(path): """Delete the file or directory subtree designated by PATH""" print("from : %s" % os.getcwd()) print("remove : %s" % path) # To prevent big damage if the input PATH argument happens to have been # miscomputed, we first attempt to move it locally, then remove the local # instance. The absence of computation on this local name makes it a tad # safer to manipulate and the attempt to move locally would fail for # obviously bogus arguments such as anything leading to a parent of the # current dir (e.g. "/", or ...). local_name = "./old_stuff_to_be_removed" # Note that what we have to remove maybe be a regular filee or an entire # directory subtree and that rm("recursive=True") is not guaranteed to # work for regular files. # Start by removing the current local instance, in case the previous # removal failed or was interrupted somehow. def isdir(path): return os.path.isdir(path) and not os.path.islink(path) if os.path.exists(local_name): rm(local_name, recursive=isdir(local_name)) if os.path.exists(path): mv(path, local_name) rm(local_name, recursive=isdir(local_name))
def __init__(self, ini, reset=False): self.root_dir = 'c:\\cygwin' self.prev_dir = None if reset: # Save previous installation if os.path.isdir(self.root_dir): self.prev_dir = self.root_dir + \ '-prev-' + strftime('%Y%m%d-%H%M') logging.warning('move previous installation to %s-prev' % self.root_dir) mv(self.root_dir, self.prev_dir) self.ini = ini # with the installation logs self.tmpdir = self.ini.download_dir # Ensure our tmp dir is used os.environ['TMP'] = self.ini.download_dir os.environ['TMPDIR'] = self.ini.download_dir # User required packages self.to_be_removed = set() self.to_be_installed = set() # Initialize db self.db = CygwinDB(self.root_dir, self.ini) self.required = set([k for k in self.db.installed])
def setup_result_dir(self): """Create the output directory in which the results are stored.""" if os.path.isdir(self.old_output_dir): rm(self.old_output_dir, True) if os.path.isdir(self.output_dir): mv(self.output_dir, self.old_output_dir) mkdir(self.output_dir) if self.main.args.dump_environ: with open(os.path.join(self.output_dir, 'environ.sh'), 'w') as f: for var_name in sorted(os.environ): f.write('export %s=%s\n' % (var_name, quote_arg(os.environ[var_name])))
def __relocate_into(self, dir, part): the_item = self.kititem_for(part=part) item_source_path = os.path.join(self.itemsdir(), the_item) item_target_path = os.path.join(dir, the_item) remove(item_target_path) print("move : %s" % item_source_path) print("into : %s" % dir) mv(item_source_path, dir)
def setup_result_dir(self) -> None: """Create the output directory in which the results are stored.""" assert self.main.args if os.path.isdir(self.old_output_dir): rm(self.old_output_dir, True) if os.path.isdir(self.output_dir): mv(self.output_dir, self.old_output_dir) mkdir(self.output_dir) if self.main.args.dump_environ: with open(os.path.join(self.output_dir, "environ.sh"), "w") as f: for var_name in sorted(os.environ): f.write("export {}={}\n".format( var_name, quote_arg(os.environ[var_name])))
def migrate_v1_5() -> None: """Migrate to API 1.5. Move all .yaml files in subdirs, when the name of a yaml file matches a spec name move the yaml file in <spec name>/config.yaml When there is an additional suffix, move it to <spec name>/<suffix>.yaml Make sure to run e3-plan-checker after running this script. """ from glob import glob import os from e3.fs import mv for f in glob("*.yaml"): print(f"looking at {f}") name, _ = os.path.splitext(os.path.basename(f)) if os.path.exists(name + ".anod"): try: os.mkdir(name) except OSError: pass try: mv(name + ".yaml", os.path.join(name, "config.yaml")) except OSError: # defensive code print(f"error for {name}") elif "-" in name: print(f"suffix detected in {f}") prefix, suffix = name.rsplit("-", 1) if not os.path.exists(prefix + ".anod"): prefix, suffix2 = prefix.rsplit("-", 1) suffix = suffix2 + "-" + suffix try: try: os.mkdir(prefix) except OSError: # defensive code pass mv(name + ".yaml", os.path.join(prefix, f"{suffix}.yaml")) except Exception as er: # defensive code print(f"error for {name}.yaml {prefix} {suffix}") print(er) else: print(f"unknown yaml file {name}.yaml")
def __latch_into(self, dir, part, toplevel, copy_from=None): this_target_is_tree = (self.this_docformat == 'html') # Compute the target dir or file name for our copy: this_target = (dir if toplevel and this_target_is_tree else os.path.join(dir, self.kititem_for(part=part))) # Compute the source dir or file name for our copy: # The local or provided source build subdir name, assuming a # sphinx setup, with an html or pdf sub-subdir depending on the # doc format. For file outputs, assume the builders are setup to # produce PART.<docformat>, e.g. TOR.pdf: this_build_subdir = os.path.join( copy_from if copy_from is not None else "build", sphinx_target_for[self.this_docformat]) this_source = (this_build_subdir if this_target_is_tree else os.path.join(this_build_subdir, part.upper() + ".%s" % self.this_docformat)) # Delete an old version of latched results that might # already be there if we're running with --work-dir. remove(this_target) # Now proceed with the latch operation per se: if not this_target_is_tree: cp(this_source, this_target, recursive=False) elif copy_from: cp(this_source, this_target, recursive=True) else: mv(this_build_subdir, this_target) print("%s %s available in %s %s" % (self.this_docformat, part.upper(), this_target, "(toplevel)" if toplevel else ""))
def setup_result_dirs(self) -> None: """Create the output directory in which the results are stored.""" assert self.main.args args = self.main.args # Both the actual new/old directories to use depend on both # --output-dir and --old-output-dir options. d = os.path.abspath(args.output_dir) if args.old_output_dir: self.output_dir = d old_output_dir = os.path.abspath(args.old_output_dir) else: self.output_dir = os.path.join(d, "new") old_output_dir = os.path.join(d, "old") # Rotate results directories if requested. In both cases, make sure the # new results dir is clean. if args.rotate_output_dirs: if os.path.isdir(old_output_dir): rm(old_output_dir, recursive=True) if os.path.isdir(self.output_dir): mv(self.output_dir, old_output_dir) elif os.path.isdir(self.output_dir): rm(self.output_dir, recursive=True) mkdir(self.output_dir) # Remember about the old output directory only if it exists and does # contain results. If not, this info will be unused at best, or lead to # incorrect behavior. self.old_output_dir = None if (os.path.exists(old_output_dir) and os.path.exists( os.path.join(old_output_dir, ReportIndex.INDEX_FILENAME))): self.old_output_dir = old_output_dir if args.dump_environ: with open(os.path.join(self.output_dir, "environ.sh"), "w") as f: for var_name in sorted(os.environ): f.write("export {}={}\n".format( var_name, quote_arg(os.environ[var_name])))
import e3.fs as fs # Build the library p = Popen([ 'gprbuild', '-XLIBRARY_TYPE=relocatable', '-XBUILD_MODE=dev', '-Pgen', ], stdout=PIPE, stderr=PIPE) output, error = p.communicate() if p.returncode != 0: # Build failed # Remove some patterns from the error to make it resilient to changes in # the library. error = re.sub("^.*instantiation error at .*", "", error) # Print the errors print error # Exit sys.exit(1) else: # Rename libgen so that it's named according to python native modules # conventions. fs.mv(P.join('test', 'libgen.so'), P.join('test', 'gen.so'))