def download_dependencies(download_dir, pips_to_download, output_filename): if not pips_to_download: return # NOTE(aababilov): pip has issues with already downloaded files if sh.isdir(download_dir): for filename in sh.listdir(download_dir, files_only=True): sh.unlink(filename) else: sh.mkdir(download_dir) # Clean out any previous paths that we don't want around. build_path = sh.joinpths(download_dir, ".build") if sh.isdir(build_path): sh.deldir(build_path) sh.mkdir(build_path) cmdline = [ PIP_EXECUTABLE, '-v', 'install', '-I', '-U', '--download', download_dir, '--build', build_path, # Don't download wheels since we lack the ability to create # rpms from them (until future when we will have it, if ever)... "--no-use-wheel", ] for p in pips_to_download: for p_seg in _split(p): if p_seg: cmdline.append(p_seg) sh.execute_save_output(cmdline, output_filename)
def download_dependencies(download_dir, pips_to_download, output_filename): if not pips_to_download: return # NOTE(aababilov): pip has issues with already downloaded files if sh.isdir(download_dir): for filename in sh.listdir(download_dir, files_only=True): sh.unlink(filename) else: sh.mkdir(download_dir) # Clean out any previous paths that we don't want around. build_path = sh.joinpths(download_dir, ".build") if sh.isdir(build_path): sh.deldir(build_path) sh.mkdir(build_path) # Ensure certain directories exist that we want to exist (but we don't # want to delete them run after run). cache_path = sh.joinpths(download_dir, ".cache") if not sh.isdir(cache_path): sh.mkdir(cache_path) cmdline = [ PIP_EXECUTABLE, '-v', 'install', '-I', '-U', '--download', download_dir, '--build', build_path, '--download-cache', cache_path, ] # Don't download wheels... # # See: https://github.com/pypa/pip/issues/1439 if dist_version.StrictVersion(PIP_VERSION) >= dist_version.StrictVersion('1.5'): cmdline.append("--no-use-wheel") cmdline.extend([str(p) for p in pips_to_download]) sh.execute_save_output(cmdline, output_filename)
def _move_rpm_files(self, source_dir, target_dir): # NOTE(imelnikov): we should create target_dir even if we have # nothing to move, because later we rely on its existence if not sh.isdir(target_dir): sh.mkdirslist(target_dir, tracewriter=self.tracewriter) if not sh.isdir(source_dir): return for filename in sh.listdir(source_dir, recursive=True, files_only=True): if not filename.lower().endswith(".rpm"): continue sh.move(filename, target_dir, force=True)
def download(self): branch = None tag = None uri = self.uri if uri.find("?") != -1: # If we use urlparser here it doesn't seem to work right?? # TODO(harlowja), why?? (uri, params) = uri.split("?", 1) params = parse_qs(params) if 'branch' in params: branch = params['branch'][0].strip() if 'tag' in params: tag = params['tag'][0].strip() uri = uri.strip() if not branch: branch = 'master' if tag: # Avoid 'detached HEAD state' message by moving to a # $tag-anvil branch for that tag new_branch = "%s-%s" % (tag, 'anvil') checkout_what = [tag, '-b', new_branch] else: # Set it up to track the remote branch correctly new_branch = branch checkout_what = ['-t', '-b', new_branch, 'origin/%s' % branch] if sh.isdir(self.store_where) and sh.isdir(sh.joinpths(self.store_where, '.git')): LOG.info("Existing git directory located at %s, leaving it alone.", colorizer.quote(self.store_where)) # do git clean -xdfq and git reset --hard to undo possible changes cmd = ["git", "clean", "-xdfq"] sh.execute(cmd, cwd=self.store_where) cmd = ["git", "reset", "--hard"] sh.execute(cmd, cwd=self.store_where) else: LOG.info("Downloading %s (%s) to %s.", colorizer.quote(uri), branch, colorizer.quote(self.store_where)) cmd = ["git", "clone", uri, self.store_where] sh.execute(cmd) if tag: LOG.info("Adjusting to tag %s.", colorizer.quote(tag)) else: LOG.info("Adjusting branch to %s.", colorizer.quote(branch)) # detach, drop new_branch if it exists, and checkout to new_branch # newer git allows branch resetting: git checkout -B $new_branch # so, all these are for compatibility with older RHEL git cmd = ["git", "rev-parse", "HEAD"] git_head = sh.execute(cmd, cwd=self.store_where)[0].strip() cmd = ["git", "checkout", git_head] sh.execute(cmd, cwd=self.store_where) cmd = ["git", "branch", "-D", new_branch] sh.execute(cmd, cwd=self.store_where, check_exit_code=False) cmd = ["git", "checkout"] + checkout_what sh.execute(cmd, cwd=self.store_where)
def download(self): branch = None tag = None uri = self.uri try: version = self.versions[self.name].split("=", 1) if version[0] == "branch": branch = version[1].strip() elif version[0] == "tag": tag = version[1].strip() except (KeyError, IndexError): pass # ensure that we will download at least the master branch branch = branch or "master" if tag: # Avoid 'detached HEAD state' message by moving to a # $tag-anvil branch for that tag new_branch = "%s-%s" % (tag, 'anvil') checkout_what = [tag, '-b', new_branch] else: # Set it up to track the remote branch correctly new_branch = branch checkout_what = ['-t', '-b', new_branch, 'origin/%s' % branch] if sh.isdir(self.store_where) and sh.isdir(sh.joinpths(self.store_where, '.git')): LOG.info("Existing git directory located at %s, leaving it alone.", colorizer.quote(self.store_where)) # do git clean -xdfq and git reset --hard to undo possible changes cmd = ["git", "clean", "-xdfq"] sh.execute(cmd, cwd=self.store_where) cmd = ["git", "reset", "--hard"] sh.execute(cmd, cwd=self.store_where) else: LOG.info("Downloading %s (%s) to %s.", colorizer.quote(uri), branch, colorizer.quote(self.store_where)) cmd = ["git", "clone", uri, self.store_where] sh.execute(cmd) if tag: LOG.info("Adjusting to tag %s.", colorizer.quote(tag)) else: LOG.info("Adjusting branch to %s.", colorizer.quote(branch)) # detach, drop new_branch if it exists, and checkout to new_branch # newer git allows branch resetting: git checkout -B $new_branch # so, all these are for compatibility with older RHEL git cmd = ["git", "rev-parse", "HEAD"] git_head = sh.execute(cmd, cwd=self.store_where)[0].strip() cmd = ["git", "checkout", git_head] sh.execute(cmd, cwd=self.store_where) cmd = ["git", "branch", "-D", new_branch] sh.execute(cmd, cwd=self.store_where, check_exit_code=False) cmd = ["git", "checkout"] + checkout_what sh.execute(cmd, cwd=self.store_where)
def pre_build(): build_requirements = self.requirements.get("build-requires") if build_requirements: utils.log_iterable(build_requirements, header="Installing build requirements", logger=LOG) self.helper.transaction(install_pkgs=build_requirements, tracewriter=self.tracewriter) build_requirements = '' try: build_requirements = sh.load_file(self.rpm_build_requires_filename) except IOError as e: if e.errno != errno.ENOENT: raise build_requirements = set(pkg_resources.yield_lines(build_requirements)) for repo_name in self.REPOS: repo_dir = sh.joinpths(self.anvil_repo_dir, self.SRC_REPOS[repo_name]) matched_paths = [] paths = list_src_rpms(repo_dir) envra_details = self.envra_helper.explode(*paths) for (path, envra_detail) in zip(paths, envra_details): package_name = envra_detail.get('name') if package_name in build_requirements: matched_paths.append(path) build_requirements.discard(package_name) if matched_paths: with sh.remove_before(self.prebuild_dir) as prebuild_dir: if not sh.isdir(prebuild_dir): sh.mkdirslist(prebuild_dir, tracewriter=self.tracewriter) for path in matched_paths: sh.move(path, sh.joinpths(prebuild_dir, sh.basename(path))) build(prebuild_dir, repo_name, 'Prebuilding %s RPM packages from their SRPMs' ' for repo %s using %s jobs') return build_requirements
def build_all_srpms(self, package_files, tracewriter, jobs): (_fn, content) = utils.load_template(sh.joinpths("packaging", "makefiles"), "source.mk") scripts_dir = sh.abspth( sh.joinpths(settings.TEMPLATE_DIR, "packaging", "scripts")) cmdline = self._start_cmdline(escape_values=True)[1:] + [ "--scripts-dir", scripts_dir, "--source-only", "--rpm-base", self._rpmbuild_dir, "--debug", ] executable = " ".join(self._start_cmdline()[0:1]) params = { "DOWNLOADS_DIR": self._download_dir, "LOGS_DIR": self._log_dir, "PY2RPM": executable, "PY2RPM_FLAGS": " ".join(cmdline) } marks_dir = sh.joinpths(self._deps_dir, "marks-deps") if not sh.isdir(marks_dir): sh.mkdirslist(marks_dir, tracewriter=tracewriter) makefile_path = sh.joinpths(self._deps_dir, "deps.mk") sh.write_file(makefile_path, utils.expand_template(content, params), tracewriter=tracewriter) utils.log_iterable(package_files, header="Building %s SRPM packages using %s jobs" % (len(package_files), jobs), logger=LOG) self._execute_make(makefile_path, marks_dir, jobs)
def ensure_image_storage(img_store_dir): if sh.isdir(img_store_dir): return LOG.debug("Ensuring file system store directory %r exists.", img_store_dir) sh.mkdirslist(img_store_dir, tracewriter=self.installer.tracewriter)
def stop(self, app_name): trace_dir = self.runtime.get_option('trace_dir') if not sh.isdir(trace_dir): msg = "No trace directory found from which to stop: %s" % (app_name) raise excp.StopException(msg) with sh.Rooted(True): fn_name = FORK_TEMPL % (app_name) (pid_file, stderr_fn, stdout_fn) = self._form_file_names(fn_name) pid = self._extract_pid(pid_file) if not pid: msg = "Could not extract a valid pid from %s" % (pid_file) raise excp.StopException(msg) (killed, attempts) = sh.kill(pid) # Trash the files if it worked if killed: LOG.debug("Killed pid %s after %s attempts." % (pid, attempts)) LOG.debug("Removing pid file %s" % (pid_file)) sh.unlink(pid_file) LOG.debug("Removing stderr file %r" % (stderr_fn)) sh.unlink(stderr_fn) LOG.debug("Removing stdout file %r" % (stdout_fn)) sh.unlink(stdout_fn) trace_fn = tr.trace_filename(trace_dir, fn_name) if sh.isfile(trace_fn): LOG.debug("Removing %r trace file %r" % (app_name, trace_fn)) sh.unlink(trace_fn) else: msg = "Could not stop %r after %s attempts" % (app_name, attempts) raise excp.StopException(msg)
def _config_adjust_api_reg(self, contents, fn): gparams = ghelper.get_shared_params(**self.options) with io.BytesIO(contents) as stream: config = cfg.create_parser(cfg.RewritableConfigParser, self) config.readfp(stream) config.set('DEFAULT', 'debug', self.get_bool_option('verbose')) config.set('DEFAULT', 'verbose', self.get_bool_option('verbose')) if fn in [REG_CONF]: config.set('DEFAULT', 'bind_port', gparams['endpoints']['registry']['port']) else: config.set('DEFAULT', 'bind_port', gparams['endpoints']['public']['port']) config.set('DEFAULT', 'sql_connection', dbhelper.fetch_dbdsn(dbname=DB_NAME, utf8=True, dbtype=self.get_option('db', 'type'), **utils.merge_dicts(self.get_option('db'), dbhelper.get_shared_passwords(self)))) config.remove_option('DEFAULT', 'log_file') config.set('paste_deploy', 'flavor', self.get_option('paste_flavor')) for (k, v) in self._fetch_keystone_params().items(): config.set('keystone_authtoken', k, v) if fn in [API_CONF]: config.set('DEFAULT', 'default_store', 'file') img_store_dir = sh.joinpths(self.get_option('component_dir'), 'images') config.set('DEFAULT', 'filesystem_store_datadir', img_store_dir) LOG.debug("Ensuring file system store directory %r exists and is empty." % (img_store_dir)) if sh.isdir(img_store_dir): sh.deldir(img_store_dir) sh.mkdirslist(img_store_dir, tracewriter=self.tracewriter, adjust_suids=True) return config.stringify(fn)
def stop(self, app_name): trace_dir = self.runtime.get_option('trace_dir') if not sh.isdir(trace_dir): msg = "No trace directory found from which to stop: %s" % (app_name) raise excp.StopException(msg) with sh.Rooted(True): fn_name = FORK_TEMPL % (app_name) (pid_file, stderr_fn, stdout_fn) = self._form_file_names(fn_name) pid = self._extract_pid(pid_file) if not pid: msg = "Could not extract a valid pid from %s" % (pid_file) raise excp.StopException(msg) (killed, attempts) = sh.kill(pid) # Trash the files if it worked if killed: LOG.debug("Killed pid %s after %s attempts." % (pid, attempts)) LOG.debug("Removing pid file %s" % (pid_file)) sh.unlink(pid_file) LOG.debug("Removing stderr file %r" % (stderr_fn)) sh.unlink(stderr_fn) LOG.debug("Removing stdout file %r" % (stdout_fn)) sh.unlink(stdout_fn) trace_fn = tr.trace_fn(trace_dir, fn_name) if sh.isfile(trace_fn): LOG.debug("Removing %r trace file %r" % (app_name, trace_fn)) sh.unlink(trace_fn) else: msg = "Could not stop %r after %s attempts" % (app_name, attempts) raise excp.StopException(msg)
def _create_repo(self, repo_name): repo_dir = sh.joinpths(self.anvil_repo_dir, repo_name) src_repo_dir = sh.joinpths(self.anvil_repo_dir, self.SRC_REPOS[repo_name]) for a_dir in (repo_dir, src_repo_dir): if not sh.isdir(a_dir): sh.mkdirslist(a_dir, tracewriter=self.tracewriter) cmdline = ["createrepo", a_dir] LOG.info("Creating repo at %s", a_dir) sh.execute(cmdline) repo_filename = sh.joinpths(self.anvil_repo_dir, "%s.repo" % repo_name) LOG.info("Writing %s", repo_filename) (_fn, content) = utils.load_template("packaging", "common.repo") params = { "repo_name": repo_name, "baseurl_bin": "file://%s" % repo_dir, "baseurl_src": "file://%s" % src_repo_dir, } sh.write_file(repo_filename, utils.expand_template(content, params), tracewriter=self.tracewriter) # Install *.repo file so that anvil deps will be available # when building OpenStack system_repo_filename = sh.joinpths(self.YUM_REPO_DIR, "%s.repo" % repo_name) sh.copy(repo_filename, system_repo_filename) LOG.info("Copying to %s", system_repo_filename) self.tracewriter.file_touched(system_repo_filename)
def run_tests(self): app_dir = self.get_option('app_dir') if not sh.isdir(app_dir): LOG.warn("Unable to find application directory at %s, can not run %s tests.", colorizer.quote(app_dir), colorizer.quote(self.name)) return pre_cmd = self._get_pre_test_command() cmd = self._get_test_command() if not cmd: LOG.warn("Unable to determine test command for %s, can not run tests.", colorizer.quote(self.name)) return env = self._get_env() try: if pre_cmd: LOG.info("Running test setup via: %s", utils.truncate_text(" ".join(pre_cmd), 80)) sh.execute(pre_cmd, stdout_fh=sys.stdout, stderr_fh=sys.stdout, cwd=app_dir, env_overrides=env) LOG.info("Running tests via: %s", utils.truncate_text(" ".join(cmd), 80)) sh.execute(cmd, stdout_fh=sys.stdout, stderr_fh=sys.stdout, cwd=app_dir, env_overrides=env) except excp.ProcessExecutionError as e: if self.ignore_test_failures: LOG.warn("Ignoring test failure of component %s: %s", colorizer.quote(self.name), e) else: raise
def _yyoom(self, arglist, on_completed=None): if not on_completed: on_completed = lambda data, errored: None if not sh.isdir(self._logs_dir): sh.mkdirslist(self._logs_dir) with tempfile.NamedTemporaryFile(suffix=".json") as fh: cmdline = [ self.yyoom_executable, "--output-file", fh.name, "--verbose", ] cmdline.extend(arglist) log_filename = sh.joinpths(self._logs_dir, _generate_log_filename(arglist)) LOG.debug("Running yyoom: log output will be placed in %s", log_filename) try: sh.execute_save_output(cmdline, log_filename) except excp.ProcessExecutionError: with excp.reraise(): try: fh.seek(0) data = utils.parse_json(fh.read()) except Exception: LOG.exception("Failed to parse YYOOM output") else: on_completed(data, True) else: fh.seek(0) data = utils.parse_json(fh.read()) on_completed(data, False) return data
def _create_repo(self, repo_name): repo_dir = sh.joinpths(self.anvil_repo_dir, repo_name) src_repo_dir = sh.joinpths(self.anvil_repo_dir, self.SRC_REPOS[repo_name]) for a_dir in (repo_dir, src_repo_dir): if not sh.isdir(a_dir): sh.mkdirslist(a_dir, tracewriter=self.tracewriter) cmdline = ["createrepo", a_dir] LOG.info("Creating repo at %s", a_dir) sh.execute(cmdline) repo_filename = sh.joinpths(self.anvil_repo_dir, "%s.repo" % repo_name) LOG.info("Writing %s", repo_filename) (_fn, content) = utils.load_template("packaging", "common.repo") params = { "repo_name": repo_name, "baseurl_bin": "file://%s" % repo_dir, "baseurl_src": "file://%s" % src_repo_dir, } sh.write_file(repo_filename, utils.expand_template(content, params), tracewriter=self.tracewriter) # NOTE(harlowja): Install *.repo file so that anvil deps will be available # when building openstack core project packages. system_repo_filename = sh.joinpths(self.YUM_REPO_DIR, "%s.repo" % repo_name) sh.copy(repo_filename, system_repo_filename, tracewriter=self.tracewriter) LOG.info("Copied to %s", system_repo_filename)
def run_tests(self): app_dir = self.get_option('app_dir') if not sh.isdir(app_dir): LOG.warn( "Unable to find application directory at %s, can not run %s tests.", colorizer.quote(app_dir), colorizer.quote(self.name)) return cmd = self._get_test_command() env = self._get_env() with open(os.devnull, 'wb') as null_fh: if self.get_bool_option("verbose", default_value=False): null_fh = None try: sh.execute(cmd, stdout_fh=None, stderr_fh=null_fh, cwd=app_dir, env_overrides=env) except excp.ProcessExecutionError as e: if self.get_bool_option("ignore-test-failures", default_value=False): LOG.warn("Ignoring test failure of component %s: %s", colorizer.quote(self.name), e) else: raise e
def _move_rpm_files(source_dir, target_dir): if not sh.isdir(source_dir): return for filename in sh.listdir(source_dir, recursive=True, files_only=True): if not filename.lower().endswith(".rpm"): continue sh.move(filename, target_dir, force=True)
def _configure_instances_path(self, instances_path, nova_conf): nova_conf.add('instances_path', instances_path) if not sh.isdir(instances_path): LOG.debug("Attempting to create instance directory: %r", instances_path) sh.mkdirslist(instances_path, tracewriter=self.tracewriter) LOG.debug("Adjusting permissions of instance directory: %r", instances_path) sh.chmod(instances_path, 0777)
def build_all_srpms(self, package_files, tracewriter, jobs): (_fn, content) = utils.load_template(sh.joinpths("packaging", "makefiles"), "source.mk") scripts_dir = sh.abspth(sh.joinpths(settings.TEMPLATE_DIR, "packaging", "scripts")) cmdline = self._start_cmdline(escape_values=True)[1:] + [ "--scripts-dir", scripts_dir, "--source-only", "--rpm-base", self._rpmbuild_dir ] executable = " ".join(self._start_cmdline()[0:1]) params = { "DOWNLOADS_DIR": self._download_dir, "LOGS_DIR": self._log_dir, "PY2RPM": executable, "PY2RPM_FLAGS": " ".join(cmdline) } marks_dir = sh.joinpths(self._deps_dir, "marks-deps") if not sh.isdir(marks_dir): sh.mkdirslist(marks_dir, tracewriter=tracewriter) makefile_path = sh.joinpths(self._deps_dir, "deps.mk") sh.write_file(makefile_path, utils.expand_template(content, params), tracewriter=tracewriter) utils.log_iterable(package_files, header="Building %s SRPM packages using %s jobs" % (len(package_files), jobs), logger=LOG) self._execute_make(makefile_path, marks_dir, jobs)
def status(self, app_name): # Attempt to find the status of a given app by finding where that apps # pid file is and loading said pids details (from stderr/stdout) files # that should exist as well as by using shell utilities to determine # if said pid is still running... trace_dir = self.runtime.get_option('trace_dir') if not sh.isdir(trace_dir): return (STATUS_UNKNOWN, '') fork_fns = self._form_file_names(app_name) pid = fork_fns.extract_pid() stderr = '' try: stderr = sh.load_file(fork_fns.stderr) except (IOError, ValueError, TypeError): pass stdout = '' try: stdout = sh.load_file(fork_fns.stdout) except (IOError, ValueError, TypeError): pass details = { 'STDOUT': stdout, 'STDERR': stderr, } if pid is not None and sh.is_running(pid): return (STATUS_STARTED, details) else: return (STATUS_UNKNOWN, details)
def get_directory_details(path): if not sh.isdir(path): raise IOError("Can not detail non-existent directory %s" % (path)) # Check if we already got the details of this dir previously path = sh.abspth(path) cache_key = "d:%s" % (sh.abspth(path)) if cache_key in EGGS_DETAILED: return EGGS_DETAILED[cache_key] req = extract(path) req.source_dir = path req.run_egg_info() dependencies = [] for d in req.requirements(): if not d.startswith("-e") and d.find("#"): d = d.split("#")[0] d = d.strip() if d: dependencies.append(d) details = { 'req': req.req, 'dependencies': dependencies, 'name': req.name, 'pkg_info': req.pkg_info(), 'dependency_links': req.dependency_links, 'version': req.installed_version, } EGGS_DETAILED[cache_key] = details return details
def post_bootstrap(self): bs_rpmbuild_dir = sh.joinpths('.bootstrap', 'rpmbuild') if sh.isdir(bs_rpmbuild_dir): LOG.info("Moving RPMS build on bootstrap to deps repo") self._move_srpms("anvil-deps", bs_rpmbuild_dir) self._move_rpm_files(bs_rpmbuild_dir, sh.joinpths(self.anvil_repo_dir, 'anvil-deps')) sh.deldir(bs_rpmbuild_dir)
def build_binary(self): def is_src_rpm(path): if not path: return False if not sh.isfile(path): return False if not path.lower().endswith('.src.rpm'): return False return True def list_src_rpms(path): path_files = [] if sh.isdir(path): path_files = sh.listdir(path, filter_func=is_src_rpm) return sorted(path_files) build_requirements = self.requirements.get("build-requires") if build_requirements: utils.log_iterable(build_requirements, header="Installing build requirements", logger=LOG) self.helper.transaction(install_pkgs=build_requirements, tracewriter=self.tracewriter) for repo_name in self.REPOS: src_repo_dir = sh.joinpths(self.anvil_repo_dir, self.SRC_REPOS[repo_name]) src_repo_files = list_src_rpms(src_repo_dir) if not src_repo_files: continue utils.log_iterable(src_repo_files, header=('Building %s RPM packages from their' ' SRPMs for repo %s using %s jobs') % (len(src_repo_files), self.SRC_REPOS[repo_name], self._jobs), logger=LOG) makefile_path = sh.joinpths(self.deps_dir, "binary-%s.mk" % repo_name) marks_dir = sh.joinpths(self.deps_dir, "marks-binary") if not sh.isdir(marks_dir): sh.mkdirslist(marks_dir, tracewriter=self.tracewriter) rpmbuild_flags = "--rebuild" if self.opts.get("usr_only", False): rpmbuild_flags += " --define 'usr_only 1'" params = { "SRC_REPO_DIR": src_repo_dir, "RPMBUILD_FLAGS": rpmbuild_flags, "LOGS_DIR": self.log_dir, 'RPMTOP_DIR': self.rpmbuild_dir, } (_fn, content) = utils.load_template(sh.joinpths("packaging", "makefiles"), "binary.mk") sh.write_file(makefile_path, utils.expand_template(content, params), tracewriter=self.tracewriter) with sh.remove_before_after(self.rpmbuild_dir): self._create_rpmbuild_subdirs() self._execute_make(makefile_path, marks_dir) repo_dir = sh.joinpths(self.anvil_repo_dir, repo_name) for d in sh.listdir(self.rpmbuild_dir, dirs_only=True): self._move_rpm_files(sh.joinpths(d, "RPMS"), repo_dir) self._move_rpm_files(sh.joinpths(self.rpmbuild_dir, "RPMS"), repo_dir) self._create_repo(repo_name)
def ensure_anvil_dirs(root_dir): wanted_dirs = ["/etc/anvil/", "/usr/share/anvil/"] if root_dir and root_dir not in wanted_dirs: wanted_dirs.append(root_dir) for d in wanted_dirs: if sh.isdir(d): continue LOG.info("Creating anvil directory at path: %s", d) sh.mkdir(d)
def ensure_anvil_dirs(root_dir): wanted_dirs = list(ANVIL_DIRS) if root_dir and root_dir not in wanted_dirs: wanted_dirs.append(root_dir) for d in wanted_dirs: if sh.isdir(d): continue LOG.info("Creating anvil directory at path: %s", d) sh.mkdir(d)
def run_tests(self): app_dir = self.get_option('app_dir') if not sh.isdir(app_dir): LOG.warn("Unable to find application directory at %s, can not run %s tests.", colorizer.quote(app_dir), colorizer.quote(self.name)) return cmd = self._get_test_command() env = self._get_env() sh.execute(*cmd, stdout_fh=None, stderr_fh=None, cwd=app_dir, env_overrides=env)
def post_bootstrap(self): bs_rpmbuild_dir = sh.joinpths('.bootstrap', 'rpmbuild') if sh.isdir(bs_rpmbuild_dir): LOG.info("Moving RPMS build on bootstrap to deps repo") self._move_srpms("anvil-deps", bs_rpmbuild_dir) self._move_rpm_files( bs_rpmbuild_dir, sh.joinpths(self.anvil_repo_dir, 'anvil-deps')) sh.deldir(bs_rpmbuild_dir)
def ensure_anvil_dirs(root_dir): wanted_dirs = ["/etc/anvil/", '/usr/share/anvil/'] if root_dir and root_dir not in wanted_dirs: wanted_dirs.append(root_dir) for d in wanted_dirs: if sh.isdir(d): continue LOG.info("Creating anvil directory at path: %s", d) sh.mkdir(d)
def ensure_anvil_dirs(root_dir): wanted_dirs = ["/etc/anvil/", '/usr/share/anvil/'] if root_dir and root_dir not in wanted_dirs: wanted_dirs.append(root_dir) for d in wanted_dirs: if sh.isdir(d): continue LOG.info("Creating anvil directory at path: %s", d) with sh.Rooted(True): sh.mkdir(d, adjust_suids=True)
def status(self, app_name): trace_dir = self.runtime.get_option('trace_dir') if not sh.isdir(trace_dir): return constants.STATUS_UNKNOWN (pid_file, stderr_fn, stdout_fn) = self._form_file_names(FORK_TEMPL % (app_name)) pid = self._extract_pid(pid_file) if pid and sh.is_running(pid): return constants.STATUS_STARTED else: return constants.STATUS_UNKNOWN
def configure(self): files = self._configure_files() conf_dir = "/etc/%s" % self.name if sh.isdir(conf_dir): sh.execute( ["chown", "-R", "%s:%s" % (self.name, self.name), conf_dir], check_exit_code=False) return files
def build_paths(self): if self._build_paths is None: build_paths = {} for name in RPM_DIR_NAMES: final_path = sh.joinpths(self.package_dir, name.upper()) build_paths[name] = final_path if sh.isdir(final_path): sh.deldir(final_path, True) sh.mkdirslist(final_path, tracewriter=self.tracewriter) self._build_paths = build_paths return copy.deepcopy(self._build_paths) # Return copy (not the same instance)
def build_paths(self): if self._build_paths is None: bpaths = {} for name in ['sources', 'specs', 'srpms', 'rpms', 'build']: final_path = sh.joinpths(self.package_dir, name.upper()) bpaths[name] = final_path if sh.isdir(final_path): sh.deldir(final_path, True) self.tracewriter.dirs_made(*sh.mkdirslist(final_path)) self._build_paths = bpaths return dict(self._build_paths)
def package_instance(self, instance): with sh.remove_before_after(self.rpmbuild_dir): self._create_rpmbuild_subdirs() if instance.name in ["general"]: self._build_dependencies() self._move_srpms("anvil-deps") else: # Meta packages don't get built. app_dir = instance.get_option("app_dir") if sh.isdir(app_dir): self._build_openstack_package(instance) self._move_srpms("anvil")
def _config_adjust_api(self, config): self._config_adjust_api_reg(config) gparams = ghelper.get_shared_params(**self.installer.options) config.add('bind_port', gparams['endpoints']['public']['port']) config.add( 'default_store', 'file') img_store_dir = sh.joinpths(self.installer.get_option('component_dir'), 'images') config.add('filesystem_store_datadir', img_store_dir) LOG.debug("Ensuring file system store directory %r exists and is empty." % (img_store_dir)) if sh.isdir(img_store_dir): sh.deldir(img_store_dir) sh.mkdirslist(img_store_dir, tracewriter=self.installer.tracewriter)
def download(self): branch = None tag = None uri = self.uri if uri.find("?") != -1: # If we use urlparser here it doesn't seem to work right?? # TODO(harlowja), why?? (uri, params) = uri.split("?", 1) params = parse_qs(params) if 'branch' in params: branch = params['branch'][0].strip() if 'tag' in params: tag = params['tag'][0].strip() uri = uri.strip() if not branch: branch = 'master' if sh.isdir(self.store_where) and sh.isdir(sh.joinpths(self.store_where, '.git')): LOG.info("Existing git directory located at %s, leaving it alone.", colorizer.quote(self.store_where)) else: LOG.info("Downloading %s (%s) to %s.", colorizer.quote(uri), branch, colorizer.quote(self.store_where)) cmd = list(self.distro.get_command('git', 'clone')) cmd += [uri, self.store_where] sh.execute(*cmd) if branch or tag: checkout_what = [] if tag: # Avoid 'detached HEAD state' message by moving to a # $tag-anvil branch for that tag checkout_what = [tag, '-b', "%s-%s" % (tag, 'anvil')] LOG.info("Adjusting to tag %s.", colorizer.quote(tag)) else: if branch.lower() == 'master': checkout_what = ['master'] else: # Set it up to track the remote branch correctly checkout_what = ['--track', '-b', branch, 'origin/%s' % (branch)] LOG.info("Adjusting branch to %s.", colorizer.quote(branch)) cmd = list(self.distro.get_command('git', 'checkout')) cmd += checkout_what sh.execute(*cmd, cwd=self.store_where)
def expand_patches(paths, patch_ext='.patch'): if not paths: return [] all_paths = [] # Expand patch files/dirs for path in paths: path = sh.abspth(path) if sh.isdir(path): all_paths.extend([p for p in sh.listdir(path, files_only=True)]) else: all_paths.append(path) # Now filter on valid patches return [p for p in all_paths if _is_patch(p, patch_ext=patch_ext)]
def patch(self, section): what_patches = self.get_option('patches', section) (_from_uri, target_dir) = self._get_download_location() if not what_patches: what_patches = [] canon_what_patches = [] for path in what_patches: if sh.isdir(path): canon_what_patches.extend(sorted(sh.listdir(path, files_only=True))) elif sh.isfile(path): canon_what_patches.append(path) if canon_what_patches: patcher.apply_patches(canon_what_patches, target_dir)
def run_tests(self): app_dir = self.get_option('app_dir') if not sh.isdir(app_dir): LOG.warn("Unable to find application directory at %s, can not run %s tests.", colorizer.quote(app_dir), colorizer.quote(self.name)) return cmd = self._get_test_command() env = self._get_env() with open(os.devnull, 'wb') as null_fh: if self.get_bool_option("tests_verbose", default_value=False): null_fh = None sh.execute(*cmd, stdout_fh=None, stderr_fh=null_fh, cwd=app_dir, env_overrides=env)
def _uninstall_python(self): py_listing = self.tracereader.py_listing() if py_listing: py_listing_dirs = set() for (_name, where) in py_listing: py_listing_dirs.add(where) utils.log_iterable(py_listing_dirs, logger=LOG, header="Uninstalling %s python setups" % (len(py_listing_dirs))) unsetup_cmd = self.distro.get_command('python', 'unsetup') for where in py_listing_dirs: if sh.isdir(where): sh.execute(*unsetup_cmd, cwd=where, run_as_root=True) else: LOG.warn("No python directory found at %s - skipping", colorizer.quote(where, quote_color='red'))
def apply_patches(patch_files, working_dir): apply_files = expand_patches(patch_files) if not len(apply_files): return if not sh.isdir(working_dir): LOG.warn("Can only apply %s patches 'inside' a directory and not '%s'", len(apply_files), working_dir) return with utils.chdir(working_dir): for p in apply_files: LOG.debug("Applying patch %s in directory %s", p, working_dir) patch_contents = sh.load_file(p) if len(patch_contents): sh.execute(PATCH_CMD, process_input=patch_contents)
def list_patches(self, section): what_patches = self.get_option('patches', section) if not what_patches: what_patches = [sh.joinpths(settings.CONFIG_DIR, 'patches', self.name, section)] canon_what_patches = [] for path in what_patches: if sh.isdir(path): patches = sorted(fn for fn in sh.listdir(path, files_only=True) if fn.endswith('patch')) canon_what_patches.extend(patches) elif sh.isfile(path): canon_what_patches.append(path) return canon_what_patches
def run_tests(self): app_dir = self.get_option('app_dir') if not sh.isdir(app_dir): LOG.warn( "Unable to find application directory at %s, can not run %s tests.", colorizer.quote(app_dir), colorizer.quote(self.name)) return cmd = self._get_test_command() env = self._get_env() sh.execute(*cmd, stdout_fh=None, stderr_fh=None, cwd=app_dir, env_overrides=env)
def _config_adjust_api(self, config): self._config_adjust_api_reg(config) gparams = ghelper.get_shared_params(**self.installer.options) config.add('bind_port', gparams['endpoints']['public']['port']) config.add('default_store', 'file') img_store_dir = sh.joinpths(self.installer.get_option('component_dir'), 'images') config.add('filesystem_store_datadir', img_store_dir) LOG.debug( "Ensuring file system store directory %r exists and is empty." % (img_store_dir)) if sh.isdir(img_store_dir): sh.deldir(img_store_dir) sh.mkdirslist(img_store_dir, tracewriter=self.installer.tracewriter)