def _git(self, workdir, *args): with interim_working_dir(workdir): out = [] if not execute(['git'] + list(args), capture=out, critical=False): print(['git'] + list(args)) print('\n'.join(out)) assert False, 'failed to issue git command' return '\n'.join(out)
def exists(self): """ return whether or not the host tool exists Returns whether or not the tool is available on the host for use. Returns: ``True``, if the tool exists; ``False`` otherwise """ if self.tool in RelengTool.detected: return RelengTool.detected[self.tool] found = True tool = self.tool if execute([tool] + self.exists_args, quiet=True, critical=False): found = True # if windows and a non-path entry, try to find the interpreter on the # local system elif sys.platform == 'win32' and os.path.basename(tool) == tool: debug( '{} tool not available in path; ' 'attempting to search the system...', tool) alt_tool = find_win32_python_interpreter(tool) if alt_tool: debug('{} tool to be replaced by: {}', tool, alt_tool) if execute([alt_tool] + self.exists_args, quiet=True, critical=False): found = True # adjust the tool for this instance to the newly detected # interpreter path tool = alt_tool self.tool = tool if found: debug('{} tool is detected on this system', tool) RelengTool.detected[tool] = True else: debug('{} tool is not detected on this system', tool) RelengTool.detected[tool] = False return RelengTool.detected[tool]
def test_utilio_execution(self): result = execute(None, quiet=True, critical=False) self.assertFalse(result) result = execute([], quiet=True, critical=False) self.assertFalse(result) test_cmd = ['python', '-c', 'print("Hello")'] result = execute(test_cmd, quiet=True, critical=False) self.assertTrue(result) result = execute(['an_unknown_command'], quiet=True, critical=False) self.assertFalse(result) # skip output checks if verbose mode is enabled if is_verbose(): raise unittest.SkipTest( 'ignoring execution output checks while in verbose mode') # verify output with redirect_stdout() as stream: test_cmd = [sys.executable, '-c', 'print("Hello")'] result = execute(test_cmd, critical=False) self.assertTrue(result) self.assertEqual(stream.getvalue().strip(), 'Hello') # verify quiet mode with redirect_stdout() as stream: test_cmd = [sys.executable, '-c', 'print("Hello")'] result = execute(test_cmd, quiet=True, critical=False) self.assertTrue(result) self.assertEqual(stream.getvalue().strip(), '') # verify capture mode which will be silent out = [] with redirect_stdout() as stream: test_cmd = [sys.executable, '-c', 'print("Hello")'] result = execute(test_cmd, critical=False, capture=out) self.assertTrue(result) self.assertEqual(''.join(out), 'Hello') self.assertEqual(stream.getvalue().strip(), '') # verify capture mode that is also verbose out = [] with redirect_stdout() as stream: test_cmd = [sys.executable, '-c', 'print("Hello")'] result = execute(test_cmd, quiet=False, critical=False, capture=out) self.assertTrue(result) self.assertEqual(''.join(out), 'Hello') self.assertEqual(stream.getvalue().strip(), 'Hello')
def configure(opts): """ support configuration for autotools projects With provided configuration options (``RelengConfigureOptions``), the configuration stage will be processed. Args: opts: configuration options Returns: ``True`` if the configuration stage is completed; ``False`` otherwise """ # check if autoreconf if opts._autotools_autoreconf: verbose('configured to run autoreconf') if not AUTORECONF.exists(): err('unable to configure package; autoreconf is not installed') return False if not AUTORECONF.execute(['--verbose']): err('failed to prepare autotools project (autoreconf): {}', opts.name) return False # definitions autotools_defs = { '--prefix': opts.prefix, '--exec-prefix': opts.prefix, } if opts.conf_defs: autotools_defs.update(expand(opts.conf_defs)) # default options autotools_opts = {} if opts.conf_opts: autotools_opts.update(expand(opts.conf_opts)) # argument building autotools_args = [] autotools_args.extend(prepare_definitions(autotools_defs)) autotools_args.extend(prepare_arguments(autotools_opts)) if not execute(['./configure'] + autotools_args, env_update=expand(opts.conf_env), critical=False): err('failed to prepare autotools project (configure): {}', opts.name) return False return True
def exists(self): """ return whether or not the host tool exists Returns whether or not the tool is available on the host for use. Returns: ``True``, if the tool exists; ``False`` otherwise """ if self.tool in RelengTool.detected: return RelengTool.detected[self.tool] if execute([self.tool] + self.exists_args, quiet=True, critical=False): debug('{} tool is detected on this system', self.tool) RelengTool.detected[self.tool] = True else: debug('{} tool is not detected on this system', self.tool) RelengTool.detected[self.tool] = False return RelengTool.detected[self.tool]
def extract(opts): """ support extraction of an archive into a build directory With provided extraction options (``RelengExtractOptions``), the extraction stage will be processed. The archive's extension will be used in attempt to finding a matching tool/implementation which can be used to extract the contents of the file. In the event that the method of extraction cannot be determined, it will be assumed that the file is in fact not extractable. Files which are not extracted are just copied into the build directly (e.g. single resource files). Args: opts: the extraction options Returns: ``True`` if the extraction stage is completed; ``False`` otherwise """ assert opts cache_file = opts.cache_file strip_count = opts.strip_count work_dir = opts.work_dir cache_basename = os.path.basename(cache_file) __, cache_ext = interpret_stem_extension(cache_basename) is_extractable = False if cache_ext: cache_ext = cache_ext.lower() # if the user defines a tool override for this extension type, use # whatever the user wants to use (passing the file and directory to # extract to) extract_override = getattr(opts, '_extract_override', None) if extract_override and cache_ext in extract_override: is_extractable = True tool_cmd = extract_override[cache_ext].format(file=cache_file, dir=work_dir) if not execute(tool_cmd.split(), cwd=work_dir, critical=False): err('unable to extract with tool override\n' ' (command: {})', tool_cmd) return None # attempt to extract the (compressed) tar archive with the host's # tar tool; if it does not exist, we'll fallback to using python's # internal implementation (tarfile) elif cache_ext.startswith(TAR_SUPPORTED): is_extractable = True # before attempting to use an external tar command, only allow # using it if the `force-local` option is available whenever a # colon character is provided, to prevent tar from assuming the # path is a remote target needs_force_local = False if ':' in cache_file: needs_force_local = True has_extracted = False if TAR.exists() and (TAR.force_local or not needs_force_local): tar_args = [ '--extract', '--file=' + cache_file, '--strip-components={}'.format(strip_count), '--verbose', ] if needs_force_local: tar_args.append('--force-local') if TAR.execute(tar_args, cwd=work_dir): has_extracted = True else: warn('unable to extract archive with host tar; ' 'will use fallback') if not has_extracted: try: def tar_extract(members, strip_count): for member in members: # strip members from package defined count if strip_count > 0: np = os.path.normpath(member.name) parts = np.split(os.path.sep, strip_count) if len(parts) <= strip_count: continue member.name = parts[-1] # notify the user of the target member to extract print(member.name) yield member with tarfile.open(cache_file, 'r') as tar: tar.extractall(path=work_dir, members=tar_extract(tar, strip_count)) except Exception as e: err( 'unable to extract tar file\n' ' {}\n' ' (file: {})\n' ' (target: {})', e, cache_file, work_dir) return False # extract a zip-extension cache file using python's internal # implementation (zipfile) elif cache_ext == 'zip': is_extractable = True try: with ZipFile(cache_file, 'r') as zip_: for member in zip_.namelist(): # strip members from package defined count member_s = member if strip_count > 0: np = os.path.normpath(member_s) parts = np.split(os.path.sep, strip_count) if len(parts) <= strip_count: continue member_s = parts[-1] dest = os.path.join(work_dir, member_s) # notify the user of the target member to extract print(member) # if this is a directory entry, ensure the directory # exists for the destination if not os.path.basename(member): ensure_dir_exists(dest) else: # always ensure the container directory for a file # exists before attempting to extract a member into # it, as not all processed zip files may process # a directory entry (to be created) ahead of time ensure_dir_exists(os.path.dirname(dest)) with zip_.open(member) as s, open(dest, 'wb') as f: shutil.copyfileobj(s, f) except Exception as e: err( 'unable to extract zip file\n' ' {}\n' ' (file: {})\n' ' (target: {})', e, cache_file, work_dir) return False if not is_extractable: debug('file not considered extractable: ' + cache_file) try: shutil.copy2(cache_file, work_dir) except IOError as e: err( 'unable to copy over cache file\n' ' {}\n' ' (file: {})\n' ' (target: {})', e, cache_file, work_dir) return False return True