Beispiel #1
0
def set_ds_test_env(test_name, hosts, test_env):
    TEMP_FILE = test_env["TEST_DIR"] + test_name
    hosts.all.shell(cmd="mkdir -p {0}".format(test_env["TEST_DIR"]))
    results = hosts.all.shell(cmd='hlq')
    for result in results.contacted.values():
        hlq = result.get("stdout")
    if(len(hlq) > 8):
        hlq = hlq[:8]
    test_env["DS_NAME"] = hlq + "." + test_name.upper() + "." + test_env["DS_TYPE"]

    try:
        hosts.all.zos_data_set(name=test_env["DS_NAME"], type=test_env["DS_TYPE"])
        hosts.all.shell(cmd="echo \"{0}\" > {1}".format(test_env["TEST_CONT"], TEMP_FILE))
        if test_env["DS_TYPE"] in ["PDS", "PDSE"]:
            test_env["DS_NAME"] = test_env["DS_NAME"] + "(MEM)"
            hosts.all.zos_data_set(name=test_env["DS_NAME"], state="present", type="member")
            cmdStr = "cp -CM {0} \"//'{1}'\"".format(quote(TEMP_FILE), test_env["DS_NAME"])
        else:
            cmdStr = "cp {0} \"//'{1}'\" ".format(quote(TEMP_FILE), test_env["DS_NAME"])

        if test_env["ENCODING"] != "IBM-1047":
            hosts.all.zos_encode(src=TEMP_FILE, dest=test_env["DS_NAME"], from_encoding="IBM-1047", to_encoding=test_env["ENCODING"])
            # cmdStr = "/u/behnam/tools/cp_withencoding/bin/cp2 {0} {1} {2}".format(test_env["ENCODING"], quote(TEMP_FILE), quote(test_env["DS_NAME"]))
            # hosts.all.shell(cmd=cmdStr)
        else:
            hosts.all.shell(cmd=cmdStr)
        hosts.all.shell(cmd="rm -rf " + test_env["TEST_DIR"])
        cmdStr = "cat \"//'{0}'\" | wc -l ".format(test_env["DS_NAME"])
        results = hosts.all.shell(cmd=cmdStr)
        pprint(vars(results))
        for result in results.contacted.values():
            assert int(result.get("stdout")) != 0
    except Exception:
        clean_ds_test_env(test_env["DS_NAME"], hosts)
        assert 1 == 0, "Failed to set the test env"
Beispiel #2
0
 def print_discovered_devices(self, devices):
     self._stream.write(_SHELL_VAR_PREFIX + 'DEVICES=%u\n' % len(devices))
     for device_n, device in zip(itertools.count(), devices):
         self._stream.write(_SHELL_VAR_PREFIX + 'DEVICE_%u_NAME=%s\n' %
                            (device_n, shellescape.quote(device['name'])))
         self._stream.write(
             _SHELL_VAR_PREFIX + 'DEVICE_%u_ADDRESS=%s\n' %
             (device_n, shellescape.quote(device['address'])))
     self._stream.flush()
Beispiel #3
0
    def encrypt_file(self,
                     inpath,
                     force_nocompress=False,
                     force_compress=False,
                     armored=False,
                     checksum=False):
        """public method for single file encryption with optional compression, ASCII armored formatting, and file hash digest generation"""
        if armored:
            if force_compress:
                command_stub = self.command_maxcompress_armored
            elif force_nocompress:
                command_stub = self.command_nocompress_armored
            else:
                if self._is_compress_filetype(inpath):
                    command_stub = self.command_default_armored
                else:
                    command_stub = self.command_nocompress_armored
        else:
            if force_compress:
                command_stub = self.command_maxcompress
            elif force_nocompress:
                command_stub = self.command_nocompress
            else:
                if self._is_compress_filetype(inpath):
                    command_stub = self.command_default
                else:
                    command_stub = self.command_nocompress

        encrypted_outpath = self._create_outfilepath(inpath)
        system_command = command_stub + encrypted_outpath + " --passphrase " + quote(
            self.passphrase) + " --symmetric " + quote(inpath)

        try:
            response = muterun(system_command)
            # check returned status code
            if response.exitcode == 0:
                stdout(encrypted_outpath + " was generated from " + inpath)
                if checksum:  # add a SHA256 hash digest of the encrypted file - requested by user --hash flag in command
                    from crypto.library import hash
                    encrypted_file_hash = hash.generate_hash(encrypted_outpath)
                    if len(encrypted_file_hash) == 64:
                        stdout("SHA256 hash digest for " + encrypted_outpath +
                               " :")
                        stdout(encrypted_file_hash)
                    else:
                        stdout(
                            "Unable to generate a SHA256 hash digest for the file "
                            + encrypted_outpath)
            else:
                stderr(response.stderr, 0)
                stderr("Encryption failed")
                sys.exit(1)
        except Exception as e:
            stderr(
                "There was a problem with the execution of gpg. Encryption failed. Error: ["
                + str(e) + "]")
            sys.exit(1)
Beispiel #4
0
 def print_discovered_devices(self, devices):
     self._stream.write(_SHELL_VAR_PREFIX + 'DEVICES=%u\n' % len(devices))
     for device_n, device in zip(itertools.count(), devices):
         self._stream.write(
                 _SHELL_VAR_PREFIX + 'DEVICE_%u_NAME=%s\n' % (
                     device_n, shellescape.quote(device['name'])))
         self._stream.write(
                 _SHELL_VAR_PREFIX + 'DEVICE_%u_ADDRESS=%s\n' % (
                     device_n, shellescape.quote(device['address'])))
     self._stream.flush()
Beispiel #5
0
 def print_temperatures(self, value):
     for var_name in ('current_temp', 'manual_temp', 'target_temp_l',
                      'target_temp_h', 'offset_temp'):
         val_str = '%f' % value[var_name]
         self._stream.write(_SHELL_VAR_PREFIX + '%s=%s\n' %
                            (var_name.upper(), shellescape.quote(val_str)))
     for var_name in ('window_open_detection', 'window_open_minutes'):
         val_str = '%u' % value[var_name]
         self._stream.write(_SHELL_VAR_PREFIX + '%s=%s\n' %
                            (var_name.upper(), shellescape.quote(val_str)))
     self._stream.flush()
Beispiel #6
0
def envDump(shell, log):  # pylint: disable=invalid-name,missing-param-doc,missing-type-doc
    """Dump environment to a .fuzzmanagerconf file."""
    # Platform and OS detection for the spec, part of which is in:
    #   https://wiki.mozilla.org/Security/CrashSignatures
    fmconf_platform = "x86" if shell.build_opts.enable32 else "x86-64"

    fmconf_os = None
    if platform.system() == "Linux":
        fmconf_os = "linux"
    elif platform.system() == "Darwin":
        fmconf_os = "macosx"
    elif platform.system() == "Windows":
        fmconf_os = "windows"

    with io.open(str(log), "a", encoding="utf-8", errors="replace") as f:
        f.write("# Information about shell:\n# \n")

        f.write("# Create another shell in shell-cache like this one:\n")
        f.write('# %s -u -m %s -b "%s" -r %s\n# \n' % (
            # Perhaps this can go into a separate function. See ensureBuild in bot.py
            "python" +
            re.search("python.*[2-3]", os.__file__).group(0).replace(
                "/", "").split("python")[-1],
            "funfuzz.js.compile_shell",
            shell.build_opts.build_options_str,
            shell.get_hg_hash()))

        f.write("# Full environment is:\n")
        f.write("# %s\n# \n" % str(shell.get_env_full()))

        f.write(
            "# Full configuration command with needed environment variables is:\n"
        )
        f.write("# %s %s\n# \n" %
                (" ".join(quote(str(x))
                          for x in shell.get_env_added()), " ".join(
                              quote(str(x))
                              for x in shell.get_cfg_cmd_excl_env())))

        # .fuzzmanagerconf details
        f.write("\n")
        f.write("[Main]\n")
        f.write("platform = %s\n" % fmconf_platform)
        f.write("product = %s\n" % shell.get_repo_name())
        f.write("product_version = %s\n" % shell.get_hg_hash())
        f.write("os = %s\n" % fmconf_os)

        f.write("\n")
        f.write("[Metadata]\n")
        f.write("buildFlags = %s\n" % shell.build_opts.build_options_str)
        f.write("majorVersion = %s\n" % shell.get_version().split(".")[0])
        f.write("pathPrefix = %s/\n" % shell.get_repo_dir())
        f.write("version = %s\n" % shell.get_version())
Beispiel #7
0
 def print_holidays(self, value):
     for holiday_n, holiday in zip(itertools.count(), value):
         for var_name in 'start', 'end':
             var_val = ('' if holiday[var_name] is None else
                        holiday[var_name].isoformat())
             self._stream.write(
                 _SHELL_VAR_PREFIX + 'HOLIDAY_%u_%s=%s\n' %
                 (holiday_n, var_name.upper(), shellescape.quote(var_val)))
         self._stream.write(
             _SHELL_VAR_PREFIX + 'HOLIDAY_%u_TEMP=%s\n' %
             (holiday_n,
              shellescape.quote('' if holiday['temp'] is None else '%f' %
                                holiday['temp'])))
     self._stream.flush()
Beispiel #8
0
 def print_holidays(self, value):
     for holiday_n, holiday in zip(itertools.count(), value):
         for var_name in 'start', 'end':
             var_val = ('' if holiday[var_name] is None
                        else holiday[var_name].isoformat())
             self._stream.write(
                     _SHELL_VAR_PREFIX + 'HOLIDAY_%u_%s=%s\n' % (
                         holiday_n, var_name.upper(),
                         shellescape.quote(var_val)))
         self._stream.write(
                 _SHELL_VAR_PREFIX + 'HOLIDAY_%u_TEMP=%s\n' % (
                     holiday_n, shellescape.quote(
                             '' if holiday['temp'] is None
                             else '%f' % holiday['temp'])))
     self._stream.flush()
Beispiel #9
0
 def _is_compress_filetype(self, inpath):
     """private method that performs magic number and size check on file to determine whether to compress the file"""
     # check for common file type suffixes in order to avoid the need for file reads to check magic number for binary vs. text file
     if self._is_common_binary(inpath):
         return False
     elif self._is_common_text(inpath):
         return True
     else:
         # files > 10kB get checked for compression (arbitrary decision to skip compression on small files)
         the_file_size = file_size(inpath)
         if the_file_size > 10240:
             if the_file_size > 512000:  # seems to be a break point at ~ 500kb where file compression offset by additional file read, so limit tests to files > 500kB
                 try:
                     system_command = "file --mime-type -b " + quote(inpath)
                     response = muterun(system_command)
                     if response.stdout[0:5] == "text/":  # check for a text file mime type
                         return True   # appropriate size, appropriate file mime type
                     else:
                         return False  # appropriate size, inappropriate file mime type
                 except Exception:
                     return False
             else:
                 return True  # if file size is < 500kB, skip the additional file read and just go with compression
         else:
             return False  # below minimum size to consider compression, do not compress
Beispiel #10
0
def normalize_shell_variable(myvar):

    newNormalizedVar = quote(myvar)
    # myvar.replace('\'', '\\\'')
    # newNormalizedVar = normalizedVar
    # newNormalizedVar = '\'' + normalizedVar + '\''
    return newNormalizedVar
Beispiel #11
0
def run_lithium(lithArgs, logPrefix, targetTime):  # pylint: disable=invalid-name,missing-param-doc,missing-return-doc
    # pylint: disable=missing-return-type-doc,missing-type-doc
    """Run Lithium as a subprocess: reduce to the smallest file that has at least the same unhappiness level.

    Returns a tuple of (lithlogfn, LITH_*, details).
    """
    deletableLithTemp = None  # pylint: disable=invalid-name
    if targetTime:
        # FIXME: this could be based on whether bot has a remoteHost  # pylint: disable=fixme
        # loop is being used by bot
        deletableLithTemp = tempfile.mkdtemp(prefix="fuzzbot-lithium")  # pylint: disable=invalid-name
        lithArgs = ["--maxruntime=" + str(targetTime), "--tempdir=" + deletableLithTemp] + lithArgs
    else:
        # loop is being run standalone
        lithtmp = logPrefix.parent / (logPrefix.stem + "-lith-tmp")
        Path.mkdir(lithtmp)
        lithArgs = ["--tempdir=" + str(lithtmp)] + lithArgs
    lithlogfn = (logPrefix.parent / (logPrefix.stem + "-lith-out")).with_suffix(".txt")
    print("Preparing to run Lithium, log file %s" % lithlogfn)
    print(" ".join(quote(str(x)) for x in runlithiumpy + lithArgs))
    with io.open(str(lithlogfn), "w", encoding="utf-8", errors="replace") as f:
        subprocess.run(runlithiumpy + lithArgs, stderr=subprocess.STDOUT, stdout=f)
    print("Done running Lithium")
    if deletableLithTemp:
        shutil.rmtree(deletableLithTemp)
    r = readLithiumResult(lithlogfn)  # pylint: disable=invalid-name
    subprocess.run(["gzip", "-f", str(lithlogfn)], check=True)
    return r
Beispiel #12
0
 def _is_compress_filetype(self, inpath):
     """private method that performs magic number and size check on file to determine whether to compress the file"""
     # check for common file type suffixes in order to avoid the need for file reads to check magic number for binary vs. text file
     if self._is_common_binary(inpath):
         return False
     elif self._is_common_text(inpath):
         return True
     else:
         # files > 10kB get checked for compression (arbitrary decision to skip compression on small files)
         the_file_size = file_size(inpath)
         if the_file_size > 10240:
             if the_file_size > 512000:  # seems to be a break point at ~ 500kb where file compression offset by additional file read, so limit tests to files > 500kB
                 try:
                     system_command = "file --mime-type -b " + quote(inpath)
                     response = muterun(system_command)
                     if response.stdout[0:5] == "text/":  # check for a text file mime type
                         return True   # appropriate size, appropriate file mime type
                     else:
                         return False  # appropriate size, inappropriate file mime type
                 except Exception:
                     return False
             else:
                 return True  # if file size is < 500kB, skip the additional file read and just go with compression
         else:
             return False  # below minimum size to consider compression, do not compress
Beispiel #13
0
def reactionStop(request):
    ctx = {}
    if request.POST:
        # 拿到計算所需的參數
        stop = request.POST['stop']

        tmp = os.popen('docker stop ' + quote(stop)).readlines()
        tmp_for_all = os.popen('docker ps -a').readlines()
        try:
            if stop == tmp[0].strip():
                # 成功停止
                ctx['statusStop'] = 'Success'
                ctx['id'] = ''
                ctx['name'] = ''
                ctx['statusCMD'] = ''
                ctx['output'] = ''
                ctx['allContainer'] = ''.join(tmp_for_all)
            else:
                ctx['statusStop'] = 'Fail'
                ctx['status'] = ''
                ctx['output'] = 'No output'
                ctx['allContainer'] = '\n' + ''.join(tmp_for_all)
        except:
            ctx['statusStop'] = 'Fail'
            ctx['status'] = ''
            ctx['output'] = 'No output'
            ctx['allContainer'] = '\n' + ''.join(tmp_for_all)

    return render(request, "reaction.html", ctx)
Beispiel #14
0
def react(request):
    ctx = {}
    if request.POST:
        # 拿到計算所需的參數
        command = request.POST['command']
        tmp_for_all = os.popen('docker ps -a').readlines()
        # name = request.POST['name']
        # 抓最新的執行
        name = ((os.popen('docker ps -l').readlines()[1].strip()).split())[-1]
        idNow = ((os.popen('docker ps -l').readlines()[1].strip()).split())[0]
        # os.system('docker start -i '+ tmp_ctx['name'])
        cmd = 'docker exec -i ' + name + ' ' + quote(command)
        try:
            tmp = os.popen(cmd).readlines()
            ctx['id'] = idNow
            ctx['name'] = name
            ctx['statusCMD'] = 'Success'
            ctx['output'] = ''.join(tmp)
            ctx['allContainer'] = ''.join(tmp_for_all)
        except:
            ctx['status'] = 'Fail'
            ctx['output'] = 'No output'
            ctx['allContainer'] = '\n' + ''.join(tmp_for_all)
        print(ctx)
    else:
        print('No Request')
    return render(request, "reaction.html", ctx)
Beispiel #15
0
def main():

    title = argv[2] if len(argv) > 2 else os.path.splitext(
        os.path.basename(argv[1]))[0]
    tmpdir = os.path.dirname(os.path.abspath(__file__)) + '/tmp'
    command = command_generator(quote(os.path.abspath(argv[1])))

    if os.path.isdir(tmpdir):
        shutil.rmtree(tmpdir)
    os.mkdir(tmpdir)
    os.chdir(tmpdir)
    print('ffmpeg %s' % command)
    os.system('ffmpeg %s' % command)

    i, lines = 0, open('out.m3u8', 'r').read()
    executor = ThreadPoolExecutor(max_workers=10)
    futures = {
        executor.submit(upload_yuque, chunk): chunk
        for chunk in glob.glob('*.ts')
    }

    for future in as_completed(futures):
        lines = lines.replace(futures[future], future.result())

        i += 1
        print('[%s/%s] Uploaded %s to %s' %
              (i, len(futures), futures[future], future.result()))

    print('This video has been published to: %s' % publish(lines, title))
def store_jcl(localhost, jcl_filename):
    with open("tests/functional/files/{0}".format(jcl_filename)) as f:
        jcl_file_content = f.read()

    localhost.file(path=TEMP_PATH, state="directory")
    localhost.shell(cmd="echo {0} > {1}/SAMPLE".format(
        quote(JOB_CARD_CONTENTS + jcl_file_content), TEMP_PATH))
Beispiel #17
0
    def lith_reduce(strategy):  # pylint: disable=invalid-name,missing-param-doc,missing-return-doc
        # pylint: disable=missing-return-type-doc,missing-type-doc
        """Lithium reduction commands accepting various strategies.

        Args:
            strategy (str): Intended strategy to use

        Returns:
            (tuple): The finished Lithium run result and details
        """
        reductionCount[0] += 1
        # Remove empty elements
        full_lith_args = [x for x in (strategy + lithArgs) if x]
        print(" ".join(
            quote(str(x))
            for x in [sys.executable, "-u", "-m", "lithium"] + full_lith_args))

        desc = "-chars" if strategy == "--char" else "-lines"
        (lith_result, lith_details) = run_lithium(  # pylint: disable=invalid-name
            full_lith_args,
            (logPrefix.parent / ("%s-%s%s" %
                                 (logPrefix.stem, reductionCount[0], desc))),
            targetTime)
        if lith_result == LITH_FINISHED:
            shutil.copy2(str(infilename), str(backup_file))

        return lith_result, lith_details
Beispiel #18
0
    def process(self):
        progress = Progress('hashing')

        __LOG__.debug(
            'hashing %d files (%s)',
            len(self._queue),
            sizeof_fmt(self.bytes_to_hash)
        )
        progress.start(self, maxval=self.bytes_to_hash)
        for (file_path, file_size, dest) in self._queue:
            __LOG__.debug(
                'hashing %s (%s)...',
                quote(file_path),
                sizeof_fmt(file_size)
            )
            try:
                hash_values = hashfile(file_path)
            except Exception as err:
                __LOG__.exception('hashing of %s failed: ', file_path)
                self.errors.append(err)
            else:
                dest.update(hash_values)
                self.processed.append(dest)
            self.bytes_processed = self.bytes_processed + file_size
            progress.update(self, val=self.bytes_processed)
        progress.finish(self)
        __LOG__.debug('%d files hashed', len(self.processed))
        return self.processed
Beispiel #19
0
def main():

    video = quote(os.path.abspath(sys.argv[1]))
    title = sys.argv[2] if len(sys.argv) > 2 else os.path.splitext(
        os.path.basename(sys.argv[1]))[0]
    stime = float(sys.argv[3]) if len(sys.argv) > 3 else segment_time(video)
    tmpdir = os.path.dirname(os.path.abspath(__file__)) + '/tmp'

    if os.path.isdir(tmpdir):
        shutil.rmtree(tmpdir)
    os.mkdir(tmpdir)
    os.chdir(tmpdir)

    # os.system('ffmpeg -i %s -codec copy -map 0 -f segment -segment_list out.m3u8 -segment_list_flags +live -segment_time 5 out%%03d.ts' % video)
    # os.system('ffmpeg -i %s -vcodec copy -acodec aac -hls_list_size 0 -hls_segment_size 3000000 -f hls out.m3u8' % video)
    os.system(
        'ffmpeg -i %s -vcodec copy -acodec aac -map 0 -f segment -segment_list out.m3u8 -segment_time %d out%%03d.ts'
        % (video, stime))

    i, lines = 0, open('out.m3u8', 'r').read()
    executor = ThreadPoolExecutor(max_workers=10)
    futures = {
        executor.submit(upload_yuque, chunk): chunk
        for chunk in glob.glob('*.ts')
    }

    for future in as_completed(futures):
        lines = lines.replace(futures[future], future.result())

        i += 1
        print('[%s/%s] Uploaded %s to %s' %
              (i, len(futures), futures[future], future.result()))

    print('This video has been published to: %s' % publish(lines, title))
Beispiel #20
0
 def _compute_binding(
     self,
     processed_token: Any,
     context: MutableMapping[Text, Any],
     bindings_map: MutableMapping[int, MutableSequence[Any]],
     is_shell_command: bool = False,
     full_js: bool = False,
     expression_lib: Optional[MutableSequence[Text]] = None
 ) -> MutableMapping[int, MutableSequence[Any]]:
     # Obtain token value
     value = _get_value(processed_token, self.item_separator)
     # If token value is null or an empty array, skip the command token
     if value is None:
         return bindings_map
     # Otherwise
     else:
         # Obtain prefix if present
         if self.prefix is not None:
             if isinstance(value, bool):
                 value = [self.prefix if value else '']
             elif self.separate:
                 if isinstance(value, MutableSequence):
                     value = [
                         self.prefix, " ".join([str(v) for v in value])
                     ]
                 else:
                     value = [self.prefix, value]
             elif isinstance(value, MutableSequence):
                 value = [self.prefix].extend(value)
             else:
                 value = [self.prefix + str(value)]
         # If value is a boolean with no prefix, skip it
         if isinstance(value, bool):
             return bindings_map
         # Ensure value is a list
         if not isinstance(value, MutableSequence):
             value = [value]
         # Process shell escape
         if is_shell_command and self.shell_quote:
             value = [shellescape.quote(v) for v in value]
         # Obtain token position
         if isinstance(self.position,
                       str) and not self.position.isnumeric():
             context['self'] = processed_token
             position = eval_expression(expression=self.position,
                                        context=context,
                                        full_js=full_js,
                                        expression_lib=expression_lib)
             try:
                 position = int(position) if position is not None else 0
             except ValueError:
                 position = 0
         else:
             position = int(self.position)
         if position not in bindings_map:
             bindings_map[position] = []
         # Place value in proper position
         bindings_map[position].append(value)
         return bindings_map
Beispiel #21
0
def validate(ctx, service_name):
    service = Service(yml=Config(filename=ctx.obj['CONFIG_FILE'], env_file=ctx.obj['ENV_FILE']).get_service(service_name))
    host, name, user, passwd, port = _get_db_parameters(service)
    cmd = "/usr/bin/mysql --host={} --user={} --password={} --port={} --execute='select version(), current_date;'"
    cmd = cmd.format(host, user, quote(passwd), port)
    success, output = service.run_remote_script([cmd])
    print success
    print output
Beispiel #22
0
    def escapePathForShell(path):
        """
		Escapes a filesystem path for use as a command-line argument
		"""
        if platform.system() == 'Windows':
            return '"{}"'.format(path.replace('"', '""'))
        else:
            return shellescape.quote(path)
Beispiel #23
0
 def print_temperatures(self, value):
     for var_name in ('current_temp',
                      'manual_temp',
                      'target_temp_l',
                      'target_temp_h',
                      'offset_temp'):
         val_str = '%f' % value[var_name]
         self._stream.write(
                 _SHELL_VAR_PREFIX + '%s=%s\n' % (
                     var_name.upper(), shellescape.quote(val_str)))
     for var_name in ('window_open_detection',
                      'window_open_minutes'):
         val_str = '%u' % value[var_name]
         self._stream.write(
                 _SHELL_VAR_PREFIX + '%s=%s\n' % (
                     var_name.upper(), shellescape.quote(val_str)))
     self._stream.flush()
    def append_file(self, file_name, data):
        """Appends a block of data to a file through the shell.

        Args:
            file_name: The name of the file to write to.
            data: The string of data to write.
        """
        return self.run('echo %s >> %s' % (shellescape.quote(data), file_name))
Beispiel #25
0
 def command(self):
     args = self.command_format(self.properties)
     sanitized_args = []
     for arg in args:
         if type(arg) is str:
             sanitized_args.append(shellescape.quote(arg))
         else:
             sanitized_args.append(arg)
     cmd = self.CMD.format(*sanitized_args)
     return cmd
Beispiel #26
0
 def add_package_modifier(self, name, modifiers):
     for modifier in modifiers:
         slug = re.sub(r'\W+', '_', modifier.split(None, 1)[0])
         self.engine.run_cmd(
             self.build_container,
             "chroot-{arch}-docker -c \"test -f /etc/portage/package.{name} && "
             "echo {modifier} >>/etc/portage/package.{name} || "
             "mkdir -p /etc/portage/package.{name} && "
             "echo {modifier} >>/etc/portage/package.{name}/{slug}\"".format(
                 arch=self.arch, modifier=quote(modifier), slug=slug, name=name))
Beispiel #27
0
def escape_shell(inp):
    """
    Shell-escapes input param
    :param inp:
    :return:
    """
    try:
        return shellescape.quote(inp)
    except:
        pass
    quote(inp)
Beispiel #28
0
 def print_days(self, value):
     for day_n, day in zip(itertools.count(), value):
         for period_n, period in zip(itertools.count(), day):
             for var_name in 'start', 'end':
                 var_val = ('' if period[var_name] is None
                            else period[var_name].isoformat())
                 self._stream.write(
                         _SHELL_VAR_PREFIX + 'DAY_%u_PERIOD_%u_%s=%s\n' % (
                             day_n, period_n, var_name.upper(),
                             shellescape.quote(var_val)))
     self._stream.flush()
Beispiel #29
0
def pinpoint(itest, logPrefix, jsEngine, engineFlags, infilename,  # pylint: disable=invalid-name,missing-param-doc
             bisectRepo, build_options_str, targetTime, suspiciousLevel):
    # pylint: disable=missing-return-doc,missing-return-type-doc,missing-type-doc,too-many-arguments,too-many-locals
    """Run Lithium and autobisectjs.

    itest must be an array of the form [module, ...] where module is an interestingness module.
    The module's "interesting" function must accept [...] + [jsEngine] + engineFlags + infilename
    (If it's not prepared to accept engineFlags, engineFlags must be empty.)
    """
    lithArgs = itest + [str(jsEngine)] + engineFlags + [str(infilename)]  # pylint: disable=invalid-name

    (lithResult, lithDetails) = reduction_strat(  # pylint: disable=invalid-name
        logPrefix, infilename, lithArgs, targetTime, suspiciousLevel)

    print()
    print("Done running Lithium on the part in between DDBEGIN and DDEND. To reproduce, run:")
    print(" ".join(quote(str(x)) for x in [sys.executable, "-u", "-m", "lithium", "--strategy=check-only"] + lithArgs))
    print()

    # pylint: disable=literal-comparison
    if (bisectRepo is not "none" and targetTime >= 3 * 60 * 60 and
            build_options_str is not None and testJsShellOrXpcshell(jsEngine) != "xpcshell"):
        autobisectCmd = (  # pylint: disable=invalid-name
            [sys.executable, "-u", "-m", "funfuzz.autobisectjs"] +
            ["-b", build_options_str] +
            ["-p", " ".join(engineFlags + [str(infilename)])] +
            ["-i"] + [str(x) for x in itest]
        )
        print(" ".join(quote(str(x)) for x in autobisectCmd))
        autobisect_log = (logPrefix.parent / (logPrefix.stem + "-autobisect")).with_suffix(".txt")
        with io.open(str(autobisect_log), "w", encoding="utf-8", errors="replace") as f:
            subprocess.run(autobisectCmd, stderr=subprocess.STDOUT, stdout=f)
        print("Done running autobisectjs. Log: %s" % autobisect_log)

        with io.open(str(autobisect_log), "r", encoding="utf-8", errors="replace") as f:
            lines = f.readlines()
            autobisect_log_trunc = file_manipulation.truncateMid(lines, 50, ["..."])
    else:
        autobisect_log_trunc = []

    return (lithResult, lithDetails, autobisect_log_trunc)
Beispiel #30
0
    def menu_activate_cb(self, menu, locations):
        # self.debug( 'type of location: ' + str( type(locations) ) )

        for f in locations:
            try:
                path = f.get_location().get_path()
                escaped_path = shellescape.quote(path)
                command = "nodejs /composer/node/ocdownload/ocdownload.js " + escaped_path
                self.debug('menu_activate_cb runs ' + command)
                exit_code = os.system(command)
                self.debug('exit code: ' + str(exit_code))
            except Exception as e:
                self.debug('Exception: ' + str(e))
Beispiel #31
0
def test_job_submit_USS(ansible_zos_module):
    hosts = ansible_zos_module
    hosts.all.file(path=TEMP_PATH, state="directory")
    hosts.all.shell(cmd="echo {0} > {1}/SAMPLE".format(
        quote(JCL_FILE_CONTENTS), TEMP_PATH))
    results = hosts.all.zos_job_submit(src="{0}/SAMPLE".format(TEMP_PATH),
                                       location="USS",
                                       wait=True,
                                       volume=None)
    hosts.all.file(path=TEMP_PATH, state="absent")
    for result in results.contacted.values():
        assert result.get("jobs")[0].get("ret_code").get("msg_code") == "0000"
        assert result.get("jobs")[0].get("ret_code").get("code") == 0
        assert result.get("changed") is True
def test_job_submit_USS(ansible_zos_module):
    hosts = ansible_zos_module
    hosts.all.file(path=TEMP_PATH, state='directory')
    hosts.all.shell(
        cmd='echo {} > {}/SAMPLE'.format(quote(JCL_FILE_CONTENTS), TEMP_PATH))
    results = hosts.all.zos_job_submit(src='{}/SAMPLE'.format(TEMP_PATH),
                                       location="USS",
                                       wait=True,
                                       volume=None)
    hosts.all.file(path=TEMP_PATH, state='absent')
    for result in results.contacted.values():
        assert result.get('jobs')[0].get('ret_code').get('msg_code') == '0000'
        assert result.get('jobs')[0].get('ret_code').get('code') == 0
        assert result.get('changed') == True
Beispiel #33
0
def test_zos_job_output_job_exists(ansible_zos_module):
    hosts = ansible_zos_module
    hosts.all.file(path=TEMP_PATH, state="directory")
    hosts.all.shell(
        cmd="echo {0} > {1}/SAMPLE".format(quote(JCL_FILE_CONTENTS), TEMP_PATH)
    )
    hosts.all.zos_job_submit(
        src="{0}/SAMPLE".format(TEMP_PATH), location="USS", wait=True, volume=None
    )
    hosts.all.file(path=TEMP_PATH, state="absent")
    results = hosts.all.zos_job_output(job_name="SAMPLE")
    for result in results.contacted.values():
        assert result.get("changed") is False
        assert result.get("jobs") is not None
def test_zos_job_output_job_exists(ansible_zos_module):
    hosts = ansible_zos_module
    hosts.all.file(path=TEMP_PATH, state='directory')
    hosts.all.shell(
        cmd='echo {} > {}/SAMPLE'.format(quote(JCL_FILE_CONTENTS), TEMP_PATH))
    hosts.all.zos_job_submit(src='{}/SAMPLE'.format(TEMP_PATH),
                             location="USS",
                             wait=True,
                             volume=None)
    hosts.all.file(path=TEMP_PATH, state='absent')
    results = hosts.all.zos_job_output(job_name='SAMPLE')
    for result in results.contacted.values():
        assert result.get('changed') is False
        assert result.get('zos_job_output') is not None
Beispiel #35
0
def testBinary(shellPath, args, useValgrind):  # pylint: disable=invalid-name,missing-param-doc,missing-return-doc
    # pylint: disable=missing-return-type-doc,missing-type-doc
    """Test the given shell with the given args."""
    test_cmd = (constructVgCmdList() if useValgrind else []) + [str(shellPath)] + args
    sps.vdump("The testing command is: " + " ".join(quote(str(x)) for x in test_cmd))
    test_cmd_result = subprocess.run(
        test_cmd,
        cwd=os.getcwdu() if sys.version_info.major == 2 else os.getcwd(),  # pylint: disable=no-member
        env=env_with_path(str(shellPath.parent)),
        stderr=subprocess.STDOUT,
        stdout=subprocess.PIPE,
        timeout=999)
    out, return_code = test_cmd_result.stdout.decode("utf-8", errors="replace"), test_cmd_result.returncode
    sps.vdump("The exit code is: " + str(return_code))
    return out, return_code
    def search_file(self, search_string, file_name):
        """Searches through a file for a string.

        Args:
            search_string: The string or pattern to look for.
            file_name: The name of the file to search.

        Returns:
            True if the string or pattern was found, False otherwise.
        """
        try:
            self.run('grep %s %s' % (shellescape.quote(search_string),
                                     file_name))
            return True
        except job.Error:
            return False
Beispiel #37
0
    def encrypt_file(self, inpath, force_nocompress=False, force_compress=False, armored=False, checksum=False):
        """public method for single file encryption with optional compression, ASCII armored formatting, and file hash digest generation"""
        if armored:
            if force_compress:
                command_stub = self.command_maxcompress_armored
            elif force_nocompress:
                command_stub = self.command_nocompress_armored
            else:
                if self._is_compress_filetype(inpath):
                    command_stub = self.command_default_armored
                else:
                    command_stub = self.command_nocompress_armored
        else:
            if force_compress:
                command_stub = self.command_maxcompress
            elif force_nocompress:
                command_stub = self.command_nocompress
            else:
                if self._is_compress_filetype(inpath):
                    command_stub = self.command_default
                else:
                    command_stub = self.command_nocompress

        encrypted_outpath = self._create_outfilepath(inpath)
        system_command = command_stub + encrypted_outpath + " --passphrase " + quote(self.passphrase) + " --symmetric " + quote(inpath)

        try:
            response = muterun(system_command)
            # check returned status code
            if response.exitcode == 0:
                stdout(encrypted_outpath + " was generated from " + inpath)
                if checksum:  # add a SHA256 hash digest of the encrypted file - requested by user --hash flag in command
                    from crypto.library import hash
                    encrypted_file_hash = hash.generate_hash(encrypted_outpath)
                    if len(encrypted_file_hash) == 64:
                        stdout("SHA256 hash digest for " + encrypted_outpath + " :")
                        stdout(encrypted_file_hash)
                    else:
                        stdout("Unable to generate a SHA256 hash digest for the file " + encrypted_outpath)
            else:
                stderr(response.stderr, 0)
                stderr("Encryption failed")
                sys.exit(1)
        except Exception as e:
            stderr("There was a problem with the execution of gpg. Encryption failed. Error: [" + str(e) + "]")
            sys.exit(1)
Beispiel #38
0
 def _step0_scan_db(self):
     progress = Progress('scanning db')
     __LOG__.debug('scanning db...')
     db_entries_count = self.file_registry.count()
     old_files_deleted = 0
     mtime_changed = 0
     size_changed = 0
     index = 0
     progress.start(self, maxval=db_entries_count)
     for db_file in self.file_registry.find_all():
         index = index + 1
         progress.update(self, val=index)
         abs_path = join(self.base_dir, db_file.path)
         try:
             changed = False
             stat = os.stat(abs_path)
             if db_file.mtime != int(stat.st_mtime):
                 mtime_changed = mtime_changed + 1
                 changed = True
             if db_file.size != stat.st_size:
                 size_changed = size_changed + 1
                 changed = True
             if changed:
                 db_file.mtime = int(stat.st_mtime)
                 db_file.size = stat.st_size
                 self.hash_queue.append(abs_path, stat.st_size, db_file)
             self.visited_files.append(s(db_file.path))
         except OSError as err:
             if err.errno == errno.ENOENT:
                 __LOG__.debug('deleting %s', quote(db_file.path))
                 self.file_registry.delete(db_file)
                 old_files_deleted = old_files_deleted + 1
             else:
                 raise  # pragma: no cover
     progress.finish(self)
     __LOG__.debug('mtime of %d files changed', mtime_changed)
     __LOG__.debug('size of %d files changed', size_changed)
     __LOG__.debug('%d old files deleted', old_files_deleted)
     return len(self.visited_files)
Beispiel #39
0
 def _step1_scan_fs(self):
     progress = Progress('scanning fs')
     __LOG__.debug('scanning %s', quote(self.base_dir))
     new_files_found = 0
     progress.start(self)
     for entry in files_of_dir(self.base_dir, self.is_excluded):
         progress.update(self)
         rel_path = os.path.relpath(entry.path, self.base_dir)
         if rel_path not in self.visited_files:
             new_files_found = new_files_found + 1
             db_file = model.File(
                 path=u(rel_path),
                 mtime=int(entry.stats.st_mtime),
                 size=entry.stats.st_size,
             )
             self.hash_queue.append(
                 entry.path,
                 entry.stats.st_size,
                 db_file
             )
     progress.finish(self)
     __LOG__.debug('%d new files found', new_files_found)
     return new_files_found
Beispiel #40
0
 def format_path(path):
     return quote(path).encode('utf-8')
Beispiel #41
0
 def format_path(path):
     return quote(path)
Beispiel #42
0
def main():
    import os
    import sys
    from time import sleep
    import getpass
    import tarfile
    from Naked.commandline import Command
    from Naked.toolshed.shell import execute, muterun
    from Naked.toolshed.system import dir_exists, file_exists, list_all_files, make_path, stdout, stderr, is_dir
    from shellescape import quote

    # ------------------------------------------------------------------------------------------
    # [ Instantiate command line object ]
    #   used for all subsequent conditional logic in the CLI application
    # ------------------------------------------------------------------------------------------
    c = Command(sys.argv[0], sys.argv[1:])
    # ------------------------------------------------------------------------------------------
    # [ VALIDATION LOGIC ] - early validation of appropriate command syntax
    # Test that user entered at least one argument to the executable, print usage if not
    # ------------------------------------------------------------------------------------------
    if not c.command_suite_validates():
        from crypto.settings import usage as crypto_usage

        print(crypto_usage)
        sys.exit(1)
    # ------------------------------------------------------------------------------------------
    # [ HELP, VERSION, USAGE LOGIC ]
    # Naked framework provides default help, usage, and version commands for all applications
    #   --> settings for user messages are assigned in the lib/crypto/settings.py file
    # ------------------------------------------------------------------------------------------
    if c.help():  # User requested crypto help information
        from crypto.settings import help as crypto_help

        print(crypto_help)
        sys.exit(0)
    elif c.usage():  # User requested crypto usage information
        from crypto.settings import usage as crypto_usage

        print(crypto_usage)
        sys.exit(0)
    elif c.version():  # User requested crypto version information
        from crypto.settings import app_name, major_version, minor_version, patch_version

        version_display_string = app_name + " " + major_version + "." + minor_version + "." + patch_version
        print(version_display_string)
        sys.exit(0)
    # ------------------------------------------------------------------------------------------
    # [ APPLICATION LOGIC ]
    #
    # ------------------------------------------------------------------------------------------
    elif c.argc > 1:
        # code for multi-file processing and commands that include options
        use_standard_output = False  # print to stdout flag
        use_file_overwrite = False  # overwrite existing file
        untar_archives = True  # untar decrypted tar archives, true by default

        # set user option flags
        if c.option("--stdout") or c.option("-s"):
            use_standard_output = True
        if c.option("--overwrite") or c.option("-o"):
            use_file_overwrite = True
        if c.option("--nountar"):
            untar_archives = False

        directory_list = []  # directory paths included in the user entered paths from the command line
        file_list = (
            []
        )  # file paths included in the user entered paths from the command line (and inside directories entered)

        for argument in c.argv:
            if file_exists(argument):  # user included a file, add it to the file_list for decryption
                if argument.endswith(".crypt"):
                    file_list.append(argument)  # add .crypt files to the list of files for decryption
                elif argument.endswith(".gpg"):
                    file_list.append(argument)
                elif argument.endswith(".asc"):
                    file_list.append(argument)
                elif argument.endswith(".pgp"):
                    file_list.append(argument)
                else:
                    # cannot identify as an encrypted file, give it a shot anyways but warn user
                    file_list.append(argument)
                    stdout(
                        "Could not confirm that '"
                        + argument
                        + "' is encrypted based upon the file type.  Attempting decryption.  Keep your fingers crossed..."
                    )
            elif dir_exists(argument):  # user included a directory, add it to the directory_list
                directory_list.append(argument)
            else:
                if argument[0] == "-":
                    pass  # if it is an option, do nothing
                else:
                    stderr(
                        "'"
                        + argument
                        + "' does not appear to be an existing file or directory.  Aborting decryption attempt for this request."
                    )

        # unroll the contained directory files into the file_list IF they are encrypted file types
        if len(directory_list) > 0:
            for directory in directory_list:
                directory_file_list = list_all_files(directory)
                for contained_file in directory_file_list:
                    if contained_file.endswith(".crypt"):
                        file_list.append(
                            make_path(directory, contained_file)
                        )  # include the file with a filepath 'directory path/contained_file path'
                    elif contained_file.endswith(".gpg"):
                        file_list.append(make_path(directory, contained_file))
                    elif contained_file.endswith("asc"):
                        file_list.append(make_path(directory, contained_file))
                    elif contained_file.endswith(".pgp"):
                        file_list.append(make_path(directory, contained_file))

        # confirm that there are files for decryption, if not abort
        if len(file_list) == 0:
            stderr("Could not identify files for decryption")
            sys.exit(1)

        # get passphrase used to symmetrically decrypt the file
        passphrase = getpass.getpass("Please enter your passphrase: ")
        if len(passphrase) == 0:  # confirm that user entered a passphrase
            stderr("You did not enter a passphrase. Please repeat your command and try again.")
            sys.exit(1)
        passphrase_confirm = getpass.getpass("Please enter your passphrase again: ")

        if passphrase == passphrase_confirm:
            # begin decryption of each requested file.  the directory path was already added to the file path above
            for encrypted_file in file_list:
                # create the decrypted file name
                decrypted_filename = ""
                if encrypted_file.endswith(".crypt"):
                    decrypted_filename = encrypted_file[0:-6]
                elif (
                    encrypted_file.endswith(".gpg")
                    or encrypted_file.endswith(".asc")
                    or encrypted_file.endswith(".pgp")
                ):
                    decrypted_filename = encrypted_file[0:-4]
                else:
                    decrypted_filename = (
                        encrypted_file + ".decrypt"
                    )  # if it was a file without a known encrypted file type, add the .decrypt suffix

                # determine whether file overwrite will take place with the decrypted file
                skip_file = False  # flag that indicates this file should not be encrypted
                created_tmp_files = False
                if not use_standard_output:  # if not writing a file, no need to check for overwrite
                    if file_exists(decrypted_filename):
                        if (
                            use_file_overwrite
                        ):  # rename the existing file to temp file which will be erased or replaced (on decryption failures) below
                            tmp_filename = decrypted_filename + ".tmp"
                            os.rename(decrypted_filename, tmp_filename)
                            created_tmp_files = True
                        else:
                            stdout(
                                "The file path '"
                                + decrypted_filename
                                + "' already exists.  This file was not decrypted."
                            )
                            skip_file = True

                # begin decryption
                if not skip_file:
                    if (
                        use_standard_output
                    ):  # using --quiet flag to suppress stdout messages from gpg, just want the file data in stdout stream
                        system_command = (
                            "gpg --batch --quiet --passphrase " + quote(passphrase) + " -d " + quote(encrypted_file)
                        )
                        successful_execution = execute(
                            system_command
                        )  # use naked execute function to directly push to stdout, rather than return stdout

                        if not successful_execution:
                            stderr("Unable to decrypt file '" + encrypted_file + "'", 0)
                            if created_tmp_files:  # restore the moved tmp file to original if decrypt failed
                                tmp_filename = decrypted_filename + ".tmp"
                                if file_exists(tmp_filename):
                                    os.rename(tmp_filename, decrypted_filename)
                        else:  # decryption successful but we are in stdout flag so do not include any other output from decrypto
                            pass
                    else:
                        system_command = (
                            "gpg --batch -o "
                            + quote(decrypted_filename)
                            + " --passphrase "
                            + quote(passphrase)
                            + " -d "
                            + quote(encrypted_file)
                        )
                        response = muterun(system_command)

                        if response.exitcode == 0:
                            stdout("'" + encrypted_file + "' decrypted to '" + decrypted_filename + "'")
                        else:  # failed decryption
                            if created_tmp_files:  # restore the moved tmp file to original if decrypt failed
                                tmp_filename = decrypted_filename + ".tmp"
                                if file_exists(tmp_filename):
                                    os.rename(tmp_filename, decrypted_filename)
                            # report the error
                            stderr(response.stderr)
                            stderr("Decryption failed for " + encrypted_file)

                # cleanup: remove the tmp file
                if created_tmp_files:
                    tmp_filename = decrypted_filename + ".tmp"
                    if file_exists(tmp_filename):
                        os.remove(tmp_filename)

                # untar/extract any detected archive file(s)
                if untar_archives is True:
                    if decrypted_filename.endswith(".tar") and tarfile.is_tarfile(decrypted_filename):
                        untar_path_tuple = os.path.split(decrypted_filename)
                        untar_path = untar_path_tuple[0]
                        if use_file_overwrite:
                            with tarfile.open(decrypted_filename) as tar:
                                if len(untar_path) > 0:
                                    tar.extractall(
                                        path=untar_path
                                    )  # use dir path from the decrypted_filename if not CWD
                                    stdout(
                                        "'"
                                        + decrypted_filename
                                        + "' unpacked in the directory path '"
                                        + untar_path
                                        + "'"
                                    )
                                else:
                                    tar.extractall()  # else use CWD
                                    stdout("'" + decrypted_filename + "' unpacked in the current working directory")
                        else:
                            with tarfile.TarFile(decrypted_filename, "r", errorlevel=1) as tar:
                                for tarinfo in tar:
                                    t_file = tarinfo.name
                                    if len(untar_path) > 0:
                                        t_file_path = os.path.join(untar_path, t_file)
                                    else:
                                        t_file_path = t_file
                                    if not os.path.exists(t_file_path):
                                        try:
                                            if len(untar_path) > 0:
                                                tar.extract(t_file, path=untar_path)  # write to the appropriate dir
                                            else:
                                                tar.extract(t_file)  # write to CWD
                                        except IOError as e:
                                            stderr("Failed to unpack the file '" + t_file_path + "' [" + str(e) + "]")
                                    elif is_dir(t_file_path):
                                        pass  # do nothing if it exists and is a directory, no need to warn
                                    else:  # it is a file and it already exists, provide user error message
                                        stderr(
                                            "Failed to unpack the file '"
                                            + t_file_path
                                            + "'. File already exists. Use the --overwrite flag to replace existing files."
                                        )

                        # remove the decrypted tar archive file
                        os.remove(decrypted_filename)

            # overwrite the entered passphrases after file decryption is complete for all files
            passphrase = ""
            passphrase_confirm = ""

            # add a short pause to hinder brute force pexpect style password attacks with decrypto
            sleep(0.2)  # 200ms pause

        else:  # passphrases did not match
            passphrase = ""
            passphrase_confirm = ""
            stderr("The passphrases did not match.  Please enter your command again.")
            sys.exit(1)

    elif c.argc == 1:
        # simple single file or directory processing with default settings
        path = c.arg0
        if file_exists(path):  # SINGLE FILE
            check_existing_file = False  # check for a file with the name of new decrypted filename in the directory

            if path.endswith(".crypt"):
                decrypted_filename = path[0:-6]  # remove the .crypt suffix
                check_existing_file = True
            elif path.endswith(".gpg") or path.endswith(".pgp") or path.endswith(".asc"):
                decrypted_filename = path[0:-4]
                check_existing_file = True
            else:
                decrypted_filename = (
                    path + ".decrypt"
                )  # if there is not a standard file type, then add a .decrypt suffix to the decrypted file name
                stdout(
                    "Could not confirm that the requested file is encrypted based upon the file type.  Attempting decryption.  Keep your fingers crossed..."
                )

            # confirm that the decrypted path does not already exist, if so abort with warning message to user
            if check_existing_file is True:
                if file_exists(decrypted_filename):
                    stderr(
                        "Your file will be decrypted to '"
                        + decrypted_filename
                        + "' and this file path already exists.  Please move the file or use the --overwrite option with your command if you intend to replace the current file."
                    )
                    sys.exit(1)

            # get passphrase used to symmetrically decrypt the file
            passphrase = getpass.getpass("Please enter your passphrase: ")
            if len(passphrase) == 0:  # confirm that user entered a passphrase
                stderr("You did not enter a passphrase. Please repeat your command and try again.")
                sys.exit(1)
            passphrase_confirm = getpass.getpass("Please enter your passphrase again: ")

            # confirm that the passphrases match
            if passphrase == passphrase_confirm:
                system_command = (
                    "gpg --batch -o "
                    + quote(decrypted_filename)
                    + " --passphrase "
                    + quote(passphrase)
                    + " -d "
                    + quote(path)
                )
                response = muterun(system_command)

                if response.exitcode == 0:
                    # unpack tar archive generated from the decryption, if present
                    if decrypted_filename.endswith(".tar") and tarfile.is_tarfile(decrypted_filename):
                        untar_path_tuple = os.path.split(decrypted_filename)
                        untar_path = untar_path_tuple[0]

                        with tarfile.TarFile(decrypted_filename, "r", errorlevel=1) as tar:
                            for tarinfo in tar:
                                t_file = tarinfo.name
                                if len(untar_path) > 0:
                                    t_file_path = os.path.join(untar_path, t_file)
                                else:
                                    t_file_path = t_file
                                if not os.path.exists(t_file_path):
                                    try:
                                        if len(untar_path) > 0:
                                            tar.extract(t_file, path=untar_path)  # write to the appropriate dir
                                        else:
                                            tar.extract(t_file)  # write to CWD
                                    except IOError as e:
                                        stderr("Failed to unpack the file '" + t_file_path + "' [" + str(e) + "]")
                                elif is_dir(t_file_path):
                                    pass  # do nothing if it exists and is a directory, no need to warn
                                else:  # it is a file and it already exists, provide user error message
                                    stderr(
                                        "Failed to unpack the file '"
                                        + t_file_path
                                        + "'. File already exists. Use the --overwrite flag to replace existing files."
                                    )

                        # remove the decrypted tar archive
                        os.remove(decrypted_filename)

                    stdout("Decryption complete")
                    # overwrite user entered passphrases
                    passphrase = ""
                    passphrase_confirm = ""
                    sys.exit(0)
                else:
                    stderr(response.stderr)
                    stderr("Decryption failed")
                    # overwrite user entered passphrases
                    passphrase = ""
                    passphrase_confirm = ""
                    # add a short pause to hinder brute force pexpect style password attacks with decrypto
                    sleep(0.2)  # 200ms pause
                    sys.exit(1)
            else:
                stderr("The passphrases did not match.  Please enter your command again.")
                sys.exit(1)
        elif dir_exists(path):  # SINGLE DIRECTORY
            dirty_directory_file_list = list_all_files(path)
            directory_file_list = [
                x
                for x in dirty_directory_file_list
                if (x.endswith(".crypt") or x.endswith(".gpg") or x.endswith(".pgp") or x.endswith(".asc"))
            ]

            # if there are no encrypted files found, warn and abort
            if len(directory_file_list) == 0:
                stderr("There are no encrypted files in the directory")
                sys.exit(1)

            # prompt for the passphrase
            passphrase = getpass.getpass("Please enter your passphrase: ")
            if len(passphrase) == 0:  # confirm that user entered a passphrase
                stderr("You did not enter a passphrase. Please repeat your command and try again.")
                sys.exit(1)
            passphrase_confirm = getpass.getpass("Please enter your passphrase again: ")

            if passphrase == passphrase_confirm:
                # decrypt all of the encypted files in the directory
                for filepath in directory_file_list:
                    absolute_filepath = make_path(
                        path, filepath
                    )  # combine the directory path and file name into absolute path

                    # remove file suffix from the decrypted file path that writes to disk
                    if absolute_filepath.endswith(".crypt"):
                        decrypted_filepath = absolute_filepath[0:-6]  # remove the .crypt suffix
                    elif (
                        absolute_filepath.endswith(".gpg")
                        or absolute_filepath.endswith(".pgp")
                        or absolute_filepath.endswith(".asc")
                    ):
                        decrypted_filepath = absolute_filepath[0:-4]

                    # confirm that the file does not already exist
                    if file_exists(decrypted_filepath):
                        stdout(
                            "The file path '" + decrypted_filepath + "' already exists.  This file was not decrypted."
                        )
                    else:
                        system_command = (
                            "gpg --batch -o "
                            + quote(decrypted_filepath)
                            + " --passphrase "
                            + quote(passphrase)
                            + " -d "
                            + quote(absolute_filepath)
                        )
                        response = muterun(system_command)

                        if response.exitcode == 0:
                            stdout("'" + absolute_filepath + "' decrypted to '" + decrypted_filepath + "'")
                        else:
                            stderr(response.stderr)
                            stderr("Decryption failed for " + absolute_filepath)
                # overwrite user entered passphrases
                passphrase = ""
                passphrase_confirm = ""

                # add a short pause to hinder brute force pexpect style password attacks with decrypto
                sleep(0.2)  # 200ms pause
            else:
                # overwrite user entered passphrases
                passphrase = ""
                passphrase_confirm = ""
                stderr("The passphrases did not match.  Please enter your command again.")
                sys.exit(1)
        else:
            # error message, not a file or directory.  user entry error
            stderr("The path that you entered does not appear to be an existing file or directory.  Please try again.")
            sys.exit(1)

    # ------------------------------------------------------------------------------------------
    # [ DEFAULT MESSAGE FOR MATCH FAILURE ]
    #  Message to provide to the user when all above conditional logic fails to meet a true condition
    # ------------------------------------------------------------------------------------------
    else:
        print("Could not complete your request.  Please try again.")
        sys.exit(1)
Beispiel #43
0
def maybe_quote(arg):
    return shellescape.quote(arg) if needs_shell_quoting(arg) else arg
Beispiel #44
0
    def _escape(self, value):
        """Escape given value unless it is safe."""
        if isinstance(value, SafeString):
            return value

        return shellescape.quote(value)
Beispiel #45
0
    def job(self, joborder, input_basedir, output_callback, **kwargs):
        builder = self._init_job(joborder, input_basedir, **kwargs)

        if self.tool["baseCommand"]:
            for n, b in enumerate(aslist(self.tool["baseCommand"])):
                builder.bindings.append({
                    "position": [-1000000, n],
                    "valueFrom": b
                })

        if self.tool.get("arguments"):
            for i, a in enumerate(self.tool["arguments"]):
                if isinstance(a, dict):
                    a = copy.copy(a)
                    if a.get("position"):
                        a["position"] = [a["position"], i]
                    else:
                        a["position"] = [0, i]
                    a["do_eval"] = a["valueFrom"]
                    a["valueFrom"] = None
                    builder.bindings.append(a)
                else:
                    builder.bindings.append({
                        "position": [0, i],
                        "valueFrom": a
                    })

        builder.bindings.sort(key=lambda a: a["position"])

        reffiles = set((f["path"] for f in builder.files))

        j = self.makeJobRunner()
        j.joborder = builder.job
        j.stdin = None
        j.stdout = None
        j.successCodes = self.tool.get("successCodes")
        j.temporaryFailCodes = self.tool.get("temporaryFailCodes")
        j.permanentFailCodes = self.tool.get("permanentFailCodes")
        j.requirements = self.requirements
        j.hints = self.hints
        j.name = uniquename(kwargs.get("name", str(id(j))))

        _logger.debug("[job %s] initializing from %s%s",
                     j.name,
                     self.tool.get("id", ""),
                     " as part of %s" % kwargs["part_of"] if "part_of" in kwargs else "")
        _logger.debug("[job %s] %s", j.name, json.dumps(joborder, indent=4))


        builder.pathmapper = None

        if self.tool.get("stdin"):
            j.stdin = builder.do_eval(self.tool["stdin"])
            if isinstance(j.stdin, dict) and "ref" in j.stdin:
                j.stdin = builder.job[j.stdin["ref"][1:]]["path"]
            reffiles.add(j.stdin)

        if self.tool.get("stdout"):
            j.stdout = builder.do_eval(self.tool["stdout"])
            if os.path.isabs(j.stdout) or ".." in j.stdout:
                raise validate.ValidationException("stdout must be a relative path")

        builder.pathmapper = self.makePathMapper(reffiles, input_basedir, **kwargs)
        builder.requirements = j.requirements

        for f in builder.files:
            f["path"] = builder.pathmapper.mapper(f["path"])[1]

        _logger.debug("[job %s] command line bindings is %s", j.name, json.dumps(builder.bindings, indent=4))
        _logger.debug("[job %s] path mappings is %s", j.name, json.dumps({p: builder.pathmapper.mapper(p) for p in builder.pathmapper.files()}, indent=4))

        dockerReq, _ = self.get_requirement("DockerRequirement")
        if dockerReq and kwargs.get("use_container"):
            out_prefix = kwargs.get("tmp_outdir_prefix")
            j.outdir = kwargs.get("outdir") or tempfile.mkdtemp(prefix=out_prefix)
            tmpdir_prefix = kwargs.get('tmpdir_prefix')
            j.tmpdir = kwargs.get("tmpdir") or tempfile.mkdtemp(prefix=tmpdir_prefix)
        else:
            j.outdir = builder.outdir
            j.tmpdir = builder.tmpdir

        createFiles, _ = self.get_requirement("CreateFileRequirement")
        j.generatefiles = {}
        if createFiles:
            for t in createFiles["fileDef"]:
                j.generatefiles[builder.do_eval(t["filename"])] = copy.deepcopy(builder.do_eval(t["fileContent"]))

        j.environment = {}
        evr, _ = self.get_requirement("EnvVarRequirement")
        if evr:
            for t in evr["envDef"]:
                j.environment[t["envName"]] = builder.do_eval(t["envValue"])

        shellcmd, _ = self.get_requirement("ShellCommandRequirement")
        if shellcmd:
            cmd = []
            for b in builder.bindings:
                arg = builder.generate_arg(b)
                if b.get("shellQuote", True):
                    arg = [shellescape.quote(a) for a in aslist(arg)]
                cmd.extend(aslist(arg))
            j.command_line = ["/bin/sh", "-c", " ".join(cmd)]
        else:
            j.command_line = flatten(map(builder.generate_arg, builder.bindings))

        j.pathmapper = builder.pathmapper
        j.collect_outputs = functools.partial(self.collect_output_ports, self.tool["outputs"], builder)
        j.output_callback = output_callback

        yield j
Beispiel #46
0
    def job(self, joborder, output_callback, **kwargs):
        # type: (Dict[str,str], str, Callable[..., Any], **Any) -> Generator[Union[CommandLineJob, CallbackJob], None, None]

        jobname = uniquename(kwargs.get("name", shortname(self.tool.get("id", "job"))))

        if kwargs.get("cachedir"):
            cacheargs = kwargs.copy()
            cacheargs["outdir"] = "/out"
            cacheargs["tmpdir"] = "/tmp"
            cachebuilder = self._init_job(joborder, **cacheargs)
            cachebuilder.pathmapper = PathMapper(set((f["path"] for f in cachebuilder.files)),
                                                 kwargs["basedir"])

            cmdline = flatten(map(cachebuilder.generate_arg, cachebuilder.bindings))
            (docker_req, docker_is_req) = self.get_requirement("DockerRequirement")
            if docker_req and kwargs.get("use_container") is not False:
                dockerimg = docker_req.get("dockerImageId") or docker_req.get("dockerPull")
                cmdline = ["docker", "run", dockerimg] + cmdline
            keydict = {"cmdline": cmdline}

            for _,f in cachebuilder.pathmapper.items():
                st = os.stat(f[0])
                keydict[f[0]] = [st.st_size, int(st.st_mtime * 1000)]

            interesting = {"DockerRequirement",
                           "EnvVarRequirement",
                           "CreateFileRequirement",
                           "ShellCommandRequirement"}
            for rh in (self.requirements, self.hints):
                for r in reversed(rh):
                    if r["class"] in interesting and r["class"] not in keydict:
                        keydict[r["class"]] = r

            keydictstr = json.dumps(keydict, separators=(',',':'), sort_keys=True)
            cachekey = hashlib.md5(keydictstr).hexdigest()

            _logger.debug("[job %s] keydictstr is %s -> %s", jobname, keydictstr, cachekey)

            jobcache = os.path.join(kwargs["cachedir"], cachekey)
            jobcachepending = jobcache + ".pending"

            if os.path.isdir(jobcache) and not os.path.isfile(jobcachepending):
                if docker_req and kwargs.get("use_container") is not False:
                    cachebuilder.outdir = kwargs.get("docker_outdir") or "/var/spool/cwl"
                else:
                    cachebuilder.outdir = jobcache

                _logger.info("[job %s] Using cached output in %s", jobname, jobcache)
                yield CallbackJob(self, output_callback, cachebuilder, jobcache)
                return
            else:
                _logger.info("[job %s] Output of job will be cached in %s", jobname, jobcache)
                shutil.rmtree(jobcache, True)
                os.makedirs(jobcache)
                kwargs["outdir"] = jobcache
                open(jobcachepending, "w").close()
                def rm_pending_output_callback(output_callback, jobcachepending,
                                               outputs, processStatus):
                    if processStatus == "success":
                        os.remove(jobcachepending)
                    output_callback(outputs, processStatus)
                output_callback = cast(
                        Callable[..., Any],  # known bug in mypy
                        # https://github.com/python/mypy/issues/797
                        partial(rm_pending_output_callback, output_callback,
                            jobcachepending))

        builder = self._init_job(joborder, **kwargs)

        reffiles = set((f["path"] for f in builder.files))

        j = self.makeJobRunner()
        j.builder = builder
        j.joborder = builder.job
        j.stdin = None
        j.stdout = None
        j.successCodes = self.tool.get("successCodes")
        j.temporaryFailCodes = self.tool.get("temporaryFailCodes")
        j.permanentFailCodes = self.tool.get("permanentFailCodes")
        j.requirements = self.requirements
        j.hints = self.hints
        j.name = jobname

        _logger.debug(u"[job %s] initializing from %s%s",
                     j.name,
                     self.tool.get("id", ""),
                     u" as part of %s" % kwargs["part_of"] if "part_of" in kwargs else "")
        _logger.debug(u"[job %s] %s", j.name, json.dumps(joborder, indent=4))


        builder.pathmapper = None

        if self.tool.get("stdin"):
            j.stdin = builder.do_eval(self.tool["stdin"])
            reffiles.add(j.stdin)

        if self.tool.get("stdout"):
            j.stdout = builder.do_eval(self.tool["stdout"])
            if os.path.isabs(j.stdout) or ".." in j.stdout:
                raise validate.ValidationException("stdout must be a relative path")

        builder.pathmapper = self.makePathMapper(reffiles, **kwargs)
        builder.requirements = j.requirements

        # map files to assigned path inside a container. We need to also explicitly
        # walk over input as implicit reassignment doesn't reach everything in builder.bindings
        def _check_adjust(f):  # type: (Dict[str,Any]) -> Dict[str,Any]
            if not f.get("containerfs"):
                f["path"] = builder.pathmapper.mapper(f["path"])[1]
                f["containerfs"] = True
            return f

        _logger.debug(u"[job %s] path mappings is %s", j.name, json.dumps({p: builder.pathmapper.mapper(p) for p in builder.pathmapper.files()}, indent=4))

        adjustFileObjs(builder.files, _check_adjust)
        adjustFileObjs(builder.bindings, _check_adjust)

        _logger.debug(u"[job %s] command line bindings is %s", j.name, json.dumps(builder.bindings, indent=4))

        dockerReq = self.get_requirement("DockerRequirement")[0]
        if dockerReq and kwargs.get("use_container"):
            out_prefix = kwargs.get("tmp_outdir_prefix")
            j.outdir = kwargs.get("outdir") or tempfile.mkdtemp(prefix=out_prefix)
            tmpdir_prefix = kwargs.get('tmpdir_prefix')
            j.tmpdir = kwargs.get("tmpdir") or tempfile.mkdtemp(prefix=tmpdir_prefix)
        else:
            j.outdir = builder.outdir
            j.tmpdir = builder.tmpdir

        createFiles = self.get_requirement("CreateFileRequirement")[0]
        j.generatefiles = {}
        if createFiles:
            for t in createFiles["fileDef"]:
                j.generatefiles[builder.do_eval(t["filename"])] = copy.deepcopy(builder.do_eval(t["fileContent"]))

        j.environment = {}
        evr = self.get_requirement("EnvVarRequirement")[0]
        if evr:
            for t in evr["envDef"]:
                j.environment[t["envName"]] = builder.do_eval(t["envValue"])

        shellcmd = self.get_requirement("ShellCommandRequirement")[0]
        if shellcmd:
            cmd = []  # type: List[str]
            for b in builder.bindings:
                arg = builder.generate_arg(b)
                if b.get("shellQuote", True):
                    arg = [shellescape.quote(a) for a in aslist(arg)]
                cmd.extend(aslist(arg))
            j.command_line = ["/bin/sh", "-c", " ".join(cmd)]
        else:
            j.command_line = flatten(map(builder.generate_arg, builder.bindings))

        j.pathmapper = builder.pathmapper
        j.collect_outputs = partial(
                self.collect_output_ports, self.tool["outputs"], builder)
        j.output_callback = output_callback

        yield j
Beispiel #47
0
    def run(self, dry_run=False, pull_image=True, rm_container=True, rm_tmpdir=True, move_outputs=True, **kwargs):
        #_logger.info("[job %s] starting with outdir %s", id(self), self.outdir)

        if not os.path.exists(self.outdir):
            os.makedirs(self.outdir)

        #with open(os.path.join(outdir, "cwl.input.json"), "w") as fp:
        #    json.dump(self.joborder, fp)

        runtime = []
        env = {"TMPDIR": self.tmpdir}

        (docker_req, docker_is_req) = get_feature(self, "DockerRequirement")

        for f in self.pathmapper.files():
            if not os.path.exists(self.pathmapper.mapper(f)[0]):
                raise WorkflowException("Required input file %s not found" % self.pathmapper.mapper(f)[0])

        img_id = None
        if docker_req and kwargs.get("use_container") is not False:
            env = os.environ
            img_id = docker.get_from_requirements(docker_req, docker_is_req, pull_image)

        if docker_is_req and img_id is None:
            raise WorkflowException("Docker is required for running this tool.")

        if img_id:
            runtime = ["docker", "run", "-i"]
            for src in self.pathmapper.files():
                vol = self.pathmapper.mapper(src)
                runtime.append("--volume=%s:%s:ro" % vol)
            runtime.append("--volume=%s:%s:rw" % (os.path.abspath(self.outdir), "/tmp/job_output"))
            runtime.append("--volume=%s:%s:rw" % (os.path.abspath(self.tmpdir), "/tmp/job_tmp"))
            runtime.append("--workdir=%s" % ("/tmp/job_output"))
            runtime.append("--user=%s" % (os.geteuid()))

            if rm_container:
                runtime.append("--rm")

            runtime.append("--env=TMPDIR=/tmp/job_tmp")

            for t,v in self.environment.items():
                runtime.append("--env=%s=%s" % (t, v))

            runtime.append(img_id)
        else:
            env = self.environment
            if not os.path.exists(self.tmpdir):
                os.makedirs(self.tmpdir)
            env["TMPDIR"] = self.tmpdir

        stdin = None
        stdout = None

        _logger.info("[job %s] exec %s%s%s",
                     id(self),
                     " ".join([shellescape.quote(arg) if needs_shell_quoting(arg) else arg for arg in (runtime + self.command_line)]),
                     ' < %s' % (self.stdin) if self.stdin else '',
                     ' > %s' % os.path.join(self.outdir, self.stdout) if self.stdout else '')

        if dry_run:
            return (self.outdir, {})

        outputs = {}

        try:
            for t in self.generatefiles:
                if isinstance(self.generatefiles[t], dict):
                    os.symlink(self.generatefiles[t]["path"], os.path.join(self.outdir, t))
                else:
                    with open(os.path.join(self.outdir, t), "w") as f:
                        f.write(self.generatefiles[t])

            if self.stdin:
                stdin = open(self.pathmapper.mapper(self.stdin)[0], "rb")
            else:
                stdin = subprocess.PIPE

            if self.stdout:
                absout = os.path.join(self.outdir, self.stdout)
                dn = os.path.dirname(absout)
                if dn and not os.path.exists(dn):
                    os.makedirs(dn)
                stdout = open(absout, "wb")
            else:
                stdout = sys.stderr

            sp = subprocess.Popen(runtime + self.command_line,
                                  shell=False,
                                  close_fds=True,
                                  stdin=stdin,
                                  stdout=stdout,
                                  env=env,
                                  cwd=self.outdir)

            if stdin == subprocess.PIPE:
                sp.stdin.close()

            rcode = sp.wait()

            if stdin != subprocess.PIPE:
                stdin.close()

            if stdout is not sys.stderr:
                stdout.close()

            if self.successCodes and rcode in self.successCodes:
                processStatus = "success"
            elif self.temporaryFailCodes and rcode in self.temporaryFailCodes:
                processStatus = "temporaryFail"
            elif self.permanentFailCodes and rcode in self.permanentFailCodes:
                processStatus = "permanentFail"
            elif rcode == 0:
                processStatus = "success"
            else:
                processStatus = "permanentFail"

            for t in self.generatefiles:
                if isinstance(self.generatefiles[t], dict):
                    os.remove(os.path.join(self.outdir, t))
                    os.symlink(self.pathmapper.reversemap(self.generatefiles[t]["path"])[1], os.path.join(self.outdir, t))

            outputs = self.collect_outputs(self.outdir)

        except Exception as e:
            _logger.exception("Exception while running job")
            processStatus = "permanentFail"

        if processStatus != "success":
            _logger.warn("[job %s] completed %s", id(self), processStatus)
        else:
            _logger.debug("[job %s] completed %s", id(self), processStatus)
        _logger.debug("[job %s] %s", id(self), json.dumps(outputs, indent=4))

        self.output_callback(outputs, processStatus)

        if rm_tmpdir:
            _logger.debug("[job %s] Removing temporary directory %s", id(self), self.tmpdir)
            shutil.rmtree(self.tmpdir, True)

        if move_outputs and empty_subtree(self.outdir):
            _logger.debug("[job %s] Removing empty output directory %s", id(self), self.tmpdir)
            shutil.rmtree(self.outdir, True)
Beispiel #48
0
    def _execute(self, runtime, env, rm_tmpdir=True, move_outputs="move"):
        # type: (List[Text], MutableMapping[Text, Text], bool, Text) -> None

        scr, _ = get_feature(self, "ShellCommandRequirement")

        shouldquote = None  # type: Callable[[Any], Any]
        if scr:
            shouldquote = lambda x: False
        else:
            shouldquote = needs_shell_quoting_re.search

        _logger.info(u"[job %s] %s$ %s%s%s%s",
                     self.name,
                     self.outdir,
                     " \\\n    ".join([shellescape.quote(Text(arg)) if shouldquote(Text(arg)) else Text(arg) for arg in
                                       (runtime + self.command_line)]),
                     u' < %s' % self.stdin if self.stdin else '',
                     u' > %s' % os.path.join(self.outdir, self.stdout) if self.stdout else '',
                     u' 2> %s' % os.path.join(self.outdir, self.stderr) if self.stderr else '')

        outputs = {}  # type: Dict[Text,Text]

        try:
            stdin_path = None
            if self.stdin:
                stdin_path = self.pathmapper.reversemap(self.stdin)[1]

            stderr_path = None
            if self.stderr:
                abserr = os.path.join(self.outdir, self.stderr)
                dnerr = os.path.dirname(abserr)
                if dnerr and not os.path.exists(dnerr):
                    os.makedirs(dnerr)
                stderr_path = abserr

            stdout_path = None
            if self.stdout:
                absout = os.path.join(self.outdir, self.stdout)
                dn = os.path.dirname(absout)
                if dn and not os.path.exists(dn):
                    os.makedirs(dn)
                stdout_path = absout

            commands = [Text(x) for x in (runtime + self.command_line)]
            job_script_contents = None  # type: Text
            builder = getattr(self, "builder", None)  # type: Builder
            if builder is not None:
                job_script_contents = builder.build_job_script(commands)
            rcode = _job_popen(
                commands,
                stdin_path=stdin_path,
                stdout_path=stdout_path,
                stderr_path=stderr_path,
                env=env,
                cwd=self.outdir,
                job_script_contents=job_script_contents,
            )

            if self.successCodes and rcode in self.successCodes:
                processStatus = "success"
            elif self.temporaryFailCodes and rcode in self.temporaryFailCodes:
                processStatus = "temporaryFail"
            elif self.permanentFailCodes and rcode in self.permanentFailCodes:
                processStatus = "permanentFail"
            elif rcode == 0:
                processStatus = "success"
            else:
                processStatus = "permanentFail"

            if self.generatefiles["listing"]:
                relink_initialworkdir(self.generatemapper, self.outdir, self.builder.outdir, inplace_update=self.inplace_update)

            outputs = self.collect_outputs(self.outdir)
            outputs = bytes2str_in_dicts(outputs)  # type: ignore

        except OSError as e:
            if e.errno == 2:
                if runtime:
                    _logger.error(u"'%s' not found", runtime[0])
                else:
                    _logger.error(u"'%s' not found", self.command_line[0])
            else:
                _logger.exception("Exception while running job")
            processStatus = "permanentFail"
        except WorkflowException as e:
            _logger.error(u"[job %s] Job error:\n%s" % (self.name, e))
            processStatus = "permanentFail"
        except Exception as e:
            _logger.exception("Exception while running job")
            processStatus = "permanentFail"

        if processStatus != "success":
            _logger.warning(u"[job %s] completed %s", self.name, processStatus)
        else:
            _logger.info(u"[job %s] completed %s", self.name, processStatus)

        if _logger.isEnabledFor(logging.DEBUG):
            _logger.debug(u"[job %s] %s", self.name, json.dumps(outputs, indent=4))

        self.output_callback(outputs, processStatus)

        if self.stagedir and os.path.exists(self.stagedir):
            _logger.debug(u"[job %s] Removing input staging directory %s", self.name, self.stagedir)
            shutil.rmtree(self.stagedir, True)

        if rm_tmpdir:
            _logger.debug(u"[job %s] Removing temporary directory %s", self.name, self.tmpdir)
            shutil.rmtree(self.tmpdir, True)
Beispiel #49
0
    def _execute(self,
                 runtime,                # type: List[Text]
                 env,                    # type: MutableMapping[Text, Text]
                 runtimeContext,         # type: RuntimeContext
                 monitor_function=None,  # type: Optional[Callable]
                 ):                      # type: (...) -> None

        scr, _ = self.get_requirement("ShellCommandRequirement")

        shouldquote = needs_shell_quoting_re.search  # type: Callable[[Any], Any]
        if scr is not None:
            shouldquote = lambda x: False

        _logger.info(u"[job %s] %s$ %s%s%s%s",
                     self.name,
                     self.outdir,
                     " \\\n    ".join([shellescape.quote(Text(arg)) if shouldquote(Text(arg)) else Text(arg) for arg in
                                       (runtime + self.command_line)]),
                     u' < %s' % self.stdin if self.stdin else '',
                     u' > %s' % os.path.join(self.outdir, self.stdout) if self.stdout else '',
                     u' 2> %s' % os.path.join(self.outdir, self.stderr) if self.stderr else '')
        if self.joborder is not None and runtimeContext.research_obj is not None:
            job_order = self.joborder
            assert runtimeContext.process_run_id is not None
            assert runtimeContext.prov_obj is not None
            runtimeContext.prov_obj.used_artefacts(
                job_order, runtimeContext.process_run_id, str(self.name))
        outputs = {}  # type: Dict[Text,Text]
        try:
            stdin_path = None
            if self.stdin is not None:
                rmap = self.pathmapper.reversemap(self.stdin)
                if rmap is None:
                    raise WorkflowException(
                        "{} missing from pathmapper".format(self.stdin))
                else:
                    stdin_path = rmap[1]

            stderr_path = None
            if self.stderr is not None:
                abserr = os.path.join(self.outdir, self.stderr)
                dnerr = os.path.dirname(abserr)
                if dnerr and not os.path.exists(dnerr):
                    os.makedirs(dnerr)
                stderr_path = abserr

            stdout_path = None
            if self.stdout is not None:
                absout = os.path.join(self.outdir, self.stdout)
                dnout = os.path.dirname(absout)
                if dnout and not os.path.exists(dnout):
                    os.makedirs(dnout)
                stdout_path = absout

            commands = [Text(x) for x in runtime + self.command_line]
            if runtimeContext.secret_store is not None:
                commands = runtimeContext.secret_store.retrieve(commands)
                env = runtimeContext.secret_store.retrieve(env)

            job_script_contents = None  # type: Optional[Text]
            builder = getattr(self, "builder", None)  # type: Builder
            if builder is not None:
                job_script_contents = builder.build_job_script(commands)
            rcode = _job_popen(
                commands,
                stdin_path=stdin_path,
                stdout_path=stdout_path,
                stderr_path=stderr_path,
                env=env,
                cwd=self.outdir,
                job_dir=tempfile.mkdtemp(prefix=getdefault(runtimeContext.tmp_outdir_prefix, DEFAULT_TMP_PREFIX)),
                job_script_contents=job_script_contents,
                timelimit=self.timelimit,
                name=self.name,
                monitor_function=monitor_function
            )

            if rcode in self.successCodes:
                processStatus = "success"
            elif rcode in self.temporaryFailCodes:
                processStatus = "temporaryFail"
            elif rcode in self.permanentFailCodes:
                processStatus = "permanentFail"
            elif rcode == 0:
                processStatus = "success"
            else:
                processStatus = "permanentFail"

            if 'listing' in self.generatefiles:
                assert self.generatemapper is not None
                relink_initialworkdir(
                    self.generatemapper, self.outdir, self.builder.outdir,
                    inplace_update=self.inplace_update)

            outputs = self.collect_outputs(self.outdir)
            outputs = bytes2str_in_dicts(outputs)  # type: ignore
        except OSError as e:
            if e.errno == 2:
                if runtime:
                    _logger.error(u"'%s' not found: %s", runtime[0], e)
                else:
                    _logger.error(u"'%s' not found: %s", self.command_line[0], e)
            else:
                _logger.exception(u"Exception while running job")
            processStatus = "permanentFail"
        except WorkflowException as err:
            _logger.error(u"[job %s] Job error:\n%s", self.name, err)
            processStatus = "permanentFail"
        except Exception as e:
            _logger.exception(u"Exception while running job")
            processStatus = "permanentFail"
        if runtimeContext.research_obj is not None \
                and self.prov_obj is not None \
                and runtimeContext.process_run_id is not None:
            # creating entities for the outputs produced by each step (in the provenance document)
            self.prov_obj.record_process_end(str(self.name), runtimeContext.process_run_id,
                                             outputs, datetime.datetime.now())
        if processStatus != "success":
            _logger.warning(u"[job %s] completed %s", self.name, processStatus)
        else:
            _logger.info(u"[job %s] completed %s", self.name, processStatus)

        if _logger.isEnabledFor(logging.DEBUG):
            _logger.debug(u"[job %s] %s", self.name,
                          json_dumps(outputs, indent=4))

        if self.generatemapper is not None and runtimeContext.secret_store is not None:
            # Delete any runtime-generated files containing secrets.
            for _, p in self.generatemapper.items():
                if p.type == "CreateFile":
                    if runtimeContext.secret_store.has_secret(p.resolved):
                        host_outdir = self.outdir
                        container_outdir = self.builder.outdir
                        host_outdir_tgt = p.target
                        if p.target.startswith(container_outdir + "/"):
                            host_outdir_tgt = os.path.join(
                                host_outdir, p.target[len(container_outdir) + 1:])
                        os.remove(host_outdir_tgt)

        if runtimeContext.workflow_eval_lock is None:
            raise WorkflowException("runtimeContext.workflow_eval_lock must not be None")

        with runtimeContext.workflow_eval_lock:
            self.output_callback(outputs, processStatus)

        if self.stagedir is not None and os.path.exists(self.stagedir):
            _logger.debug(u"[job %s] Removing input staging directory %s", self.name, self.stagedir)
            shutil.rmtree(self.stagedir, True)

        if runtimeContext.rm_tmpdir:
            _logger.debug(u"[job %s] Removing temporary directory %s", self.name, self.tmpdir)
            shutil.rmtree(self.tmpdir, True)
Beispiel #50
0
    def run(self, dry_run=False, pull_image=True, rm_container=True, rm_tmpdir=True, move_outputs=True, **kwargs):
        if not os.path.exists(self.outdir):
            os.makedirs(self.outdir)

        #with open(os.path.join(outdir, "cwl.input.json"), "w") as fp:
        #    json.dump(self.joborder, fp)

        runtime = []
        env = {"TMPDIR": self.tmpdir}

        (docker_req, docker_is_req) = get_feature(self, "DockerRequirement")

        for f in self.pathmapper.files():
            if not os.path.isfile(self.pathmapper.mapper(f)[0]):
                raise WorkflowException("Required input file %s not found or is not a regular file." % self.pathmapper.mapper(f)[0])

        img_id = None
        if docker_req and kwargs.get("use_container") is not False:
            env = os.environ
            img_id = docker.get_from_requirements(docker_req, docker_is_req, pull_image)

        if docker_is_req and img_id is None:
            raise WorkflowException("Docker is required for running this tool.")

        if img_id:
            runtime = ["docker", "run", "-i"]
            for src in self.pathmapper.files():
                vol = self.pathmapper.mapper(src)
                runtime.append("--volume=%s:%s:ro" % vol)
            runtime.append("--volume=%s:%s:rw" % (os.path.abspath(self.outdir), "/var/spool/cwl"))
            runtime.append("--volume=%s:%s:rw" % (os.path.abspath(self.tmpdir), "/tmp"))
            runtime.append("--workdir=%s" % ("/var/spool/cwl"))
            runtime.append("--read-only=true")
            runtime.append("--net=none")
            euid = docker_vm_uid() or os.geteuid()
            runtime.append("--user=%s" % (euid))

            if rm_container:
                runtime.append("--rm")

            runtime.append("--env=TMPDIR=/tmp")

            for t,v in self.environment.items():
                runtime.append("--env=%s=%s" % (t, v))

            runtime.append(img_id)
        else:
            env = self.environment
            if not os.path.exists(self.tmpdir):
                os.makedirs(self.tmpdir)
            env["TMPDIR"] = self.tmpdir
            vars_to_preserve = kwargs.get("preserve_environment")
            if vars_to_preserve is not None:
                for key, value in os.environ.items():
                    if key in vars_to_preserve and key not in env:
                        env[key] = value

        stdin = None
        stdout = None

        scr, _  = get_feature(self, "ShellCommandRequirement")

        if scr:
            shouldquote = lambda x: False
        else:
            shouldquote = needs_shell_quoting_re.search

        _logger.info("[job %s] %s$ %s%s%s",
                     self.name,
                     self.outdir,
                     " ".join([shellescape.quote(str(arg)) if shouldquote(str(arg)) else str(arg) for arg in (runtime + self.command_line)]),
                     ' < %s' % (self.stdin) if self.stdin else '',
                     ' > %s' % os.path.join(self.outdir, self.stdout) if self.stdout else '')

        if dry_run:
            return (self.outdir, {})

        outputs = {}

        try:
            for t in self.generatefiles:
                if isinstance(self.generatefiles[t], dict):
                    src = self.generatefiles[t]["path"]
                    dst = os.path.join(self.outdir, t)
                    if os.path.dirname(self.pathmapper.reversemap(src)[1]) != self.outdir:
                        _logger.debug("symlinking %s to %s", dst, src)
                        os.symlink(src, dst)
                else:
                    with open(os.path.join(self.outdir, t), "w") as f:
                        f.write(self.generatefiles[t])

            if self.stdin:
                stdin = open(self.pathmapper.mapper(self.stdin)[0], "rb")
            else:
                stdin = subprocess.PIPE

            if self.stdout:
                absout = os.path.join(self.outdir, self.stdout)
                dn = os.path.dirname(absout)
                if dn and not os.path.exists(dn):
                    os.makedirs(dn)
                stdout = open(absout, "wb")
            else:
                stdout = sys.stderr

            sp = subprocess.Popen([str(x) for x in runtime + self.command_line],
                                  shell=False,
                                  close_fds=True,
                                  stdin=stdin,
                                  stdout=stdout,
                                  env=env,
                                  cwd=self.outdir)

            if stdin == subprocess.PIPE:
                sp.stdin.close()

            rcode = sp.wait()

            if stdin != subprocess.PIPE:
                stdin.close()

            if stdout is not sys.stderr:
                stdout.close()

            if self.successCodes and rcode in self.successCodes:
                processStatus = "success"
            elif self.temporaryFailCodes and rcode in self.temporaryFailCodes:
                processStatus = "temporaryFail"
            elif self.permanentFailCodes and rcode in self.permanentFailCodes:
                processStatus = "permanentFail"
            elif rcode == 0:
                processStatus = "success"
            else:
                processStatus = "permanentFail"

            for t in self.generatefiles:
                if isinstance(self.generatefiles[t], dict):
                    src = self.generatefiles[t]["path"]
                    dst = os.path.join(self.outdir, t)
                    if os.path.dirname(self.pathmapper.reversemap(src)[1]) != self.outdir:
                        os.remove(dst)
                        os.symlink(self.pathmapper.reversemap(src)[1], dst)

            outputs = self.collect_outputs(self.outdir)

        except OSError as e:
            if e.errno == 2:
                if runtime:
                    _logger.error("'%s' not found", runtime[0])
                else:
                    _logger.error("'%s' not found", self.command_line[0])
            else:
                _logger.exception("Exception while running job")
            processStatus = "permanentFail"
        except WorkflowException as e:
            _logger.error("Error while running job: %s" % e)
            processStatus = "permanentFail"
        except Exception as e:
            _logger.exception("Exception while running job")
            processStatus = "permanentFail"

        if processStatus != "success":
            _logger.warn("[job %s] completed %s", self.name, processStatus)
        else:
            _logger.debug("[job %s] completed %s", self.name, processStatus)
        _logger.debug("[job %s] %s", self.name, json.dumps(outputs, indent=4))

        self.output_callback(outputs, processStatus)

        if rm_tmpdir:
            _logger.debug("[job %s] Removing temporary directory %s", self.name, self.tmpdir)
            shutil.rmtree(self.tmpdir, True)

        if move_outputs and empty_subtree(self.outdir):
            _logger.debug("[job %s] Removing empty output directory %s", self.name, self.outdir)
            shutil.rmtree(self.outdir, True)
Beispiel #51
0
    def job(self,
            job_order,  # type: Dict[Text, Text]
            output_callbacks,  # type: Callable[[Any, Any], Any]
            **kwargs  # type: Any
            ):
        # type: (...) -> Generator[Union[JobBase, CallbackJob], None, None]

        jobname = uniquename(kwargs.get("name", shortname(self.tool.get("id", "job"))))
        if kwargs.get("cachedir"):
            cacheargs = kwargs.copy()
            cacheargs["outdir"] = "/out"
            cacheargs["tmpdir"] = "/tmp"
            cacheargs["stagedir"] = "/stage"
            cachebuilder = self._init_job(job_order, **cacheargs)
            cachebuilder.pathmapper = PathMapper(cachebuilder.files,
                                                 kwargs["basedir"],
                                                 cachebuilder.stagedir,
                                                 separateDirs=False)
            _check_adjust = partial(check_adjust, cachebuilder)
            visit_class([cachebuilder.files, cachebuilder.bindings],
                       ("File", "Directory"), _check_adjust)

            cmdline = flatten(list(map(cachebuilder.generate_arg, cachebuilder.bindings)))
            (docker_req, docker_is_req) = self.get_requirement("DockerRequirement")
            if docker_req and kwargs.get("use_container"):
                dockerimg = docker_req.get("dockerImageId") or docker_req.get("dockerPull")
            elif kwargs.get("default_container", None) is not None and kwargs.get("use_container"):
                dockerimg = kwargs.get("default_container")
            else:
                dockerimg = None

            if dockerimg:
                cmdline = ["docker", "run", dockerimg] + cmdline
            keydict = {u"cmdline": cmdline}

            if "stdout" in self.tool:
                keydict["stdout"] = self.tool["stdout"]
            for location, f in cachebuilder.pathmapper.items():
                if f.type == "File":
                    checksum = next((e['checksum'] for e in cachebuilder.files
                            if 'location' in e and e['location'] == location
                            and 'checksum' in e
                            and e['checksum'] != 'sha1$hash'), None)
                    st = os.stat(f.resolved)
                    if checksum:
                        keydict[f.resolved] = [st.st_size, checksum]
                    else:
                        keydict[f.resolved] = [st.st_size, int(st.st_mtime * 1000)]

            interesting = {"DockerRequirement",
                           "EnvVarRequirement",
                           "CreateFileRequirement",
                           "ShellCommandRequirement"}
            for rh in (self.requirements, self.hints):
                for r in reversed(rh):
                    if r["class"] in interesting and r["class"] not in keydict:
                        keydict[r["class"]] = r

            keydictstr = json.dumps(keydict, separators=(',', ':'), sort_keys=True)
            cachekey = hashlib.md5(keydictstr.encode('utf-8')).hexdigest()

            _logger.debug("[job %s] keydictstr is %s -> %s", jobname,
                          keydictstr, cachekey)

            jobcache = os.path.join(kwargs["cachedir"], cachekey)
            jobcachepending = jobcache + ".pending"

            if os.path.isdir(jobcache) and not os.path.isfile(jobcachepending):
                if docker_req and kwargs.get("use_container"):
                    cachebuilder.outdir = kwargs.get("docker_outdir") or "/var/spool/cwl"
                else:
                    cachebuilder.outdir = jobcache

                _logger.info("[job %s] Using cached output in %s", jobname, jobcache)
                yield CallbackJob(self, output_callbacks, cachebuilder, jobcache)
                return
            else:
                _logger.info("[job %s] Output of job will be cached in %s", jobname, jobcache)
                shutil.rmtree(jobcache, True)
                os.makedirs(jobcache)
                kwargs["outdir"] = jobcache
                open(jobcachepending, "w").close()

                def rm_pending_output_callback(output_callbacks, jobcachepending,
                                               outputs, processStatus):
                    if processStatus == "success":
                        os.remove(jobcachepending)
                    output_callbacks(outputs, processStatus)

                output_callbacks = cast(
                    Callable[..., Any],  # known bug in mypy
                    # https://github.com/python/mypy/issues/797
                    partial(rm_pending_output_callback, output_callbacks,
                            jobcachepending))

        builder = self._init_job(job_order, **kwargs)

        reffiles = copy.deepcopy(builder.files)

        j = self.makeJobRunner(**kwargs)
        j.builder = builder
        j.joborder = builder.job
        j.make_pathmapper = self.makePathMapper
        j.stdin = None
        j.stderr = None
        j.stdout = None
        j.successCodes = self.tool.get("successCodes")
        j.temporaryFailCodes = self.tool.get("temporaryFailCodes")
        j.permanentFailCodes = self.tool.get("permanentFailCodes")
        j.requirements = self.requirements
        j.hints = self.hints
        j.name = jobname

        debug = _logger.isEnabledFor(logging.DEBUG)

        if debug:
            _logger.debug(u"[job %s] initializing from %s%s",
                          j.name,
                          self.tool.get("id", ""),
                          u" as part of %s" % kwargs["part_of"] if "part_of" in kwargs else "")
            _logger.debug(u"[job %s] %s", j.name, json.dumps(job_order, indent=4))

        builder.pathmapper = None
        make_path_mapper_kwargs = kwargs
        if "stagedir" in make_path_mapper_kwargs:
            make_path_mapper_kwargs = make_path_mapper_kwargs.copy()
            del make_path_mapper_kwargs["stagedir"]

        builder.pathmapper = self.makePathMapper(reffiles, builder.stagedir, **make_path_mapper_kwargs)
        builder.requirements = j.requirements

        _check_adjust = partial(check_adjust, builder)

        visit_class([builder.files, builder.bindings], ("File", "Directory"), _check_adjust)

        initialWorkdir = self.get_requirement("InitialWorkDirRequirement")[0]
        j.generatefiles = {"class": "Directory", "listing": [], "basename": ""}
        if initialWorkdir:
            ls = []  # type: List[Dict[Text, Any]]
            if isinstance(initialWorkdir["listing"], (str, Text)):
                ls = builder.do_eval(initialWorkdir["listing"])
            else:
                for t in initialWorkdir["listing"]:
                    if "entry" in t:
                        et = {u"entry": builder.do_eval(t["entry"])}
                        if "entryname" in t:
                            et["entryname"] = builder.do_eval(t["entryname"])
                        else:
                            et["entryname"] = None
                        et["writable"] = t.get("writable", False)
                        ls.append(et)
                    else:
                        ls.append(builder.do_eval(t))
            for i, t in enumerate(ls):
                if "entry" in t:
                    if isinstance(t["entry"], string_types):
                        ls[i] = {
                            "class": "File",
                            "basename": t["entryname"],
                            "contents": t["entry"],
                            "writable": t.get("writable")
                        }
                    else:
                        if t.get("entryname") or t.get("writable"):
                            t = copy.deepcopy(t)
                            if t.get("entryname"):
                                t["entry"]["basename"] = t["entryname"]
                            t["entry"]["writable"] = t.get("writable")
                        ls[i] = t["entry"]
            j.generatefiles[u"listing"] = ls
            for l in ls:
                self.updatePathmap(builder.outdir, builder.pathmapper, l)
            visit_class([builder.files, builder.bindings], ("File", "Directory"), _check_adjust)

        if debug:
            _logger.debug(u"[job %s] path mappings is %s", j.name,
                          json.dumps({p: builder.pathmapper.mapper(p) for p in builder.pathmapper.files()}, indent=4))

        if self.tool.get("stdin"):
            with SourceLine(self.tool, "stdin", validate.ValidationException, debug):
                j.stdin = builder.do_eval(self.tool["stdin"])
                reffiles.append({"class": "File", "path": j.stdin})

        if self.tool.get("stderr"):
            with SourceLine(self.tool, "stderr", validate.ValidationException, debug):
                j.stderr = builder.do_eval(self.tool["stderr"])
                if os.path.isabs(j.stderr) or ".." in j.stderr:
                    raise validate.ValidationException("stderr must be a relative path, got '%s'" % j.stderr)

        if self.tool.get("stdout"):
            with SourceLine(self.tool, "stdout", validate.ValidationException, debug):
                j.stdout = builder.do_eval(self.tool["stdout"])
                if os.path.isabs(j.stdout) or ".." in j.stdout or not j.stdout:
                    raise validate.ValidationException("stdout must be a relative path, got '%s'" % j.stdout)

        if debug:
            _logger.debug(u"[job %s] command line bindings is %s", j.name, json.dumps(builder.bindings, indent=4))

        dockerReq = self.get_requirement("DockerRequirement")[0]
        if dockerReq and kwargs.get("use_container"):
            out_prefix = kwargs.get("tmp_outdir_prefix")
            j.outdir = kwargs.get("outdir") or tempfile.mkdtemp(prefix=out_prefix)
            tmpdir_prefix = kwargs.get('tmpdir_prefix')
            j.tmpdir = kwargs.get("tmpdir") or tempfile.mkdtemp(prefix=tmpdir_prefix)
            j.stagedir = tempfile.mkdtemp(prefix=tmpdir_prefix)
        else:
            j.outdir = builder.outdir
            j.tmpdir = builder.tmpdir
            j.stagedir = builder.stagedir

        inplaceUpdateReq = self.get_requirement("http://commonwl.org/cwltool#InplaceUpdateRequirement")[0]

        if inplaceUpdateReq:
            j.inplace_update = inplaceUpdateReq["inplaceUpdate"]
        normalizeFilesDirs(j.generatefiles)

        readers = {}
        muts = set()

        if builder.mutation_manager:
            def register_mut(f):
                muts.add(f["location"])
                builder.mutation_manager.register_mutation(j.name, f)

            def register_reader(f):
                if f["location"] not in muts:
                    builder.mutation_manager.register_reader(j.name, f)
                    readers[f["location"]] = f

            for li in j.generatefiles["listing"]:
                li = cast(Dict[Text, Any], li)
                if li.get("writable") and j.inplace_update:
                    adjustFileObjs(li, register_mut)
                    adjustDirObjs(li, register_mut)
                else:
                    adjustFileObjs(li, register_reader)
                    adjustDirObjs(li, register_reader)

            adjustFileObjs(builder.files, register_reader)
            adjustFileObjs(builder.bindings, register_reader)
            adjustDirObjs(builder.files, register_reader)
            adjustDirObjs(builder.bindings, register_reader)

        j.environment = {}
        evr = self.get_requirement("EnvVarRequirement")[0]
        if evr:
            for t in evr["envDef"]:
                j.environment[t["envName"]] = builder.do_eval(t["envValue"])

        shellcmd = self.get_requirement("ShellCommandRequirement")[0]
        if shellcmd:
            cmd = []  # type: List[Text]
            for b in builder.bindings:
                arg = builder.generate_arg(b)
                if b.get("shellQuote", True):
                    arg = [shellescape.quote(a) for a in aslist(arg)]
                cmd.extend(aslist(arg))
            j.command_line = ["/bin/sh", "-c", " ".join(cmd)]
        else:
            j.command_line = flatten(list(map(builder.generate_arg, builder.bindings)))

        j.pathmapper = builder.pathmapper
        j.collect_outputs = partial(
            self.collect_output_ports, self.tool["outputs"], builder,
            compute_checksum=kwargs.get("compute_checksum", True),
            jobname=jobname,
            readers=readers)
        j.output_callback = output_callbacks

        yield j
Beispiel #52
0
    def job(self, joborder, output_callback, **kwargs):
        # type: (Dict[Text, Text], Callable[..., Any], **Any) -> Generator[Union[CommandLineJob, CallbackJob], None, None]

        jobname = uniquename(kwargs.get("name", shortname(self.tool.get("id", "job"))))

        if kwargs.get("cachedir"):
            cacheargs = kwargs.copy()
            cacheargs["outdir"] = "/out"
            cacheargs["tmpdir"] = "/tmp"
            cacheargs["stagedir"] = "/stage"
            cachebuilder = self._init_job(joborder, **cacheargs)
            cachebuilder.pathmapper = PathMapper(cachebuilder.files,
                                                 kwargs["basedir"],
                                                 cachebuilder.stagedir,
                                                 separateDirs=False)
            _check_adjust = partial(check_adjust, cachebuilder)
            adjustFileObjs(cachebuilder.files, _check_adjust)
            adjustFileObjs(cachebuilder.bindings, _check_adjust)
            adjustDirObjs(cachebuilder.files, _check_adjust)
            adjustDirObjs(cachebuilder.bindings, _check_adjust)
            cmdline = flatten(map(cachebuilder.generate_arg, cachebuilder.bindings))
            (docker_req, docker_is_req) = self.get_requirement("DockerRequirement")
            if docker_req and kwargs.get("use_container") is not False:
                dockerimg = docker_req.get("dockerImageId") or docker_req.get("dockerPull")
                cmdline = ["docker", "run", dockerimg] + cmdline
            keydict = {u"cmdline": cmdline}

            for _,f in cachebuilder.pathmapper.items():
                if f.type == "File":
                    st = os.stat(f.resolved)
                    keydict[f.resolved] = [st.st_size, int(st.st_mtime * 1000)]

            interesting = {"DockerRequirement",
                           "EnvVarRequirement",
                           "CreateFileRequirement",
                           "ShellCommandRequirement"}
            for rh in (self.requirements, self.hints):
                for r in reversed(rh):
                    if r["class"] in interesting and r["class"] not in keydict:
                        keydict[r["class"]] = r

            keydictstr = json.dumps(keydict, separators=(',',':'), sort_keys=True)
            cachekey = hashlib.md5(keydictstr).hexdigest()

            _logger.debug("[job %s] keydictstr is %s -> %s", jobname,
                    keydictstr, cachekey)

            jobcache = os.path.join(kwargs["cachedir"], cachekey)
            jobcachepending = jobcache + ".pending"

            if os.path.isdir(jobcache) and not os.path.isfile(jobcachepending):
                if docker_req and kwargs.get("use_container") is not False:
                    cachebuilder.outdir = kwargs.get("docker_outdir") or "/var/spool/cwl"
                else:
                    cachebuilder.outdir = jobcache

                _logger.info("[job %s] Using cached output in %s", jobname, jobcache)
                yield CallbackJob(self, output_callback, cachebuilder, jobcache)
                return
            else:
                _logger.info("[job %s] Output of job will be cached in %s", jobname, jobcache)
                shutil.rmtree(jobcache, True)
                os.makedirs(jobcache)
                kwargs["outdir"] = jobcache
                open(jobcachepending, "w").close()

                def rm_pending_output_callback(output_callback, jobcachepending,
                                               outputs, processStatus):
                    if processStatus == "success":
                        os.remove(jobcachepending)
                    output_callback(outputs, processStatus)
                output_callback = cast(
                    Callable[..., Any],  # known bug in mypy
                    # https://github.com/python/mypy/issues/797
                    partial(rm_pending_output_callback, output_callback,
                        jobcachepending))

        builder = self._init_job(joborder, **kwargs)

        reffiles = copy.deepcopy(builder.files)

        j = self.makeJobRunner()
        j.builder = builder
        j.joborder = builder.job
        j.stdin = None
        j.stderr = None
        j.stdout = None
        j.successCodes = self.tool.get("successCodes")
        j.temporaryFailCodes = self.tool.get("temporaryFailCodes")
        j.permanentFailCodes = self.tool.get("permanentFailCodes")
        j.requirements = self.requirements
        j.hints = self.hints
        j.name = jobname

        _logger.debug(u"[job %s] initializing from %s%s",
                     j.name,
                     self.tool.get("id", ""),
                     u" as part of %s" % kwargs["part_of"] if "part_of" in kwargs else "")
        _logger.debug(u"[job %s] %s", j.name, json.dumps(joborder, indent=4))


        builder.pathmapper = None
        make_path_mapper_kwargs = kwargs
        if "stagedir" in make_path_mapper_kwargs:
            make_path_mapper_kwargs = make_path_mapper_kwargs.copy()
            del make_path_mapper_kwargs["stagedir"]
        builder.pathmapper = self.makePathMapper(reffiles, builder.stagedir, **make_path_mapper_kwargs)
        builder.requirements = j.requirements

        _logger.debug(u"[job %s] path mappings is %s", j.name, json.dumps({p: builder.pathmapper.mapper(p) for p in builder.pathmapper.files()}, indent=4))

        _check_adjust = partial(check_adjust, builder)

        adjustFileObjs(builder.files, _check_adjust)
        adjustFileObjs(builder.bindings, _check_adjust)
        adjustDirObjs(builder.files, _check_adjust)
        adjustDirObjs(builder.bindings, _check_adjust)

        if self.tool.get("stdin"):
            j.stdin = builder.do_eval(self.tool["stdin"])
            reffiles.append({"class": "File", "path": j.stdin})

        if self.tool.get("stderr"):
            j.stderr = builder.do_eval(self.tool["stderr"])
            if os.path.isabs(j.stderr) or ".." in j.stderr:
                raise validate.ValidationException("stderr must be a relative path")

        if self.tool.get("stdout"):
            j.stdout = builder.do_eval(self.tool["stdout"])
            if os.path.isabs(j.stdout) or ".." in j.stdout or not j.stdout:
                raise validate.ValidationException("stdout must be a relative path")

        _logger.debug(u"[job %s] command line bindings is %s", j.name, json.dumps(builder.bindings, indent=4))

        dockerReq = self.get_requirement("DockerRequirement")[0]
        if dockerReq and kwargs.get("use_container"):
            out_prefix = kwargs.get("tmp_outdir_prefix")
            j.outdir = kwargs.get("outdir") or tempfile.mkdtemp(prefix=out_prefix)
            tmpdir_prefix = kwargs.get('tmpdir_prefix')
            j.tmpdir = kwargs.get("tmpdir") or tempfile.mkdtemp(prefix=tmpdir_prefix)
            j.stagedir = tempfile.mkdtemp(prefix=tmpdir_prefix)
        else:
            j.outdir = builder.outdir
            j.tmpdir = builder.tmpdir
            j.stagedir = builder.stagedir

        initialWorkdir = self.get_requirement("InitialWorkDirRequirement")[0]
        j.generatefiles = {"class": "Directory", "listing": [], "basename": ""}
        if initialWorkdir:
            ls = []  # type: List[Dict[Text, Any]]
            if isinstance(initialWorkdir["listing"], (str, Text)):
                ls = builder.do_eval(initialWorkdir["listing"])
            else:
                for t in initialWorkdir["listing"]:
                    if "entry" in t:
                        et = {u"entry": builder.do_eval(t["entry"])}
                        if "entryname" in t:
                            et["entryname"] = builder.do_eval(t["entryname"])
                        else:
                            et["entryname"] = None
                        et["writable"] = t.get("writable", False)
                        ls.append(et)
                    else:
                        ls.append(builder.do_eval(t))
            for i,t in enumerate(ls):
                if "entry" in t:
                    if isinstance(t["entry"], basestring):
                        ls[i] = {
                            "class": "File",
                            "basename": t["entryname"],
                            "contents": t["entry"],
                            "writable": t.get("writable")
                        }
                    else:
                        if t["entryname"]:
                            t = copy.deepcopy(t)
                            t["entry"]["basename"] = t["entryname"]
                            t["entry"]["writable"] = t.get("writable")
                        ls[i] = t["entry"]
            j.generatefiles[u"listing"] = ls

        normalizeFilesDirs(j.generatefiles)

        j.environment = {}
        evr = self.get_requirement("EnvVarRequirement")[0]
        if evr:
            for t in evr["envDef"]:
                j.environment[t["envName"]] = builder.do_eval(t["envValue"])

        shellcmd = self.get_requirement("ShellCommandRequirement")[0]
        if shellcmd:
            cmd = []  # type: List[Text]
            for b in builder.bindings:
                arg = builder.generate_arg(b)
                if b.get("shellQuote", True):
                    arg = [shellescape.quote(a) for a in aslist(arg)]
                cmd.extend(aslist(arg))
            j.command_line = ["/bin/sh", "-c", " ".join(cmd)]
        else:
            j.command_line = flatten(map(builder.generate_arg, builder.bindings))

        j.pathmapper = builder.pathmapper
        j.collect_outputs = partial(
            self.collect_output_ports, self.tool["outputs"], builder,
            compute_checksum=kwargs.get("compute_checksum", True))
        j.output_callback = output_callback

        yield j
Beispiel #53
0
    def job(self,
            job_order,         # type: Mapping[Text, Text]
            output_callbacks,  # type: Callable[[Any, Any], Any]
            runtimeContext     # RuntimeContext
           ):
        # type: (...) -> Generator[Union[JobBase, CallbackJob], None, None]

        workReuse, _ = self.get_requirement("WorkReuse")
        enableReuse = workReuse.get("enableReuse", True) if workReuse else True

        jobname = uniquename(runtimeContext.name or shortname(self.tool.get("id", "job")))
        if runtimeContext.cachedir and enableReuse:
            cachecontext = runtimeContext.copy()
            cachecontext.outdir = "/out"
            cachecontext.tmpdir = "/tmp"
            cachecontext.stagedir = "/stage"
            cachebuilder = self._init_job(job_order, cachecontext)
            cachebuilder.pathmapper = PathMapper(cachebuilder.files,
                                                 runtimeContext.basedir,
                                                 cachebuilder.stagedir,
                                                 separateDirs=False)
            _check_adjust = partial(check_adjust, cachebuilder)
            visit_class([cachebuilder.files, cachebuilder.bindings],
                        ("File", "Directory"), _check_adjust)

            cmdline = flatten(list(map(cachebuilder.generate_arg, cachebuilder.bindings)))
            docker_req, _ = self.get_requirement("DockerRequirement")
            if docker_req is not None and runtimeContext.use_container:
                dockerimg = docker_req.get("dockerImageId") or docker_req.get("dockerPull")
            elif runtimeContext.default_container is not None and runtimeContext.use_container:
                dockerimg = runtimeContext.default_container
            else:
                dockerimg = None

            if dockerimg is not None:
                cmdline = ["docker", "run", dockerimg] + cmdline
                # not really run using docker, just for hashing purposes
            keydict = {u"cmdline": cmdline}

            for shortcut in ["stdout", "stderr"]:  # later, add "stdin"
                if shortcut in self.tool:
                    keydict[shortcut] = self.tool[shortcut]

            for location, fobj in cachebuilder.pathmapper.items():
                if fobj.type == "File":
                    checksum = next(
                        (e['checksum'] for e in cachebuilder.files
                         if 'location' in e and e['location'] == location
                         and 'checksum' in e
                         and e['checksum'] != 'sha1$hash'), None)
                    fobj_stat = os.stat(fobj.resolved)
                    if checksum is not None:
                        keydict[fobj.resolved] = [fobj_stat.st_size, checksum]
                    else:
                        keydict[fobj.resolved] = [fobj_stat.st_size,
                                                  int(fobj_stat.st_mtime * 1000)]

            interesting = {"DockerRequirement",
                           "EnvVarRequirement",
                           "CreateFileRequirement",
                           "ShellCommandRequirement"}
            for rh in (self.original_requirements, self.original_hints):
                for r in reversed(rh):
                    if r["class"] in interesting and r["class"] not in keydict:
                        keydict[r["class"]] = r

            keydictstr = json_dumps(keydict, separators=(',', ':'),
                                    sort_keys=True)
            cachekey = hashlib.md5(keydictstr.encode('utf-8')).hexdigest()

            _logger.debug("[job %s] keydictstr is %s -> %s", jobname,
                          keydictstr, cachekey)

            jobcache = os.path.join(runtimeContext.cachedir, cachekey)
            jobcachepending = "{}.{}.pending".format(
                jobcache, threading.current_thread().ident)

            if os.path.isdir(jobcache) and not os.path.isfile(jobcachepending):
                if docker_req and runtimeContext.use_container:
                    cachebuilder.outdir = runtimeContext.docker_outdir or random_outdir()
                else:
                    cachebuilder.outdir = jobcache

                _logger.info("[job %s] Using cached output in %s", jobname, jobcache)
                yield CallbackJob(self, output_callbacks, cachebuilder, jobcache)
                return
            else:
                _logger.info("[job %s] Output of job will be cached in %s", jobname, jobcache)
                shutil.rmtree(jobcache, True)
                os.makedirs(jobcache)
                runtimeContext = runtimeContext.copy()
                runtimeContext.outdir = jobcache
                open(jobcachepending, "w").close()

                def rm_pending_output_callback(output_callbacks, jobcachepending,
                                               outputs, processStatus):
                    if processStatus == "success":
                        os.remove(jobcachepending)
                    output_callbacks(outputs, processStatus)

                output_callbacks = partial(
                    rm_pending_output_callback, output_callbacks, jobcachepending)

        builder = self._init_job(job_order, runtimeContext)

        reffiles = copy.deepcopy(builder.files)

        j = self.make_job_runner(runtimeContext)(
            builder, builder.job, self.make_path_mapper, self.requirements,
            self.hints, jobname)
        j.prov_obj = self.prov_obj

        j.successCodes = self.tool.get("successCodes", [])
        j.temporaryFailCodes = self.tool.get("temporaryFailCodes", [])
        j.permanentFailCodes = self.tool.get("permanentFailCodes", [])

        debug = _logger.isEnabledFor(logging.DEBUG)

        if debug:
            _logger.debug(u"[job %s] initializing from %s%s",
                          j.name,
                          self.tool.get("id", ""),
                          u" as part of %s" % runtimeContext.part_of
                          if runtimeContext.part_of else "")
            _logger.debug(u"[job %s] %s", j.name, json_dumps(builder.job,
                                                             indent=4))

        builder.pathmapper = self.make_path_mapper(
            reffiles, builder.stagedir, runtimeContext, True)
        builder.requirements = j.requirements

        _check_adjust = partial(check_adjust, builder)

        visit_class([builder.files, builder.bindings], ("File", "Directory"), _check_adjust)

        initialWorkdir, _ = self.get_requirement("InitialWorkDirRequirement")
        if initialWorkdir is not None:
            ls = []  # type: List[Dict[Text, Any]]
            if isinstance(initialWorkdir["listing"], string_types):
                ls = builder.do_eval(initialWorkdir["listing"])
            else:
                for t in initialWorkdir["listing"]:
                    if isinstance(t, Mapping) and "entry" in t:
                        entry_exp = builder.do_eval(t["entry"], strip_whitespace=False)
                        for entry in aslist(entry_exp):
                            et = {u"entry": entry}
                            if "entryname" in t:
                                et["entryname"] = builder.do_eval(t["entryname"])
                            else:
                                et["entryname"] = None
                            et["writable"] = t.get("writable", False)
                            if et[u"entry"]:
                                ls.append(et)
                    else:
                        initwd_item = builder.do_eval(t)
                        if not initwd_item:
                            continue
                        if isinstance(initwd_item, MutableSequence):
                            ls.extend(initwd_item)
                        else:
                            ls.append(initwd_item)
            for i, t in enumerate(ls):
                if "entry" in t:
                    if isinstance(t["entry"], string_types):
                        ls[i] = {
                            "class": "File",
                            "basename": t["entryname"],
                            "contents": t["entry"],
                            "writable": t.get("writable")
                        }
                    else:
                        if t.get("entryname") or t.get("writable"):
                            t = copy.deepcopy(t)
                            if t.get("entryname"):
                                t["entry"]["basename"] = t["entryname"]
                            t["entry"]["writable"] = t.get("writable")
                        ls[i] = t["entry"]
            j.generatefiles["listing"] = ls
            for l in ls:
                self.updatePathmap(builder.outdir, builder.pathmapper, l)
            visit_class([builder.files, builder.bindings], ("File", "Directory"), _check_adjust)

        if debug:
            _logger.debug(u"[job %s] path mappings is %s", j.name,
                          json_dumps({p: builder.pathmapper.mapper(p)
                                      for p in builder.pathmapper.files()},
                                     indent=4))

        if self.tool.get("stdin"):
            with SourceLine(self.tool, "stdin", validate.ValidationException, debug):
                j.stdin = builder.do_eval(self.tool["stdin"])
                assert j.stdin is not None
                reffiles.append({"class": "File", "path": j.stdin})

        if self.tool.get("stderr"):
            with SourceLine(self.tool, "stderr", validate.ValidationException, debug):
                j.stderr = builder.do_eval(self.tool["stderr"])
                assert j.stderr is not None
                if os.path.isabs(j.stderr) or ".." in j.stderr:
                    raise validate.ValidationException(
                        "stderr must be a relative path, got '%s'" % j.stderr)

        if self.tool.get("stdout"):
            with SourceLine(self.tool, "stdout", validate.ValidationException, debug):
                j.stdout = builder.do_eval(self.tool["stdout"])
                assert j.stdout is not None
                if os.path.isabs(j.stdout) or ".." in j.stdout or not j.stdout:
                    raise validate.ValidationException(
                        "stdout must be a relative path, got '%s'" % j.stdout)

        if debug:
            _logger.debug(u"[job %s] command line bindings is %s", j.name,
                          json_dumps(builder.bindings, indent=4))
        dockerReq, _ = self.get_requirement("DockerRequirement")
        if dockerReq is not None and runtimeContext.use_container:
            out_dir, out_prefix = os.path.split(
                runtimeContext.tmp_outdir_prefix)
            j.outdir = runtimeContext.outdir or \
                tempfile.mkdtemp(prefix=out_prefix, dir=out_dir)
            tmpdir_dir, tmpdir_prefix = os.path.split(
                runtimeContext.tmpdir_prefix)
            j.tmpdir = runtimeContext.tmpdir or \
                tempfile.mkdtemp(prefix=tmpdir_prefix, dir=tmpdir_dir)
            j.stagedir = tempfile.mkdtemp(prefix=tmpdir_prefix, dir=tmpdir_dir)
        else:
            j.outdir = builder.outdir
            j.tmpdir = builder.tmpdir
            j.stagedir = builder.stagedir

        inplaceUpdateReq, _ = self.get_requirement("InplaceUpdateRequirement")
        if inplaceUpdateReq is not None:
            j.inplace_update = inplaceUpdateReq["inplaceUpdate"]
        normalizeFilesDirs(j.generatefiles)

        readers = {}  # type: Dict[Text, Any]
        muts = set()  # type: Set[Text]

        if builder.mutation_manager is not None:
            def register_mut(f):
                muts.add(f["location"])
                builder.mutation_manager.register_mutation(j.name, f)

            def register_reader(f):
                if f["location"] not in muts:
                    builder.mutation_manager.register_reader(j.name, f)
                    readers[f["location"]] = copy.deepcopy(f)

            for li in j.generatefiles["listing"]:
                li = cast(Dict[Text, Any], li)
                if li.get("writable") and j.inplace_update:
                    adjustFileObjs(li, register_mut)
                    adjustDirObjs(li, register_mut)
                else:
                    adjustFileObjs(li, register_reader)
                    adjustDirObjs(li, register_reader)

            adjustFileObjs(builder.files, register_reader)
            adjustFileObjs(builder.bindings, register_reader)
            adjustDirObjs(builder.files, register_reader)
            adjustDirObjs(builder.bindings, register_reader)

        timelimit, _ = self.get_requirement("TimeLimit")
        if timelimit is not None:
            with SourceLine(timelimit, "timelimit", validate.ValidationException, debug):
                j.timelimit = builder.do_eval(timelimit["timelimit"])
                if not isinstance(j.timelimit, int) or j.timelimit < 0:
                    raise Exception("timelimit must be an integer >= 0, got: %s" % j.timelimit)

        networkaccess, _ = self.get_requirement("NetworkAccess")
        if networkaccess is not None:
            with SourceLine(networkaccess, "networkAccess", validate.ValidationException, debug):
                j.networkaccess = builder.do_eval(networkaccess["networkAccess"])
                if not isinstance(j.networkaccess, bool):
                    raise Exception("networkAccess must be a boolean, got: %s" % j.networkaccess)

        j.environment = {}
        evr, _ = self.get_requirement("EnvVarRequirement")
        if evr is not None:
            for t in evr["envDef"]:
                j.environment[t["envName"]] = builder.do_eval(t["envValue"])

        shellcmd, _ = self.get_requirement("ShellCommandRequirement")
        if shellcmd is not None:
            cmd = []  # type: List[Text]
            for b in builder.bindings:
                arg = builder.generate_arg(b)
                if b.get("shellQuote", True):
                    arg = [shellescape.quote(a) for a in aslist(arg)]
                cmd.extend(aslist(arg))
            j.command_line = ["/bin/sh", "-c", " ".join(cmd)]
        else:
            j.command_line = flatten(list(map(builder.generate_arg, builder.bindings)))

        j.pathmapper = builder.pathmapper
        j.collect_outputs = partial(
            self.collect_output_ports, self.tool["outputs"], builder,
            compute_checksum=getdefault(runtimeContext.compute_checksum, True),
            jobname=jobname,
            readers=readers)
        j.output_callback = output_callbacks

        yield j
Beispiel #54
0
    def run(self, dry_run=False, pull_image=True, rm_container=True,
            rm_tmpdir=True, move_outputs="move", **kwargs):
        # type: (bool, bool, bool, bool, bool, Text, **Any) -> Union[Tuple[Text, Dict[None, None]], None]
        if not os.path.exists(self.outdir):
            os.makedirs(self.outdir)

        #with open(os.path.join(outdir, "cwl.input.json"), "w") as fp:
        #    json.dump(self.joborder, fp)

        runtime = []  # type: List[Text]

        (docker_req, docker_is_req) = get_feature(self, "DockerRequirement")

        for knownfile in self.pathmapper.files():
            p = self.pathmapper.mapper(knownfile)
            if p.type == "File" and not os.path.isfile(p[0]):
                raise WorkflowException(
                    u"Input file %s (at %s) not found or is not a regular "
                    "file." % (knownfile, self.pathmapper.mapper(knownfile)[0]))

        img_id = None
        env = None  # type: Union[MutableMapping[Text, Text], MutableMapping[str, str]]
        if docker_req and kwargs.get("use_container") is not False:
            env = os.environ
            img_id = docker.get_from_requirements(docker_req, docker_is_req, pull_image)

        if docker_is_req and img_id is None:
            raise WorkflowException("Docker is required for running this tool.")

        if img_id:
            runtime = ["docker", "run", "-i"]
            for src in self.pathmapper.files():
                vol = self.pathmapper.mapper(src)
                if vol.type == "File":
                    runtime.append(u"--volume=%s:%s:ro" % (vol.resolved, vol.target))
                if vol.type == "CreateFile":
                    createtmp = os.path.join(self.stagedir, os.path.basename(vol.target))
                    with open(createtmp, "w") as f:
                        f.write(vol.resolved.encode("utf-8"))
                    runtime.append(u"--volume=%s:%s:ro" % (createtmp, vol.target))
            runtime.append(u"--volume=%s:%s:rw" % (os.path.realpath(self.outdir), "/var/spool/cwl"))
            runtime.append(u"--volume=%s:%s:rw" % (os.path.realpath(self.tmpdir), "/tmp"))
            runtime.append(u"--workdir=%s" % ("/var/spool/cwl"))
            runtime.append("--read-only=true")
            if (kwargs.get("enable_net", None) is None and
                    kwargs.get("custom_net", None) is not None):
                runtime.append("--net=none")
            elif kwargs.get("custom_net", None) is not None:
                runtime.append("--net={0}".format(kwargs.get("custom_net")))

            if self.stdout:
                runtime.append("--log-driver=none")

            euid = docker_vm_uid() or os.geteuid()
            runtime.append(u"--user=%s" % (euid))

            if rm_container:
                runtime.append("--rm")

            runtime.append("--env=TMPDIR=/tmp")

            # spec currently says "HOME must be set to the designated output
            # directory." but spec might change to designated temp directory.
            # runtime.append("--env=HOME=/tmp")
            runtime.append("--env=HOME=/var/spool/cwl")

            for t,v in self.environment.items():
                runtime.append(u"--env=%s=%s" % (t, v))

            runtime.append(img_id)
        else:
            env = self.environment
            if not os.path.exists(self.tmpdir):
                os.makedirs(self.tmpdir)
            vars_to_preserve = kwargs.get("preserve_environment")
            if vars_to_preserve is not None:
                for key, value in os.environ.items():
                    if key in vars_to_preserve and key not in env:
                        env[key] = value
            env["HOME"] = self.outdir
            env["TMPDIR"] = self.tmpdir

            stageFiles(self.pathmapper, os.symlink)

        stdin = None  # type: Union[IO[Any], int]
        stderr = None  # type: IO[Any]
        stdout = None  # type: IO[Any]

        scr, _ = get_feature(self, "ShellCommandRequirement")

        if scr:
            shouldquote = lambda x: False
        else:
            shouldquote = needs_shell_quoting_re.search

        _logger.info(u"[job %s] %s$ %s%s%s%s",
                     self.name,
                     self.outdir,
                     " \\\n    ".join([shellescape.quote(Text(arg)) if shouldquote(Text(arg)) else Text(arg) for arg in (runtime + self.command_line)]),
                     u' < %s' % self.stdin if self.stdin else '',
                     u' > %s' % os.path.join(self.outdir, self.stdout) if self.stdout else '',
                     u' 2> %s' % os.path.join(self.outdir, self.stderr) if self.stderr else '')

        if dry_run:
            return (self.outdir, {})

        outputs = {}  # type: Dict[Text,Text]

        try:
            if self.generatefiles["listing"]:
                generatemapper = PathMapper([self.generatefiles], self.outdir,
                                            self.outdir, separateDirs=False)
                _logger.debug(u"[job %s] initial work dir %s", self.name,
                              json.dumps({p: generatemapper.mapper(p) for p in generatemapper.files()}, indent=4))

                def linkoutdir(src, tgt):
                    # Need to make the link to the staged file (may be inside
                    # the container)
                    for _, item in self.pathmapper.items():
                        if src == item.resolved:
                            os.symlink(item.target, tgt)
                            break
                stageFiles(generatemapper, linkoutdir)

            if self.stdin:
                stdin = open(self.pathmapper.reversemap(self.stdin)[1], "rb")
            else:
                stdin = subprocess.PIPE

            if self.stderr:
                abserr = os.path.join(self.outdir, self.stderr)
                dnerr = os.path.dirname(abserr)
                if dnerr and not os.path.exists(dnerr):
                    os.makedirs(dnerr)
                stderr = open(abserr, "wb")
            else:
                stderr = sys.stderr

            if self.stdout:
                absout = os.path.join(self.outdir, self.stdout)
                dn = os.path.dirname(absout)
                if dn and not os.path.exists(dn):
                    os.makedirs(dn)
                stdout = open(absout, "wb")
            else:
                stdout = sys.stderr

            sp = subprocess.Popen([Text(x).encode('utf-8') for x in runtime + self.command_line],
                                  shell=False,
                                  close_fds=True,
                                  stdin=stdin,
                                  stderr=stderr,
                                  stdout=stdout,
                                  env=env,
                                  cwd=self.outdir)

            if sp.stdin:
                sp.stdin.close()

            rcode = sp.wait()

            if isinstance(stdin, file):
                stdin.close()

            if stderr is not sys.stderr:
                stderr.close()

            if stdout is not sys.stderr:
                stdout.close()

            if self.successCodes and rcode in self.successCodes:
                processStatus = "success"
            elif self.temporaryFailCodes and rcode in self.temporaryFailCodes:
                processStatus = "temporaryFail"
            elif self.permanentFailCodes and rcode in self.permanentFailCodes:
                processStatus = "permanentFail"
            elif rcode == 0:
                processStatus = "success"
            else:
                processStatus = "permanentFail"

            if self.generatefiles["listing"]:
                def linkoutdir(src, tgt):
                    # Need to make the link to the staged file (may be inside
                    # the container)
                    if os.path.islink(tgt):
                        os.remove(tgt)
                        os.symlink(src, tgt)
                stageFiles(generatemapper, linkoutdir, ignoreWritable=True)

            outputs = self.collect_outputs(self.outdir)

        except OSError as e:
            if e.errno == 2:
                if runtime:
                    _logger.error(u"'%s' not found", runtime[0])
                else:
                    _logger.error(u"'%s' not found", self.command_line[0])
            else:
                _logger.exception("Exception while running job")
            processStatus = "permanentFail"
        except WorkflowException as e:
            _logger.error(u"Error while running job: %s" % e)
            processStatus = "permanentFail"
        except Exception as e:
            _logger.exception("Exception while running job")
            processStatus = "permanentFail"

        if processStatus != "success":
            _logger.warn(u"[job %s] completed %s", self.name, processStatus)
        else:
            _logger.debug(u"[job %s] completed %s", self.name, processStatus)
        _logger.debug(u"[job %s] %s", self.name, json.dumps(outputs, indent=4))

        self.output_callback(outputs, processStatus)

        if self.stagedir and os.path.exists(self.stagedir):
            _logger.debug(u"[job %s] Removing input staging directory %s", self.name, self.stagedir)
            shutil.rmtree(self.stagedir, True)

        if rm_tmpdir:
            _logger.debug(u"[job %s] Removing temporary directory %s", self.name, self.tmpdir)
            shutil.rmtree(self.tmpdir, True)

        if move_outputs == "move" and empty_subtree(self.outdir):
            _logger.debug(u"[job %s] Removing empty output directory %s", self.name, self.outdir)
            shutil.rmtree(self.outdir, True)
Beispiel #55
0
 def _print_simple(self, name, value):
     self._stream.write(
             _SHELL_VAR_PREFIX + '%s=%s\n' % (
                 name.upper(), shellescape.quote(value)))
     self._stream.flush()