Exemplo n.º 1
0
    def get_reproduction_cmd(self, verb, jid):
        """Get a command line to reproduce this stage with the proper environment."""

        # Define the base env command
        get_env_cmd = 'catkin {} --get-env {}'.format(verb, jid)

        # Add additional env args
        env_overrides_formatted = ' '.join([
            '{}={}'.format(k, cmd_quote(v))
            for k, v in self.env_overrides.items()
        ])

        # Define the actual command to reproduce
        cmd_str = ' '.join([cmd_quote(t) for t in self.async_execute_process_kwargs['cmd']])

        # Define the command to run the subcommand
        env_cmd = 'catkin env -si {} {}'.format(
            env_overrides_formatted,
            cmd_str)

        # Return the full command
        return 'cd {}; {} | {}; cd -'.format(
            self.async_execute_process_kwargs['cwd'],
            get_env_cmd,
            env_cmd)
Exemplo n.º 2
0
 def _get(self, remote_filename, local_path):
     commandline = "lftp -c \"source %s; get %s -o %s\"" % (
         cmd_quote(self.tempname),
         cmd_quote(self.remote_path) + remote_filename,
         cmd_quote(local_path.name)
     )
     log.Debug("CMD: %s" % commandline)
     _, l, e = self.subprocess_popen(commandline)
     log.Debug("STDERR:\n"
               "%s" % (e))
     log.Debug("STDOUT:\n"
               "%s" % (l))
Exemplo n.º 3
0
    def upload_files(self, file_defs):
        print "Uploading files ... "

        for curr_file_def in file_defs:
            full_local_filepath, dest_filename, full_remote_dir_path, num_files, overwrite = curr_file_def

            # Find all the files that match our full_local_filepath (which may contain pattern)
            local_files = sorted(glob.glob(full_local_filepath))

            try:
                if len(local_files) > 0:
                    # Ensure the remote dir exits
                    # TODO: How expensive are calls to mkdir if dir already exists, 
                    #       better to check dir exists first?
                    subprocess.check_call("ssh " + remote_account + " 'mkdir -p " + 
                                          full_remote_dir_path + "'", shell=True)
                    curr_file_num = 1
                    for curr_file in local_files:
                        if (num_files is not 0) and (curr_file_num > num_files):
                            break

                        # Deal with the case where we want to alter the destination filename
                        curr_src_full_filename = os.path.basename(curr_file)
                        if dest_filename is "":
                            full_remote_filepath = cmd_quote(os.path.join(full_remote_dir_path, 
                                                       curr_src_full_filename))
                        else:
                            filename, extension = os.path.splitext(curr_src_full_filename)
                            full_remote_filepath = cmd_quote(os.path.join(full_remote_dir_path, 
                                                       dest_filename + extension))
                        
                        # Deal with the case where we do not want to overwrite the dest file
                        file_num = 2
                        if overwrite is False:
                            full_remote_filename = os.path.basename(full_remote_filepath)
                            while subprocess.call(['ssh', remote_account, 'test -e ' + 
                                                   full_remote_filepath]) == 0:
                                filename_no_ext, filename_ext = os.path.splitext(full_remote_filename)

                                full_remote_filepath = cmd_quote(os.path.join(full_remote_dir_path, 
                                            filename_no_ext + "_" + str(file_num) + filename_ext))
                                file_num += 1

                        subprocess.check_call("scp " + curr_file + " " + 
                                 remote_account + ":" + full_remote_filepath, shell=True)

                        curr_file_num += 1

            except subprocess.CalledProcessError as e:
                print "Error uploading files: ", e.returncode
                raise

        print "... upload finished."
Exemplo n.º 4
0
 def _delete(self, filename):
     commandline = "lftp -c \"source %s; cd %s; rm %s\"" % (
         cmd_quote(self.tempname),
         cmd_quote(self.remote_path),
         cmd_quote(filename)
     )
     log.Debug("CMD: %s" % commandline)
     _, l, e = self.subprocess_popen(commandline)
     log.Debug("STDERR:\n"
               "%s" % (e))
     log.Debug("STDOUT:\n"
               "%s" % (l))
Exemplo n.º 5
0
 def _put(self, source_path, remote_filename):
     commandline = "lftp -c \"source %s; mkdir -p %s; put %s -o %s\"" % (
         self.tempname,
         cmd_quote(self.remote_path),
         cmd_quote(source_path.name),
         cmd_quote(self.remote_path) + remote_filename
     )
     log.Debug("CMD: %s" % commandline)
     s, l, e = self.subprocess_popen(commandline)
     log.Debug("STATUS: %s" % s)
     log.Debug("STDERR:\n"
               "%s" % (e))
     log.Debug("STDOUT:\n"
               "%s" % (l))
Exemplo n.º 6
0
def format_usage(program_name, description, commands=None, options=()):
    """
    Construct the usage text.

    Parameters
    ----------
        program_name : str
            Usually the name of the python file that contains the experiment.
        description : str
            description of this experiment (usually the docstring).
        commands : dict[str, func]
            Dictionary of supported commands.
            Each entry should be a tuple of (name, function).
        options : list[sacred.commandline_options.CommandLineOption]
            A list of all supported commandline options.

    Returns
    -------
        str
            The complete formatted usage text for this experiment.
            It adheres to the structure required by ``docopt``.

    """
    usage = USAGE_TEMPLATE.format(
        program_name=cmd_quote(program_name),
        description=description.strip() if description else '',
        options=_format_options_usage(options),
        arguments=_format_arguments_usage(options),
        commands=_format_command_usage(commands)
    )
    return usage
Exemplo n.º 7
0
def format_env_dict(environ):
    """Format an environment dict for printing to console similarly to `typeset` builtin."""

    return '\n'.join([
        'typeset -x {}={}'.format(k, cmd_quote(v))
        for k, v in environ.items()
    ])
Exemplo n.º 8
0
 def __parse_argument(self, argument):
     long_option_match = self.long_option_pattern.match(argument)
     short_option_match = self.short_option_pattern.match(argument)
     if long_option_match:
         groupdict = long_option_match.groupdict()
         return '--{option}={value}'.format(
             option=groupdict['option'],
             value=cmd_quote(groupdict['value']),
         )
     elif short_option_match:
         groupdict = short_option_match.groupdict()
         return '-{option}={value}'.format(
             option=groupdict['option'],
             value=cmd_quote(groupdict['value']),
         )
     else:
         return cmd_quote(argument)
def print_creds(environment_overrides):
    exports = []
    for var, value in environment_overrides.items():
        if value is not None:
            exports.append("export {}={}".format(var, cmd_quote(value)))
        else:
            exports.append("unset {}".format(var))

    print("\n".join(exports))
Exemplo n.º 10
0
    def _list(self):
        # Do a long listing to avoid connection reset
        # remote_dir = urllib.unquote(self.parsed_url.path.lstrip('/')).rstrip()
        remote_dir = urllib.unquote(self.parsed_url.path)
        # print remote_dir
        quoted_path = cmd_quote(self.remote_path)
        # failing to cd into the folder might be because it was not created already
        commandline = "lftp -c \"source %s; ( cd %s && ls ) || ( mkdir -p %s && cd %s && ls )\"" % (
            cmd_quote(self.tempname),
            quoted_path, quoted_path, quoted_path
        )
        log.Debug("CMD: %s" % commandline)
        _, l, e = self.subprocess_popen(commandline)
        log.Debug("STDERR:\n"
                  "%s" % (e))
        log.Debug("STDOUT:\n"
                  "%s" % (l))

        # Look for our files as the last element of a long list line
        return [x.split()[-1] for x in l.split('\n') if x]
Exemplo n.º 11
0
def convert(fname, pages=None):
    try:
        # attempt to find a pdftotext binary
        cmd = "where" if platform.system() == "Windows" else "which"
        cmd = subprocess.check_output(cmd + ' pdftotext', shell=True)
        cmd = cmd.decode('utf8').strip()
        if not cmd:
            raise EnvironmentError("pdftotext not found")
        
        return subprocess.check_output(' '.join([cmd, cmd_quote(fname), '-']), shell=True)
    except (EnvironmentError, subprocess.CalledProcessError):
        #logging.warning("pdftotext not found, defaulting to pdfminer.")
        return convert_miner(fname, pages=pages)
Exemplo n.º 12
0
def expand_one_verb_alias(sysargs, verb_aliases, used_aliases):
    """Iterate through sysargs looking for expandable verb aliases.

    When a verb alias is found, sysargs is modified to effectively expand the alias.
    The alias is removed from verb_aliases and added to used_aliases.
    After finding and expanding an alias, this function returns True.
    If no alias is found to be expanded, this function returns False.
    """
    cmd = os.path.basename(sys.argv[0])
    for index, arg in enumerate(sysargs):
        if arg.startswith('-'):
            # Not a verb, continue through the arguments
            continue
        if arg in used_aliases:
            print(fmt(
                "@[email protected]{gf}==>@| Expanding alias '@[email protected]{yf}" + arg +
                "@|' was previously expanded, ignoring this time to prevent infinite recursion."
            ))
        if arg in verb_aliases:
            before = [] if index == 0 else sysargs[:index - 1]
            after = [] if index == len(sysargs) else sysargs[index + 1:]
            sysargs[:] = before + verb_aliases[arg] + after
            print(fmt(
                "@[email protected]{gf}==>@| Expanding alias "
                "'@[email protected]{yf}{alias}@|' "
                "from '@{yf}{before} @!{alias}@{boldoff}{after}@|' "
                "to '@{yf}{before} @!{expansion}@{boldoff}{after}@|'"
            ).format(
                alias=arg,
                expansion=' '.join([cmd_quote(aarg) for aarg in verb_aliases[arg]]),
                before=' '.join([cmd] + before),
                after=(' '.join([''] + after) if after else '')
            ))
            # Prevent the alias from being used again, to prevent infinite recursion
            used_aliases.append(arg)
            del verb_aliases[arg]
            # Return True since one has been found
            return True
        # Return False since no verb alias was found
        return False
Exemplo n.º 13
0
    def __init__(self, parsed_url):
        duplicity.backend.Backend.__init__(self, parsed_url)

        # we expect an output
        try:
            p = os.popen("lftp --version")
            fout = p.read()
            ret = p.close()
        except Exception:
            pass
        # there is no output if lftp not found
        if not fout:
            log.FatalError("LFTP not found:  Please install LFTP.",
                           log.ErrorCode.ftps_lftp_missing)

        # version is the second word of the second part of the first line
        version = fout.split('\n')[0].split(' | ')[1].split()[1]
        log.Notice("LFTP version is %s" % version)

        self.parsed_url = parsed_url

        self.scheme = duplicity.backend.strip_prefix(parsed_url.scheme, 'lftp').lower()
        self.scheme = re.sub('^webdav', 'http', self.scheme)
        self.url_string = self.scheme + '://' + parsed_url.hostname
        if parsed_url.port:
            self.url_string += ":%s" % parsed_url.port

        self.remote_path = re.sub('^/', '', parsed_url.path)

        # Fix up an empty remote path
        if len(self.remote_path) == 0:
            self.remote_path = '/'

        # Use an explicit directory name.
        if self.remote_path[-1] != '/':
            self.remote_path += '/'

        self.authflag = ''
        if self.parsed_url.username:
            self.username = self.parsed_url.username
            self.password = self.get_password()
            self.authflag = "-u '%s,%s'" % (self.username, self.password)

        if globals.ftp_connection == 'regular':
            self.conn_opt = 'off'
        else:
            self.conn_opt = 'on'

        # check for cacert file if https
        self.cacert_file = globals.ssl_cacert_file
        if self.scheme == 'https' and not globals.ssl_no_check_certificate:
            cacert_candidates = ["~/.duplicity/cacert.pem",
                                 "~/duplicity_cacert.pem",
                                 "/etc/duplicity/cacert.pem"]
            # look for a default cacert file
            if not self.cacert_file:
                for path in cacert_candidates:
                    path = os.path.expanduser(path)
                    if (os.path.isfile(path)):
                        self.cacert_file = path
                        break

        # save config into a reusable temp file
        self.tempfile, self.tempname = tempdir.default().mkstemp()
        os.write(self.tempfile, "set ssl:verify-certificate " +
                 ("false" if globals.ssl_no_check_certificate else "true") + "\n")
        if self.cacert_file:
            os.write(self.tempfile, "set ssl:ca-file " + cmd_quote(self.cacert_file) + "\n")
        if globals.ssl_cacert_path:
            os.write(self.tempfile, "set ssl:ca-path " + cmd_quote(globals.ssl_cacert_path) + "\n")
        if self.parsed_url.scheme == 'ftps':
            os.write(self.tempfile, "set ftp:ssl-allow true\n")
            os.write(self.tempfile, "set ftp:ssl-protect-data true\n")
            os.write(self.tempfile, "set ftp:ssl-protect-list true\n")
        elif self.parsed_url.scheme == 'ftpes':
            os.write(self.tempfile, "set ftp:ssl-force on\n")
            os.write(self.tempfile, "set ftp:ssl-protect-data on\n")
            os.write(self.tempfile, "set ftp:ssl-protect-list on\n")
        else:
            os.write(self.tempfile, "set ftp:ssl-allow false\n")
        os.write(self.tempfile, "set http:use-propfind true\n")
        os.write(self.tempfile, "set net:timeout %s\n" % globals.timeout)
        os.write(self.tempfile, "set net:max-retries %s\n" % globals.num_retries)
        os.write(self.tempfile, "set ftp:passive-mode %s\n" % self.conn_opt)
        if log.getverbosity() >= log.DEBUG:
            os.write(self.tempfile, "debug\n")
        if self.parsed_url.scheme == 'ftpes':
            os.write(self.tempfile, "open %s %s\n" % (self.authflag, self.url_string.replace('ftpes', 'ftp')))
        else:
            os.write(self.tempfile, "open %s %s\n" % (self.authflag, self.url_string))
        os.close(self.tempfile)
        # print settings in debug mode
        if log.getverbosity() >= log.DEBUG:
            f = open(self.tempname, 'r')
            log.Debug("SETTINGS: \n"
                      "%s" % f.read())
Exemplo n.º 14
0
def catkin_main(sysargs):
    # Initialize config
    try:
        initialize_config()
    except RuntimeError as exc:
        sys.exit("Failed to initialize config: {0}".format(exc))

    # Create a top level parser
    parser = argparse.ArgumentParser(
        description="catkin command", formatter_class=argparse.RawDescriptionHelpFormatter)
    add = parser.add_argument
    add('-a', '--list-aliases', action="store_true", default=False,
        help="Lists the current verb aliases and then quits, all other arguments are ignored")
    add('--test-colors', action='store_true', default=False,
        help="Prints a color test pattern to the screen and then quits, all other arguments are ignored")
    add('--version', action='store_true', default=False,
        help="Prints the catkin_tools version.")
    color_control_group = parser.add_mutually_exclusive_group()
    add = color_control_group.add_argument
    add('--force-color', action='store_true', default=False,
        help='Forces catkin to output in color, even when the terminal does not appear to support it.')
    add('--no-color', action='store_true', default=False,
        help='Forces catkin to not use color in the output, regardless of the detect terminal type.')

    # Deprecated, moved to `catkin locate --shell-verbs
    add('--locate-extra-shell-verbs', action='store_true', help=argparse.SUPPRESS)

    # Generate a list of verbs available
    verbs = list_verbs()

    # Create the subparsers for each verb and collect the argument preprocessors
    argument_preprocessors = create_subparsers(parser, verbs)

    # Get verb aliases
    verb_aliases = get_verb_aliases()

    # Setup sysargs
    sysargs = sys.argv[1:] if sysargs is None else sysargs

    # Get colors config
    no_color = False
    force_color = os.environ.get('CATKIN_TOOLS_FORCE_COLOR', False)
    for arg in sysargs:
        if arg == '--no-color':
            no_color = True
        if arg == '--force-color':
            force_color = True

    if no_color or not force_color and not is_tty(sys.stdout):
        set_color(False)

    # Check for version
    if '--version' in sysargs:
        print('catkin_tools {} (C) 2014-{} Open Source Robotics Foundation'.format(
            pkg_resources.get_distribution('catkin_tools').version,
            date.today().year)
        )
        print('catkin_tools is released under the Apache License,'
              ' Version 2.0 (http://www.apache.org/licenses/LICENSE-2.0)')
        print('---')
        print('Using Python {}'.format(''.join(sys.version.split('\n'))))
        sys.exit(0)

    # Deprecated option
    if '--locate-extra-shell-verbs' in sysargs:
        print('Please use `catkin locate --shell-verbs` instead of `catkin --locate-extra-shell-verbs`',
              file=sys.stderr)
        sys.exit(0)

    # Check for --test-colors
    for arg in sysargs:
        if arg == '--test-colors':
            test_colors()
            sys.exit(0)
        if not arg.startswith('-'):
            break

    # Check for --list-aliases
    for arg in sysargs:
        if arg == '--list-aliases' or arg == '-a':
            for alias in sorted(list(verb_aliases.keys())):
                print("{0}: {1}".format(alias, ' '.join([cmd_quote(aarg) for aarg in verb_aliases[alias]])))
            sys.exit(0)
        if not arg.startswith('-'):
            break

    # Do verb alias expansion
    sysargs = expand_verb_aliases(sysargs, verb_aliases)

    # Determine the verb, splitting arguments into pre and post verb
    verb = None
    pre_verb_args = []
    post_verb_args = []
    for index, arg in enumerate(sysargs):
        # If the arg does not start with a `-` then it is a positional argument
        # The first positional argument must be the verb
        if not arg.startswith('-'):
            verb = arg
            post_verb_args = sysargs[index + 1:]
            break
        # If the `-h` or `--help` option comes before the verb, parse_args
        if arg in ['-h', '--help']:
            parser.parse_args(sysargs)
        # Otherwise it is a pre-verb option
        pre_verb_args.append(arg)

    # Error on no verb provided
    if verb is None:
        print(parser.format_usage())
        sys.exit("Error: No verb provided.")
    # Error on unknown verb provided
    if verb not in verbs:
        print(parser.format_usage())
        sys.exit("Error: Unknown verb '{0}' provided.".format(verb))

    # First allow the verb's argument preprocessor to strip any args
    # and return any "extra" information it wants as a dict
    processed_post_verb_args, extras = argument_preprocessors[verb](post_verb_args)
    # Then allow argparse to process the left over post-verb arguments along
    # with the pre-verb arguments and the verb itself
    args = parser.parse_args(pre_verb_args + [verb] + processed_post_verb_args)
    # Extend the argparse result with the extras from the preprocessor
    for key, value in extras.items():
        setattr(args, key, value)

    # Finally call the subparser's main function with the processed args
    # and the extras which the preprocessor may have returned
    sys.exit(args.main(args) or 0)
Exemplo n.º 15
0
    def prime_docker_container(self, runtime, func_arn, env_vars, lambda_cwd):
        """
        Prepares a persistent docker container for a specific function.
        :param runtime: Lamda runtime environment. python2.7, nodejs6.10, etc.
        :param func_arn: The ARN of the lambda function.
        :param env_vars: The environment variables for the lambda.
        :param lambda_cwd: The local directory containing the code for the lambda function.
        :return: ContainerInfo class containing the container name and default entry point.
        """
        with self.docker_container_lock:
            # Get the container name and id.
            container_name = self.get_container_name(func_arn)

            LOG.debug('Priming docker container: %s' % container_name)

            status = self.get_docker_container_status(func_arn)
            # Container is not running or doesn't exist.
            if status < 1:
                # Make sure the container does not exist in any form/state.
                self.destroy_docker_container(func_arn)

                env_vars_str = ' '.join(['-e {}={}'.format(k, cmd_quote(v)) for (k, v) in env_vars])

                # Create and start the container
                LOG.debug('Creating container: %s' % container_name)
                cmd = (
                    'docker create'
                    ' --name "%s"'
                    ' --entrypoint /bin/bash'  # Load bash when it starts.
                    ' --interactive'  # Keeps the container running bash.
                    ' -e AWS_LAMBDA_EVENT_BODY="$AWS_LAMBDA_EVENT_BODY"'
                    ' -e HOSTNAME="$HOSTNAME"'
                    ' -e LOCALSTACK_HOSTNAME="$LOCALSTACK_HOSTNAME"'
                    '  %s'  # env_vars
                    ' lambci/lambda:%s'
                ) % (container_name, env_vars_str, runtime)
                LOG.debug(cmd)
                run(cmd, stderr=subprocess.PIPE, outfile=subprocess.PIPE)

                LOG.debug('Copying files to container "%s" from "%s".' % (container_name, lambda_cwd))
                cmd = (
                    'docker cp'
                    ' "%s/." "%s:/var/task"'
                ) % (lambda_cwd, container_name)
                LOG.debug(cmd)
                run(cmd, stderr=subprocess.PIPE, outfile=subprocess.PIPE)

                LOG.debug('Starting container: %s' % container_name)
                cmd = 'docker start %s' % (container_name)
                LOG.debug(cmd)
                run(cmd, stderr=subprocess.PIPE, outfile=subprocess.PIPE)
                # give the container some time to start up
                time.sleep(1)

            # Get the entry point for the image.
            LOG.debug('Getting the entrypoint for image: lambci/lambda:%s' % runtime)
            cmd = (
                'docker image inspect'
                ' --format="{{ .ContainerConfig.Entrypoint }}"'
                ' lambci/lambda:%s'
            ) % (runtime)

            LOG.debug(cmd)
            run_result = run(cmd, async=False, stderr=subprocess.PIPE, outfile=subprocess.PIPE)

            entry_point = run_result.strip('[]\n\r ')

            LOG.debug('Using entrypoint "%s" for container "%s".' % (entry_point, container_name))
            return ContainerInfo(container_name, entry_point)
Exemplo n.º 16
0
def command(args, extra_args):  # noqa: C901 FIXME!!!
    stack = "parallelcluster-" + args.cluster_name
    config = cfnconfig.ParallelClusterConfig(args)
    if args.command in config.aliases:
        config_command = config.aliases[args.command]
    else:
        config_command = "ssh {CFN_USER}@{MASTER_IP} {ARGS}"

    cfn = boto3.client(
        "cloudformation",
        region_name=config.region,
        aws_access_key_id=config.aws_access_key_id,
        aws_secret_access_key=config.aws_secret_access_key,
    )
    try:
        stack_result = cfn.describe_stacks(StackName=stack).get("Stacks")[0]
        status = stack_result.get("StackStatus")
        valid_status = ["CREATE_COMPLETE", "UPDATE_COMPLETE", "UPDATE_ROLLBACK_COMPLETE"]
        invalid_status = ["DELETE_COMPLETE", "DELETE_IN_PROGRESS"]

        if status in invalid_status:
            LOGGER.info("Stack status: %s. Cannot SSH while in %s", status, " or ".join(invalid_status))
            sys.exit(1)
        elif status in valid_status:
            outputs = stack_result.get("Outputs")
            username = _get_output_value(outputs, "ClusterUser")
            ip = (
                _get_output_value(outputs, "MasterPublicIP")
                if _get_output_value(outputs, "MasterPublicIP")
                else _get_output_value(outputs, "MasterPrivateIP")
            )

            if not username:
                LOGGER.info("Failed to get cluster %s username.", args.cluster_name)
                sys.exit(1)

            if not ip:
                LOGGER.info("Failed to get cluster %s ip.", args.cluster_name)
                sys.exit(1)
        else:
            # Stack is in CREATING, CREATED_FAILED, or ROLLBACK_COMPLETE but MasterServer is running
            ip = _get_master_server_ip(stack, config)
            template = cfn.get_template(StackName=stack)
            mappings = template.get("TemplateBody").get("Mappings").get("OSFeatures")
            base_os = _get_param_value(stack_result.get("Parameters"), "BaseOS")
            username = mappings.get(base_os).get("User")

        try:
            from shlex import quote as cmd_quote
        except ImportError:
            from pipes import quote as cmd_quote

        # build command
        cmd = config_command.format(
            CFN_USER=username, MASTER_IP=ip, ARGS=" ".join(cmd_quote(str(e)) for e in extra_args)
        )

        # run command
        if not args.dryrun:
            os.system(cmd)
        else:
            LOGGER.info(cmd)
    except ClientError as e:
        LOGGER.critical(e.response.get("Error").get("Message"))
        sys.stdout.flush()
        sys.exit(1)
    except KeyboardInterrupt:
        LOGGER.info("\nExiting...")
        sys.exit(0)
Exemplo n.º 17
0
def get_resultspace_environment(result_space_path, base_env=None, quiet=False, cached=True, strict=True):
    """Get the environemt variables which result from sourcing another catkin
    workspace's setup files as the string output of `cmake -E environment`.
    This cmake command is used to be as portable as possible.

    :param result_space_path: path to a Catkin result-space whose environment should be loaded, ``str``
    :type result_space_path: str
    :param quiet: don't throw exceptions, ``bool``
    :type quiet: bool
    :param cached: use the cached environment
    :type cached: bool
    :param strict: require the ``.catkin`` file exists in the resultspace
    :type strict: bool

    :returns: a dictionary of environment variables and their values
    """

    # Set bae environment to the current environment
    if base_env is None:
        base_env = dict(os.environ)

    # Get the MD5 checksums for the current env hooks
    # TODO: the env hooks path should be defined somewhere
    env_hooks_path = os.path.join(result_space_path, 'etc', 'catkin', 'profile.d')
    if os.path.exists(env_hooks_path):
        env_hooks = [
            md5(open(os.path.join(env_hooks_path, path)).read().encode('utf-8')).hexdigest()
            for path in os.listdir(env_hooks_path)]
    else:
        env_hooks = []

    # Check the cache first, if desired
    if cached and result_space_path in _resultspace_env_cache:
        (cached_base_env, cached_env_hooks, result_env) = _resultspace_env_cache.get(result_space_path)
        if env_hooks == cached_env_hooks and cached_base_env == base_env:
            return dict(result_env)

    # Check to make sure result_space_path is a valid directory
    if not os.path.isdir(result_space_path):
        if quiet:
            return dict()
        raise IOError(
            "Cannot load environment from resultspace \"%s\" because it does not "
            "exist." % result_space_path
        )

    # Check to make sure result_space_path contains a `.catkin` file
    # TODO: `.catkin` should be defined somewhere as an atom in catkin_pkg
    if strict and not os.path.exists(os.path.join(result_space_path, '.catkin')):
        if quiet:
            return dict()
        raise IOError(
            "Cannot load environment from resultspace \"%s\" because it does not "
            "appear to be a catkin-generated resultspace (missing .catkin marker "
            "file)." % result_space_path
        )

    # Determine the shell to use to source the setup file
    shell_path = os.environ.get('SHELL', None)
    if shell_path is None:
        shell_path = DEFAULT_SHELL
        if not os.path.isfile(shell_path):
            raise RuntimeError(
                "Cannot determine shell executable. "
                "The 'SHELL' environment variable is not set and "
                "the default '{0}' does not exist.".format(shell_path)
            )
    (_, shell_name) = os.path.split(shell_path)

    # Use fallback shell if using a non-standard shell
    if shell_name not in ['bash', 'zsh']:
        shell_name = 'bash'

    # Check to make sure result_space_path contains the appropriate setup file
    setup_file_path = os.path.join(result_space_path, 'env.sh')
    if not os.path.exists(setup_file_path):
        if quiet:
            return dict()
        raise IOError(
            "Cannot load environment from resultspace \"%s\" because the "
            "required setup file \"%s\" does not exist." % (result_space_path, setup_file_path)
        )

    # Construct a command list which sources the setup file and prints the env to stdout
    norc_flags = {
        'bash': '--norc',
        'zsh': '-f'
    }

    command = ' '.join([
        cmd_quote(setup_file_path),
        shell_path,
        norc_flags[shell_name],
        '-c',
        '"typeset -px"'
    ])

    # Define some "blacklisted" environment variables which shouldn't be copied
    blacklisted_keys = ('_', 'PWD')
    env_dict = {}

    try:
        # Run the command synchronously to get the resultspace environmnet
        if 0:
            # NOTE: This sometimes fails to get all output (returns prematurely)
            lines = ''
            for ret in execute_process(command, cwd=os.getcwd(), env=base_env, emulate_tty=False, shell=True):
                if type(ret) is bytes:
                    ret = ret.decode()
                if isinstance(ret, string_type):
                    lines += ret
        else:
            p = subprocess.Popen(command, cwd=os.getcwd(), env=base_env, shell=True, stdout=subprocess.PIPE)
            lines, _ = p.communicate()

        # Extract the environment variables
        env_dict = {
            k: v
            for k, v in parse_env_str(lines).items()
            if k not in blacklisted_keys
        }

        # Check to make sure we got some kind of environment
        if len(env_dict) > 0:
            # Cache the result
            _resultspace_env_cache[result_space_path] = (base_env, env_hooks, env_dict)
        else:
            print("WARNING: Sourced environment from `{}` has no environment variables. Something is wrong.".format(
                setup_file_path))

    except IOError as err:
        print("WARNING: Failed to extract environment from resultspace: {}: {}".format(
            result_space_path, str(err)),
            file=sys.stderr)

    return dict(env_dict)