Beispiel #1
0
def execute(nodes_definition, flow_definitions, flow_name,
            node_args=_DEFAULT_NODE_ARGS, node_args_file=None, node_args_json=False, concurrency=_DEFAULT_CONCURRENCY,
            sleep_time=_DEFAULT_SLEEP_TIME, config_py=None, keep_config_py=False, hide_progressbar=False,
            selective_task_names=None, selective_follow_subflows=False, selective_run_subsequent=False):
    """Execute flows based on YAML configuration in a CLI."""
    if node_args and node_args_file:
        raise RequestError("Node arguments could be specified by command line argument or a file, but not from both")

    node_args = node_args
    if node_args_file:
        with open(node_args_file, 'r') as f:
            node_args = f.read()

    if node_args_json:
        try:
            node_args = json.loads(node_args)
        except Exception as e:
            raise RequestError("Unable to parse JSON arguments: %s" % str(e)) from e

    executor = Executor(nodes_definition, flow_definitions,
                        concurrency=concurrency, sleep_time=sleep_time,
                        config_py=config_py, keep_config_py=keep_config_py,
                        show_progressbar=not hide_progressbar)

    if selective_task_names:
        executor.run_flow_selective(
            flow_name,
            selective_task_names.split(','),
            node_args,
            follow_subflows=selective_follow_subflows,
            run_subsequent=selective_run_subsequent
        )
    else:
        if selective_follow_subflows:
            raise RequestError("Option --selective-follow-subflows requires --selective-task-names set")
        if selective_run_subsequent:
            raise RequestError("Option --selective-run-subsequent requires --selective-task-names set")

        executor.run(flow_name, node_args)
    def execute(self, arguments):
        self._strict_assert(arguments.get('ecosystem'))
        self._strict_assert(arguments.get('name'))
        self._strict_assert(arguments.get('version'))

        if arguments['ecosystem'] == 'maven':
            return self._maven_scan(arguments)
        elif arguments['ecosystem'] == 'npm':
            return self._npm_scan(arguments)
        elif arguments['ecosystem'] == 'pypi':
            return self._python_scan(arguments)
        elif arguments['ecosystem'] == 'nuget':
            return self._nuget_scan(arguments)
        else:
            raise RequestError('Unsupported ecosystem')
    def execute(self, arguments):
        """Task code.

        :param arguments: dictionary with task arguments
        :return: {}, results
        """
        self._strict_assert(arguments.get('ecosystem'))
        self._strict_assert(arguments.get('name'))
        self._strict_assert(arguments.get('version'))

        if arguments['ecosystem'] in ('maven', 'pypi', 'npm'):
            return self._victims_scan(arguments, arguments['ecosystem'])
        elif arguments['ecosystem'] == 'nuget':
            return self._nuget_scan(arguments)
        else:
            raise RequestError('Unsupported ecosystem')
Beispiel #4
0
    def execute(self, arguments):
        """Task code.

        :param arguments: dictionary with task arguments
        :return: {}, results
        """
        self._strict_assert(arguments.get('ecosystem'))
        self._strict_assert(arguments.get('name'))
        self._strict_assert(arguments.get('version'))

        rdb = StoragePool.get_connected_storage('BayesianPostgres')
        ecosystem = Ecosystem.by_name(rdb.session, arguments.get('ecosystem'))

        if arguments['ecosystem'] in ('maven', 'pypi', 'npm'):
            return self._victims_scan(arguments, ecosystem)
        elif arguments['ecosystem'] == 'nuget':
            return self._nuget_scan(arguments)
        else:
            raise RequestError('Unsupported ecosystem')
Beispiel #5
0
def migrate(nodes_definition, flow_definitions, old_nodes_definition=None, old_flow_definitions=None, no_meta=False,
            migration_dir=None, use_git=False, no_check=False, tainted_flows=_DEFAULT_TAINTED_FLOW_STRATEGY):
    """Perform migrations on old and new YAML configuration files in flow changes."""
    # pylint: disable=too-many-branches
    if int(not old_flow_definitions) + int(not old_nodes_definition) == 1:
        raise RequestError("Please provide all flow and nodes configuration files or use --git")

    use_old_files = bool(old_flow_definitions)
    usage_clash = int(use_old_files) + int(use_git)
    if usage_clash == 2:
        raise RequestError("Option --git is disjoint with explicit configuration file specification")

    if usage_clash == 0:
        raise RequestError("Use --git or explicit old configuration file specification in order "
                           "to access old config files")

    if use_git:
        # Compute version that directly precedes the current master - there is relevant change
        # in any of config files.
        git_hash, depth = git_previous_version(nodes_definition)
        for new_flow_definition_file in flow_definitions:
            new_git_hash, new_depth = git_previous_version(new_flow_definition_file)
            if new_depth < depth:
                git_hash = new_git_hash
                depth = new_depth

        _logger.debug("Using Git hash %r for old config files", git_hash)
        old_nodes_definition = git_previous_version_file(git_hash, nodes_definition)
        old_flow_definitions = list(map(partial(git_previous_version_file, git_hash), flow_definitions))
    else:
        old_nodes_definition = old_nodes_definition
        old_flow_definitions = old_flow_definitions

    try:
        if not no_check:
            try:
                System.from_files(nodes_definition, flow_definitions)
            except Exception as e:
                raise RequestError("There is an error in your new configuration files: {}".format(str(e))) from e

            try:
                System.from_files(old_nodes_definition, old_flow_definitions)
            except Exception as e:
                raise RequestError("There is an error in your old configuration files: {}".format(str(e))) from e

        migrator = Migrator(migration_dir)
        new_migration_file = migrator.create_migration_file(
            old_nodes_definition,
            old_flow_definitions,
            nodes_definition,
            flow_definitions,
            TaintedFlowStrategy.get_option_by_name(tainted_flows),
            not no_meta
        )
    finally:
        if use_git:
            _logger.debug("Removing temporary files")
            # Clean up temporary files produced by git_previous_version()
            os.remove(old_nodes_definition)
            for old_flow_definition_file in old_flow_definitions:
                os.remove(old_flow_definition_file)

    _logger.info("New migration file placed in %r", new_migration_file)