Example #1
0
    def analyze_vulnerable_components(self, vulnerable_packages):
        """Make sure we have all packages with known vulnerabilities ingested.

        Runs non-forced bayesianPriorityFlow analysis.

        :param vulnerable_packages: dict, a dict of vulnerable packages with details
        :return: None
        """
        for ga, data in vulnerable_packages.items():
            if data:
                versions = data[0].get('affected', []) + data[0].get(
                    'not_affected', [])
                for version in versions:
                    node_args = {
                        'ecosystem': 'maven',
                        'force': False,
                        'force_graph_sync': False,
                        'name': ga,
                        'recursive_limit': 0,
                        'version': version
                    }
                    self.log.info(
                        "Scheduling analysis of a package "
                        "with known vulnerabilities: {ga}:{v}".format(
                            ga=ga, v=version))
                    run_flow('bayesianPriorityFlow', node_args)
Example #2
0
    def run(self, flow_name, node_args=None):
        """Run executor.

        :param flow_name: a flow name that should be run
        :param node_args: arguments for the flow
        """
        self._prepare()
        run_flow(flow_name, node_args)
        self._executor_run()
Example #3
0
    def run(self, flow_name, node_args=None):
        """Run executor.

        :param flow_name: a flow name that should be run
        :param node_args: arguments for the flow
        """
        self._prepare()
        run_flow(flow_name, node_args)
        self._executor_run()
Example #4
0
    def test_run_flow(self):
        self.init(edge_table={'flow1': []}, dispatcher_queues={'flow1': 'flow1_queue'})

        flexmock(Dispatcher)
        Dispatcher.should_receive('apply_async').and_return("<dispatcher_id>")

        assert run_flow('flow1', node_args={'foo': 'bar'}) == "<dispatcher_id>"
def handler(signum, frame):
    logger.debug("Running Liveness Probe")
    if ENABLE_SCHEDULING:
        run_flow('livenessFlow', [None])
    else:
        logger.debug("Liveness probe - livenessFlow"
                     " did not run since selinon is not initialized")

    basedir = os.path.dirname(PROBE_FILE_LOCATION)
    if not os.path.exists(basedir):
        os.makedirs(basedir)

    with open(PROBE_FILE_LOCATION, 'a'):
        os.utime(PROBE_FILE_LOCATION, None)

    logger.debug("Liveness probe - finished")
Example #6
0
def run_server_flow(flow_name, node_args):
    """To run the worker flow via selinon.

    :param flow_name: Name of the ingestion flow
    :param node_args: Details required by Selinon task manager for triggering a flow.
    :return: Selinon Dispatcher ID associated to flow started.
    """
    return run_flow(flow_name, node_args)
Example #7
0
    def run_selinon_flow(cls, flow_name, node_args):
        """ Run Selinon flow, connect to broker if necessary

        :param flow_name: name of flow to run
        :param node_args: flow arguments
        :return: celery.AsyncResult describing dispatcher task
        """
        cls.init()
        return run_flow(flow_name, node_args)
Example #8
0
def server_run_flow(flow_name, flow_args):
    """Run a flow

    :param flow_name: name of flow to be run as stated in YAML config file
    :param flow_args: arguments for the flow
    :return: dispatcher ID handling flow
    """
    # Before we schedule a flow, we have to ensure that we are connected to broker
    Setup.connect_if_not_connected()
    return run_flow(flow_name, flow_args)
    def run_selinon_flow(self, flow_name, node_args):
        """Connect to broker, if not connected, and run Selinon flow.

        :param flow_name: flow that should be run
        :param node_args: flow arguments
        """
        self._normalize_package_name(node_args)

        if self.job_id:
            node_args['job_id'] = self.job_id

        return run_flow(flow_name, node_args)
Example #10
0
    def run_selinon_flow(self, flow_name, node_args):
        """Connect to broker, if not connected, and run Selinon flow

        :param flow_name: flow that should be run
        :param node_args: flow arguments
        """
        self.log.debug("Scheduling Selinon flow '%s' with node_args: '%s'", flow_name, node_args)

        if self.job_id:
            node_args['job_id'] = self.job_id

        return run_flow(flow_name, node_args)
    def analyses_selinon_flow(self, name, version, ecosystem):
        """Run Selinon flow for analyses.
        :param name: name of the package to analyse
        :param version: package version
        :param ecosystem: package ecosystem
        :return: dispatcher ID serving flow
        """

        node_args = {'ecosystem': ecosystem, 'name': name, 'version': version}

        self.log.debug("Scheduling Selinon flow '%s' with node_args: '%s'",
                       'bayesianFlow', node_args)
        return run_flow('bayesianFlow', node_args)
Example #12
0
    def run_selinon_flow(cls, flow_name, node_args):
        """ Run Selinon flow, connect to broker if necessary

        :param flow_name: name of flow to run
        :param node_args: flow arguments
        :return: celery.AsyncResult describing dispatcher task
        """
        if not cls._connected:
            from demo_worker import init
            # It is not necessary to connect to result backend, we just publish messages
            init(with_result_backend=False)
            cls._connected = True

        return run_flow(flow_name, node_args)
Example #13
0
def server_run_flow(flow_name, flow_args):
    """Run a flow.

    :param flow_name: name of flow to be run as stated in YAML config file
    :param flow_args: arguments for the flow
    :return: dispatcher ID handling flow
    """
    logger.info('Running flow {}'.format(flow_name))
    start = datetime.datetime.now()
    init_celery(result_backend=False)
    dispacher_id = run_flow(flow_name, flow_args)
    elapsed_seconds = (datetime.datetime.now() - start).total_seconds()
    logger.info("It took {t} seconds to start {f} flow.".format(
        t=elapsed_seconds, f=flow_name))
    return dispacher_id
Example #14
0
def server_run_flow(flow_name, flow_args):
    """Run a flow.

    :param flow_name: name of flow to be run as stated in YAML config file
    :param flow_args: arguments for the flow
    :return: dispatcher ID handling flow
    """
    logger.debug('Running flow %s', flow_name)
    start = datetime.datetime.now()

    init_celery(result_backend=False)
    dispacher_id = run_flow(flow_name, flow_args)

    logger.debug('It took %f seconds to start %s flow.',
                 (datetime.datetime.now() - start).total_seconds(), flow_name)
    return dispacher_id
Example #15
0
    def run_package_analisys(self, name, ecosystem, version):
        """Run Selinon flow for analyses.

        :param name: name of the package to analyse
        :param version: package version
        :param ecosystem: package ecosystem
        :return: dispatcher ID serving flow
        """
        node_args = {
            'ecosystem': ecosystem,
            'name': name,
            'version': version,
            'recursive_limit': 0
        }

        self.log.info("Scheduling Selinon flow '%s' "
                      "with node_args: '%s'", 'bayesianFlow', node_args)
        return run_flow('bayesianFlow', node_args)
Example #16
0
def start_workflow(analysis_details):
    """Handle implementation of API for triggering componentApi flow."""
    logger.info('Workflow is called')
    input_data = analysis_details.get('body', {})
    # Check if worker flow activation is disabled.
    if not _INVOKE_API_WORKERS:
        logger.debug('Worker flows are disabled.')
        input_data['message'] = 'Worker flows are disabled.'
        return input_data, 201
    flow_name = input_data.get('flowname')
    node_arguments = input_data
    try:
        dispacher_id = run_flow(flow_name, node_arguments)
        input_data['dispacher_id'] = dispacher_id.id
    except Exception as e:
        logger.error('Exception while initiating the worker flow %s', e)
        return {'message': 'Failed to initiate worker flow.'}, 500
    return input_data, 201
Example #17
0
def test():

    from selinon import run_flow, run_flow_selective

    print("===================================================")

    node_dict = {
        'tasks': [{
            'name': 'CheckMessage',
            'queue': 'my_new_flow',
            'import': 'tasks',
            'max_retry': 0,
            'storage': 'Redis'
        }, {
            'name': 'MessageLength',
            'queue': 'my_new_flow',
            'import': 'tasks',
            'max_retry': 0,
            'storage': 'Redis'
        }, {
            'name': 'SuccessAction',
            'queue': 'my_new_flow',
            'import': 'tasks',
            'max_retry': 0,
            'storage': 'Redis'
        }, {
            'name': 'FailureAction',
            'queue': 'my_new_flow',
            'import': 'tasks',
            'max_retry': 0,
            'storage': 'Redis'
        }],
        'flows': ['my_new_flow'],
        'storages': [{
            'name': 'Redis',
            'import': 'selinon.storages.redis',
            'configuration': {
                'host': 'localhost',
                'port': 6379,
                'db': 1,
                'charset': 'utf-8'
            }
        }],
        'global': {
            'trace': {
                'json': True
            }
        },
        'migration_dir':
        'migration_dir'
    }

    flow_definition = [{
        'flow-definitions': [{
            'name':
            'my_new_flow',
            'queue':
            'my_new_flow',
            'sampling': {
                'name': 'constant',
                'args': {
                    'retry': 10
                }
            },
            'edges': [{
                'from': '',
                'to': 'CheckMessage'
            }, {
                'from': 'CheckMessage',
                'to': 'MessageLength'
            }, {
                'from': 'MessageLength',
                'to': 'SuccessAction',
                'condition': {
                    'name': 'fieldEqual',
                    'args': {
                        'key': 'status',
                        'value': True
                    }
                }
            }, {
                'from': 'MessageLength',
                'to': 'FailureAction',
                'condition': {
                    'name': 'fieldEqual',
                    'args': {
                        'key': 'status',
                        'value': False
                    }
                }
            }],
            'failures': [{
                'nodes': 'MessageLength',
                'fallback': 'FailureAction'
            }]
        }]
    }]

    flow_defi_revse = [{
        'flow-definitions': [{
            'name':
            'my_new_flow',
            'queue':
            'my_new_flow',
            'sampling': {
                'name': 'constant',
                'args': {
                    'retry': 10
                }
            },
            'edges': [{
                'from': '',
                'to': 'CheckMessage'
            }, {
                'from': 'CheckMessage',
                'to': 'MessageLength'
            }, {
                'from': 'MessageLength',
                'to': 'SuccessAction',
                'condition': {
                    'and': [{
                        'name': 'fieldEqual',
                        'args': {
                            'key': 'status',
                            'value': True
                        }
                    }, {
                        'name': 'fieldEqual',
                        'args': {
                            'key': 'checkvalue',
                            'value': True
                        }
                    }, {
                        'name': 'fieldEqual',
                        'args': {
                            'key': 'length',
                            'value': True
                        }
                    }]
                }
            }, {
                'from': 'MessageLength',
                'to': 'FailureAction',
                'condition': {
                    'name': 'fieldEqual',
                    'args': {
                        'key': 'status',
                        'value': False
                    }
                }
            }]
        }]
    }]

    print(get_selinon_config().dispatcher_queues)

    get_selinon_config().set_config_dict(node_dict, flow_defi_revse)

    print("===================================================")

    dispatcher_id = run_flow('my_new_flow')

    print("===================================================")
Example #18
0
 def notify(self, notification_string):
     """See parent class."""
     run_flow(SELINON_FLOW_NAME, json.loads(notification_string))
Example #19
0
def send_selinon(request):

    node_dict = {
        'tasks': [{
            'name': 'CheckMessage',
            'queue': 'hello_task',
            'import': 'tasks',
            'max_retry': 0,
            'storage': 'Redis'
        }, {
            'name': 'MessageLength',
            'queue': 'hello_task',
            'import': 'tasks',
            'max_retry': 0,
            'storage': 'Redis'
        }, {
            'name': 'SuccessAction',
            'queue': 'hello_task',
            'import': 'tasks',
            'max_retry': 0,
            'storage': 'Redis'
        }, {
            'name': 'FailureAction',
            'queue': 'hello_task',
            'import': 'tasks',
            'max_retry': 0,
            'storage': 'Redis'
        }],
        'flows': ['my_new_flow'],
        'storages': [{
            'name': 'Redis',
            'import': 'selinon.storages.redis',
            'configuration': {
                'host': 'localhost',
                'port': 6379,
                'db': 1,
                'charset': 'utf-8'
            }
        }],
        'global': {
            'trace': {
                'json': True
            }
        },
        'migration_dir':
        'migration_dir'
    }

    flow_definition = [{
        'flow-definitions': [{
            'name':
            'my_new_flow',
            'queue':
            'hello_task',
            'sampling': {
                'name': 'constant',
                'args': {
                    'retry': 10
                }
            },
            'edges': [{
                'from': '',
                'to': 'CheckMessage'
            }, {
                'from': 'CheckMessage',
                'to': 'MessageLength'
            }, {
                'from': 'MessageLength',
                'to': 'SuccessAction',
                'condition': {
                    'name': 'fieldEqual',
                    'args': {
                        'key': 'status',
                        'value': True
                    }
                }
            }, {
                'from': 'MessageLength',
                'to': 'FailureAction',
                'condition': {
                    'name': 'fieldEqual',
                    'args': {
                        'key': 'status',
                        'value': False
                    }
                }
            }],
            'failures': [{
                'nodes': 'MessageLength',
                'fallback': 'FailureAction'
            }]
        }]
    }]

    flow_defi_revse = [{
        'flow-definitions': [{
            'name':
            'my_new_flow',
            'queue':
            'hello_task',
            'sampling': {
                'name': 'constant',
                'args': {
                    'retry': 10
                }
            },
            'edges': [{
                'from': '',
                'to': 'CheckMessage'
            }, {
                'from': 'CheckMessage',
                'to': 'MessageLength'
            }, {
                'from': 'MessageLength',
                'to': 'SuccessAction',
                'condition': {
                    'and': [{
                        'name': 'fieldEqual',
                        'args': {
                            'key': 'status',
                            'value': True
                        }
                    }, {
                        'name': 'fieldEqual',
                        'args': {
                            'key': 'checkvalue',
                            'value': True
                        }
                    }, {
                        'name': 'fieldEqual',
                        'args': {
                            'key': 'length',
                            'value': True
                        }
                    }]
                }
            }, {
                'from': 'MessageLength',
                'to': 'FailureAction',
                'condition': {
                    'name': 'fieldEqual',
                    'args': {
                        'key': 'status',
                        'value': False
                    }
                }
            }]
        }]
    }]

    node_dict1 = copy.deepcopy(node_dict)
    flow_defi_revse1 = copy.deepcopy(flow_defi_revse)

    Config.set_config_dict(node_dict, flow_defi_revse)

    # print(Config.__dict__)

    # from selinon_demo import celery_config

    # Config.set_celery_app(celery_config.app)

    # celery_config.app.tasks.register(Dispatcher())

    # celery_config.app.autodiscover_tasks()
    # import ipdb;ipdb.set_trace()

    dispatcher_id = run_flow('my_new_flow', {}, node_dict1, flow_defi_revse1)

    dispatcher_id1 = run_flow('my_new_flow', {}, node_dict1, flow_definition)

    return JsonResponse({'status': 'success'})
Example #20
0
def ingest_epv_into_graph(epv_details):
    """Handle implementation of API for triggering ingestion flow.

    :param epv_details: A dictionary object having list of packages/version as a nested object.
    Ex:
    {
          "ecosystem": "<ecosystem_name>",     (*required)
          "packages": [
            {
              "package": "<package_name_1>",   (*required)
              "version": "<package_version_1>" (*required)
            }, {
              "package": "<package_name_2>",   (*required)
              "version": "<package_version_2>" (*required)
            }
          ],
          "force": false,              (optional)
          "force_graph_sync": true,    (optional)
          "recursive_limit": 0         (optional)
          "source": "<Consumer_of_API>"(optional)
        }
    """
    logger.info('graph_ingestion_:_ingest_epv_into_graph() is called.')
    input_data = epv_details.get('body', {})

    # Check if worker flow activation is disabled.
    if not _INVOKE_API_WORKERS:
        logger.debug('Worker flows are disabled.')
        input_data['message'] = 'Worker flows are disabled.'
        return input_data, 201

    # Check if API consumer is CA or SA and unknown package ingestion flag is disabled.
    if _DISABLE_UNKNOWN_PACKAGE_FLOW and input_data.get('source', '') == 'api':
        logger.debug('Unknown package ingestion is disabled.')
        input_data['message'] = 'Unknown package ingestion is disabled.'
        return input_data, 201

    gh = GithubUtils()
    ecosystem = input_data.get('ecosystem')
    package_list = input_data.get('packages')

    node_arguments = {
        "ecosystem": ecosystem,
        "force": input_data.get('force', True),
        "recursive_limit": input_data.get('recursive_limit', 0),
        "force_graph_sync": input_data.get('force_graph_sync', False)
    }

    # Iterate through packages given for current ecosystem.
    for item in package_list:
        if ecosystem == 'golang':
            _, clean_version = GolangDependencyTreeGenerator.\
                clean_version(item.get('version'))
            if gh.is_pseudo_version(clean_version):
                item['error_message'] = 'Golang pseudo version is not supported.'
                continue

        flow_name = 'newPackageFlow' if ecosystem == 'golang' else 'bayesianApiFlow'

        if 'flow_name' in input_data:
            flow_name = input_data['flow_name']

        node_arguments['name'] = item.get('package')
        node_arguments['version'] = item.get('version')

        try:
            # Initiate Selinon flow for current EPV ingestion.
            dispacher_id = run_flow(flow_name, node_arguments)
            item['dispacher_id'] = dispacher_id.id
        except Exception as e:
            logger.error('Exception while initiating the worker flow %s', e)
            return {'message': 'Failed to initiate worker flow.'}, 500

        logger.info('A %s in initiated for eco: %s, pkg: %s, ver: %s',
                    flow_name, ecosystem, item['package'], item['version'])

    return input_data, 201
Example #21
0
    def test_run_unknown_flow_error(self):
        # We run uninitialized Config
        self.init(edge_table={}, dispatcher_queues=None)

        with pytest.raises(UnknownFlowError):
            run_flow('some_flow', node_args={'foo': 'bar'})
Example #22
0
    def test_run_flow_error(self):
        # We run uninitialized Config
        self.init(edge_table={}, dispatcher_queues=None)

        with pytest.raises(KeyError):
            run_flow('some_flow', node_args={'foo': 'bar'})
def run_server_flow(flow_name, flow_args):
    """To run the worker flow via selinon."""
    init_celery(result_backend=False)
    init_selinon()
    return run_flow(flow_name, flow_args)