def main():
    _monkey_patch()

    cli_opts = [
        cfg.StrOpt('timestamp', default=None,
                   help='Will delete data older than ' +
                   'this timestamp. (default 48 hours). ' +
                   'Example value: 2015-03-13T19:01:27.255542Z'),
        cfg.StrOpt('action-ref', default='',
                   help='action-ref to delete executions for.')
    ]
    do_register_cli_opts(cli_opts)
    config.parse_args()

    # Get config values
    timestamp = cfg.CONF.timestamp
    action_ref = cfg.CONF.action_ref
    username = cfg.CONF.database.username if hasattr(cfg.CONF.database, 'username') else None
    password = cfg.CONF.database.password if hasattr(cfg.CONF.database, 'password') else None

    # Connect to db.
    db_setup(cfg.CONF.database.db_name, cfg.CONF.database.host, cfg.CONF.database.port,
             username=username, password=password)

    if not timestamp:
        now = datetime.now()
        timestamp = now - timedelta(days=DEFAULT_TIMEDELTA_DAYS)
    else:
        timestamp = datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S.%fZ')

    # Purge models.
    _purge_executions(timestamp=timestamp, action_ref=action_ref)

    # Disconnect from db.
    db_teardown()
Exemple #2
0
def _setup(argv):
    config.parse_args()

    # 2. setup logging
    log_level = logging.DEBUG
    logging.basicConfig(format='%(asctime)s %(levelname)s [-] %(message)s',
                        level=log_level)

    if not cfg.CONF.verbose:
        # Note: We still want to print things at the following log levels: INFO, ERROR, CRITICAL
        exclude_log_levels = [logging.AUDIT, logging.DEBUG]
        handlers = logging.getLoggerClass().manager.root.handlers

        for handler in handlers:
            handler.addFilter(LogLevelFilter(log_levels=exclude_log_levels))

    # 3. all other setup which requires config to be parsed and logging to
    # be correctly setup.
    username = cfg.CONF.database.username if hasattr(cfg.CONF.database,
                                                     'username') else None
    password = cfg.CONF.database.password if hasattr(cfg.CONF.database,
                                                     'password') else None
    db_setup(cfg.CONF.database.db_name,
             cfg.CONF.database.host,
             cfg.CONF.database.port,
             username=username,
             password=password)
Exemple #3
0
def main():
    monkey_patch()

    cli_opts = [
        cfg.BoolOpt("sensors", default=False, help="diff sensor alone."),
        cfg.BoolOpt("actions", default=False, help="diff actions alone."),
        cfg.BoolOpt("rules", default=False, help="diff rules alone."),
        cfg.BoolOpt("all",
                    default=False,
                    help="diff sensors, actions and rules."),
        cfg.BoolOpt("verbose", default=False),
        cfg.BoolOpt(
            "simple",
            default=False,
            help="In simple mode, tool only tells you if content is missing." +
            "It doesn't show you content diff between disk and db.",
        ),
        cfg.StrOpt("pack-dir",
                   default=None,
                   help="Path to specific pack to diff."),
    ]
    do_register_cli_opts(cli_opts)
    config.parse_args()

    # Connect to db.
    db_setup()

    # Diff content
    pack_dir = cfg.CONF.pack_dir or None
    content_diff = not cfg.CONF.simple

    if cfg.CONF.all:
        _diff_sensors(pack_dir=pack_dir,
                      verbose=cfg.CONF.verbose,
                      content_diff=content_diff)
        _diff_actions(pack_dir=pack_dir,
                      verbose=cfg.CONF.verbose,
                      content_diff=content_diff)
        _diff_rules(pack_dir=pack_dir,
                    verbose=cfg.CONF.verbose,
                    content_diff=content_diff)
        return

    if cfg.CONF.sensors:
        _diff_sensors(pack_dir=pack_dir,
                      verbose=cfg.CONF.verbose,
                      content_diff=content_diff)

    if cfg.CONF.actions:
        _diff_actions(pack_dir=pack_dir,
                      verbose=cfg.CONF.verbose,
                      content_diff=content_diff)

    if cfg.CONF.rules:
        _diff_rules(pack_dir=pack_dir,
                    verbose=cfg.CONF.verbose,
                    content_diff=content_diff)

    # Disconnect from db.
    db_teardown()
Exemple #4
0
def main():
    monkey_patch()

    cli_opts = [
        cfg.BoolOpt('sensors', default=False, help='diff sensor alone.'),
        cfg.BoolOpt('actions', default=False, help='diff actions alone.'),
        cfg.BoolOpt('rules', default=False, help='diff rules alone.'),
        cfg.BoolOpt('all',
                    default=False,
                    help='diff sensors, actions and rules.'),
        cfg.BoolOpt('verbose', default=False),
        cfg.BoolOpt(
            'simple',
            default=False,
            help='In simple mode, tool only tells you if content is missing.' +
            'It doesn\'t show you content diff between disk and db.'),
        cfg.StrOpt('pack-dir',
                   default=None,
                   help='Path to specific pack to diff.')
    ]
    do_register_cli_opts(cli_opts)
    config.parse_args()

    # Connect to db.
    db_setup()

    # Diff content
    pack_dir = cfg.CONF.pack_dir or None
    content_diff = not cfg.CONF.simple

    if cfg.CONF.all:
        _diff_sensors(pack_dir=pack_dir,
                      verbose=cfg.CONF.verbose,
                      content_diff=content_diff)
        _diff_actions(pack_dir=pack_dir,
                      verbose=cfg.CONF.verbose,
                      content_diff=content_diff)
        _diff_rules(pack_dir=pack_dir,
                    verbose=cfg.CONF.verbose,
                    content_diff=content_diff)
        return

    if cfg.CONF.sensors:
        _diff_sensors(pack_dir=pack_dir,
                      verbose=cfg.CONF.verbose,
                      content_diff=content_diff)

    if cfg.CONF.actions:
        _diff_actions(pack_dir=pack_dir,
                      verbose=cfg.CONF.verbose,
                      content_diff=content_diff)

    if cfg.CONF.rules:
        _diff_rules(pack_dir=pack_dir,
                    verbose=cfg.CONF.verbose,
                    content_diff=content_diff)

    # Disconnect from db.
    db_teardown()
def main():
    _monkey_patch()

    cli_opts = [
        cfg.IntOpt(
            'rate',
            default=100,
            help='Rate of trigger injection measured in instances in per sec.'
            +
            ' Assumes a default exponential distribution in time so arrival is poisson.'
        ),
        cfg.ListOpt(
            'triggers',
            required=False,
            help='List of triggers for which instances should be fired.' +
            ' Uniform distribution will be followed if there is more than one'
            + 'trigger.'),
        cfg.StrOpt('schema_file',
                   default=None,
                   help='Path to schema file defining trigger and payload.'),
        cfg.IntOpt('duration',
                   default=1,
                   help='Duration of stress test in minutes.')
    ]
    do_register_cli_opts(cli_opts)
    config.parse_args()

    # Get config values
    triggers = cfg.CONF.triggers
    trigger_payload_schema = {}

    if not triggers:
        if (cfg.CONF.schema_file is None or cfg.CONF.schema_file == ''
                or not os.path.exists(cfg.CONF.schema_file)):
            print(
                'Either "triggers" need to be provided or a schema file containing'
                + ' triggers should be provided.')
            return
        with open(cfg.CONF.schema_file) as fd:
            trigger_payload_schema = yaml.safe_load(fd)
            triggers = trigger_payload_schema.keys()
            print('Triggers=%s' % triggers)

    rate = cfg.CONF.rate
    rate_per_trigger = int(rate / len(triggers))
    duration = cfg.CONF.duration

    dispatcher_pool = eventlet.GreenPool(len(triggers))

    for trigger in triggers:
        payload = trigger_payload_schema.get(trigger, {})
        dispatcher_pool.spawn(_inject_instances,
                              trigger,
                              rate_per_trigger,
                              duration,
                              payload=payload)
        eventlet.sleep(random.uniform(0, 1))
    dispatcher_pool.waitall()
    def __init__(
        self,
        pack,
        file_path,
        config=None,
        parameters=None,
        user=None,
        parent_args=None,
        log_level=PYTHON_RUNNER_DEFAULT_LOG_LEVEL,
    ):
        """
        :param pack: Name of the pack this action belongs to.
        :type pack: ``str``

        :param file_path: Path to the action module.
        :type file_path: ``str``

        :param config: Pack config.
        :type config: ``dict``

        :param parameters: action parameters.
        :type parameters: ``dict`` or ``None``

        :param user: Name of the user who triggered this action execution.
        :type user: ``str``

        :param parent_args: Command line arguments passed to the parent process.
        :type parse_args: ``list``
        """

        self._pack = pack
        self._file_path = file_path
        self._config = config or {}
        self._parameters = parameters or {}
        self._user = user
        self._parent_args = parent_args or []
        self._log_level = log_level

        self._class_name = None
        self._logger = logging.getLogger("PythonActionWrapper")

        try:
            st2common_config.parse_args(args=self._parent_args)
        except Exception as e:
            LOG.debug(
                "Failed to parse config using parent args (parent_args=%s): %s"
                % (str(self._parent_args), six.text_type(e)))

        # Note: We can only set a default user value if one is not provided after parsing the
        # config
        if not self._user:
            # Note: We use late import to avoid performance overhead
            from oslo_config import cfg

            self._user = cfg.CONF.system_user.user
def main():
    monkey_patch()

    cli_opts = [
        cfg.IntOpt('rate', default=100,
                   help='Rate of trigger injection measured in instances in per sec.' +
                   ' Assumes a default exponential distribution in time so arrival is poisson.'),
        cfg.ListOpt('triggers', required=False,
                    help='List of triggers for which instances should be fired.' +
                    ' Uniform distribution will be followed if there is more than one' +
                    'trigger.'),
        cfg.StrOpt('schema_file', default=None,
                   help='Path to schema file defining trigger and payload.'),
        cfg.IntOpt('duration', default=60,
                   help='Duration of stress test in seconds.'),
        cfg.BoolOpt('max-throughput', default=False,
                   help='If True, "rate" argument will be ignored and this script will try to '
                   'saturize the CPU and achieve max utilization.')
    ]
    do_register_cli_opts(cli_opts)
    config.parse_args()

    # Get config values
    triggers = cfg.CONF.triggers
    trigger_payload_schema = {}

    if not triggers:
        if (cfg.CONF.schema_file is None or cfg.CONF.schema_file == '' or
                not os.path.exists(cfg.CONF.schema_file)):
            print('Either "triggers" need to be provided or a schema file containing' +
                  ' triggers should be provided.')
            return
        with open(cfg.CONF.schema_file) as fd:
            trigger_payload_schema = yaml.safe_load(fd)
            triggers = list(trigger_payload_schema.keys())
            print('Triggers=%s' % triggers)

    rate = cfg.CONF.rate
    rate_per_trigger = int(rate / len(triggers))
    duration = cfg.CONF.duration
    max_throughput = cfg.CONF.max_throughput

    if max_throughput:
        rate = 0
        rate_per_trigger = 0

    dispatcher_pool = eventlet.GreenPool(len(triggers))

    for trigger in triggers:
        payload = trigger_payload_schema.get(trigger, {})
        dispatcher_pool.spawn(_inject_instances, trigger, rate_per_trigger, duration,
                              payload=payload, max_throughput=max_throughput)
        eventlet.sleep(random.uniform(0, 1))
    dispatcher_pool.waitall()
def main():
    config.parse_args()

    # Connect to db.
    db_setup()

    # Migrate rules.
    migrate_rules()

    # Disconnect from db.
    db_teardown()
Exemple #9
0
def main():
    config.parse_args()

    # Connect to db.
    db_setup()

    # Migrate rules.
    migrate_rules()

    # Disconnect from db.
    db_teardown()
Exemple #10
0
def _setup():
    config.parse_args()

    # 2. setup logging.
    logging.basicConfig(format='%(asctime)s %(levelname)s [-] %(message)s',
                        level=logging.DEBUG)

    # 3. all other setup which requires config to be parsed and logging to
    # be correctly setup.
    username = cfg.CONF.database.username if hasattr(cfg.CONF.database, 'username') else None
    password = cfg.CONF.database.password if hasattr(cfg.CONF.database, 'password') else None
    db_setup(cfg.CONF.database.db_name, cfg.CONF.database.host, cfg.CONF.database.port,
             username=username, password=password)
def main():
    config.parse_args()

    # Connect to db.
    username = cfg.CONF.database.username if hasattr(cfg.CONF.database, 'username') else None
    password = cfg.CONF.database.password if hasattr(cfg.CONF.database, 'password') else None
    db_setup(cfg.CONF.database.db_name, cfg.CONF.database.host, cfg.CONF.database.port,
             username=username, password=password)

    # Migrate rules.
    migrate_rules()

    # Disconnect from db.
    db_teardown()
Exemple #12
0
def main():
    monkey_patch()

    cli_opts = [
        cfg.BoolOpt('sensors', default=False,
                    help='diff sensor alone.'),
        cfg.BoolOpt('actions', default=False,
                    help='diff actions alone.'),
        cfg.BoolOpt('rules', default=False,
                    help='diff rules alone.'),
        cfg.BoolOpt('all', default=False,
                    help='diff sensors, actions and rules.'),
        cfg.BoolOpt('verbose', default=False),
        cfg.BoolOpt('simple', default=False,
                    help='In simple mode, tool only tells you if content is missing.' +
                         'It doesn\'t show you content diff between disk and db.'),
        cfg.StrOpt('pack-dir', default=None, help='Path to specific pack to diff.')
    ]
    do_register_cli_opts(cli_opts)
    config.parse_args()

    username = cfg.CONF.database.username if hasattr(cfg.CONF.database, 'username') else None
    password = cfg.CONF.database.password if hasattr(cfg.CONF.database, 'password') else None

    # Connect to db.
    db_setup(cfg.CONF.database.db_name, cfg.CONF.database.host, cfg.CONF.database.port,
             username=username, password=password)

    # Diff content
    pack_dir = cfg.CONF.pack_dir or None
    content_diff = not cfg.CONF.simple

    if cfg.CONF.all:
        _diff_sensors(pack_dir=pack_dir, verbose=cfg.CONF.verbose, content_diff=content_diff)
        _diff_actions(pack_dir=pack_dir, verbose=cfg.CONF.verbose, content_diff=content_diff)
        _diff_rules(pack_dir=pack_dir, verbose=cfg.CONF.verbose, content_diff=content_diff)
        return

    if cfg.CONF.sensors:
        _diff_sensors(pack_dir=pack_dir, verbose=cfg.CONF.verbose, content_diff=content_diff)

    if cfg.CONF.actions:
        _diff_actions(pack_dir=pack_dir, verbose=cfg.CONF.verbose, content_diff=content_diff)

    if cfg.CONF.rules:
        _diff_rules(pack_dir=pack_dir, verbose=cfg.CONF.verbose, content_diff=content_diff)

    # Disconnect from db.
    db_teardown()
Exemple #13
0
def main():
    _monkey_patch()

    cli_opts = [
        cfg.BoolOpt("sensors", default=False, help="diff sensor alone."),
        cfg.BoolOpt("actions", default=False, help="diff actions alone."),
        cfg.BoolOpt("rules", default=False, help="diff rules alone."),
        cfg.BoolOpt("all", default=False, help="diff sensors, actions and rules."),
        cfg.BoolOpt("verbose", default=False),
        cfg.BoolOpt(
            "simple",
            default=False,
            help="In simple mode, tool only tells you if content is missing."
            + "It doesn't show you content diff between disk and db.",
        ),
        cfg.StrOpt("pack-dir", default=None, help="Path to specific pack to diff."),
    ]
    do_register_cli_opts(cli_opts)
    config.parse_args()

    username = cfg.CONF.database.username if hasattr(cfg.CONF.database, "username") else None
    password = cfg.CONF.database.password if hasattr(cfg.CONF.database, "password") else None

    # Connect to db.
    db_setup(
        cfg.CONF.database.db_name, cfg.CONF.database.host, cfg.CONF.database.port, username=username, password=password
    )

    # Diff content
    pack_dir = cfg.CONF.pack_dir or None
    content_diff = not cfg.CONF.simple

    if cfg.CONF.all:
        _diff_sensors(pack_dir=pack_dir, verbose=cfg.CONF.verbose, content_diff=content_diff)
        _diff_actions(pack_dir=pack_dir, verbose=cfg.CONF.verbose, content_diff=content_diff)
        _diff_rules(pack_dir=pack_dir, verbose=cfg.CONF.verbose, content_diff=content_diff)
        return

    if cfg.CONF.sensors:
        _diff_sensors(pack_dir=pack_dir, verbose=cfg.CONF.verbose, content_diff=content_diff)

    if cfg.CONF.actions:
        _diff_actions(pack_dir=pack_dir, verbose=cfg.CONF.verbose, content_diff=content_diff)

    if cfg.CONF.rules:
        _diff_rules(pack_dir=pack_dir, verbose=cfg.CONF.verbose, content_diff=content_diff)

    # Disconnect from db.
    db_teardown()
Exemple #14
0
def main():
    _monkey_patch()

    cli_opts = [
        cfg.StrOpt('action_ref', default=None,
                   help='Root action to begin analysis.'),
        cfg.StrOpt('link_trigger_ref', default='core.st2.generic.actiontrigger',
                   help='Root action to begin analysis.'),
        cfg.StrOpt('out_file', default='pipeline')
    ]
    do_register_cli_opts(cli_opts)
    config.parse_args()
    db_setup()
    rule_links = LinksAnalyzer().analyze(cfg.CONF.action_ref, cfg.CONF.link_trigger_ref)
    Grapher().generate_graph(rule_links, cfg.CONF.out_file)
Exemple #15
0
def main():
    monkey_patch()

    cli_opts = [
        cfg.StrOpt('action_ref', default=None,
                   help='Root action to begin analysis.'),
        cfg.StrOpt('link_trigger_ref', default='core.st2.generic.actiontrigger',
                   help='Root action to begin analysis.'),
        cfg.StrOpt('out_file', default='pipeline')
    ]
    do_register_cli_opts(cli_opts)
    config.parse_args()
    db_setup()
    rule_links = LinksAnalyzer().analyze(cfg.CONF.action_ref, cfg.CONF.link_trigger_ref)
    Grapher().generate_graph(rule_links, cfg.CONF.out_file)
Exemple #16
0
def _setup(argv):
    config.parse_args()

    log_level = logging.DEBUG
    logging.basicConfig(format='%(asctime)s %(levelname)s [-] %(message)s', level=log_level)

    if not cfg.CONF.verbose:
        # Note: We still want to print things at the following log levels: INFO, ERROR, CRITICAL
        exclude_log_levels = [logging.AUDIT, logging.DEBUG]
        handlers = logging.getLoggerClass().manager.root.handlers

        for handler in handlers:
            handler.addFilter(LogLevelFilter(log_levels=exclude_log_levels))

    db_setup()
    register_exchanges()
def main():
    _monkey_patch()

    cli_opts = [
        cfg.IntOpt(
            "rate",
            default=100,
            help="Rate of trigger injection measured in instances in per sec."
            + " Assumes a default exponential distribution in time so arrival is poisson.",
        ),
        cfg.ListOpt(
            "triggers",
            required=False,
            help="List of triggers for which instances should be fired."
            + " Uniform distribution will be followed if there is more than one"
            + "trigger.",
        ),
        cfg.StrOpt("schema_file", default=None, help="Path to schema file defining trigger and payload."),
        cfg.IntOpt("duration", default=1, help="Duration of stress test in minutes."),
    ]
    do_register_cli_opts(cli_opts)
    config.parse_args()

    # Get config values
    triggers = cfg.CONF.triggers
    trigger_payload_schema = {}

    if not triggers:
        if cfg.CONF.schema_file is None or cfg.CONF.schema_file == "" or not os.path.exists(cfg.CONF.schema_file):
            print('Either "triggers" need to be provided or a schema file containing' + " triggers should be provided.")
            return
        with open(cfg.CONF.schema_file) as fd:
            trigger_payload_schema = yaml.safe_load(fd)
            triggers = trigger_payload_schema.keys()
            print("Triggers=%s" % triggers)

    rate = cfg.CONF.rate
    rate_per_trigger = int(rate / len(triggers))
    duration = cfg.CONF.duration

    dispatcher_pool = eventlet.GreenPool(len(triggers))

    for trigger in triggers:
        payload = trigger_payload_schema.get(trigger, {})
        dispatcher_pool.spawn(_inject_instances, trigger, rate_per_trigger, duration, payload=payload)
        eventlet.sleep(random.uniform(0, 1))
    dispatcher_pool.waitall()
Exemple #18
0
def _setup(argv):
    config.parse_args()

    log_level = logging.DEBUG
    logging.basicConfig(format='%(asctime)s %(levelname)s [-] %(message)s',
                        level=log_level)

    if not cfg.CONF.verbose:
        # Note: We still want to print things at the following log levels: INFO, ERROR, CRITICAL
        exclude_log_levels = [logging.AUDIT, logging.DEBUG]
        handlers = logging.getLoggerClass().manager.root.handlers

        for handler in handlers:
            handler.addFilter(LogLevelFilter(log_levels=exclude_log_levels))

    db_setup()
    register_exchanges()
Exemple #19
0
    def initialize(self):
        # 1. Parse config
        try:
            config.parse_args()
        except:
            pass

        # 2. Setup db connection
        username = cfg.CONF.database.username if hasattr(
            cfg.CONF.database, 'username') else None
        password = cfg.CONF.database.password if hasattr(
            cfg.CONF.database, 'password') else None
        db_setup(cfg.CONF.database.db_name,
                 cfg.CONF.database.host,
                 cfg.CONF.database.port,
                 username=username,
                 password=password)
Exemple #20
0
    def __init__(self, pack, file_path, config=None, parameters=None, user=None, parent_args=None,
                 log_level=PYTHON_RUNNER_DEFAULT_LOG_LEVEL):
        """
        :param pack: Name of the pack this action belongs to.
        :type pack: ``str``

        :param file_path: Path to the action module.
        :type file_path: ``str``

        :param config: Pack config.
        :type config: ``dict``

        :param parameters: action parameters.
        :type parameters: ``dict`` or ``None``

        :param user: Name of the user who triggered this action execution.
        :type user: ``str``

        :param parent_args: Command line arguments passed to the parent process.
        :type parse_args: ``list``
        """

        self._pack = pack
        self._file_path = file_path
        self._config = config or {}
        self._parameters = parameters or {}
        self._user = user
        self._parent_args = parent_args or []
        self._log_level = log_level

        self._class_name = None
        self._logger = logging.getLogger('PythonActionWrapper')

        try:
            st2common_config.parse_args(args=self._parent_args)
        except Exception as e:
            LOG.debug('Failed to parse config using parent args (parent_args=%s): %s' %
                      (str(self._parent_args), str(e)))

        # Note: We can only set a default user value if one is not provided after parsing the
        # config
        if not self._user:
            # Note: We use late import to avoid performance overhead
            from oslo_config import cfg
            self._user = cfg.CONF.system_user.user
def main():
    config.parse_args()

    # Connect to db.
    db_setup()

    # Migrate rules.
    try:
        migrate_datastore()
        print('SUCCESS: Datastore items migrated successfully.')
        exit_code = 0
    except:
        print('ABORTED: Datastore migration aborted on first failure.')
        exit_code = 1

    # Disconnect from db.
    db_teardown()
    sys.exit(exit_code)
def main():
    config.parse_args()

    # Connect to db.
    db_setup()

    # Migrate rules.
    try:
        migrate_datastore()
        print('SUCCESS: Datastore items migrated successfully.')
        exit_code = 0
    except:
        print('ABORTED: Datastore migration aborted on first failure.')
        exit_code = 1

    # Disconnect from db.
    db_teardown()
    sys.exit(exit_code)
Exemple #23
0
def _setup():
    config.parse_args()

    # 2. setup logging.
    logging.basicConfig(format='%(asctime)s %(levelname)s [-] %(message)s',
                        level=logging.DEBUG)

    # 3. all other setup which requires config to be parsed and logging to
    # be correctly setup.
    username = cfg.CONF.database.username if hasattr(cfg.CONF.database,
                                                     'username') else None
    password = cfg.CONF.database.password if hasattr(cfg.CONF.database,
                                                     'password') else None
    db_setup(cfg.CONF.database.db_name,
             cfg.CONF.database.host,
             cfg.CONF.database.port,
             username=username,
             password=password)
Exemple #24
0
    def __init__(self, pack, file_path, config=None, parameters=None, user=None, parent_args=None,
                 log_level='debug'):
        """
        :param pack: Name of the pack this action belongs to.
        :type pack: ``str``

        :param file_path: Path to the action module.
        :type file_path: ``str``

        :param config: Pack config.
        :type config: ``dict``

        :param parameters: action parameters.
        :type parameters: ``dict`` or ``None``

        :param user: Name of the user who triggered this action execution.
        :type user: ``str``

        :param parent_args: Command line arguments passed to the parent process.
        :type parse_args: ``list``
        """

        self._pack = pack
        self._file_path = file_path
        self._config = config or {}
        self._parameters = parameters or {}
        self._user = user
        self._parent_args = parent_args or []
        self._log_level = log_level

        self._class_name = None
        self._logger = logging.getLogger('PythonActionWrapper')

        try:
            st2common_config.parse_args(args=self._parent_args)
        except Exception as e:
            LOG.debug('Failed to parse config using parent args (parent_args=%s): %s' %
                      (str(self._parent_args), str(e)))

        # Note: We can only set a default user value if one is not provided after parsing the
        # config
        if not self._user:
            self._user = cfg.CONF.system_user.user
Exemple #25
0
def main():
    _monkey_patch()

    cli_opts = [
        cfg.StrOpt('timestamp',
                   default=None,
                   help='Will delete data older than ' +
                   'this timestamp. (default 48 hours). ' +
                   'Example value: 2015-03-13T19:01:27.255542Z'),
        cfg.StrOpt('action-ref',
                   default='',
                   help='action-ref to delete executions for.')
    ]
    do_register_cli_opts(cli_opts)
    config.parse_args()

    # Get config values
    timestamp = cfg.CONF.timestamp
    action_ref = cfg.CONF.action_ref
    username = cfg.CONF.database.username if hasattr(cfg.CONF.database,
                                                     'username') else None
    password = cfg.CONF.database.password if hasattr(cfg.CONF.database,
                                                     'password') else None

    # Connect to db.
    db_setup(cfg.CONF.database.db_name,
             cfg.CONF.database.host,
             cfg.CONF.database.port,
             username=username,
             password=password)

    if not timestamp:
        now = datetime.now()
        timestamp = now - timedelta(days=DEFAULT_TIMEDELTA_DAYS)
    else:
        timestamp = datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S.%fZ')

    # Purge models.
    _purge_executions(timestamp=timestamp, action_ref=action_ref)

    # Disconnect from db.
    db_teardown()
Exemple #26
0
def main():
    monkey_patch()

    cli_opts = [
        cfg.StrOpt("action_ref",
                   default=None,
                   help="Root action to begin analysis."),
        cfg.StrOpt(
            "link_trigger_ref",
            default="core.st2.generic.actiontrigger",
            help="Root action to begin analysis.",
        ),
        cfg.StrOpt("out_file", default="pipeline"),
    ]
    do_register_cli_opts(cli_opts)
    config.parse_args()
    db_setup()
    rule_links = LinksAnalyzer().analyze(cfg.CONF.action_ref,
                                         cfg.CONF.link_trigger_ref)
    Grapher().generate_graph(rule_links, cfg.CONF.out_file)
Exemple #27
0
def _setup(argv):
    config.parse_args()

    # 2. setup logging
    log_level = logging.DEBUG
    logging.basicConfig(format='%(asctime)s %(levelname)s [-] %(message)s', level=log_level)

    if not cfg.CONF.verbose:
        # Note: We still want to print things at the following log levels: INFO, ERROR, CRITICAL
        exclude_log_levels = [logging.AUDIT, logging.DEBUG]
        handlers = logging.getLoggerClass().manager.root.handlers

        for handler in handlers:
            handler.addFilter(LogLevelFilter(log_levels=exclude_log_levels))

    # 3. all other setup which requires config to be parsed and logging to
    # be correctly setup.
    username = cfg.CONF.database.username if hasattr(cfg.CONF.database, 'username') else None
    password = cfg.CONF.database.password if hasattr(cfg.CONF.database, 'password') else None
    db_setup(cfg.CONF.database.db_name, cfg.CONF.database.host, cfg.CONF.database.port,
             username=username, password=password)
Exemple #28
0
def main():
    monkey_patch()

    cli_opts = [
        cfg.IntOpt(
            "rate",
            default=100,
            help="Rate of trigger injection measured in instances in per sec."
            +
            " Assumes a default exponential distribution in time so arrival is poisson.",
        ),
        cfg.ListOpt(
            "triggers",
            required=False,
            help="List of triggers for which instances should be fired." +
            " Uniform distribution will be followed if there is more than one"
            + "trigger.",
        ),
        cfg.StrOpt(
            "schema_file",
            default=None,
            help="Path to schema file defining trigger and payload.",
        ),
        cfg.IntOpt("duration",
                   default=60,
                   help="Duration of stress test in seconds."),
        cfg.BoolOpt(
            "max-throughput",
            default=False,
            help=
            'If True, "rate" argument will be ignored and this script will try to '
            "saturize the CPU and achieve max utilization.",
        ),
    ]
    do_register_cli_opts(cli_opts)
    config.parse_args()

    # Get config values
    triggers = cfg.CONF.triggers
    trigger_payload_schema = {}

    if not triggers:
        if (cfg.CONF.schema_file is None or cfg.CONF.schema_file == ""
                or not os.path.exists(cfg.CONF.schema_file)):
            print(
                'Either "triggers" need to be provided or a schema file containing'
                + " triggers should be provided.")
            return
        with open(cfg.CONF.schema_file) as fd:
            trigger_payload_schema = yaml.safe_load(fd)
            triggers = list(trigger_payload_schema.keys())
            print("Triggers=%s" % triggers)

    rate = cfg.CONF.rate
    rate_per_trigger = int(rate / len(triggers))
    duration = cfg.CONF.duration
    max_throughput = cfg.CONF.max_throughput

    if max_throughput:
        rate = 0
        rate_per_trigger = 0

    dispatcher_pool = eventlet.GreenPool(len(triggers))

    for trigger in triggers:
        payload = trigger_payload_schema.get(trigger, {})
        dispatcher_pool.spawn(
            _inject_instances,
            trigger,
            rate_per_trigger,
            duration,
            payload=payload,
            max_throughput=max_throughput,
        )
        eventlet.sleep(random.uniform(0, 1))
    dispatcher_pool.waitall()
Exemple #29
0
 def _setup():
     try:
         config.parse_args()
     except:
         pass
Exemple #30
0
 def initialize(self):
     try:
         config.parse_args()
     except:
         pass
Exemple #31
0
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License.  You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import absolute_import

from st2common.runners import get_available_backends
from st2common.runners import get_backend_instance

from st2common import config
config.parse_args()

runner_names = get_available_backends()

print('Available / installed action runners:')
for name in runner_names:
    runner_driver = get_backend_instance(name)
    runner_instance = runner_driver.get_runner()
    runner_metadata = runner_driver.get_metadata()

    print('- %s (runner_module=%s,cls=%s)' % (name, runner_metadata['runner_module'],
                                              runner_instance.__class__))
Exemple #32
0
 def initialize(self):
     try:
         config.parse_args()
     except:
         pass
Exemple #33
0
}

# Options which should be removed from the st2 config
ST2_CONF_OPTIONS_TO_REMOVE = {
    'database': ['username', 'password'],
    'messaging': ['url']
}

REMOVE_VALUE_NAME = '**removed**'

OUTPUT_FILENAME_TEMPLATE = 'st2-debug-output-%(hostname)s-%(date)s.tar.gz'

DATE_FORMAT = '%Y-%m-%d-%H%M%S'

try:
    config.parse_args(args=[])
except Exception:
    pass


def setup_logging():
    root = LOG
    root.setLevel(logging.INFO)

    ch = logging.StreamHandler(sys.stdout)
    ch.setLevel(logging.DEBUG)
    formatter = logging.Formatter('%(asctime)s  %(levelname)s - %(message)s')
    ch.setFormatter(formatter)
    root.addHandler(ch)

Exemple #34
0
}

# Options which should be removed from the st2 config
ST2_CONF_OPTIONS_TO_REMOVE = {
    'database': ['username', 'password'],
    'messaging': ['url']
}

REMOVE_VALUE_NAME = '**removed**'

OUTPUT_FILENAME_TEMPLATE = 'st2-debug-output-%(hostname)s-%(date)s.tar.gz'

DATE_FORMAT = '%Y-%m-%d-%H%M%S'

try:
    config.parse_args(args=[])
except Exception:
    pass


def setup_logging():
    root = LOG
    root.setLevel(logging.INFO)

    ch = logging.StreamHandler(sys.stdout)
    ch.setLevel(logging.DEBUG)
    formatter = logging.Formatter('%(asctime)s  %(levelname)s - %(message)s')
    ch.setFormatter(formatter)
    root.addHandler(ch)

Exemple #35
0
    for group_id in group_ids:
        member_ids = list(coordinator.get_members(group_id).get())
        member_ids = [member_id.decode('utf-8') for member_id in member_ids]

        print('Members in group "%s" (%s):' % (group_id, len(member_ids)))

        for member_id in member_ids:
            capabilities = coordinator.get_member_capabilities(group_id, member_id).get()
            print(' - %s (capabilities=%s)' % (member_id, str(capabilities)))


def do_register_cli_opts(opts, ignore_errors=False):
    for opt in opts:
        try:
            cfg.CONF.register_cli_opt(opt)
        except:
            if not ignore_errors:
                raise


if __name__ == '__main__':
    cli_opts = [
        cfg.StrOpt('group-id', default=None,
                   help='If provided, only list members for that group.'),

    ]
    do_register_cli_opts(cli_opts)
    config.parse_args()

    main(group_id=cfg.CONF.group_id)
import argparse

from kombu import Exchange

from st2common import config

from st2common.transport import utils as transport_utils
from st2common.transport.publishers import PoolPublisher


def main(exchange, routing_key, payload):
    exchange = Exchange(exchange, type='topic')
    publisher = PoolPublisher(urls=transport_utils.get_messaging_urls())
    publisher.publish(payload=payload, exchange=exchange, routing_key=routing_key)


if __name__ == '__main__':
    config.parse_args(args={})
    parser = argparse.ArgumentParser(description='Queue producer')
    parser.add_argument('--exchange', required=True,
                        help='Exchange to publish the message to')
    parser.add_argument('--routing-key', required=True,
                        help='Routing key to use')
    parser.add_argument('--payload', required=True,
                        help='Message payload')
    args = parser.parse_args()

    main(exchange=args.exchange, routing_key=args.routing_key,
         payload=args.payload)
Exemple #37
0
def _setup():
    config.parse_args()

    # 2. setup logging.
    logging.basicConfig(format='%(asctime)s %(levelname)s [-] %(message)s',
                        level=logging.DEBUG)
Exemple #38
0
from kombu import Connection, Exchange
from oslo_config import cfg

from st2common import config

from st2common.transport.publishers import PoolPublisher


def main(exchange, routing_key, payload):
    exchange = Exchange(exchange, type='topic')
    publisher = PoolPublisher(cfg.CONF.messaging.url)

    with Connection(cfg.CONF.messaging.url):
        publisher.publish(payload=payload, exchange=exchange,
                          routing_key=routing_key)


if __name__ == '__main__':
    config.parse_args(args={})
    parser = argparse.ArgumentParser(description='Queue producer')
    parser.add_argument('--exchange', required=True,
                        help='Exchange to publish the message to')
    parser.add_argument('--routing-key', required=True,
                        help='Routing key to use')
    parser.add_argument('--payload', required=True,
                        help='Message payload')
    args = parser.parse_args()

    main(exchange=args.exchange, routing_key=args.routing_key,
         payload=args.payload)
Exemple #39
0
 def _setup():
     try:
         config.parse_args()
     except:
         pass