def __init__(self, name=None, log_dest=None, execution=Execution.LOCAL_THREAD, env=None, **kwargs): """ Initializes a new Tigres Program :param self: :param execution: The execution plugin to use. Default: :class:`Execution.LOCAL_THREAD` :param env: environment variable for task execution :type env: dict :param kwargs: :param name: Workflow name. If not given, a random one will be chosen. :type name: str :param log_dest: Log destination, passed as `dest` to `monitoring.api.init()` :param log_*: Additional keywords to pass to :func:`monitoring.log.init()`, with the `log_` prefix stripped :return: program :rtype: Program """ self._lock = threading.Lock() if name is None: # Autogenerate a name name = "Tigres{:x}".format(int(time.time())) # All Tasks are registered here self._tigres_objects = {} self._env = env # All state is registered here self._work = {} self._name = name self._identifier = str(uuid4()) # init monitoring monitoring_kw = {k[4:]: v for k, v in kwargs.items() if k.startswith("log_") and k != "log_dest"} self._log_dest = log_dest if log_dest is None: self._log_dest = get_new_output_file( basename='tigres-{}'.format(name)) init(self._log_dest, self.name, self._identifier, **monitoring_kw) self._log(Level.INFO, "start", state=State.RUN) self._log(Level.INFO, Keyword.pfx + "init_program", message="initializing program") # This must happen after the logging is initialized # We might want to revisit whether we need to log # every time a name is registered with the program self._root_sequence_work = self.register_sequence_work(name) load_plugin(execution) self._log(Level.INFO, Keyword.pfx + "load_execution", message=execution)
def main(cmdline=sys.argv[1:]): """Program entry point. :return: Status of run, 0=OK :rtype: int """ return_code = 0 parser = argparse.ArgumentParser() parser.add_argument('-b', '--badok', dest='bad_ok', action='store_true', help='Ignore un-parseable records') parser.add_argument('-v', '--verbose', dest='vb', action='count', default=0, help='Increase log message verbosity (to stderr)') url_help = "Log path or URL, e.g., /var/log/mylogfile'." subp = parser.add_subparsers(help='Valid query modes', title='Query mode') # 'status' mode check_parser = subp.add_parser('check', help='Check the status of task, template, or workflow') check_parser.add_argument('-a', '--all', dest='multiple', action='store_true', help='Return multiple (all) results [default=last one only]') check_parser.add_argument('-n', '--name', dest='names', action='append', metavar='EXPR', help='Name of component to look for, as text or regular expression.') check_parser.add_argument('--task', dest='task_t', action='store_true', help='Component is a Tigres task') check_parser.add_argument('--template', dest='tmpl_t', action='store_true', help='Component is a Tigres template') check_parser.add_argument('--program', dest='program_t', action='store_true', help='Component is a Tigres program') check_parser.add_argument('url', help=url_help) check_parser.set_defaults(main_fn=check_main) # 'query' mode user_parser = subp.add_parser('query', help='Query the logs', description=""" query expressions are in the form: <field> <operation> <value>. <field> is the name of the field in the log record, e.g., 'level' or any user-defined field. <operation> is a boolean operation. Defined operations are: >, >=, <, <=, =, ~. The last one is the regular expression operator. <value> is the value to match against. The first four (inequalities) require numeric values, '=' does an exact string match, and '~' makes its value into a regular expression.""", epilog=""" examples: 'foo > 1.5' will find records where field foo is greater than 1.5, ignoring records where foo is not a number. 'foo ~ 1\.\d' will find records where field foo is a '1' followed by a decimal point followed by some other digit. """, formatter_class=argparse.RawDescriptionHelpFormatter) user_parser.add_argument('-e', '--expr', dest='exprs', action='append', default=[], help='Query expression (repeatable). ' 'Each record is matched against ALL provided expressions.') user_parser.add_argument('-f', '--format', action="store", dest="fmt", default='kvp', help="Output format: kvp (default), json, table") user_parser.add_argument('-F', '--fields', action='store', dest='fields', default=None, help="Comma-separated list of fields to snippets, " "in addition to timestamp and level (default=ALL)") user_parser.add_argument('-n', '--page', metavar='ROWS', dest='pagelen', action='store', type=int, default=40, help="For 'table', page length (default=40)") user_parser.add_argument('-s', '--shorten', metavar='N', dest='idlen', action='store', type=int, default=36, help="For 'table', shorten identifiers to >= N (default=36)") user_parser.add_argument('url', help=url_help) user_parser.set_defaults(main_fn=user_main) # 'graph' mode #TODO finish help text for 'graph' graph_parser = subp.add_parser('graph', help="Writes DOT from specified execution log") graph_parser.add_argument('url', help=url_help) graph_parser.add_argument('-n', '--number', dest='prgm_id', metavar='program_id', help="Program id of the execution to generate a graph for. " "Defaults to the most recent program id logged.") graph_parser.add_argument('-o', '--outdir', dest='out_path', help="Path for output. If no filename is specified, a default will be provided.") graph_parser.set_defaults(main_fn=graph_main) args = parser.parse_args(cmdline) # sanity checks parts = parse.urlparse(args.url) if not os.path.exists(parts.path): parser.error("File for URL path '{}' not found".format(parts.path)) # set up self-logging hndlr = logging.StreamHandler() hndlr.setFormatter(logging.Formatter("[%(levelname)s] log.py %(asctime)s %(message)s")) g_log.addHandler(hndlr) if args.vb > 2: g_log.setLevel(logging.DEBUG) elif args.vb > 1: g_log.setLevel(logging.INFO) elif args.vb > 0: g_log.setLevel(logging.WARN) else: g_log.setLevel(logging.ERROR) # initialize monitoring lib log.init(args.url, readonly=True) # run appropriate commands for mode return args.main_fn(args)