def load_rule_contents(rule_file: Path, single_only=False) -> list: """Load a rule file from multiple formats.""" _, extension = os.path.splitext(rule_file) raw_text = rule_file.read_text() if extension in ('.ndjson', '.jsonl'): # kibana exported rule object is ndjson with the export metadata on the last line contents = [json.loads(line) for line in raw_text.splitlines()] if len(contents) > 1 and 'exported_count' in contents[-1]: contents.pop(-1) if single_only and len(contents) > 1: raise ValueError('Multiple rules not allowed') return contents or [{}] elif extension == '.toml': rule = pytoml.loads(raw_text) else: rule = load_dump(rule_file) if isinstance(rule, dict): return [rule] elif isinstance(rule, list): return rule else: raise ValueError(f"Expected a list or dictionary in {rule_file}")
def query(args): """Query over an input file.""" if args.file: stream = stream_file_events(args.file, args.format, args.encoding) else: stream = stream_stdin_events(args.format) config = {'print': True} if args.config: config.update(load_dump(args.config)) engine = PythonEngine(config) try: eql_query = parse_query(args.query, implied_any=True, implied_base=True) engine.add_query(eql_query) except EqlError as e: print(e, file=sys.stderr) sys.exit(2) engine.stream_events(stream, finalize=False) engine.finalize()
def build(args): """Convert an EQL engine with analytics to a target language.""" config = load_dump(args.config) if args.config else {} _, ext = os.path.splitext(args.output_file) ext = ext[len(os.extsep):] with use_schema(config.get('schema')): if '*' in args.input_file: analytics = [] for input_file in glob.glob(args.input_file): analytics.extend(load_analytics(input_file)) else: analytics = load_analytics(args.input_file) if ext in ('yml', 'yaml', 'json'): save_analytics(analytics, args.output_file) else: output = render_engine(analytics, engine_type=ext, config=config, analytics_only=args.analytics_only) with open(args.output_file, "w") as f: f.write(output)
def reset_schema(): """Reset the schema to the default.""" global _schema update_schema(load_dump(SCHEMA_FILE))
def load_analytic(filename): """Load analytic.""" analytic = load_dump(filename) return parse_analytic(analytic)
def load_analytics(filename): """Load analytics.""" analytics = load_dump(filename) if isinstance(analytics, dict): analytics = analytics['analytics'] # type: list return parse_analytics(analytics)