def _init_logging(self): log_file = self.config['log_file'] log_level = self.config['log_level'].upper() if log_level == 'NONE': handler = logging.NullHandler() elif log_file == 'stderr': handler = logging.StreamHandler(sys.stderr) elif log_file == 'stdout': handler = logging.StreamHandler(sys.stdout) else: handler = logging.handlers.RotatingFileHandler( config_location() + log_file, maxBytes=10 * 1024 * 1024, backupCount=5) log_level = getattr(logging, log_level, logging.WARNING) formatter = logging.Formatter( '%(asctime)s (%(process)d/%(threadName)s) ' '%(name)s %(levelname)s - %(message)s') handler.setFormatter(formatter) root_logger = logging.getLogger(__package__) root_logger.addHandler(handler) root_logger.setLevel(log_level)
def __call__(self, app, **options): config_dir = config_location() if not os.path.exists(config_dir): raise FileNotFoundError( f'Config directory does not exist: {config_dir!r}. ' f'Please create it before downloading API spec files') existing_kibana_dirs = [ os.path.join(config_dir, d) for d in os.listdir(config_dir) if d.startswith('kibana-') ] if existing_kibana_dirs: raise RuntimeError( f'Existing {"directory" if len(existing_kibana_dirs) == 1 else "directories"} ' f'found for API specs: {existing_kibana_dirs}. ' f'Please remove {"it" if len(existing_kibana_dirs) == 1 else "them"} ' f'before download new spec files.') import urllib.request import zipfile import io kibana_version = options.get('version', '7.9.1') kibana_release_url = f'https://github.com/elastic/kibana/archive/v{kibana_version}.zip' app.display.info( f'Downloading from {kibana_release_url} ... This may take a few minutes ...' ) data = urllib.request.urlopen(kibana_release_url).read() zf = zipfile.ZipFile(io.BytesIO(data)) for info in zf.infolist(): if 'spec_definitions' in info.filename: zf.extract(info, path=config_dir) app.completer.init_api_specs() return f'Version {kibana_version} API spec files are ready'
def __call__(self, app, **options): if not options: return {'location': config_location(), 'config': app.config} extra_config = {} for key, value in options.items(): parent = extra_config key_components = key.split('.') for key_component in key_components[:-1]: child = parent.get(key_component) if child is None: parent[key_component] = {} elif not isinstance(child, dict): _logger.warning( f'Config key {key!r} conflicts. ' f'Value of {key_component!r} is not a dict, ' f'but {type(child)!r}') parent = None break parent = parent[key_component] if isinstance(parent, dict): parent[key_components[-1]] = value # TODO: saner merge that does not change data type, e.g. from dict to primitive and vice versa app.config.merge(ConfigObj(extra_config))
def __init__(self, history_max=HIST_MAX): super().__init__() self.history_max = history_max db_file = expanduser(config_location() + 'history') ensure_dir_exists(db_file) self.conn = sqlite3.connect(db_file) self.conn.execute( 'CREATE TABLE IF NOT EXISTS history ' '(id INTEGER PRIMARY KEY AUTOINCREMENT, content TEXT NOT NULL, timestamp INTEGER NOT NULL)' ) self.conn.execute( 'CREATE TABLE IF NOT EXISTS connection ' '(name TEXT PRIMARY KEY, data TEXT NOT NULL, timestamp INTEGER NOT NULL)' ) self._maintain_size() self.conn.commit()
def init_api_specs(self): from peek import __file__ as package_root package_root = os.path.dirname(package_root) kibana_dir = self.app.config['kibana_dir'] if not kibana_dir: config_dir = config_location() if os.path.exists(config_dir): kibana_dirs = [ os.path.join(config_dir, d) for d in os.listdir(config_dir) if d.startswith('kibana-') ] if kibana_dirs: kibana_dir = kibana_dirs[0] if not kibana_dir: kibana_dir = os.path.join(package_root, 'specs', 'kibana-7.8.1') _logger.info( f'Attempt to build Elasticsearch API specs from: {kibana_dir}') return ApiSpec(self.app, kibana_dir)
def build_js_specs(kibana_dir, use_cache_file): # Cache is not for efficiency, but rather because the spec building from TypeScript # files is hacky and likely to go wrong with new Kibana releases. Cache it so at least # we can have some usable version till a fix is ready for new releases. cached_extended_specs_file = os.path.expanduser(config_location()) + 'extended_specs.es' source = None if use_cache_file and os.path.exists(cached_extended_specs_file): _logger.info(f'Found cached extended specs file: {cached_extended_specs_file!r}') with open(cached_extended_specs_file) as ins: source = ins.read() spec_parser = JsSpecParser(kibana_dir, source=source) nodes = spec_parser.parse() spec_evaluator = JsSpecEvaluator() specs = spec_evaluator.visit(nodes) if use_cache_file and source is None: spec_parser.save(cached_extended_specs_file) _logger.info('Complete building extended specs') return specs
def main(): """Console script for peek.""" parser = argparse.ArgumentParser() parser.add_argument('input', nargs='*', help='script files') parser.add_argument('--config', default=config_location() + 'peekrc', help='Configuration file to load') parser.add_argument('-e', '--extra-config-option', action='append', help='Extra configuration option to override') parser.add_argument('--name', help='A friendly name for the connection') parser.add_argument('--hosts', default=argparse.SUPPRESS, help='ES hosts to connect to (default localhost:9200)') parser.add_argument('--cloud_id', help='Elastic Cloud ID') parser.add_argument('--username', help='Username') parser.add_argument('--password', help='Password') parser.add_argument('--api_key', help='API key of format id:key') parser.add_argument('--token', help='Token for authentication') parser.add_argument('--use_ssl', action='store_true', default=argparse.SUPPRESS, help='Enable TLS for connecting to ES') parser.add_argument('--verify_certs', action='store_true', default=argparse.SUPPRESS, help='Verify server certificate') parser.add_argument('--assert_hostname', action='store_true', default=argparse.SUPPRESS, help='Verify hostname') parser.add_argument('--ca_certs', help='Location of CA certificates') parser.add_argument('--client_cert', help='Location of client certificate') parser.add_argument('--client_key', help='Location of client private key') parser.add_argument('--force_prompt', action='store_true', default=argparse.SUPPRESS, help='Force prompting for password') parser.add_argument('--no_prompt', action='store_true', default=argparse.SUPPRESS, help='Do not prompt for password') parser.add_argument('-z', '--zero_connection', action='store_true', help='Start the session with no connection') parser.add_argument('-V', '--version', action='version', version=__version__) ns = parser.parse_args() isatty = sys.stdin.isatty() batch_mode = (not isatty) or bool(ns.input) peek = PeekApp( batch_mode=batch_mode, config_file=ns.config, extra_config_options=ns.extra_config_option, cli_ns=ns, ) if not batch_mode: peek.run() else: if ns.input: for f in ns.input: with open(f) as ins: peek.process_input(ins.read()) else: stdin_read = sys.stdin.read() peek.process_input(stdin_read) return 0