def get_sql_store(self): """Create a new SQL store object from the configured graph_cache file or return a cached value. """ if self.sql_store is None: conn = sqlite3.connect(self.graph_cache, check_same_thread=False) # don't do this for now, since we get the aliases as keys() which would require further mapping: #conn.row_factory = sqlite3.Row self.sql_store = sqlstore.SqliteStore(dbfile=self.graph_cache, conn=conn, loglevel=self.loglevel) return self.sql_store
def run(input_files: KGTKFiles, **options): """Run Kypher query according to the provided command-line arguments. """ try: import_modules() debug = options.get('_debug', False) expert = options.get('_expert', False) loglevel = debug and 1 or 0 if debug and expert: loglevel = 2 print('OPTIONS:', options) # normalize path objects to strings: inputs = [str(f) for f in KGTKArgumentParser.get_input_file_list(input_files)] if len(inputs) == 0: raise KGTKException('At least one input needs to be supplied') output = options.get('output') if output == '-': output = sys.stdout if isinstance(output, str): output = sqlstore.open_to_write(output, mode='wt') parameters = parse_query_parameters(regular=options.get('regular_paras') or [], string=options.get('string_paras') or [], lqstring=options.get('lqstring_paras') or []) try: graph_cache = options.get('graph_cache_file') store = sqlstore.SqliteStore(graph_cache, create=not os.path.exists(graph_cache), loglevel=loglevel) query = kyquery.KgtkQuery(inputs, store, loglevel=loglevel, options=options.get('input_file_options'), query=options.get('query'), match=options.get('match'), where=options.get('where'), ret=options.get('return_'), order=options.get('order'), skip=options.get('skip'), limit=options.get('limit'), parameters=parameters, index=options.get('index')) explain = options.get('explain') if explain is not None: result = query.explain(explain) output.write(result) else: result = query.execute() # we are forcing \n line endings here instead of \r\n, since those # can be re/imported efficiently with the new SQLite import command; # we force `escapechar' back to None to avoid generation of double # backslashes as in 'Buffalo \'66', which in turn will now raise errors # if separators in fields are encountered (which seems what we want): csvwriter = csv.writer(output, dialect=None, delimiter='\t', quoting=csv.QUOTE_NONE, quotechar=None, lineterminator='\n', escapechar=None) if not options.get('no_header'): csvwriter.writerow(query.result_header) csvwriter.writerows(result) output.flush() finally: store.close() if output is not None and output is not sys.stdout: output.close() except sh.SignalException_SIGPIPE: # hack to work around Python3 issue when stdout is gone when we try to report an exception; # without this we get an ugly 'Exception ignored...' msg when we quit with head or a pager: sys.stdout = os.fdopen(1) except KGTKException as e: raise e except Exception as e: raise KGTKException(str(e) + '\n')
def run(input_files: KGTKFiles, **options): """Run Kypher query according to the provided command-line arguments. """ try: options = preprocess_query_options(input_files=input_files, **options) show_cache = options.get('show_cache') inputs = options.get('input_files') if len(inputs) == 0 and not show_cache: raise KGTKException('At least one input needs to be supplied') output = options['output'] loglevel = options.get('loglevel') store = None try: graph_cache = options.get('graph_cache_file') store = sqlstore.SqliteStore(graph_cache, create=not os.path.exists(graph_cache), loglevel=loglevel, readonly=options.get('readonly')) if show_cache: store.describe_meta_tables(out=sys.stdout) return imports = options.get('import') imports and exec('import ' + imports, sqlstore.__dict__) query = kyquery.KgtkQuery(inputs, store, loglevel=loglevel, options=options.get('input_file_options'), query=options.get('query'), match=options.get('match'), where=options.get('where'), optionals=options.get('optionals'), with_=options.get('with'), ret=options.get('return'), order=options.get('order'), skip=options.get('skip'), limit=options.get('limit'), parameters=options.get('parameters'), index=options.get('index_mode'), force=options.get('force')) explain = options.get('explain') if explain is not None: result = query.explain(explain) output.write(result) else: result = query.execute() # we are forcing \n line endings here instead of \r\n, since those # can be re/imported efficiently with the new SQLite import command; # we force `escapechar' back to None to avoid generation of double # backslashes as in 'Buffalo \'66', which in turn will now raise errors # if separators in fields are encountered (which seems what we want): csvwriter = csv.writer(output, dialect=None, delimiter='\t', quoting=csv.QUOTE_NONE, quotechar=None, lineterminator='\n', escapechar=None) if not options.get('no_header'): csvwriter.writerow(query.result_header) csvwriter.writerows(result) output.flush() finally: if store is not None: store.close() if output is not None and output is not sys.stdout: output.close() except sh.SignalException_SIGPIPE: # hack to work around Python3 issue when stdout is gone when we try to report an exception; # without this we get an ugly 'Exception ignored...' msg when we quit with head or a pager: sys.stdout = os.fdopen(1) except KGTKException as e: raise e except Exception as e: raise KGTKException(str(e) + '\n')