def load_csv(ctx, mapping, output, context, csv_file): """ Load entities from a CSV file. """ graph = ctx.obj['GRAPH'] mapping = ensure_uri(mapping) csv_uri = ensure_uri(csv_file) log.debug('Loading data from %r', csv_uri) load_mapped_csv(graph, csv_uri, read_yaml_uri(mapping), context_id=context) if output is not None: save_dump(graph, output)
def query(ctx, input, output, context, query_file): """ Run an MQL query and store the results. """ graph = ctx.obj['GRAPH'] for uri in input: load_dump(graph, ensure_uri(uri)) if query_file is not None: query = read_yaml_uri(ensure_uri(query_file)) else: query = yaml.loads(sys.stdin) save_query_json(graph, query, output, context_id=context)
def cli(ctx, debug, config): """ JSON graph-based data processing utility. """ ctx.obj = ctx.obj or {} ctx.obj['DEBUG'] = debug fmt = '[%(levelname)-8s] %(name)-12s: %(message)s' level = logging.DEBUG if debug else logging.INFO logging.basicConfig(level=level, format=fmt) logging.getLogger('requests').setLevel(logging.WARNING) data = None if config is not None: data = read_yaml_uri(ensure_uri(config)) else: config = path_to_uri(os.getcwd()) ctx.obj['GRAPH'] = Graph(config=data, base_uri=config)
def merge(ctx, input, output): """ Combine multiple graph files. """ graph = ctx.obj['GRAPH'] for uri in input: load_dump(graph, ensure_uri(uri)) save_dump(graph, output)
def dump_json(ctx, input, types, output, depth): """ Generate JSON of registered schemas. """ graph = ctx.obj['GRAPH'] for uri in input: load_dump(graph, ensure_uri(uri)) save_json_dump(graph, output, types, depth=depth)