def import_model(path): # We need to use both: the metadata and the created model, as we do not # want to reproduce the model creation here global MODEL logger = get_logger() logger.setLevel("INFO") logger.info("importing model from %s" % path) metadata = read_model_metadata(path) cube_list = metadata.pop("cubes", []) for i, cube in enumerate(cube_list): cube_id = i + 1 cube["id"] = cube_id CUBES[str(cube_id)] = cube dim_list = metadata.pop("dimensions", []) for i, dim in enumerate(dim_list): dim = fix_dimension_metadata(dim) dim_id = i + 1 dim["id"] = dim_id DIMENSIONS[str(dim_id)] = dim MODEL = metadata
def create_workspace(config_file): global WORKSPACE global ENGINE logger = get_logger() logger.setLevel("INFO") logger.info("cretating workspace from %s" % config_file) WORKSPACE = Workspace(config=config_file) ENGINE = engine(WORKSPACE)
def import_model(path): # We need to use both: the metadata and the created model, as we do not # want to reproduce the model creation here global MODEL cube_id_sequence = count(1) dimension_id_sequence = count(1) logger = get_logger() logger.setLevel("INFO") logger.info("importing model from %s" % path) metadata = read_model_metadata(path) cube_list = metadata.pop("cubes", []) for i, cube in enumerate(cube_list): cube_id = cube_id_sequence.next() cube["id"] = cube_id CUBES[str(cube_id)] = cube dim_list = metadata.pop("dimensions", []) for i, dim in enumerate(dim_list): dim = expand_dimension_metadata(dim) dim_id = dimension_id_sequence.next() dim["id"] = dim_id DIMENSIONS[str(dim_id)] = dim MODEL = metadata # Convert joins (of known types) # TODO: currently we assume that all JOINS are SQL joins as we have no way # to determine actual store and therefore the backend used for # interpreting this model joins = metadata.pop("joins", []) for join in joins: if "detail" in join: join["detail"] = _fix_sql_join_value(join["detail"]) if "master" in join: join["master"] = _fix_sql_join_value(join["master"]) join["__type__"] = "sql" MODEL["joins"] = joins
from flask import Flask, render_template, request, g import cubes import os.path import sqlalchemy import logging logger = cubes.get_logger() logger.setLevel(logging.DEBUG) app = Flask(__name__) # # Data we aregoing to browse and logical model of the data # APP_ROOT = os.path.dirname(os.path.abspath(__file__)) MODEL_PATH = os.path.join(APP_ROOT, "vvo_model.json") DB_PATH = os.path.join(APP_ROOT, "vvo_data.sqlite") DB_URL = "sqlite:///" + DB_PATH CUBE_NAME = "contracts" # Some global variables. We do not have to care about Flask provided thread # safety here, as they are non-mutable. workspace = None model = None @app.route("/")