def create_app(**config): app = Flask(__name__, static_folder='../spendb.ui') app.config.from_object(default_settings) app.config.from_envvar('SPENDB_SETTINGS', silent=True) app.config.update(config) app.jinja_options['extensions'].extend([ formencode_jinja2.formfill, 'jinja2.ext.i18n' ]) db.init_app(app) babel.init_app(app) cache.init_app(app) mail.init_app(app) assets.init_app(app) login_manager.init_app(app) data_manager.init_app(app) pages.init_app(app) migrate.init_app(app, db, directory=app.config.get('ALEMBIC_DIR')) cors.init_app(app, resources=r'/api/*', supports_credentials=True, methods=['GET', 'HEAD', 'OPTIONS']) ws = Workspace() ext.model_provider("spending", metadata={}) ext.store("spending") ws.register_default_store('spending', model_provider='spending') app.cubes_workspace = ws return app
def create_browser(): #workspace = Workspace(config="slicer.ini") print("Creating Workspace and model") workspace = Workspace() workspace.register_default_store("sql", url="sqlite:///data.sqlite") workspace.import_model("movie_ratings_model.json") browser = workspace.browser("ratings") return browser
def __init__(self): print("Creating Workspace and model") workspace = Workspace() workspace.register_default_store("sql", url="sqlite:///data.sqlite") workspace.import_model("movie_ratings_model.json") browser = workspace.browser("ratings") self.browser = browser
def setUp(self): super(SlicerModelTestCase, self).setUp() ws = Workspace() ws.register_default_store("sql", url=TEST_DB_URL) self.ws = ws self.slicer.cubes_workspace = ws # Satisfy browser with empty tables # TODO: replace this once we have data store = ws.get_store("default") table = Table("sales", store.metadata) table.append_column(Column("id", Integer)) table.create() ws.import_model(self.model_path("model.json")) ws.import_model(self.model_path("sales_no_date.json"))
def setUp(self): super(SlicerModelTestCase, self).setUp() ws = Workspace() ws.register_default_store("sql", url=TEST_DB_URL) self.ws = ws self.slicer.cubes_workspace = ws # Satisfy browser with empty tables # TODO: replace this once we have data store = ws.get_store("default") table = Table("sales", store.metadata) table.append_column(Column("id", Integer)) table.create() ws.import_model(self.model_path("model.json")) ws.import_model(self.model_path("sales_no_date.json"))
def create_workspace(self, store=None, model=None): """Create shared workspace. Add default store specified in `store` as a dictionary and `model` which can be a filename relative to ``tests/models`` or a moel dictionary. If no store is provided but class has an engine or `sql_engine` set, then the existing engine will be used as the default SQL store.""" workspace = Workspace() if store: store = dict(store) store_type = store.pop("type", "sql") workspace.register_default_store(store_type, **store) elif self.engine: workspace.register_default_store("sql", engine=self.engine) if model: if isinstance(model, compat.string_type): model = self.model_path(model) workspace.import_model(model) return workspace
from cubes import Workspace, Cell, PointCut # 1. Create a workspace workspace = Workspace() workspace.register_default_store("sql", url="sqlite:///data.sqlite") workspace.add_model("model.json") # 2. Get a browser browser = workspace.browser("irbd_balance") # 3. Play with aggregates result = browser.aggregate() print "Total\n" \ "----------------------" print "Record count: %8d" % result.summary["record_count"] print "Total amount: %8d" % result.summary["amount_sum"] # # 4. Drill-down through a dimension # print "\n" \ "Drill Down by Category (top-level Item hierarchy)\n" \ "=================================================" # result = browser.aggregate(drilldown=["item"]) # print ("%-20s%10s%10s\n"+"-"*40) % ("Category", "Count", "Total") #
import os.path BASE = os.path.dirname(os.path.abspath(__file__)) from cubes import Workspace, Cell, PointCut from datetime import datetime, timedelta import sys import json from django.http import JsonResponse #-------------------------------------------------------------# workspace = Workspace() workspace.register_default_store("sql", url="sqlite:///"+os.path.join(BASE,"myData.sqlite")) workspace.import_model(os.path.join(BASE,"modal.json")) browser = workspace.browser("FB_POSTS_DATA") #-------------------------------------------------------------# d = datetime.now() - timedelta(days=1) cut = PointCut("pub_date", [d.year, d.month, d.day-6], None) cell = Cell(browser.cube, cuts = [cut]) #-------------------------------------------------------------# def get_post_by_shares(): result = browser.aggregate(cell, drilldown=["name"]) shares = []
from cubes import StaticModelProvider from cubes import Workspace workspace = Workspace() workspace.register_default_store("sql", url="sqlite:///BI.db") dicc = { "cubes": [{ "name": "clase", "label": "Clases", "dimensions": ["franja", "seccion", "salon", "curso"] }], "dimensions": [{ "name": "franja", "label": "Franja", "attributes": [{ "name": "dia", "label": "Dia" }, { "name": "hora_inicio", "label": "Hora de inicio", }, { "name": "minuto_inicio", "label": "Minuto de inicio", }, { "name": "hora_fin", "label": "Hora de finalizacion", }, { "name": "minuto_fin", "label": "Minuto de finalizacion",
# table_name="ibrd_balance", # fields=[ # ("category", "string"), # ("category_label", "string"), # ("subcategory", "string"), # ("subcategory_label", "string"), # ("line_item", "string"), # ("year", "integer"), # ("amount", "integer")], # create_id=True # ) from cubes import Workspace, PointCut, Cell workspace = Workspace() workspace.register_default_store( "sql", url="postgresql://*****:*****@localhost/willowood") workspace.import_model("SalesTable.json") browser = workspace.browser("salestable") result = browser.aggregate() print(result.summary["record_count"]) print(result.summary["Qty"]) print(result.summary["Value"]) cube = browser.cube # result = browser.aggregate(drilldown=["billing_date"]) # # for record in result: # print(' record: ', record)
from __future__ import print_function from cubes import Workspace, Cell, PointCut workspace = Workspace() workspace.register_default_store("sql", url="sqlite:///data_sqlite/f1.sqlite") workspace.import_model("models/model.json") browser = workspace.browser("qualifying") cut1 = PointCut("drivers", []) cut2 = PointCut("races", []) cell = Cell(browser.cube, cuts=[cut1, cut2]) result = browser.aggregate(cell, drilldown=["drivers", "races"]) list_res = [row for row in result] def filter_racer(data, name, year): temp = list( filter( lambda x: x['drivers.surname'] == name and x['races.year'] == year, data)) return sorted(temp, key=lambda x: x['position_min']) for line in filter_racer(list_res, 'Hamilton', 2009): print(line)
from cubes import Workspace, Cell # 1. Create a workspace workspace = Workspace() workspace.register_default_store("sql", url="sqlite:///vvo_data.sqlite", dimension_prefix="dm_", fact_prefix="ft_") workspace.import_model("procurements.cubesmodel") # 2. Get a browser browser = workspace.browser("contracts") cube = browser.cube # workspace = cubes.create_workspace("sql", model, url="postgres://localhost/ep2012", # schema="vvo", # dimension_prefix="dm_", # fact_prefix="ft_", # denormalized_view_schema="views", # use_denormalization=False, # denormalized_view_prefix="mft_") def drilldown(cell, dimension): """Drill-down and aggregate recursively through als levels of `dimension`. This function is like recursively traversing directories on a file system and aggregating the file sizes, for example. * `cell` - cube cell to drill-down * `dimension` - dimension to be traversed through all levels """