def createCube(self): self.workspace = Workspace() self.workspace.register_default_store("sql", url="mysql://root:@localhost/datawarehouse") model = cubes.read_model_metadata_bundle("../CubeModelisation/model/") self.workspace.import_model(model) self.browserTweet = self.workspace.browser("tweet")
def create_app(**config): app = Flask(__name__) app.config.from_object(default_settings) app.config.from_envvar('SPENDB_SETTINGS', silent=True) app.config.update(config) db.init_app(app) babel.init_app(app) cache.init_app(app) mail.init_app(app) login_manager.init_app(app) data_manager.init_app(app) pages.init_app(app) migrate.init_app(app, db, directory=app.config.get('ALEMBIC_DIR')) cors.init_app(app, resources=r'/api/*', supports_credentials=True, methods=['GET', 'HEAD', 'OPTIONS']) ws = Workspace() ext.model_provider("spending", metadata={}) ext.store("spending") ws.register_default_store('spending', model_provider='spending') app.cubes_workspace = ws return app
def setUp(self): self.w = Workspace() self.w.add_slicer("myslicer", "http://localhost:5010", username=os.environ.get("SLICER_USERNAME"), password=os.environ.get("SLICER_PASSWORD")) self.cube_list = self.w.list_cubes()
def __init__(self): print("Creating Workspace and model") workspace = Workspace() workspace.register_default_store("sql", url="sqlite:///data.sqlite") workspace.import_model("movie_ratings_model.json") browser = workspace.browser("ratings") self.browser = browser
def create_browser(): #workspace = Workspace(config="slicer.ini") print("Creating Workspace and model") workspace = Workspace() workspace.register_default_store("sql", url="sqlite:///data.sqlite") workspace.import_model("movie_ratings_model.json") browser = workspace.browser("ratings") return browser
def get_cubes_workspace(self): workspace = Workspace() workspace.register_default_store( "sql", url=Connector.get_database_url(), schema=settings.NIAMOTO_FACT_TABLES_SCHEMA, dimension_schema=settings.NIAMOTO_DIMENSIONS_SCHEMA, ) workspace.import_model(self.generate_cubes_model()) return workspace
def create_workspace(config_file): global WORKSPACE global ENGINE logger = get_logger() logger.setLevel("INFO") logger.info("cretating workspace from %s" % config_file) WORKSPACE = Workspace(config=config_file) ENGINE = engine(WORKSPACE)
def main(): settings = ConfigParser() settings.read("slicer.ini") # Creating workspace workspace = Workspace(config=settings) # Creating browser so that we can do actual aggregations and other data queries for the cube browser = workspace.browser('death_fact') cube = browser.cube # Pass browser in data_aggregate - this function will do all aggregations and queries compare_kill_distances(browser, cube) count_match_deaths(browser, cube)
def analiza_temperatura(request): if request.GET.get("czy_analiza", None): print("Super dokonaj analizy!") # Stwórz Workspace z pliku konfiguracyjnego: workspace = Workspace() workspace.register_default_store("sql", url="sqlite:///db.sqlite3") # Ładuj model: workspace.import_model("model.json") # Twórz obiekt browser: browser = workspace.browser("analiza_temperatura") # Twórz wyniki agregacji: if request.GET.get("wiek_pacjenta", None): res = browser.aggregate(drilldown=["wiek_pacjenta"]) po_czym = "wiek_pacjenta" elif request.GET.get("data_pomiaru", None): res = browser.aggregate(drilldown=["data_pomiaru"]) po_czym = "data_pomiaru" elif request.GET.get("kontynent", None): res = browser.aggregate(drilldown=["kontynent"]) po_czym = "kontynent" elif request.GET.get("kraj", None): res = browser.aggregate(drilldown=["kraj"]) po_czym = "kraj" elif request.GET.get("obszar", None): res = browser.aggregate(drilldown=["obszar"]) po_czym = "obszar" # Wyświetl podsumowanie całkowite i dla grupy: lista = [] print(res.summary) for r in res: print(type(r)) print(r) lista.append(r) # Twórz kontekst: print(type(res)) cont = {"agre_list": lista, "czy_analiza": True, "po_czym": po_czym} else: cont = {} print("Lipa") return render(request, "aplikacja/analiza/temperatura.html", cont)
def setUp(self): super(SlicerModelTestCase, self).setUp() ws = Workspace() ws.register_default_store("sql", url=TEST_DB_URL) self.ws = ws self.slicer.cubes_workspace = ws # Satisfy browser with empty tables # TODO: replace this once we have data store = ws.get_store("default") table = Table("sales", store.metadata) table.append_column(Column("id", Integer)) table.create() ws.import_model(self.model_path("model.json")) ws.import_model(self.model_path("sales_no_date.json"))
def create_app(**config): app = Flask(__name__) app.url_rule_class = NamespaceRouteRule app.url_map.converters['fmt'] = FormatConverter app.url_map.converters['nodot'] = NoDotConverter app.config.from_object(default_settings) app.config.from_envvar('OPENSPENDING_SETTINGS', silent=True) app.config.update(config) app.jinja_options['extensions'].extend( [formencode_jinja2.formfill, 'jinja2.ext.i18n']) db.init_app(app) cache.init_app(app) mail.init_app(app) assets.init_app(app) login_manager.init_app(app) configure_uploads(app, (sourcefiles, )) @app.before_request def require_basic_auth(*args, **kwargs): LOCKDOWN_FORCE = app.config['LOCKDOWN_FORCE'] if not current_user.is_authenticated() and request.path not in [ "/lockdown", "/__ping__" ] and LOCKDOWN_FORCE: return redirect("/lockdown", code=302) from openspending.model.search import SearchForm g.search_form = SearchForm() if request.method == "POST" and request.path not in ["/lockdown"]: token = session.get('csrf_token', None) resquesttoken = request.form.get('csrf_token', None) if request.json and not resquesttoken: resquesttoken = request.json.get('csrf_token') if not token or resquesttoken != token: abort(403) with app.app_context(): app.cubes_workspace = Workspace() app.cubes_workspace.register_default_store('OpenSpendingStore') return app
def create_workspace(self, store=None, model=None): """Create shared workspace. Add default store specified in `store` as a dictionary and `model` which can be a filename relative to ``tests/models`` or a moel dictionary. If no store is provided but class has an engine or `sql_engine` set, then the existing engine will be used as the default SQL store.""" workspace = Workspace() if store: store = dict(store) store_type = store.pop("type", "sql") workspace.register_default_store(store_type, **store) elif self.engine: workspace.register_default_store("sql", engine=self.engine) if model: if isinstance(model, compat.string_type): model = self.model_path(model) workspace.import_model(model) return workspace
def test_base_ignorance(self): ws = Workspace(load_base_model=False) with self.assertRaises(NoSuchDimensionError): ws.dimension("base_time")
def get_browser(): global cube_name if not cube_name: # Get the first cube in the list cubes = workspace.list_cubes() cube_name = cubes[0]["name"] return workspace.browser(cube_name) if __name__ == "__main__": parser = argparse.ArgumentParser(description='Cubes model browser.') parser.add_argument('config', help='server configuration .ini file') parser.add_argument('cube', nargs='?', default=None, help='cube name') args = parser.parse_args() config = ConfigParser.SafeConfigParser() try: config.read(args.config) except Exception as e: raise Exception("Unable to load config: %s" % e) cube_name = args.cube workspace = Workspace(config) app.debug = True app.run()
# Are we at the very detailed level? is_last = hierarchy.is_last(next_level) # Finally, we render it return render_template('report.html', dimensions=cube.dimensions, dimension=dimension, levels=levels, next_level=next_level, result=result, cell=cell, is_last=is_last, details=details) # @app.before_first_request # def initialize_model(): # print 'initialize' # workspace = Workspace(config='slicer.ini') if __name__ == "__main__": workspace = Workspace(config='slicer.ini') workspace.register_default_store( "sql", url="sqlite:///../hello_world/data.sqlite") #,dimension_prefix="dm_") # Create a Slicer and register it at http://localhost:5000/slicer app.register_blueprint(slicer, url_prefix="/slicer", config="slicer.ini") app.run(debug=True)
# Go to the ../hello_world directory and do: python prepare_data.py # # Instructions: # # Just run this file: # # python table.py # Output: # * standard input – text table # * table.html # * cross_table.html # from cubes import Workspace, create_formatter workspace = Workspace("slicer.ini") # Create formatters text_formatter = create_formatter("text_table") html_formatter = create_formatter("simple_html_table") html_cross_formatter = create_formatter("html_cross_table") # Get the browser and data browser = workspace.browser("irbd_balance") result = browser.aggregate(drilldown=["item"]) result = result.cached() # # 1. Create text output
from cubes import Workspace from cubes.compat import ConfigParser print("Python Cubes - Test1") # Stwórz Workspace z pliku konfiguracyjnego: conf = ConfigParser() conf.read("slicer.ini") workspace = Workspace(config=conf) # Ładuj model: workspace.import_model("model.json") # Twórz obiekt browser: browser = workspace.browser("POMIAR") # Twórz wyniki agregacji, agreguj po GRUPA: res = browser.aggregate(drilldown=["GRUPA"]) # Wyświetl podsumowanie całkowite i dla grupy: print(res.summary) for r in res: print(r)
def test_select_hierarchies(self): ws = Workspace() dim_time = ws.dimension("base_time") dim_date = ws.dimension("base_date") self.assertLess(len(dim_date.hierarchies), len(dim_time.hierarchies))
import os from flask_migrate import Migrate from flask_sqlalchemy import SQLAlchemy from app.celery.celery import NotifyCelery from cubes import Workspace migrate = Migrate() db = SQLAlchemy() notify_celery = NotifyCelery() notify_workspace = Workspace() def create_app(application): from app.config import configs notify_environment = os.environ['NOTIFY_ENVIRONMENT'] application.config.from_object(configs[notify_environment]) # init_app(application) notify_celery.init_app(application) db.init_app(application) migrate.init_app(application, db=db) register_blueprint(application) return application
from cubes import Workspace print("Python Cubes Test!") # Stwórz Workspace z pliku konfiguracyjnego: workspace = Workspace() workspace.register_default_store("sql", url="sqlite:///db.sqlite3") # Ładuj model: workspace.import_model("model.json") # Twórz obiekt browser: browser = workspace.browser("analiza_temperatura") # Twórz wyniki agregacji, agreguj po GRUPA: res = browser.aggregate(drilldown=["kontynent"]) # Wyświetl podsumowanie całkowite i dla grupy: print(res.summary) for r in res: print(r)
def test_base_existence(self): ws = Workspace() dim = ws.dimension("base_time") self.assertEqual(dim.name, "base_time")