def __init__(self, **options): super(SQLStore, self).__init__(**options) options = dict(options) self.options = coalesce_options(options, OPTION_TYPES) self.logger = get_logger() self.schema = None self._metadata = None
def __init__(self, cube, store, locale=None, calendar=None, **options): super(DjangoBrowser, self).__init__(cube, store) self.logger = get_logger() self.cube = cube # Locale support is not implemented self.locale = locale or cube.locale self.class_name = store.class_name if self.cube.browser_options.get('class_name'): self.class_name = self.cube.browser_options.get('class_name') # São usados no `provide_aggregate` self.include_summary = options.get("include_summary", True) self.include_cell_count = options.get("include_cell_count", True) self.safe_labels = options.get("safe_labels", False) self.label_counter = 1 # Whether to ignore cells where at least one aggregate is NULL self.exclude_null_agregates = options.get("exclude_null_agregates", True) self.mapper = DjangoMapper(self.cube, self.class_name, self.locale) self.model = get_model(*self.class_name.split('.'))
def __init__(self, cube, store, locale=None, calendar=None, **options): super(MongoBrowser, self).__init__(cube, store) self.logger = get_logger() database = store.database if cube.browser_options.get('database'): database = cube.browser_options.get('database') collection = store.collection if cube.browser_options.get('collection'): collection = cube.browser_options.get('collection') self.data_store = store.client[database][collection] self.mapper = MongoCollectionMapper(cube, database, collection, locale) self.timezone = pytz.timezone(cube.browser_options.get('timezone') or options.get('timezone') or 'UTC') self.datesupport = MongoDateSupport(self.logger, calendar) if "__query__" in self.cube.mappings: self.logger.warn("mongo: __query__ in mappings is depreciated, " "use browser_options.filter instead") self.query_filter = options.get("filter", None)
def __init__(self, cube, store, locale=None, calendar=None, **options): super(MongoBrowser, self).__init__(cube, store) self.logger = get_logger() database = store.database if cube.browser_options.get('database'): database = cube.browser_options.get('database') collection = store.collection if cube.browser_options.get('collection'): collection = cube.browser_options.get('collection') self.data_store = store.client[database][collection] self.mapper = MongoCollectionMapper(cube, database, collection, locale) self.timezone = pytz.timezone( cube.browser_options.get('timezone') or options.get('timezone') or 'UTC') self.datesupport = MongoDateSupport(self.logger, calendar) if "__query__" in self.cube.mappings: self.logger.warn("mongo: __query__ in mappings is depreciated, " "use browser_options.filter instead") self.query_filter = options.get("filter", None)
def __init__(self, email=None, key_file=None, account_id=None, account_name=None, web_property=None, category=None, view_id=None, **options): super(GoogleAnalyticsStore, self).__init__(**options) self.logger = get_logger() self.service = None self.credentials = None self.category = category if not email: raise ConfigurationError("Google Analytics: email is required") if not key_file: raise ConfigurationError("Google Analytics: key_file is required") if account_name and account_id: raise ConfigurationError("Both account_name and account_id " "provided. Use only one or none.") with open(key_file) as f: self.key = f.read() self.email = email self.account_id = account_id self.web_property_id = web_property self.web_property = None self.profile_id = view_id self.profile = None date = options.get("default_start_date") if date: self.default_start_date = date.split("-") else: self.default_start_date = None date = options.get("default_start_date") if date: self.default_end_date = date.split("-") else: self.default_end_date = None self.credentials = SignedJwtAssertionCredentials( self.email, self.key, scope="https://www.googleapis.com/auth/analytics.readonly") # TODO: make this lazy self._authorize() self._initialize_account(account_name, account_id)
def __init__(self, api_key, api_secret, category=None, tz=None, **options): super(MixpanelStore, self).__init__(**options) self.mixpanel = Mixpanel(api_key, api_secret) self.category = category or "Mixpanel Events" if tz is not None: tz = pytz.timezone(tz) else: tz = pytz.timezone(time.strftime('%Z', time.localtime())) self.tz = tz self.logger = get_logger()
def __init__(self, *args, **kwargs): super(GoogleAnalyticsModelProvider, self).__init__(*args, **kwargs) self.logger = get_logger() self.logger.setLevel("DEBUG") self.ga_concepts = {} self.ga_measures = {} self.ga_dimensions = {} self.ga_cubes = [] self.cube_to_group = {} self.group_to_cube = {}
def __init__(self, cube, store, locale=None, **options): self.store = store self.cube = cube self.locale = locale self.logger = get_logger() self.logger.setLevel("DEBUG") self.mapper = GoogleAnalyticsMapper(cube, locale) # Note: Make sure that we have our own calendar copy, not workspace # calendar (we don't want to rewrite anything shared) self.calendar = Calendar(timezone=self.store.timezone) self.default_start_date = self.store.default_start_date \ or _DEFAULT_START_DATE self.default_end_date = self.store.default_end_date
def __init__(self, cube, store, locale=None, **options): """Creates a Mixpanel aggregation browser. Requirements and limitations: * `time` dimension should always be present in the drilldown * only one other dimension is allowd for drilldown * range cuts assume numeric dimensions * unable to drill-down on `year` level, will default to `month` """ self.store = store self.cube = cube self.options = options self.logger = get_logger() self.timezone = self.store.tz dim_names = [dim.name for dim in cube.dimensions] self.mapper = MixpanelMapper(cube, cube.mappings, property_dimensions=dim_names)
with open (BASE_DIR + "/ostrovacubes/heroku_slicer.ini", 'r') as inp,\ open (BASE_DIR + "/ostrovacubes/heroku_slicer_subst.ini", 'w+') as temp : for ln in inp: temp.write(os.path.expandvars(ln)) os.environ.setdefault("SLICER_CONFIG", BASE_DIR + "/ostrovacubes/heroku_slicer_subst.ini") django_application = get_wsgi_application() django_application = DjangoWhiteNoise(django_application) config = read_slicer_config(os.environ["SLICER_CONFIG"]) logging.error("init cubes") # initialize logging #if config.has_option("server","log"): logging.error("init logging:" + config.get("server", "log", fallback="CONSOLE")) lg = get_logger(config.get("server", "log", fallback=None)) lg.error("logging test") lg.setLevel(logging.DEBUG) cubes_application = create_server(config) debug = os.environ.get("SLICER_DEBUG") if debug and str_to_bool(debug): cubes_application.debug = True application = DispatcherMiddleware(django_application, {'/cubes_backend': cubes_application})
from ..errors import ModelError, TemplateRequired, CubesError, BackendError from ..errors import NoSuchDimensionError, NoSuchCubeError from .localization import LocalizationContext from .cube import Cube from .dimension import Dimension from cubes.logging import get_logger __all__ = [ "ModelProvider", "StaticModelProvider", "link_cube", "find_dimension", ] LOG = get_logger() # Proposed Provider API: # Provider.cube() – in abstract class # Provider.provide_cube() – in concrete class, providers Cube object that # might be modified later # Provider.provide_dimension() # Provider.link_cube(cube,locale) # Provider.find_dimension(cube, locale) # # Provider is bound to namespace # TODO: add tests # TODO: needs to be reviewed def link_cube(cube,