def select_id_or_insert_multi(self, report_id, tags_series_spec_list): res = [] with cursor() as cur: for (tags, series_spec) in tags_series_spec_list: cur.execute( """SELECT series_id FROM series_def WHERE series_spec=? AND report_id=? AND tags=?""", [serialize.mjson(series_spec), report_id, tags]) row = cur.fetchone() if row: res.append(row['series_id']) continue series_id = gen_timeuuid() cur.execute( """INSERT INTO series_def (report_id, tags, series_id, series_spec, from_rid, to_rid) SELECT ?, ?, ?, ?, ?, ? WHERE NOT EXISTS (SELECT 1 FROM series_def WHERE report_id=? AND tags=? AND series_id=?)""", [ report_id, tags, series_id, serialize.mjson(series_spec), None, None, report_id, tags, series_id ]) if cur.lastrowid: res.append(series_id) continue cur.execute( """SELECT series_id FROM series_def WHERE series_spec=? AND report_id=? AND tags=?""", [serialize.mjson(series_spec), report_id, tags]) res.append(cur.fetchone()['series_id']) return res
def test_mjson(self): self.assertRaises(TypeError, lambda: serialize.mjson(object())) self.assertEqual("""{"a":{"b":[1,2,3]}}""", serialize.mjson({'a': { 'b': [1, 2, 3] }}))
def update_user_data(self, d): """Update the ``mqe.user.user_data`` column holding a custom JSON-serializable dictionary. The ``d`` is a dictionary which items will be assigned to the ``user_data`` dictionary. """ assert isinstance(d, dict) row = c.dao.UserDAO.select(self.user_id) user_data = serialize.json_loads(row['user_data']) user_data.update(d) c.dao.UserDAO.update_user_data(self.user_id, serialize.mjson(user_data)) self['user_data'] = serialize.mjson(user_data) # clear cached value del self.user_data
def select_id_or_insert_multi(self, report_id, tags_series_spec_list): select_qs = [] for (tags, series_spec) in tags_series_spec_list: select_qs.append( bind( """SELECT series_id FROM mqe.series_def_by_series_spec WHERE report_id=? AND tags_repr=? AND series_spec=?""", [ report_id, tags_repr_from_tags(tags), serialize.mjson(series_spec) ])) select_qs_res = c.cass.execute_parallel(select_qs) res = [ rows[0]['series_id'] if rows else None for rows in select_qs_res ] to_insert_idxs = [ i for i in xrange(len(tags_series_spec_list)) if res[i] is None ] to_insert_data = [tags_series_spec_list[i] for i in to_insert_idxs] insert_res = self.insert_multi(report_id, to_insert_data) for i in xrange(len(to_insert_idxs)): res[to_insert_idxs[i]] = insert_res[i] return res
def select_default_series_spec_options(report_id, series_spec_list): """Return default options for a list of :class:`SeriesSpec` objects as a list of dictionaries with the keys: * ``name`` - the suggested name of the series * ``color`` - the suggested color of the series """ assert isinstance(series_spec_list, list) series_spec_vals = [ serialize.mjson(series_spec_for_default_options(ss)) for ss in series_spec_list ] series_spec_val_to_idxs = defaultdict(list) for i, ssv in enumerate(series_spec_vals): series_spec_val_to_idxs[ssv].append(i) res = [{} for _ in xrange(len(series_spec_list))] options_list = c.dao.OptionsDAO.select_multi(report_id, 'SeriesSpec', series_spec_vals) for series_spec_raw, default_options_raw in options_list: for idx in series_spec_val_to_idxs[series_spec_raw]: if default_options_raw: res[idx] = serialize.json_loads(default_options_raw) return res
def insert_dashboard(self, dashboard_name, dashboard_options={}): """Insert a dashboard having the passed ``dashboard_name`` and ``dashboard_options`` (a JSON-serializable value). """ from mqe import layouts row = c.dao.DashboardDAO.insert(self.owner_id, dashboard_name, serialize.mjson(dashboard_options)) if not row: return None dashboard = Dashboard(row) change_dashboards_ordering( self.owner_id, self.dashboard_id_ordering + [dashboard.dashboard_id]) empty_layout = layouts.Layout() empty_layout.set(self.owner_id, row['dashboard_id'], None) log.info('Inserted new dashboard dashboard_id=%s name=%r', dashboard.dashboard_id, dashboard.dashboard_name) self.reload() fire_signal(new_dashboard, dashboard=dashboard) return Dashboard(row)
def update(self, dashboard_name=None, dashboard_options=None): """Update the dashboard's name (if non-``None``) and/or options (if non-``None`` and passed as a JSON-serializable document)""" if dashboard_options is not None: dashboard_options = serialize.mjson(dashboard_options) c.dao.DashboardDAO.update(self.owner_id, self.dashboard_id, dashboard_name, dashboard_options)
def set_all_dashboards_ordering(self, owner_id, dashboard_id_list): with cursor() as cur: cur.execute(*replace( 'all_dashboards_properties', dict(owner_id=owner_id, dashboard_id_ordering=serialize.mjson( dashboard_id_list))))
def insert(cls, owner_id, report_id, dashboard_id, tile_config, skip_db=False): """Insert and return a new :class:`Tile` specified using the :data:`tile_config`. If ``skip_db`` is ``True``, the tile is not inserted into the database.""" cls._postprocess_tile_config(tile_config) cls._validate_tile_config(tile_config) tile_options = { 'tw_type': tile_config.get('tw_type', DEFAULT_TW_TYPE), 'owner_id': owner_id, 'report_id': report_id, 'tags': tile_config.get('tags', []), } tile_options.update(tile_config.get('tile_options', {})) partial_tile = Tile({'dashboard_id': dashboard_id, 'tile_id': None, 'tile_options': serialize.mjson(tile_options)}) partial_tile.tile_options['series_configs'] = \ partial_tile.tilewidget.get_series_configs(tile_config['series_spec_list']) cls._postprocess_tile_options(partial_tile.tile_options) partial_tile.tilewidget.postprocess_new_tile_options(tile_config) ### do update if skip_db: return partial_tile return Tile.insert_with_tile_options(dashboard_id, partial_tile.tile_options)
def test_custom_types(self): d = { 'id': uuid.uuid1(), 'id2': uuid.uuid4(), 'dt': datetime.datetime.utcnow(), 'da': datetime.datetime.utcnow(), 'ss': SeriesSpec(2, 0, dict(op='eq', args=['monique'])), } self.assertEqual(d, serialize.json_loads(serialize.mjson(d))) dt = datetime.datetime.utcnow() self.assertEqual(util.datetime_from_date(dt.date()), serialize.json_loads(serialize.mjson(dt.date()))) ext_d = json.loads( serialize.json_dumps_external(util.dictwithout(d, 'ss'))) self.assertEqual(d['id2'].hex, ext_d['id2'])
def update_default_options(tile): """Include the tile's :data:`tile_options` in a pool of default options belonging to the owner of the tile""" series_specs = tile.series_specs() if not series_specs: return old_options_list = c.dao.OptionsDAO.select_multi( tile.report_id, 'SeriesSpec', [ serialize.mjson(series_spec_for_default_options(ss)) for ss in series_specs ]) default_options_by_ss = {} for ss_do_raw, options_raw in old_options_list: ss_do = series_spec_for_default_options( serialize.json_loads(ss_do_raw)) default_options_by_ss[ss_do] = serialize.json_loads(options_raw) colors = tile.tile_options.get('colors') to_set = [] for i, ss in enumerate(series_specs): ss_do = series_spec_for_default_options(ss) old_options = default_options_by_ss.get(ss_do, {}) new_options = {} name = ss.params.get('name') if name: new_options['name'] = name color = util.safeget(colors, i) if color: new_options['color'] = color if old_options.get('color') and not new_options.get('color'): new_options['color'] = old_options['color'] if old_options != new_options: to_set.append( (serialize.mjson(ss_do), serialize.mjson(new_options))) if to_set: c.dao.OptionsDAO.set_multi(tile.report_id, 'SeriesSpec', to_set) log.debug('Updated default options from tile %s', tile)
def get_series_configs(self, series_spec_list): return [{ 'series_spec': series_spec, 'series_id': util.uuid_for_string( mjson({ 'series_spec': series_spec, 'tags': self.tile_options['tags'] })), } for series_spec in series_spec_list[:mqeconfig.MAX_SERIES]]
def insert_multi(self, report_id, tags_series_spec_list): res = [] with cursor() as cur: for tags, series_spec in tags_series_spec_list: series_id = gen_timeuuid() cur.execute(*insert('series_def', dict( report_id=report_id, tags=tags, series_id=series_id, series_spec=serialize.mjson(series_spec)))) res.append(series_id) return res
def insert_with_tile_options_multi(cls, dashboard_id, tile_options_list): """Insert and return multiple :class:`Tile` objects at once. A :class:`Tile` is created for each :data:`tile_options` from the list.""" assert isinstance(tile_options_list, list) if not tile_options_list: return [] for tile_options in tile_options_list: cls._postprocess_tile_options(tile_options) rows = c.dao.TileDAO.insert_multi(tile_options_list[0]['owner_id'], dashboard_id, [serialize.mjson(to) for to in tile_options_list]) return [Tile(row) for row in rows]
def rows_it(): for ri in instances_it: if info['oldest_rid_fetched'] is None: info['oldest_rid_fetched'] = ri.report_instance_id info['newest_rid_fetched'] = ri.report_instance_id info['count'] += 1 cell = series_def.series_spec.get_cell(ri) if cell: row = dict(report_instance_id=ri.report_instance_id, json_value=serialize.mjson(cell.value)) header = ri.table.header(cell.colno) if header: row['header'] = header yield row
def insert_multi(self, report_id, tags_series_spec_list): if not tags_series_spec_list: return [] qs = [] res = [] for tags, series_spec in tags_series_spec_list: series_id = gen_timeuuid() qs.append( insert( 'mqe.series_def', dict(report_id=report_id, tags_repr=tags_repr_from_tags(tags), series_id=series_id, series_spec=serialize.mjson(series_spec)))) qs.append( insert( 'mqe.series_def_by_series_spec', dict(report_id=report_id, tags_repr=tags_repr_from_tags(tags), series_spec=serialize.mjson(series_spec), series_id=series_id))) res.append(series_id) c.cass.execute_parallel(qs) return res
def fetch_move_tile_data(): dashboard_id = request.get_json()['dashboard_id'] tile_id = request.get_json()['tile_id'] check_access(lambda: auth.access_dashboard(dashboard_id)) dbs = dashboards.OwnerDashboards(auth.logged_owner_id()) options = [(serialize.mjson(db.dashboard_id), db.dashboard_name) for db in dbs.dashboards if db.dashboard_id != dashboard_id] tile = tiles.Tile.select(dashboard_id, tile_id) if not tile: return error('Invalid tile') result = dict( report_name=tile.report.report_name, has_options=bool(options), html_dashboard_select=get_template_attribute('m.html', 'select')('move-to-dashboard-select', options), ) return success(result=result)
def insert(self, email, password): if self.select_by_email(email) is not None: raise ValueError('User exists: %r' % email) user_id = gen_uuid() pw_hash = werkzeug.security.generate_password_hash(password) c.cass.execute( batch( insert( 'mqe.user', { 'user_id': user_id, 'email': email, 'password': pw_hash, 'created': datetime.datetime.utcnow(), 'user_data': serialize.mjson({}), }), insert('mqe.user_by_email', { 'email': email, 'user_id': user_id }), )) return self.select(user_id)
def __hash__(self): return hash(serialize.mjson(self))
def todata(s): if s is None: return '' if isinstance(s, basestring): return s return serialize.mjson(s)
def set(self, owner_id=None, dashboard_id=None, old_layout_id=None): """Set a new layout definition for the dashboard (replacing the existing one), using the current content of the :attr:`layout_dict`. The parameters are optional - if not specified, the current values of :attr:`owner_id`, :attr:`dashboard_id` and :attr:`layout_id` are used. :param owner_id: the owner ID of the dashboard :param dashboard_id: the dashboard's ID :param old_layout_id: ``None`` if this should be a new layout definition for the dashboard, ``layout_id`` of the existing layout otherwise :return: a ``layout_id`` of a newly set layout if the operation was successful, ``None`` otherwise (ie. when the passed ``old_layout_id`` didn't match the version in the database) """ owner_id = owner_id or self.owner_id if not owner_id: raise ValueError('owner_id not set in Layout and not passed as an argument') dashboard_id = dashboard_id or self.dashboard_id if not dashboard_id: raise ValueError('dashboard_id not set in Layout and not passed as an argument') old_layout_id = old_layout_id or self.layout_id # a layout def is a layout_dict serialized as a list of items. The list is # sorted by tile creation time (but this assumption should not be generally made). new_layout_def = serialize.mjson(sorted(self.layout_dict.items(), key=lambda (tile_id, vo): tile_id.time)) # Merge old layout_props with new data old_layout_props_row = c.dao.LayoutDAO.select(owner_id, dashboard_id, ['layout_props']) if not old_layout_props_row and old_layout_id: return None if old_layout_props_row and old_layout_props_row['layout_props']: old_layout_props = serialize.json_loads(old_layout_props_row['layout_props']) else: old_layout_props = {'by_tile_id': []} by_tile_id = {} old_by_tile_id = dict(old_layout_props['by_tile_id']) tile_ids_to_fetch = [] for tile_id in self.layout_dict: if tile_id in old_by_tile_id: by_tile_id[tile_id] = old_by_tile_id[tile_id] elif tile_id in self._included_tiles: by_tile_id[tile_id] = self.props_of_tile(self._included_tiles[tile_id]) else: tile_ids_to_fetch.append(tile_id) tile_dict = Tile.select_multi(dashboard_id, tile_ids_to_fetch) for tile_id, tile in tile_dict.items(): by_tile_id[tile.tile_id] = self.props_of_tile(tile) # Compute data for sscreator and tpcreator sscs_data = set() master_data = set() for props in by_tile_id.values(): if props.get('sscs'): #sscs_data.add((props['report_id'], tuple(props['tags']))) sscs_data.add(props['report_id']) if props.get('is_master'): master_data.add(props['report_id']) new_layout_props = serialize.mjson({'by_tile_id': by_tile_id.items()}) # Set the new layout new_layout_id = gen_timeuuid() res = c.dao.LayoutDAO.set(owner_id, dashboard_id, old_layout_id, new_layout_id, new_layout_def, new_layout_props) if not res: log.info('Setting new layout failed') return None # Insert layout_by_report for sscs and tpcreator c.dao.LayoutDAO.insert_layout_by_report_multi(owner_id, sscs_data, [], 'sscs', dashboard_id, new_layout_id) c.dao.LayoutDAO.insert_layout_by_report_multi(owner_id, master_data, [], 'tpcreator', dashboard_id, new_layout_id) self.layout_id = new_layout_id return new_layout_id
def process_input(self, input_string, tags=None, created=None, input_type='any', ip_options={}, force_header=None, extra_ri_data=None, handle_tpcreator=True, handle_sscreator=True): """Process an input string - parse it into a table and create a report instance belonging to the report. :param str|unicode input_string: the input string :param list tags: a list of string tags attached to the report instance :param ~datetime.datetime created: an explicit creation datetime of the report instance ( default: the current datetime) :param str input_type: input type (see :func:`mqetables.parseany.parse_input`) :param dict ip_options: extra parser options (see :func:`mqetables.parsing.InputParser`) :param force_header: a list of header rows indexes to set as a header (defaults to auto-detection) :param extra_ri_data: a custom JSON-serializable document attached to the report instance :param handle_tpcreator: whether to handle TPCreator for the created report instance by calling :func:`~mqe.tpcreator.handle_tpcreator` :param handle_sscreator: whether to handle SSCS by calling :func:`~mqe.sscreator.handle_sscreator` :return: an :class:`InputProcessingResult` """ assert isinstance(input_string, (str, unicode)) # disallow 'created' in the future now = datetime.datetime.utcnow() if created is not None and created.tzinfo: created = util.make_tz_naive(created) if created is not None and created.year < 2000: raise ValueError('created cannot be before the year 2000') if created is not None and created < now: report_instance_id = util.uuid_with_dt(created) custom_created = True else: custom_created = False report_instance_id = gen_timeuuid() created = util.datetime_from_uuid1(report_instance_id) if tags is None: tags = [] parsing_result = parseany.parse_input(input_string, input_type, ip_options) table = mqeconfig.get_table_from_parsing_result(parsing_result) if table is None: return InputProcessingResult(None, parsing_result) if force_header is not None: log.debug('Overwriting header detection due to force_header') table.header_idxs = [ i for i in force_header if util.valid_index(table.num_rows, i) ] table.header_idxs_source = parsing.HEADER_IDXS_SOURCE_USER ri_data_dict = { 'table': table, } result_desc = self._get_result_desc(parsing_result) if result_desc: ri_data_dict['result_desc'] = result_desc report_instance_row = c.dao.ReportInstanceDAO.insert( owner_id=self.owner_id, report_id=self.report_id, report_instance_id=report_instance_id, tags=tags, ri_data=serialize.mjson(ri_data_dict), input_string=parsing_result.input_string, extra_ri_data=serialize.mjson(extra_ri_data) if extra_ri_data else None, custom_created=custom_created) report_instance = ReportInstance(report_instance_row) log.info( 'Created new report instance report_id=%s report_name=%r tags=%s ' 'report_instance_id=%s created=%s', self.report_id, self.report_name, tags, report_instance_id, report_instance.created) if tags and handle_tpcreator: from mqe import tpcreator tpcreator.handle_tpcreator(self.owner_id, self.report_id, report_instance) if handle_sscreator: from mqe import sscreator sscreator.handle_sscreator(self.owner_id, self.report_id, report_instance) if custom_created: from mqe import dataseries dataseries.clear_series_defs(self.report_id, util.powerset(tags)) return InputProcessingResult(report_instance, parsing_result)
def insert_series_values(series_def, report, from_dt, to_dt, after=None, limit=None): assert after or (from_dt is not None and to_dt is not None) log.debug( 'insert_series_values report_id=%s sd.from_dt=%s sd.to_dt=%s from_dt=%s' 'to_dt=%s after=%s limit=%s', report.report_id, series_def.from_dt, series_def.to_dt, from_dt, to_dt, after, limit) instances = report.fetch_instances( after=after, from_dt=from_dt if not after else None, to_dt=to_dt if not after else None, limit=limit or mqeconfig.MAX_SERIES_POINTS, tags=series_def.tags, columns=['report_instance_id', 'ri_data']) if not instances: return data = [] for ri in instances: cell = series_def.series_spec.get_cell(ri) if cell: row = dict(report_instance_id=ri.report_instance_id, json_value=serialize.mjson(cell.value)) header = ri.table.header(cell.colno) if header: row['header'] = header data.append(row) log.info( 'Inserting %d series values from %d instances report_name=%r series_id=%s', len(data), len(instances), report.report_name, series_def.series_id) c.dao.SeriesValueDAO.insert_multi(series_def.series_id, data) oldest_rid_fetched = instances[0].report_instance_id newest_rid_fetched = instances[-1].report_instance_id # from_rid stores minimal uuid from dt for which we fetched instances, # while to_rid stores an actual latest report_instance_id in the series. # However, generally it's not expected to_rid can always be a real report_instance_id if from_dt is not None: oldest_rid_stored = util.min_uuid_with_dt(from_dt) else: oldest_rid_stored = oldest_rid_fetched if series_def.from_rid is None or \ util.uuid_lt(oldest_rid_stored, series_def.from_rid): log.debug('Updating series_def_id=%s from_rid_dt=%s', series_def.series_id, util.datetime_from_uuid1(oldest_rid_stored)) series_def.update_from_rid(oldest_rid_stored) if series_def.to_rid is None or \ util.uuid_lt(series_def.to_rid, newest_rid_fetched): log.debug('Updating series_def_id=%s to_rid_dt=%s', series_def.series_id, util.datetime_from_uuid1(newest_rid_fetched)) series_def.update_to_rid(newest_rid_fetched)
def set_all_dashboards_ordering(self, owner_id, dashboard_id_list): c.cass.execute( """UPDATE mqe.all_dashboards_properties SET dashboard_id_ordering=? WHERE owner_id=?""", [serialize.mjson(dashboard_id_list), owner_id])