Пример #1
0
def _should_expire_tile(tile, min_valid, optimize_check):
    max_dt = None
    if optimize_check:
        latest_report_instance_id = tile.report.fetch_latest_instance_id(tile.tags)
        if latest_report_instance_id:
            max_dt = util.datetime_from_uuid1(latest_report_instance_id)
    else:
        tile_data = tile.tilewidget.get_tile_data()
        last_points = [sd['data_points'][-1] for sd in tile_data['series_data'] if sd['data_points']]
        if last_points:
            max_dt = max(p.dt for p in last_points)
    if max_dt is None:
        # if there's no data, use the tile's creation datetime
        max_dt = util.datetime_from_uuid1(tile.tile_id)
    return max_dt < min_valid
Пример #2
0
def report_instance_for_viewer():
    report_name = request.get_json()['report_name']
    tags = request.get_json().get('tags')
    curr_report_instance_id = request.get_json()['curr_report_instance_id']
    direction = request.get_json().get('direction')
    search_date = request.get_json().get('search_date')

    check_access(lambda: auth.access_profile())

    report = Report.select_by_name(auth.logged_owner_id(), report_name)

    if not curr_report_instance_id:
        curr_report_instance_id = report.fetch_latest_instance_id(tags)
        if not curr_report_instance_id:
            return error()
    if not direction:
        if search_date is not None:
            ri = report.find_report_instance_by_dt(search_date, tags)
        else:
            ri = report.fetch_single_instance(curr_report_instance_id)
    elif direction == 'next':
        ri = report.fetch_next_instance(curr_report_instance_id, tags)
    elif direction == 'prev':
        ri = report.fetch_prev_instance(curr_report_instance_id, tags)
    else:
        return error('Wrong direction')
    res = {}
    res['report_id'] = report.report_id
    res['report_has_tags'] = report.has_tags()
    if ri:
        res['html_newest_table'] = get_template_attribute('m.html', 'table_as_html_table')(ri.table)
        res['created_raw'] = datetime_from_uuid1(ri.report_instance_id)
        res['created'] = format_datetime(datetime_from_uuid1(ri.report_instance_id))
        res['tags'] = Markup(' '.join('<span class="selected-tag-name clickable">%s</span>' % tag for tag in ri.all_tags))
        res['curr_report_instance_id'] = ri.report_instance_id
        res['has_next'] = report.fetch_next_instance(ri.report_instance_id, tags) is not None
        res['has_prev'] = report.fetch_prev_instance(ri.report_instance_id, tags) is not None
    else:
        res['html_newest_table'] = ''
        res['created_raw'] = ''
        res['created'] = ''
        res['tags'] = ''
        res['curr_report_instance_id'] = None
        res['has_next'] = False
        res['has_prev'] = False

    return success(result=res)
Пример #3
0
def _should_expire_tile(tile, max_seconds_without_data):
    tile_data = tile.tilewidget.get_tile_data(limit=1)
    last_points = [
        sd['data_points'][-1] for sd in tile_data['series_data']
        if sd['data_points']
    ]
    if not last_points:
        max_dt = util.datetime_from_uuid1(tile.tile_id)
    else:
        max_dt = max(p.dt for p in last_points)
    min_valid = datetime.datetime.utcnow() - datetime.timedelta(
        seconds=max_seconds_without_data)
    if max_dt >= min_valid:
        return False
    return True
Пример #4
0
    def test_expire_tiles_without_data_losing_sscreated(self):
        rd = ReportData('r')

        ss = dataseries.SeriesSpec(0, -1, dict(op='eq', args=['0']))
        tile_config1 = {
            'series_spec_list': [ss],
            'tags': ['p1:10'],
            'tile_options': {
                'tpcreator_uispec':
                tpcreator.suggested_tpcreator_uispec(['p1:10']),
                'sscs': ss
            }
        }

        tile1 = Tile.insert(rd.owner_id, rd.report.report_id, rd.dashboard_id,
                            tile_config1)
        place_tile(tile1)

        #rd.report.process_input('1', tags=['p1:10'])

        rd.report.process_input('1\n2\n', tags=['p1:11'])
        rd.report.process_input('1\n2\n3\n', tags=['p1:12'])

        self.assertEqual(3, len(rd.layout().layout_dict))
        self.assertEqual(3, len(rd.get_tile_by_tags(['p1:12']).series_specs()))

        tile1_created_ago = datetime.datetime.utcnow(
        ) - util.datetime_from_uuid1(tile1.tile_id)
        tiles.expire_tiles_without_data(
            rd.layout().tile_dict.keys(),
            tile1_created_ago.total_seconds() - 0.00001,
            rd.layout().layout_id)
        self.assertEqual(2, len(rd.layout().layout_dict))
        self.assertTrue(rd.layout_has_tags([['p1:11'], ['p1:12']]))
        master_tile = rd.get_tile_by_tags(['p1:11'])
        self.assertTrue(master_tile.is_master_tile())
        self.assertEqual(master_tile.tile_id,
                         rd.get_tile_by_tags(['p1:12']).get_master_tile_id())
        self.assertEqual(3, len(rd.get_tile_by_tags(['p1:12']).series_specs()))
Пример #5
0
def insert_series_values(series_def, report, from_dt, to_dt, after=None, limit=None):
    assert after or (from_dt is not None and to_dt is not None)

    log.debug('insert_series_values report_id=%s sd.from_dt=%s sd.to_dt=%s from_dt=%s'
              'to_dt=%s after=%s limit=%s', report.report_id, series_def.from_dt,
              series_def.to_dt, from_dt, to_dt, after, limit)

    instances_it = report.fetch_instances_iter(after=after,
                                               from_dt=from_dt if not after else None,
                                               to_dt=to_dt if not after else None,
                                               limit=limit or mqeconfig.MAX_SERIES_POINTS,
                                               tags=series_def.tags,
                                               columns=['report_instance_id', 'ri_data'])
    info = dict(oldest_rid_fetched=None,
                newest_rid_fetched=None,
                count=0)

    def rows_it():
        for ri in instances_it:
            if info['oldest_rid_fetched'] is None:
                info['oldest_rid_fetched'] = ri.report_instance_id
            info['newest_rid_fetched'] = ri.report_instance_id
            info['count'] += 1

            cell = series_def.series_spec.get_cell(ri)
            if cell:
                row = dict(report_instance_id=ri.report_instance_id,
                           json_value=serialize.mjson(cell.value))
                header = ri.table.header(cell.colno)
                if header:
                    row['header'] = header
                yield row

    c.dao.SeriesValueDAO.insert_multi(series_def.series_id, rows_it())

    if info['count'] == 0:
        return

    log.info('Inserted %d series values report_name=%r series_id=%s',
             info['count'], report.report_name, series_def.series_id)


    # from_rid stores minimal uuid from dt for which we fetched instances,
    # while to_rid stores an actual latest report_instance_id in the series.
    # However, generally it's not expected to_rid can always be a real report_instance_id
    if from_dt is not None:
        oldest_rid_stored = util.min_uuid_with_dt(from_dt)
    else:
        oldest_rid_stored = info['oldest_rid_fetched']

    if series_def.from_rid is None or \
            util.uuid_lt(oldest_rid_stored, series_def.from_rid):
        log.debug('Updating series_def_id=%s from_rid_dt=%s', series_def.series_id,
                  util.datetime_from_uuid1(oldest_rid_stored))
        series_def.update_from_rid(oldest_rid_stored)

    if series_def.to_rid is None or \
            util.uuid_lt(series_def.to_rid, info['newest_rid_fetched']):
        log.debug('Updating series_def_id=%s to_rid_dt=%s', series_def.series_id,
                  util.datetime_from_uuid1(info['newest_rid_fetched']))
        series_def.update_to_rid(info['newest_rid_fetched'])
Пример #6
0
 def to_dt(self):
     """A :class:`~datetime.datetime` up to which this data series has data"""
     if self.to_rid is None:
         return None
     return util.datetime_from_uuid1(self.to_rid)
Пример #7
0
    def insert(self, owner_id, report_id, report_instance_id, tags, ri_data,
               input_string, extra_ri_data, custom_created):
        created = util.datetime_from_uuid1(report_instance_id)

        with cursor() as cur:
            first_row = None
            tags_powerset = util.powerset(tags[:mqeconfig.MAX_TAGS])
            for tags_subset in tags_powerset:
                row = dict(report_id=report_id,
                           tags=tags_subset,
                           report_instance_id=report_instance_id,
                           ri_data=ri_data,
                           input_string=input_string,
                           all_tags=tags,
                           extra_ri_data=extra_ri_data)
                if first_row is None:
                    first_row = row
                cur.execute(*insert('report_instance', row))

                cur.execute(
                    """INSERT OR IGNORE INTO report_instance_day (report_id, tags, day)
                               VALUES (?, ?, ?)""",
                    [report_id, tags_subset,
                     created.date()])

            if first_row:
                # report counts

                cur.execute(
                    """UPDATE report SET
                               report_instance_count = report_instance_count + 1
                               WHERE report_id=?""", [report_id])

                diskspace = self._compute_ri_diskspace(first_row)
                cur.execute(
                    """UPDATE report SET
                               report_instance_diskspace = report_instance_diskspace + ?
                               WHERE report_id=?""", [diskspace, report_id])

                # owner counts
                cur.execute(
                    """SELECT 1 FROM report_data_for_owner WHERE owner_id=?""",
                    [owner_id])
                if not cur.fetchone():
                    try:
                        cur.execute(
                            """INSERT INTO report_data_for_owner (owner_id)
                                       VALUES (?)""", [owner_id])
                    except sqlite3.IntegrityError:
                        pass

                cur.execute(
                    """UPDATE report_data_for_owner
                               SET report_instance_count=report_instance_count+1
                               WHERE owner_id=?""", [owner_id])

                cur.execute(
                    """UPDATE report_data_for_owner
                               SET report_instance_diskspace=report_instance_diskspace+?
                               WHERE owner_id=?""", [diskspace, owner_id])

                for tag in tags:
                    cur.execute(
                        """INSERT OR IGNORE INTO report_tag (report_id, tag)
                                   VALUES (?, ?)""", [report_id, tag])

            return first_row
Пример #8
0
    def _delete_ris(self, owner_id, report_id, tags, ris, update_counters):
        qs = []
        tags_days = set()
        all_tags_subsets = set()

        with cursor() as cur:
            for ri in ris:
                tags_powerset = util.powerset(ri['all_tags'])
                cur.execute("""DELETE FROM report_instance WHERE report_id=?
                               AND tags IN {in_p} AND report_instance_id=?""".format(in_p=in_params(tags_powerset)),
                            [report_id] + tags_powerset + [ri['report_instance_id']])
                day = util.datetime_from_uuid1(ri['report_instance_id']).date()
                for tags_subset in tags_powerset:
                    tags_days.add((tuple(tags_subset), day))
                    all_tags_subsets.add(tuple(tags_subset))

            if update_counters:
                total_diskspace = sum(self._compute_ri_diskspace(ri) for ri in ris)
                cur.execute("""UPDATE report
                               SET report_instance_count = report_instance_count - ?
                               WHERE report_id=?""",
                            [len(ris), report_id])
                cur.execute("""UPDATE report
                               SET report_instance_diskspace = report_instance_diskspace - ?
                               WHERE report_id=?""",
                            [total_diskspace, report_id])
                cur.execute("""UPDATE report_data_for_owner
                               SET report_instance_count=report_instance_count - ?
                               WHERE owner_id=?""",
                            [len(ris), owner_id])
                cur.execute("""UPDATE report_data_for_owner
                               SET report_instance_diskspace=report_instance_diskspace - ?
                               WHERE owner_id=?""",
                            [total_diskspace, owner_id])


            ### Delete days for which report instances no longer exist

            for day_tags, day in tags_days:
                cur.execute("""SELECT report_instance_id FROM report_instance
                               WHERE report_id=? AND tags=? AND 
                               report_instance_id > ? AND report_instance_id < ?
                               LIMIT 1""",
                            [report_id, list(day_tags),
                             util.min_uuid_with_dt(datetime.datetime.combine(day,
                                                            datetime.datetime.min.time())),
                             util.max_uuid_with_dt(datetime.datetime.combine(day,
                                                            datetime.datetime.max.time()))])
                if not cur.fetchall():
                    cur.execute("""DELETE FROM report_instance_day
                                   WHERE report_id=? AND tags=? AND day=?""",
                                [report_id, list(day_tags), day])


            ### Delete tags for which report instances no longer exist

            tags_present = set()
            for tags, _ in tags_days:
                for tag in tags:
                    tags_present.add(tag)

            for tag in tags_present:
                cur.execute("""SELECT report_id FROM report_instance_day
                               WHERE report_id=? AND tags=?
                               LIMIT 1""",
                            [report_id, [tag]])
                if cur.fetchall():
                    continue
                cur.execute("""DELETE FROM report_tag
                               WHERE report_id=? AND tag=?""",
                            [report_id, tag])


            return len(ris), [list(ts) for ts in all_tags_subsets]
Пример #9
0
 def dt(self):
     return util.datetime_from_uuid1(self.rid)
Пример #10
0
def insert_series_values(series_def,
                         report,
                         from_dt,
                         to_dt,
                         after=None,
                         limit=None):
    assert after or (from_dt is not None and to_dt is not None)

    log.debug(
        'insert_series_values report_id=%s sd.from_dt=%s sd.to_dt=%s from_dt=%s'
        'to_dt=%s after=%s limit=%s', report.report_id, series_def.from_dt,
        series_def.to_dt, from_dt, to_dt, after, limit)

    instances = report.fetch_instances(
        after=after,
        from_dt=from_dt if not after else None,
        to_dt=to_dt if not after else None,
        limit=limit or mqeconfig.MAX_SERIES_POINTS,
        tags=series_def.tags,
        columns=['report_instance_id', 'ri_data'])
    if not instances:
        return
    data = []
    for ri in instances:
        cell = series_def.series_spec.get_cell(ri)
        if cell:
            row = dict(report_instance_id=ri.report_instance_id,
                       json_value=serialize.mjson(cell.value))
            header = ri.table.header(cell.colno)
            if header:
                row['header'] = header
            data.append(row)
    log.info(
        'Inserting %d series values from %d instances report_name=%r series_id=%s',
        len(data), len(instances), report.report_name, series_def.series_id)
    c.dao.SeriesValueDAO.insert_multi(series_def.series_id, data)

    oldest_rid_fetched = instances[0].report_instance_id
    newest_rid_fetched = instances[-1].report_instance_id

    # from_rid stores minimal uuid from dt for which we fetched instances,
    # while to_rid stores an actual latest report_instance_id in the series.
    # However, generally it's not expected to_rid can always be a real report_instance_id
    if from_dt is not None:
        oldest_rid_stored = util.min_uuid_with_dt(from_dt)
    else:
        oldest_rid_stored = oldest_rid_fetched

    if series_def.from_rid is None or \
            util.uuid_lt(oldest_rid_stored, series_def.from_rid):
        log.debug('Updating series_def_id=%s from_rid_dt=%s',
                  series_def.series_id,
                  util.datetime_from_uuid1(oldest_rid_stored))
        series_def.update_from_rid(oldest_rid_stored)

    if series_def.to_rid is None or \
            util.uuid_lt(series_def.to_rid, newest_rid_fetched):
        log.debug('Updating series_def_id=%s to_rid_dt=%s',
                  series_def.series_id,
                  util.datetime_from_uuid1(newest_rid_fetched))
        series_def.update_to_rid(newest_rid_fetched)
Пример #11
0
 def created(self):
     """Creation datetime"""
     return util.datetime_from_uuid1(self.report_instance_id)
Пример #12
0
    def process_input(self,
                      input_string,
                      tags=None,
                      created=None,
                      input_type='any',
                      ip_options={},
                      force_header=None,
                      extra_ri_data=None,
                      handle_tpcreator=True,
                      handle_sscreator=True):
        """Process an input string - parse it into a table and create a report instance belonging
        to the report.

        :param str|unicode input_string: the input string
        :param list tags: a list of string tags attached to the report instance
        :param ~datetime.datetime created: an explicit creation datetime of the report instance (
            default: the current datetime)
        :param str input_type: input type (see :func:`mqetables.parseany.parse_input`)
        :param dict ip_options: extra parser options (see :func:`mqetables.parsing.InputParser`)
        :param force_header: a list of header rows indexes to set as a header (defaults to
            auto-detection)
        :param extra_ri_data: a custom JSON-serializable document attached to the report instance
        :param handle_tpcreator: whether to handle TPCreator for the created report instance
            by calling :func:`~mqe.tpcreator.handle_tpcreator`
        :param handle_sscreator: whether to handle SSCS by calling :func:`~mqe.sscreator.handle_sscreator`
        :return: an :class:`InputProcessingResult`
        """
        assert isinstance(input_string, (str, unicode))

        # disallow 'created' in the future
        now = datetime.datetime.utcnow()
        if created is not None and created.tzinfo:
            created = util.make_tz_naive(created)

        if created is not None and created.year < 2000:
            raise ValueError('created cannot be before the year 2000')

        if created is not None and created < now:
            report_instance_id = util.uuid_with_dt(created)
            custom_created = True
        else:
            custom_created = False
            report_instance_id = gen_timeuuid()
            created = util.datetime_from_uuid1(report_instance_id)

        if tags is None:
            tags = []

        parsing_result = parseany.parse_input(input_string, input_type,
                                              ip_options)
        table = mqeconfig.get_table_from_parsing_result(parsing_result)
        if table is None:
            return InputProcessingResult(None, parsing_result)

        if force_header is not None:
            log.debug('Overwriting header detection due to force_header')
            table.header_idxs = [
                i for i in force_header if util.valid_index(table.num_rows, i)
            ]
            table.header_idxs_source = parsing.HEADER_IDXS_SOURCE_USER

        ri_data_dict = {
            'table': table,
        }
        result_desc = self._get_result_desc(parsing_result)
        if result_desc:
            ri_data_dict['result_desc'] = result_desc

        report_instance_row = c.dao.ReportInstanceDAO.insert(
            owner_id=self.owner_id,
            report_id=self.report_id,
            report_instance_id=report_instance_id,
            tags=tags,
            ri_data=serialize.mjson(ri_data_dict),
            input_string=parsing_result.input_string,
            extra_ri_data=serialize.mjson(extra_ri_data)
            if extra_ri_data else None,
            custom_created=custom_created)

        report_instance = ReportInstance(report_instance_row)

        log.info(
            'Created new report instance report_id=%s report_name=%r tags=%s '
            'report_instance_id=%s created=%s', self.report_id,
            self.report_name, tags, report_instance_id,
            report_instance.created)

        if tags and handle_tpcreator:
            from mqe import tpcreator
            tpcreator.handle_tpcreator(self.owner_id, self.report_id,
                                       report_instance)

        if handle_sscreator:
            from mqe import sscreator
            sscreator.handle_sscreator(self.owner_id, self.report_id,
                                       report_instance)

        if custom_created:
            from mqe import dataseries
            dataseries.clear_series_defs(self.report_id, util.powerset(tags))

        return InputProcessingResult(report_instance, parsing_result)