def insert_series_sample(data_service, val=None, *vals):
    series = Series(ENTITY, METRIC)
    series.tags = {TAG: TAG_VALUE}
    series.add_samples(Sample(val, datetime.now()))
    if vals:
        for i, v in enumerate(vals):
            time.sleep(WAIT_TIME + 2)
            series.add_samples(Sample(v, datetime.now()))

    # print('insertion =', series)

    return data_service.insert(series)
    def test_insert_retrieve_versioning(self):

        test_status = 'pyapi.status'
        now = datetime.now()

        series = Series(ENTITY, VERSION_METRIC)
        val = random.randint(0, VALUE - 1)
        series.add_samples(
            Sample(value=val,
                   time=now - timedelta(seconds=2),
                   version={'status': test_status}))

        sf = SeriesFilter(metric=VERSION_METRIC)
        ef = EntityFilter(entity=ENTITY)
        df = DateFilter(start_date=now - timedelta(hours=1), end_date=now)
        vf = VersioningFilter(versioned=True)
        query = SeriesQuery(series_filter=sf,
                            entity_filter=ef,
                            date_filter=df,
                            versioning_filter=vf)

        successful = self.service.insert(series)
        time.sleep(WAIT_TIME)
        series, = self.service.query(query)
        # print(series)
        last_sample = series.data[-1]

        self.assertTrue(successful)
        self.assertEqual(last_sample.v, val)
        self.assertIsNotNone(last_sample.version)
        self.assertEqual(last_sample.version['status'], test_status)
 def test_fields_match(self):
     sample = Sample(1, datetime.now())
     series = Series(ENTITY, METRIC, tags={TAG: TAG_VALUE})
     series.add_samples(sample)
     self.assertEqual(ENTITY, series.entity)
     self.assertEqual(METRIC, series.metric)
     self.assertEqual({TAG: TAG_VALUE}, series.tags)
     self.assertEqual([sample], series.data)
Exemple #4
0
def main(filename,
         atsd_url,
         username,
         password,
         stat_as_tag,
         entity_name=None,
         timestamp=None):
    if timestamp is None:
        timestamp = datetime.now()
    conn = connect_url(atsd_url, username, password)
    series_service = SeriesService(conn)
    with open(filename) as f:
        entries = json.load(f)

        for entry in entries:
            benchmark_name_split = entry['benchmark'].split('.')
            entity = benchmark_name_split[
                -2] if entity_name is None else entity_name
            metric_prefix = 'jmh.' + entry['mode'] + '.' + entry[
                'primaryMetric']['scoreUnit']
            tags = {'method': benchmark_name_split[-1]}
            if not stat_as_tag:
                metric = metric_prefix + '.avg'
            else:
                metric = metric_prefix
                tags['stat'] = 'avg'

            series_service.insert(
                Series(entity, metric,
                       [Sample(entry['primaryMetric']['score'], timestamp)],
                       tags))
            for key, value in entry['primaryMetric']['scorePercentiles'].items(
            ):
                if key == '0.0':
                    stat = 'min'
                elif key == '100.0':
                    stat = 'max'
                else:
                    stat = key
                if not stat_as_tag:
                    metric = metric_prefix + '.' + stat
                else:
                    metric = metric_prefix
                    tags['stat'] = stat
                series_service.insert(
                    Series(entity, metric, [Sample(value, timestamp)], tags))
 def setUp(self):
     """
     Insert series to open the alert.
     """
     series = Series(ENTITY, METRIC)
     series.add_samples(Sample(VALUE, datetime.now()))
     self._series_service = SeriesService(self.connection)
     self._series_service.insert(series)
     time.sleep(self.wait_time)
 def setUpClass(cls):
     """
     Insert series.
     """
     super().setUpClass()
     series = Series(NAME, METRIC)
     series.add_samples(Sample(1, datetime.now()))
     series_service = SeriesService(cls.connection)
     series_service.insert(series)
     time.sleep(cls.wait_time)
 def test_fields_match(self):
     """
     Check fields of Series model were set as expected.
     """
     sample = Sample(1, datetime.now())
     series = Series(ENTITY, METRIC, tags={TAG: TAG_VALUE})
     series.add_samples(sample)
     self.assertEqual(ENTITY, series.entity)
     self.assertEqual(METRIC, series.metric)
     self.assertEqual({TAG: TAG_VALUE}, series.tags)
     self.assertEqual([sample], series.data)
    def test_history_query(self):
        # Insert series to close the alert.
        series = Series(ENTITY, METRIC)
        series.add_samples(Sample(-1, datetime.now()))
        self._series_service.insert(series)

        time.sleep(self.wait_time)

        query = AlertHistoryQuery(entity_filter=ef,
                                  date_filter=df,
                                  rule=RULE,
                                  metric=METRIC)
        result = self.service.history_query(query)
        # print(result)
        self.assertIsNotNone(result)
        self.assertGreater(len(result), 0)
        a = result[0]
        self.common_checks(a)
    def test_series(self):
        # Insert series.
        series = Series(ENTITY, NAME)
        series.add_samples(Sample(1, datetime.now()))
        series_service = SeriesService(self.connection)
        series_service.insert(series)

        time.sleep(self.wait_time)

        result = self.service.series(metric=NAME,
                                     min_insert_date='current_hour')
        # print(result)
        self.assertIsNotNone(result)
        self.assertGreater(len(result), 0)
        series = result[0]
        self.assertIsNotNone(series)
        self.assertIsInstance(series, Series)
        self.assertEqual(NAME, series.metric)
        self.assertEqual(ENTITY, series.entity)
        self.assertEqual({}, series.tags)
 def tearDown(self):
     # Insert series to close the alert.
     series = Series(ENTITY, METRIC)
     series.add_samples(Sample(-1, datetime.now()))
     self._series_service.insert(series)
     time.sleep(self.wait_time)
Exemple #11
0
    batches_left = BATCH_COUNT
    total_asset_duration[entity_name] = timedelta()

    while batches_left > 0:
        procedure_name = ''

        if proc == 0:
            stage_2_leaps, metrics = update_metrics_behaviour()
            procedures, td = update_durations(stage_2_leaps)
            total_asset_duration[entity_name] += timedelta(hours=td)
            if SAVE_AS_COMMANDS:
                commands.append(
                    'series e:%s x:%s=%s d:%s' % (entity_name, entity_name + UNIT_BATCH_ID_SUFFIX, batch_id, to_iso(t)))
            else:
                series.append(Series(entity_name, entity_name + UNIT_BATCH_ID_SUFFIX,
                                     data=[Sample(time=t, x=batch_id, value=None)]))
            batch_id += 1
            procedure_name = 'Stage 1 Static Drying'
        elif procedures[proc][0] == 'Inactive':
            if SAVE_AS_COMMANDS:
                commands.append(
                    'series e:%s x:%s=%s d:%s' % (
                    entity_name, entity_name + UNIT_BATCH_ID_SUFFIX, 'Inactive', to_iso(t)))
            else:
                series.append(Series(entity_name, entity_name + UNIT_BATCH_ID_SUFFIX,
                                     data=[Sample(time=t, x='Inactive', value=None)]))
            procedure_name = 'Inactive'
            batches_left -= 1
        elif procedures[proc][0] == 'Stage 2: Enable agitator 0':
            procedure_name = 'Stage 2 Intermittent Agitation'
        elif procedures[proc][0] == 'Stage 3: Product Cooled Down':
Exemple #12
0
    while batches_left > 0:
        procedure_name = ''

        if proc == 0:
            stage_2_leaps, metrics = update_metrics_behaviour()
            procedures, td = update_durations(stage_2_leaps)
            total_asset_duration[entity_name] += timedelta(hours=td)
            if SAVE_AS_COMMANDS:
                commands.append('series e:%s x:%s=%s d:%s' %
                                (entity_name, entity_name +
                                 UNIT_BATCH_ID_SUFFIX, batch_id, to_iso(t)))
            else:
                series.append(
                    Series(entity_name,
                           entity_name + UNIT_BATCH_ID_SUFFIX,
                           data=[Sample(time=t, x=batch_id, value=None)]))
            batch_id += 1
            procedure_name = 'Stage 1 Static Drying'
        elif procedures[proc][0] == 'Inactive':
            if SAVE_AS_COMMANDS:
                commands.append('series e:%s x:%s=%s d:%s' %
                                (entity_name, entity_name +
                                 UNIT_BATCH_ID_SUFFIX, 'Inactive', to_iso(t)))
            else:
                series.append(
                    Series(entity_name,
                           entity_name + UNIT_BATCH_ID_SUFFIX,
                           data=[Sample(time=t, x='Inactive', value=None)]))
            procedure_name = 'Inactive'
            batches_left -= 1
        elif procedures[proc][0] == 'Stage 2: Enable agitator 0':
Exemple #13
0
series = []

for asset in assets:
    proc = 0
    t = next_time(None, MIN_TIME)

    dataSplines = SplineHolder()
    dataCommandSplines.append([asset, dataSplines])

    while t < MAX_TIME:
        iso = t.isoformat()
        if proc == 0:
            series.append(
                Series(asset['id'],
                       'axi.Unit_BatchID',
                       data=[Sample(time=iso, x=batch_id, value=None)]))
            batch_id += 1
        elif procedures[proc][0] == 'Inactive':
            series.append(
                Series(asset['id'],
                       'axi.Unit_BatchID',
                       data=[Sample(time=iso, x='Inactive', value=None)]))
        series.append(
            Series(asset['id'],
                   'axi.Unit_Procedure',
                   data=[Sample(time=iso, x=procedures[proc][0], value=None)]))
        next_t = next_time(procedures[proc], t)
        for [metric, splines] in metrics:
            dataSplines.put_spline(t, next_t, metric,
                                   splines[procedures[proc][0]])
        proc = (proc + 1) % len(procedures)
Exemple #14
0
start_date = '2018-01-01T00:00:00Z'
end_date = '2019-01-01T00:00:01Z'

# prepare series_query and execute it
sf = SeriesFilter(metric=metric_name)
ef = EntityFilter(entity=entity_name)
df = DateFilter(start_date=start_date, end_date=end_date)
query = SeriesQuery(series_filter=sf, entity_filter=ef, date_filter=df)
series_list = svc.query(query)

for series in series_list:
    # update  timestamps
    updated_data = []
    for sample in series.data:
        current_date = sample.get_date()
        for i in range(1, year_count + 1):
            try:
                # increment year
                new_date = current_date.replace(year=current_date.year + i)
                updated_data.append(
                    Sample(value=sample.v,
                           time=new_date,
                           version=sample.version))
            except ValueError:
                # Uncomment to add nonexistent dates to output
                print('%s, year shift is %s' % (current_date, i))
                continue
    series.data = updated_data
    series.aggregate = None
    svc.insert(series)